mirror of
https://github.com/nodejs/node.git
synced 2025-04-28 13:40:37 +00:00
build: use pathlib for paths
Use Python's `pathlib` library for paths and related operations instead of `os.path`. Refs: https://github.com/nodejs/node/pull/47323#discussion_r1163799518 https://github.com/nodejs/node/pull/47323#issuecomment-1510350194 PR-URL: https://github.com/nodejs/node/pull/47581 Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com> Reviewed-By: Christian Clauss <cclauss@me.com>
This commit is contained in:
parent
109ab0a89c
commit
d2156f1bf0
110
configure.py
110
configure.py
@ -12,11 +12,12 @@ import subprocess
|
||||
import shutil
|
||||
import bz2
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
# If not run from node/, cd to node/.
|
||||
os.chdir(os.path.dirname(__file__) or '.')
|
||||
os.chdir(Path(__file__).parent)
|
||||
|
||||
original_argv = sys.argv[1:]
|
||||
|
||||
@ -25,11 +26,13 @@ original_argv = sys.argv[1:]
|
||||
CC = os.environ.get('CC', 'cc' if sys.platform == 'darwin' else 'gcc')
|
||||
CXX = os.environ.get('CXX', 'c++' if sys.platform == 'darwin' else 'g++')
|
||||
|
||||
sys.path.insert(0, os.path.join('tools', 'gyp', 'pylib'))
|
||||
tools_path = Path('tools')
|
||||
|
||||
sys.path.insert(0, str(tools_path / 'gyp' / 'pylib'))
|
||||
from gyp.common import GetFlavor
|
||||
|
||||
# imports in tools/configure.d
|
||||
sys.path.insert(0, os.path.join('tools', 'configure.d'))
|
||||
sys.path.insert(0, str(tools_path / 'configure.d'))
|
||||
import nodedownload
|
||||
|
||||
# imports in tools/
|
||||
@ -53,8 +56,7 @@ valid_mips_arch = ('loongson', 'r1', 'r2', 'r6', 'rx')
|
||||
valid_mips_fpu = ('fp32', 'fp64', 'fpxx')
|
||||
valid_mips_float_abi = ('soft', 'hard')
|
||||
valid_intl_modes = ('none', 'small-icu', 'full-icu', 'system-icu')
|
||||
with open('tools/icu/icu_versions.json', encoding='utf-8') as f:
|
||||
icu_versions = json.load(f)
|
||||
icu_versions = json.loads((tools_path / 'icu' / 'icu_versions.json').read_text(encoding='utf-8'))
|
||||
|
||||
shareable_builtins = {'cjs_module_lexer/lexer': 'deps/cjs-module-lexer/lexer.js',
|
||||
'cjs_module_lexer/dist/lexer': 'deps/cjs-module-lexer/dist/lexer.js',
|
||||
@ -839,7 +841,7 @@ parser.add_argument('-C',
|
||||
(options, args) = parser.parse_known_args()
|
||||
|
||||
# Expand ~ in the install prefix now, it gets written to multiple files.
|
||||
options.prefix = os.path.expanduser(options.prefix or '')
|
||||
options.prefix = str(Path(options.prefix or '').expanduser())
|
||||
|
||||
# set up auto-download list
|
||||
auto_downloads = nodedownload.parse(options.download_list)
|
||||
@ -1199,7 +1201,7 @@ def configure_zos(o):
|
||||
o['variables']['node_static_zoslib'] = b(True)
|
||||
if options.static_zoslib_gyp:
|
||||
# Apply to all Node.js components for now
|
||||
o['variables']['zoslib_include_dir'] = os.path.dirname(options.static_zoslib_gyp) + '/include'
|
||||
o['variables']['zoslib_include_dir'] = Path(options.static_zoslib_gyp).parent + '/include'
|
||||
o['include_dirs'] += [o['variables']['zoslib_include_dir']]
|
||||
else:
|
||||
raise Exception('--static-zoslib-gyp=<path to zoslib.gyp file> is required.')
|
||||
@ -1603,7 +1605,7 @@ def configure_static(o):
|
||||
|
||||
def write(filename, data):
|
||||
print_verbose(f'creating {filename}')
|
||||
with open(filename, 'w+', encoding='utf-8') as f:
|
||||
with Path(filename).open(mode='w+', encoding='utf-8') as f:
|
||||
f.write(data)
|
||||
|
||||
do_not_edit = '# Do not edit. Generated by the configure script.\n'
|
||||
@ -1619,8 +1621,8 @@ def glob_to_var(dir_base, dir_sub, patch_dir):
|
||||
# srcfile uses "slash" as dir separator as its output is consumed by gyp
|
||||
srcfile = f'{dir_sub}/{file}'
|
||||
if patch_dir:
|
||||
patchfile = f'{dir_base}{patch_dir}{file}'
|
||||
if os.path.isfile(patchfile):
|
||||
patchfile = Path(dir_base, patch_dir, file)
|
||||
if patchfile.is_file():
|
||||
srcfile = f'{patch_dir}/{file}'
|
||||
info(f'Using floating patch "{patchfile}" from "{dir_base}"')
|
||||
file_list.append(srcfile)
|
||||
@ -1629,9 +1631,8 @@ def glob_to_var(dir_base, dir_sub, patch_dir):
|
||||
|
||||
def configure_intl(o):
|
||||
def icu_download(path):
|
||||
depFile = 'tools/icu/current_ver.dep'
|
||||
with open(depFile, encoding='utf-8') as f:
|
||||
icus = json.load(f)
|
||||
depFile = tools_path / 'icu' / 'current_ver.dep'
|
||||
icus = json.loads(depFile.read_text(encoding='utf-8'))
|
||||
# download ICU, if needed
|
||||
if not os.access(options.download_path, os.W_OK):
|
||||
error('''Cannot write to desired download path.
|
||||
@ -1646,13 +1647,13 @@ def configure_intl(o):
|
||||
For the entry {url},
|
||||
Expected one of these keys: {' '.join(allAlgos)}''')
|
||||
local = url.split('/')[-1]
|
||||
targetfile = os.path.join(options.download_path, local)
|
||||
if not os.path.isfile(targetfile):
|
||||
targetfile = Path(options.download_path, local)
|
||||
if not targetfile.is_file():
|
||||
if attemptdownload:
|
||||
nodedownload.retrievefile(url, targetfile)
|
||||
else:
|
||||
print(f'Re-using existing {targetfile}')
|
||||
if os.path.isfile(targetfile):
|
||||
if targetfile.is_file():
|
||||
print(f'Checking file integrity with {hashAlgo}:\r')
|
||||
gotHash = nodedownload.checkHash(targetfile, hashAlgo)
|
||||
print(f'{hashAlgo}: {gotHash} {targetfile}')
|
||||
@ -1739,14 +1740,15 @@ def configure_intl(o):
|
||||
icu_full_path = icu_deps_path
|
||||
|
||||
# icu-tmp is used to download and unpack the ICU tarball.
|
||||
icu_tmp_path = os.path.join(icu_parent_path, 'icu-tmp')
|
||||
icu_tmp_path = Path(icu_parent_path, 'icu-tmp')
|
||||
|
||||
# canned ICU. see tools/icu/README.md to update.
|
||||
canned_icu_dir = 'deps/icu-small'
|
||||
|
||||
# use the README to verify what the canned ICU is
|
||||
canned_is_full = os.path.isfile(os.path.join(canned_icu_dir, 'README-FULL-ICU.txt'))
|
||||
canned_is_small = os.path.isfile(os.path.join(canned_icu_dir, 'README-SMALL-ICU.txt'))
|
||||
canned_icu_path = Path(canned_icu_dir)
|
||||
canned_is_full = (canned_icu_path / 'README-FULL-ICU.txt').is_file()
|
||||
canned_is_small = (canned_icu_path / 'README-SMALL-ICU.txt').is_file()
|
||||
if canned_is_small:
|
||||
warn(f'Ignoring {canned_icu_dir} - in-repo small icu is no longer supported.')
|
||||
|
||||
@ -1766,39 +1768,39 @@ def configure_intl(o):
|
||||
icu_config['variables']['icu_full_canned'] = 1
|
||||
# --with-icu-source processing
|
||||
# now, check that they didn't pass --with-icu-source=deps/icu
|
||||
elif with_icu_source and os.path.abspath(icu_full_path) == os.path.abspath(with_icu_source):
|
||||
elif with_icu_source and Path(icu_full_path).resolve() == Path(with_icu_source).resolve():
|
||||
warn(f'Ignoring redundant --with-icu-source={with_icu_source}')
|
||||
with_icu_source = None
|
||||
# if with_icu_source is still set, try to use it.
|
||||
if with_icu_source:
|
||||
if os.path.isdir(icu_full_path):
|
||||
if Path(icu_full_path).is_dir():
|
||||
print(f'Deleting old ICU source: {icu_full_path}')
|
||||
shutil.rmtree(icu_full_path)
|
||||
# now, what path was given?
|
||||
if os.path.isdir(with_icu_source):
|
||||
if Path(with_icu_source).is_dir():
|
||||
# it's a path. Copy it.
|
||||
print(f'{with_icu_source} -> {icu_full_path}')
|
||||
shutil.copytree(with_icu_source, icu_full_path)
|
||||
else:
|
||||
# could be file or URL.
|
||||
# Set up temporary area
|
||||
if os.path.isdir(icu_tmp_path):
|
||||
if Path(icu_tmp_path).is_dir():
|
||||
shutil.rmtree(icu_tmp_path)
|
||||
os.mkdir(icu_tmp_path)
|
||||
icu_tmp_path.mkdir()
|
||||
icu_tarball = None
|
||||
if os.path.isfile(with_icu_source):
|
||||
if Path(with_icu_source).is_file():
|
||||
# it's a file. Try to unpack it.
|
||||
icu_tarball = with_icu_source
|
||||
else:
|
||||
# Can we download it?
|
||||
local = os.path.join(icu_tmp_path, with_icu_source.split('/')[-1]) # local part
|
||||
local = icu_tmp_path / with_icu_source.split('/')[-1] # local part
|
||||
icu_tarball = nodedownload.retrievefile(with_icu_source, local)
|
||||
# continue with "icu_tarball"
|
||||
nodedownload.unpack(icu_tarball, icu_tmp_path)
|
||||
# Did it unpack correctly? Should contain 'icu'
|
||||
tmp_icu = os.path.join(icu_tmp_path, 'icu')
|
||||
if os.path.isdir(tmp_icu):
|
||||
os.rename(tmp_icu, icu_full_path)
|
||||
tmp_icu = icu_tmp_path / 'icu'
|
||||
if tmp_icu.is_dir():
|
||||
tmp_icu.rename(icu_full_path)
|
||||
shutil.rmtree(icu_tmp_path)
|
||||
else:
|
||||
shutil.rmtree(icu_tmp_path)
|
||||
@ -1808,22 +1810,22 @@ def configure_intl(o):
|
||||
o['variables']['icu_gyp_path'] = 'tools/icu/icu-generic.gyp'
|
||||
# ICU source dir relative to tools/icu (for .gyp file)
|
||||
o['variables']['icu_path'] = icu_full_path
|
||||
if not os.path.isdir(icu_full_path):
|
||||
if not Path(icu_full_path).is_dir():
|
||||
# can we download (or find) a zipfile?
|
||||
localzip = icu_download(icu_full_path)
|
||||
if localzip:
|
||||
nodedownload.unpack(localzip, icu_parent_path)
|
||||
else:
|
||||
warn("* ECMA-402 (Intl) support didn't find ICU in {icu_full_path}..")
|
||||
if not os.path.isdir(icu_full_path):
|
||||
warn(f"* ECMA-402 (Intl) support didn't find ICU in {icu_full_path}..")
|
||||
if not Path(icu_full_path).is_dir():
|
||||
error(f'''Cannot build Intl without ICU in {icu_full_path}.
|
||||
Fix, or disable with "--with-intl=none"''')
|
||||
else:
|
||||
print_verbose(f'* Using ICU in {icu_full_path}')
|
||||
# Now, what version of ICU is it? We just need the "major", such as 54.
|
||||
# uvernum.h contains it as a #define.
|
||||
uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h')
|
||||
if not os.path.isfile(uvernum_h):
|
||||
uvernum_h = Path(icu_full_path, 'source', 'common', 'unicode', 'uvernum.h')
|
||||
if not uvernum_h.is_file():
|
||||
error(f'Could not load {uvernum_h} - is ICU installed?')
|
||||
icu_ver_major = None
|
||||
matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*'
|
||||
@ -1843,17 +1845,15 @@ def configure_intl(o):
|
||||
icu_data_file_l = f'icudt{icu_ver_major}l.dat' # LE filename
|
||||
icu_data_file = f'icudt{icu_ver_major}{icu_endianness}.dat'
|
||||
# relative to configure
|
||||
icu_data_path = os.path.join(icu_full_path,
|
||||
'source/data/in',
|
||||
icu_data_file_l) # LE
|
||||
icu_data_path = Path(icu_full_path, 'source', 'data', 'in', icu_data_file_l) # LE
|
||||
compressed_data = f'{icu_data_path}.bz2'
|
||||
if not os.path.isfile(icu_data_path) and os.path.isfile(compressed_data):
|
||||
if not icu_data_path.is_file() and Path(compressed_data).is_file():
|
||||
# unpack. deps/icu is a temporary path
|
||||
if os.path.isdir(icu_tmp_path):
|
||||
if icu_tmp_path.is_dir():
|
||||
shutil.rmtree(icu_tmp_path)
|
||||
os.mkdir(icu_tmp_path)
|
||||
icu_data_path = os.path.join(icu_tmp_path, icu_data_file_l)
|
||||
with open(icu_data_path, 'wb') as outf:
|
||||
icu_tmp_path.mkdir()
|
||||
icu_data_path = icu_tmp_path / icu_data_file_l
|
||||
with icu_data_path.open(mode='wb') as outf:
|
||||
inf = bz2.BZ2File(compressed_data, 'rb')
|
||||
try:
|
||||
shutil.copyfileobj(inf, outf)
|
||||
@ -1862,20 +1862,18 @@ def configure_intl(o):
|
||||
# Now, proceed..
|
||||
|
||||
# relative to dep..
|
||||
icu_data_in = os.path.join('..', '..', icu_data_path)
|
||||
if not os.path.isfile(icu_data_path) and icu_endianness != 'l':
|
||||
icu_data_in = Path('..', '..', icu_data_path)
|
||||
if not icu_data_path.is_file() and icu_endianness != 'l':
|
||||
# use host endianness
|
||||
icu_data_path = os.path.join(icu_full_path,
|
||||
'source/data/in',
|
||||
icu_data_file) # will be generated
|
||||
if not os.path.isfile(icu_data_path):
|
||||
icu_data_path = Path(icu_full_path, 'source', 'data', 'in', icu_data_file) # will be generated
|
||||
if not icu_data_path.is_file():
|
||||
# .. and we're not about to build it from .gyp!
|
||||
error(f'''ICU prebuilt data file {icu_data_path} does not exist.
|
||||
See the README.md.''')
|
||||
|
||||
# this is the input '.dat' file to use .. icudt*.dat
|
||||
# may be little-endian if from a icu-project.org tarball
|
||||
o['variables']['icu_data_in'] = icu_data_in
|
||||
o['variables']['icu_data_in'] = str(icu_data_in)
|
||||
|
||||
# map from variable name to subdirs
|
||||
icu_src = {
|
||||
@ -1973,16 +1971,16 @@ def make_bin_override():
|
||||
os.path.realpath(which_python) == os.path.realpath(sys.executable)):
|
||||
return
|
||||
|
||||
bin_override = os.path.abspath('out/tools/bin')
|
||||
bin_override = Path('out', 'tools', 'bin').resolve()
|
||||
try:
|
||||
os.makedirs(bin_override)
|
||||
bin_override.mkdir(parents=True)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
python_link = os.path.join(bin_override, 'python')
|
||||
python_link = bin_override / 'python'
|
||||
try:
|
||||
os.unlink(python_link)
|
||||
python_link.unlink()
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise e
|
||||
@ -1991,7 +1989,7 @@ def make_bin_override():
|
||||
# We need to set the environment right now so that when gyp (in run_gyp)
|
||||
# shells out, it finds the right python (specifically at
|
||||
# https://github.com/nodejs/node/blob/d82e107/deps/v8/gypfiles/toolchain.gypi#L43)
|
||||
os.environ['PATH'] = bin_override + ':' + os.environ['PATH']
|
||||
os.environ['PATH'] = str(bin_override) + ':' + os.environ['PATH']
|
||||
|
||||
return bin_override
|
||||
|
||||
@ -2070,7 +2068,7 @@ write('config.gypi', do_not_edit +
|
||||
|
||||
write('config.status', '#!/bin/sh\nset -x\nexec ./configure ' +
|
||||
' '.join([shlex.quote(arg) for arg in original_argv]) + '\n')
|
||||
os.chmod('config.status', 0o775)
|
||||
Path('config.status').chmod(0o775)
|
||||
|
||||
|
||||
config = {
|
||||
@ -2100,7 +2098,7 @@ config_str = '\n'.join(config_lines)
|
||||
# On Windows there's no reason to search for a different python binary.
|
||||
bin_override = None if sys.platform == 'win32' else make_bin_override()
|
||||
if bin_override:
|
||||
config_str = 'export PATH:=' + bin_override + ':$(PATH)\n' + config_str
|
||||
config_str = 'export PATH:=' + str(bin_override) + ':$(PATH)\n' + config_str
|
||||
|
||||
write('config.mk', do_not_edit + config_str)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user