From 2cb6aa8fc0bc578228d06fd8cc7ae59894846cd0 Mon Sep 17 00:00:00 2001 From: Sam Clegg Date: Thu, 26 Mar 2026 09:48:08 -0700 Subject: [PATCH] Enable preview checks in ruff by default. NFC --- .circleci/config.yml | 2 - emcc.py | 6 +-- emcmake.py | 2 +- emconfigure.py | 2 +- emmake.py | 2 +- emrun.py | 30 ++++++------ pyproject.toml | 13 ++++++ requirements-dev.txt | 2 +- site/source/get_api_items.py | 4 +- site/source/get_wiki.py | 14 +++--- test/benchmark/benchmark_sse.py | 6 +-- test/browser_common.py | 17 ++++--- test/common.py | 30 ++++++------ test/gen_many_js_functions.py | 4 +- test/parallel_testsuite.py | 14 +++--- test/parse_benchmark_output.py | 4 +- test/runner.py | 4 +- test/test_benchmark.py | 16 ++++--- test/test_browser.py | 20 ++++---- test/test_core.py | 16 +++---- test/test_other.py | 63 +++++++++++++------------- test/test_sanity.py | 6 +-- tools/building.py | 32 ++++++------- tools/cmdline.py | 18 ++++---- tools/config.py | 6 +-- tools/diagnostics.py | 2 +- tools/emcoverage.py | 2 +- tools/empath-split.py | 16 +++---- tools/emprofile.py | 6 +-- tools/emscripten.py | 4 +- tools/extract_metadata.py | 6 +-- tools/file_packager.py | 16 +++---- tools/gen_struct_info.py | 6 +-- tools/install.py | 2 +- tools/js_optimizer.py | 6 +-- tools/link.py | 28 ++++++------ tools/maint/add_license.py | 24 +++++----- tools/maint/check_emcc_help_text.py | 4 +- tools/maint/check_for_closed_issues.py | 2 +- tools/maint/create_dom_pk_codes.py | 6 ++- tools/maint/create_entry_points.py | 28 +++++++----- tools/maint/create_release.py | 2 +- tools/maint/find_unused_settings.py | 2 +- tools/maint/heuristic_clear_cache.py | 4 +- tools/maint/npm_update.py | 4 +- tools/maint/rebaseline_tests.py | 2 +- tools/maint/simde_update.py | 8 ++-- tools/maint/update_docs.py | 4 +- tools/maint/update_settings_docs.py | 4 +- tools/ports/__init__.py | 4 +- tools/ports/cocos2d.py | 2 +- tools/ports/sdl2_image.py | 2 +- tools/ports/sdl2_mixer.py | 2 +- tools/ports/zlib.py | 1 - tools/response_file.py | 2 +- tools/settings.py | 11 ++--- tools/shared.py | 2 +- tools/system_libs.py | 8 ++-- tools/toolchain_profiler.py | 3 +- tools/wasm-sourcemap.py | 19 ++++---- tools/webidl_binder.py | 10 ++-- 61 files changed, 301 insertions(+), 286 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 35634c482ae61..cd96285b981b5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -539,8 +539,6 @@ jobs: - checkout - pip-install - run: ruff check - # TODO (cclauss): When ruff supports rules these errors without --preview, remove following line - - run: ruff check --preview --select=E20,E30,E221,E225,E226,E275 vulture: executor: ubuntu-lts steps: diff --git a/emcc.py b/emcc.py index a64a00eab4555..6814682d1266b 100644 --- a/emcc.py +++ b/emcc.py @@ -127,7 +127,7 @@ def make_relative(filename): reproduce_file.add(utils.path_from_root('emscripten-version.txt'), os.path.join(root, 'version.txt')) with shared.get_temp_files().get_file(suffix='.tar') as rsp_name: - with open(rsp_name, 'w') as rsp: + with open(rsp_name, 'w', encoding='utf-8') as rsp: ignore_next = False output_arg = None @@ -371,7 +371,7 @@ def get_next_arg(): add_link_arg(get_next_arg()) elif arg == '-s' or arg.startswith(('-l', '-L', '--js-library=', '-z', '-u')): add_link_arg(arg) - elif not arg.startswith('-o') and arg not in ('-nostdlib', '-nostartfiles', '-nolibc', '-nodefaultlibs', '-s'): + elif not arg.startswith('-o') and arg not in {'-nostdlib', '-nostartfiles', '-nolibc', '-nodefaultlibs', '-s'}: # All other flags are for the compiler compiler_args.append(arg) if skip: @@ -415,7 +415,7 @@ def phase_setup(state): # If we get here then the user specified both DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED # on the command line. This is no longer valid so report either an error or a warning (for # backwards compat with the old `DISABLE_EXCEPTION_CATCHING=2` - if user_settings['DISABLE_EXCEPTION_CATCHING'] in ('0', '2'): + if user_settings['DISABLE_EXCEPTION_CATCHING'] in {'0', '2'}: diagnostics.warning('deprecated', 'DISABLE_EXCEPTION_CATCHING=X is no longer needed when specifying EXCEPTION_CATCHING_ALLOWED') else: exit_with_error('DISABLE_EXCEPTION_CATCHING and EXCEPTION_CATCHING_ALLOWED are mutually exclusive') diff --git a/emcmake.py b/emcmake.py index 9c6befda002e7..b10d24599eb99 100755 --- a/emcmake.py +++ b/emcmake.py @@ -16,7 +16,7 @@ # Main run() function # def run(): - if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'): + if len(sys.argv) < 2 or sys.argv[1] in {'--version', '--help'}: print('''\ emcmake is a helper for cmake, setting various environment variables so that emcc etc. are used. Typical usage: diff --git a/emconfigure.py b/emconfigure.py index 701a32bc31634..83b35cf132fbf 100755 --- a/emconfigure.py +++ b/emconfigure.py @@ -27,7 +27,7 @@ # Main run() function # def run(): - if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'): + if len(sys.argv) < 2 or sys.argv[1] in {'--version', '--help'}: print('''\ emconfigure is a helper for configure, setting various environment variables so that emcc etc. are used. Typical usage: diff --git a/emmake.py b/emmake.py index 77c08fe1fd15c..776b7e87e9da0 100755 --- a/emmake.py +++ b/emmake.py @@ -33,7 +33,7 @@ # Main run() function # def run(): - if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'): + if len(sys.argv) < 2 or sys.argv[1] in {'--version', '--help'}: print('''\ emmake is a helper for make, setting various environment variables so that emcc etc. are used. Typical usage: diff --git a/emrun.py b/emrun.py index af3dda6dbd27a..260bca19afc02 100644 --- a/emrun.py +++ b/emrun.py @@ -40,10 +40,7 @@ from operator import itemgetter from urllib.parse import unquote, urlsplit -# We depend on python 3.8 features -if sys.version_info < (3, 8): # noqa: UP036 - print(f'error: emrun requires python 3.8 or above ({sys.executable} {sys.version})', file=sys.stderr) - sys.exit(1) +assert sys.version_info >= (3, 10), f'emscripten requires python 3.10 or above ({sys.executable} {sys.version})' # Populated from cmdline params emrun_options = None @@ -109,7 +106,7 @@ LINUX = True elif platform.system() == 'FreeBSD': FREEBSD = True -elif platform.mac_ver()[0] != '': +elif platform.mac_ver()[0]: MACOS = True import plistlib @@ -225,7 +222,7 @@ def delete_emrun_safe_firefox_profile(): def create_emrun_safe_firefox_profile(): global temp_firefox_profile_dir temp_firefox_profile_dir = tempfile.mkdtemp(prefix='temp_emrun_firefox_profile_') - with open(os.path.join(temp_firefox_profile_dir, 'prefs.js'), 'w') as f: + with open(os.path.join(temp_firefox_profile_dir, 'prefs.js'), 'w', encoding='utf-8') as f: f.write(''' // Old Firefox browsers have a maxPerDomain limit of 20. Newer Firefox browsers default to 512. Match the new // default here to help test spawning a lot of threads also on older Firefox versions. @@ -780,7 +777,7 @@ def get_cpu_info(): logical_cores = int(check_output(['sysctl', '-n', 'machdep.cpu.thread_count']).strip()) frequency = int(check_output(['sysctl', '-n', 'hw.cpufrequency']).strip()) // 1000000 elif LINUX: - for line in open('/proc/cpuinfo').readlines(): + for line in open('/proc/cpuinfo', encoding='utf-8').readlines(): if 'model name' in line: cpu_name = re.sub('.*model name.*:', '', line, count=1).strip() lscpu = check_output(['lscpu']) @@ -1031,7 +1028,7 @@ def win_get_file_properties(fname): strInfo = {} for propName in propNames: strInfoPath = u'\\StringFileInfo\\%04X%04X\\%s' % (lang, codepage, propName) - ## print str_info + # print str_info strInfo[propName] = win32api.GetFileVersionInfo(fname, strInfoPath) props['StringFileInfo'] = strInfo @@ -1043,7 +1040,7 @@ def get_computer_model(): try: if MACOS: try: - with open(os.path.join(os.getenv("HOME"), '.emrun.hwmodel.cached'), 'r') as f: + with open(os.path.join(os.getenv("HOME"), '.emrun.hwmodel.cached'), encoding='utf-8') as f: model = f.read() return model except IOError: @@ -1059,7 +1056,7 @@ def get_computer_model(): model = check_output(cmd) model = re.search('(.*)', model) model = model.group(1).strip() - with open(os.path.join(os.getenv("HOME"), '.emrun.hwmodel.cached'), 'w') as fh: + with open(os.path.join(os.getenv("HOME"), '.emrun.hwmodel.cached'), 'w', encoding='utf-8') as fh: fh.write(model) # Cache the hardware model to disk return model except Exception: @@ -1089,7 +1086,7 @@ def get_computer_model(): def get_os_version(): - bitness = ' (64bit)' if platform.machine() in ['AMD64', 'x86_64'] else ' (32bit)' + bitness = ' (64bit)' if platform.machine() in {'AMD64', 'x86_64'} else ' (32bit)' try: if WINDOWS: versionHandle = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion") @@ -1116,7 +1113,7 @@ def get_system_memory(): if emrun_options.android: lines = check_output([ADB, 'shell', 'cat', '/proc/meminfo']).split('\n') else: - mem = open('/proc/meminfo', 'r') + mem = open('/proc/meminfo', encoding='utf-8') lines = mem.readlines() mem.close() for i in lines: @@ -1383,13 +1380,14 @@ def get_system_info(format_json): return info.strip() else: try: - with open(os.path.expanduser('~/.emrun.generated.guid')) as fh: + with open(os.path.expanduser('~/.emrun.generated.guid'), encoding='utf-8') as fh: unique_system_id = fh.read().strip() except Exception: import uuid unique_system_id = str(uuid.uuid4()) try: - open(os.path.expanduser('~/.emrun.generated.guid'), 'w').write(unique_system_id) + with open(os.path.expanduser('~/.emrun.generated.guid'), 'w', encoding='utf-8') as f: + f.write(unique_system_id) except Exception as e: logv(e) @@ -1822,13 +1820,13 @@ def run(cmd): if options.log_stdout: global browser_stdout_handle - browser_stdout_handle = open(options.log_stdout, 'a') + browser_stdout_handle = open(options.log_stdout, 'a', encoding='utf-8') if options.log_stderr: global browser_stderr_handle if options.log_stderr == options.log_stdout: browser_stderr_handle = browser_stdout_handle else: - browser_stderr_handle = open(options.log_stderr, 'a') + browser_stderr_handle = open(options.log_stderr, 'a', encoding='utf-8') if options.run_browser: logv("Starting browser: %s" % ' '.join(browser)) # if browser[0] == 'cmd': diff --git a/pyproject.toml b/pyproject.toml index a6fa4911fe92d..263f46c9e5587 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,9 @@ requires-python = ">=3.10" [tool.ruff] +preview = true line-length = 100 +indent-width = 2 exclude = [ "./cache/", "./node_modules/", @@ -38,15 +40,26 @@ lint.ignore = [ "B011", # See https://github.com/PyCQA/flake8-bugbear/issues/66 "B023", "B026", + "E272", "E402", + "E241", + "E266", "E501", "E721", "E741", + "E111", # Does not seem to honor `indent-width = 2` above + "E114", # Does not seem to honor `indent-width = 2` above + "E261", "PERF203", "PERF401", "PLC0415", + "PLR0904", + "PLR0916", + "PLR0914", + "PLR1702", "PLR1704", "PLR5501", + "PLR6301", "PLW0602", "PLW0603", "PLW1510", diff --git a/requirements-dev.txt b/requirements-dev.txt index 66a04ecede54d..9cb5413753980 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,7 +7,7 @@ coverage[toml]==6.5 mypy==1.14 psutil==7.0.0 -ruff==0.14.1 +ruff==0.15.7 types-requests==2.32.0.20241016 unittest-xml-reporting==3.2.0 deadcode==2.3.1 diff --git a/site/source/get_api_items.py b/site/source/get_api_items.py index 0a7db796bd284..b1c8d827fb6c5 100755 --- a/site/source/get_api_items.py +++ b/site/source/get_api_items.py @@ -65,7 +65,7 @@ def addapiitems(matchobj): filepath = api_reference_directory + file print(file) # open file - with open(filepath) as infile: + with open(filepath, encoding='utf-8') as infile: for line in infile: # parse line for API items re.sub(r'^\.\.\s+((\w+)\:(\w+)\:\:(.*))', addapiitems, line) @@ -74,7 +74,7 @@ def addapiitems(matchobj): def exportItems(): """Export the API items into form for use in another script. """ - with open(api_item_filename, 'w') as infile: + with open(api_item_filename, 'w', encoding='utf-8') as infile: # write function lead in infile.write("# Auto-generated file (see get_api_items.py)\n\ndef get_mapped_items():\n mapped_wiki_inline_code = dict()\n") diff --git a/site/source/get_wiki.py b/site/source/get_wiki.py index 9d25a481fe5c3..84e2c72d4f84c 100755 --- a/site/source/get_wiki.py +++ b/site/source/get_wiki.py @@ -37,7 +37,7 @@ wiki_checkout = 'emscripten.wiki/' temp_set_of_codemarkup = set() -logfile = open(logfilename, 'w') +logfile = open(logfilename, 'w', encoding='utf-8') # snapshot_version_information = '.. note:: This is a **snapshot** of the wiki: %s\n\n' % strftime("%a, %d %b %Y %H:%M", gmtime()) snapshot_version_information = '.. note:: This article was migrated from the wiki (%s) and is now the "master copy" (the version in the wiki will be deleted). It may not be a perfect rendering of the original but we hope to fix that soon!\n\n' % time.strftime("%a, %d %b %Y %H:%M", time.gmtime()) @@ -91,7 +91,7 @@ def ConvertFilesToRst(): continue inputfilename = wiki_checkout + file - markdown = Path(inputfilename).read_text() + markdown = Path(inputfilename).read_text(encoding='utf-8') if 'This article has moved from the wiki to the new site' in markdown: continue if 'This page has been migrated to the main site' in markdown: @@ -127,16 +127,16 @@ def ConvertFilesToRst(): textinfile += snapshot_version_information - with open(outputfilename) as infile: + with open(outputfilename, encoding='utf-8') as infile: for line in infile: textinfile += line # print textinfile - with open(outputfilename, 'w') as outfile: + with open(outputfilename, 'w', encoding='utf-8') as outfile: outfile.write(textinfile) # write the index - with open(output_dir + 'index.rst', 'w') as outfile: + with open(output_dir + 'index.rst', 'w', encoding='utf-8') as outfile: outfile.write(indexfiletext) @@ -184,7 +184,7 @@ def fixcodemarkuplinks(matchobj): input_file = output_dir + file # print input_file textinfile = '' - with open(input_file) as infile: + with open(input_file, encoding='utf-8') as infile: for line in infile: textinfile += line @@ -195,7 +195,7 @@ def fixcodemarkuplinks(matchobj): # convert codemarkup to links if possible textinfile = fixWikiCodeMarkupToCodeLinks(textinfile) - with open(input_file, 'w') as outfile: + with open(input_file, 'w', encoding='utf-8') as outfile: outfile.write(textinfile) logfile.write('\n\nCODE MARKUP THAT WONT BE LINKED (add entry to mapped_wiki_inline_code if one of these need to be linked. The tool get-api-items.py can be used to generate the list of the documented API items. \n') diff --git a/test/benchmark/benchmark_sse.py b/test/benchmark/benchmark_sse.py index a240dbe0fc74d..6da83958f7b5b 100644 --- a/test/benchmark/benchmark_sse.py +++ b/test/benchmark/benchmark_sse.py @@ -22,7 +22,7 @@ from tools.config import V8_ENGINE from tools.shared import CLANG_CXX, EMCC -from tools.utils import WINDOWS, run_process +from tools.utils import WINDOWS, run_process, write_file # System info system_info = subprocess.check_output([EMRUN, '--system_info'], stderr=subprocess.STDOUT, text=True) @@ -279,13 +279,13 @@ def format_comparison(a, b): html += '' - open(results_file, 'w').write(html) + write_file(results_file, html) print('Wrote ' + str(len(html)) + ' bytes to file ' + results_file + '.') if __name__ == '__main__': suite = sys.argv[1].lower() if len(sys.argv) == 2 else None - if suite in ['sse', 'sse1']: + if suite in {'sse', 'sse1'}: run_benchmark(test_file('benchmark/benchmark_sse1.cpp'), 'results_sse1.html', ['-msse']) elif suite == 'sse2': run_benchmark(test_file('benchmark/benchmark_sse2.cpp'), 'results_sse2.html', ['-msse2']) diff --git a/test/browser_common.py b/test/browser_common.py index bdac8eea21b9d..991d0225f0980 100644 --- a/test/browser_common.py +++ b/test/browser_common.py @@ -355,8 +355,8 @@ def send_head(self): ctype = self.guess_type(path) self.send_header('Content-Type', ctype) pieces = self.headers.get('Range').split('=')[1].split('-') - start = int(pieces[0]) if pieces[0] != '' else 0 - end = int(pieces[1]) if pieces[1] != '' else fsize - 1 + start = int(pieces[0]) if pieces[0] else 0 + end = int(pieces[1]) if pieces[1] else fsize - 1 end = min(fsize - 1, end) length = end - start + 1 self.send_header('Content-Range', f'bytes {start}-{end}/{fsize}') @@ -404,7 +404,7 @@ def do_POST(self): # noqa: DC04 elif urlinfo.path.startswith('/status/'): code_str = urlinfo.path[len('/status/'):] code = int(code_str) - if code in (301, 302, 303, 307, 308): + if code in {301, 302, 303, 307, 308}: self.send_response(code) self.send_header('Location', '/status/200') self.end_headers() @@ -430,7 +430,7 @@ def do_GET(self): elif info.path.startswith('/status/'): code_str = info.path[len('/status/'):] code = int(code_str) - if code in (301, 302, 303, 307, 308): + if code in {301, 302, 303, 307, 308}: # Redirect to /status/200 self.send_response(code) self.send_header('Location', '/status/200') @@ -496,8 +496,8 @@ def do_GET(self): ctype = self.guess_type(path) self.send_header('Content-type', ctype) pieces = self.headers.get('Range').split('=')[1].split('-') - start = int(pieces[0]) if pieces[0] != '' else 0 - end = int(pieces[1]) if pieces[1] != '' else len(data) - 1 + start = int(pieces[0]) if pieces[0] else 0 + end = int(pieces[1]) if pieces[1] else len(data) - 1 end = min(len(data) - 1, end) length = end - start + 1 self.send_header('Content-Length', str(length)) @@ -605,15 +605,14 @@ def __enter__(self): time.sleep(0.1) # Return the locking count number try: - self.counter = int(open(f'{self.path}_counter').read()) + self.counter = int(utils.read_file(f'{self.path}_counter')) except Exception: pass return self.counter def __exit__(self, *a): # Increment locking count number before releasing the lock - with open(f'{self.path}_counter', 'w') as f: - f.write(str(self.counter + 1)) + utils.write_file(f'{self.path}_counter', str(self.counter + 1)) # And release the lock os.close(self.fd) try: diff --git a/test/common.py b/test/common.py index 5e0eaccebfafd..33f538c8c7829 100644 --- a/test/common.py +++ b/test/common.py @@ -98,7 +98,7 @@ def errlog(*args): def load_previous_test_run_results(): try: - return json.load(open(PREVIOUS_TEST_RUN_RESULTS_FILE)) + return json.loads(utils.read_file(PREVIOUS_TEST_RUN_RESULTS_FILE)) except FileNotFoundError: return {} except json.decoder.JSONDecodeError as e: @@ -135,7 +135,7 @@ def exe_suffix(cmd): def compiler_for(filename, force_c=False): - if utils.suffix(filename) in ('.cc', '.cxx', '.cpp') and not force_c: + if utils.suffix(filename) in {'.cc', '.cxx', '.cpp'} and not force_c: return EMXX else: return EMCC @@ -143,7 +143,8 @@ def compiler_for(filename, force_c=False): def record_flaky_test(test_name, attempt_count, max_attempts, exception_msg): logger.info(f'Retrying flaky test "{test_name}" (attempt {attempt_count}/{max_attempts} failed):\n{exception_msg}') - open(flaky_tests_log_filename, 'a').write(f'{test_name}\n') + with open(flaky_tests_log_filename, 'a', encoding='utf-8') as f: + f.write(f'{test_name}\n') def node_bigint_flags(node_version): @@ -519,7 +520,7 @@ def require_wasm64(self): self.fail('either d8 or node >= 24 required to run wasm64 tests. Use EMTEST_SKIP_WASM64 to skip') - def try_require_node_version(self, major, minor = 0, revision = 0): + def try_require_node_version(self, major, minor=0, revision=0): nodejs = get_nodejs() if not nodejs: self.skipTest('Test requires nodejs to run') @@ -867,9 +868,9 @@ def is_ldflag(f): args += self.ldflags if not main_file: for i, arg in enumerate(args): - if arg in ('--pre-js', '--post-js'): + if arg in {'--pre-js', '--post-js'}: args[i] = None - args[i + 1] = None + args[i + 1] = None # noqa: B909 args = [arg for arg in args if arg is not None] return args @@ -925,7 +926,7 @@ def build(self, filename, libraries=None, includes=None, force_c=False, cflags=N self.run_process(cmd, stderr=self.stderr_redirect if not DEBUG else None) self.assertExists(output) - if output_suffix in ('.js', '.mjs'): + if output_suffix in {'.js', '.mjs'}: # Make sure we produced correct line endings self.assertEqual(line_endings.check_line_endings(output), 0) @@ -996,8 +997,8 @@ def run_js(self, filename, engine=None, args=None, stderr = STDOUT else: stderr_file = self.in_dir('stderr') - stderr = open(stderr_file, 'w') - stdout = open(stdout_file, 'w') + stderr = open(stderr_file, 'w', encoding='utf-8') + stdout = open(stdout_file, 'w', encoding='utf-8') error = None timeout_error = None engine = self.get_engine_with_args(engine) @@ -1066,7 +1067,7 @@ def assertTextDataIdentical(self, text1, text2, msg=None, def assertIdentical(self, values, y, msg=None, fromfile='expected', tofile='actual'): - if type(values) not in (list, tuple): + if type(values) not in {list, tuple}: values = [values] for x in values: if x == y: @@ -1091,8 +1092,7 @@ def assertFileContents(self, filename, contents): contents = contents.replace('\r', '') if EMTEST_REBASELINE: - with open(filename, 'w') as f: - f.write(contents) + utils.write_file(filename, contents) return if not os.path.exists(filename): @@ -1115,7 +1115,7 @@ def assertContained(self, values, string, additional_info='', regex=False): self.assertTrue(match_any, 'Expected at least one of "%s" to match on:\n%s' % (values, limit_size(string))) return - if type(values) not in [list, tuple]: + if type(values) not in {list, tuple}: values = [values] if not any(v in string for v in values): @@ -1420,7 +1420,7 @@ def do_run_in_out_file_test(self, srcfile, **kwargs): utils.write_file(outfile, output) return output - ## Does a complete test - builds, runs, checks output, etc. + # Does a complete test - builds, runs, checks output, etc. def _build_and_run(self, filename, expected_output, args=None, no_build=False, assert_returncode=0, assert_identical=False, assert_all=False, @@ -1459,7 +1459,7 @@ def _build_and_run(self, filename, expected_output, args=None, interleaved_output=interleaved_output) js_output = js_output.replace('\r\n', '\n') if expected_output: - if type(expected_output) not in [list, tuple]: + if type(expected_output) not in {list, tuple}: expected_output = [expected_output] try: if assert_identical: diff --git a/test/gen_many_js_functions.py b/test/gen_many_js_functions.py index dccc74aca2b29..480dec05d7dda 100644 --- a/test/gen_many_js_functions.py +++ b/test/gen_many_js_functions.py @@ -15,7 +15,7 @@ def func_name(i): def generate_js_library_with_lots_of_functions(out_file): - with open(out_file, 'w') as f: + with open(out_file, 'w', encoding='utf-8') as f: f.write('var FunctionsLibrary = {\n') for i in range(NUM_FUNCS_TO_GENERATE): @@ -26,7 +26,7 @@ def generate_js_library_with_lots_of_functions(out_file): def generate_c_program_that_calls_js_library_with_lots_of_functions(out_file): - with open(out_file, 'w') as f: + with open(out_file, 'w', encoding='utf-8') as f: f.write('#include \n\n') for i in range(NUM_FUNCS_TO_GENERATE): diff --git a/test/parallel_testsuite.py b/test/parallel_testsuite.py index fc366a49d3ded..9363dd5152b68 100644 --- a/test/parallel_testsuite.py +++ b/test/parallel_testsuite.py @@ -37,7 +37,7 @@ def cap_max_workers_in_pool(max_workers, is_browser): if max_workers > 1 and is_browser and 'EMTEST_CORES' not in os.environ and 'EMCC_CORES' not in os.environ: # TODO experiment with this number. In browser tests we'll be creating # a browser instance per worker which is expensive. - max_workers = max_workers // 2 + max_workers //= 2 # Python has an issue that it can only use max 61 cores on Windows: https://github.com/python/cpython/issues/89240 if WINDOWS: return min(max_workers, 61) @@ -158,7 +158,7 @@ def run(self, result): # results may be be None if # of allowed errors was exceeded # and the harness aborted. if res: - if res.test_result not in ['success', 'skipped'] and allowed_failures_counter is not None: + if res.test_result not in {'success', 'skipped'} and allowed_failures_counter is not None: # Signal existing multiprocess pool runners so that they can exit early if needed. allowed_failures_counter.value -= 1 res.integrate_result(result) @@ -167,7 +167,7 @@ def run(self, result): # Send a task to each worker to tear down the browser and server. This # relies on the implementation detail in the worker pool that all workers # are cycled through once. - num_tear_downs = sum([pool.apply(tear_down, ()) for i in range(use_cores)]) + num_tear_downs = sum(pool.apply(tear_down, ()) for _ in range(use_cores)) # Assert the assumed behavior above hasn't changed. if num_tear_downs != use_cores and not buffer: errlog(f'Expected {use_cores} teardowns, got {num_tear_downs}') @@ -176,7 +176,7 @@ def run(self, result): previous_test_run_results = common.load_previous_test_run_results() for r in results: # Save a test result record with the specific suite name (e.g. "core0.test_foo") - test_failed = r.test_result not in ['success', 'skipped'] + test_failed = r.test_result not in {'success', 'skipped'} def update_test_results_to(test_name): fail_frequency = previous_test_run_results[test_name]['fail_frequency'] if test_name in previous_test_run_results else int(test_failed) @@ -193,7 +193,7 @@ def update_test_results_to(test_name): # for quick --failfast termination, in case a test fails in multiple suites update_test_results_to(r.test_name.split(' ')[0]) - json.dump(previous_test_run_results, open(common.PREVIOUS_TEST_RUN_RESULTS_FILE, 'w'), indent=2) + utils.write_file(common.PREVIOUS_TEST_RUN_RESULTS_FILE, json.dumps(previous_test_run_results, indent=2)) if EMTEST_VISUALIZE: self.visualize_results(results) @@ -224,7 +224,7 @@ def visualize_results(self, results): # shared data structures are hard in the python multi-processing world, so # use a file to share the flaky test information across test processes. - flaky_tests = open(common.flaky_tests_log_filename).read().split() if os.path.isfile(common.flaky_tests_log_filename) else [] + flaky_tests = utils.read_file(common.flaky_tests_log_filename).split() if os.path.isfile(common.flaky_tests_log_filename) else [] # Extract only the test short names flaky_tests = [x.split('.')[-1] for x in flaky_tests] @@ -315,7 +315,7 @@ def log_test_run_for_visualization(self, flaky_tests): dummy_test_task_counter = os.path.getsize(profiler_log_file) if os.path.isfile(profiler_log_file) else 0 # Remove the redundant 'test_' prefix from each test, since character space is at a premium in the visualized graph. test_name = self.test_short_name().removeprefix('test_') - with open(profiler_log_file, 'a') as prof: + with open(profiler_log_file, 'a', encoding='utf-8') as prof: prof.write(f',\n{{"pid":{dummy_test_task_counter},"op":"start","time":{self.start_time},"cmdLine":["{test_name}"],"color":"{color}"}}') prof.write(f',\n{{"pid":{dummy_test_task_counter},"op":"exit","time":{self.start_time + self.test_duration},"returncode":0}}') diff --git a/test/parse_benchmark_output.py b/test/parse_benchmark_output.py index d8e0d4b9569a3..c09b1deb8cfc8 100755 --- a/test/parse_benchmark_output.py +++ b/test/parse_benchmark_output.py @@ -25,7 +25,7 @@ def main(args): # other lines have: [benchmark name, result 1 , ..] matrix = [] - for line in open(args[0]).readlines(): + for line in open(args[0], encoding='utf-8').readlines(): line = line.strip() if line.startswith('test_'): benchmark = line.split(' ')[0][5:] @@ -57,7 +57,7 @@ def main(args): if len(line) >= 2: base = line[1] for i in range(1, len(line)): - line[i] = line[i] / base + line[i] /= base col0_width = max(len(r[0]) for r in matrix) diff --git a/test/runner.py b/test/runner.py index db91a8529c94e..4e1821f1d9bbc 100755 --- a/test/runner.py +++ b/test/runner.py @@ -290,7 +290,7 @@ def show(): def error_on_legacy_suite_names(args): for a in args: - if a.startswith('wasm') and not any(a.startswith(p) for p in ('wasm2js', 'wasmfs', 'wasm64')): + if a.startswith('wasm') and not a.startswith(('wasm2js', 'wasmfs', 'wasm64')): new = a.replace('wasm', 'core', 1) utils.exit_with_error('`%s` test suite has been replaced with `%s`', a, new) @@ -372,7 +372,7 @@ def sort_tests_failing_and_slowest_first_comparator(x, y): def use_parallel_suite(module): - suite_supported = module.__name__ not in ('test_sanity', 'test_benchmark', 'test_sockets', 'test_interactive', 'test_stress') + suite_supported = module.__name__ not in {'test_sanity', 'test_benchmark', 'test_sockets', 'test_interactive', 'test_stress'} if not common.EMTEST_SAVE_DIR and not shared.DEBUG: has_multiple_cores = parallel_testsuite.num_cores() > 1 if suite_supported and has_multiple_cores: diff --git a/test/test_benchmark.py b/test/test_benchmark.py index 95061ce919816..790ce7e3ad810 100644 --- a/test/test_benchmark.py +++ b/test/test_benchmark.py @@ -203,7 +203,9 @@ def build(self, parent, filename, shared_args, emcc_args, native_args, native_ex if lib_builder: env = {'CC': self.cc, 'CXX': self.cxx, 'CXXFLAGS': '-Wno-c++11-narrowing'} env.update(clang_native.get_clang_native_env()) - native_args = native_args + lib_builder(self.name, native=True, env_init=env) + # Avoid mutating incoming native_args list + native_args = native_args.copy() + native_args += lib_builder(self.name, native=True, env_init=env) if not native_exec: compiler = self.cxx if filename.endswith('cpp') else self.cc cmd = compiler + [ @@ -250,9 +252,9 @@ def build(self, parent, filename, shared_args, emcc_args, native_args, native_ex # systems (like zlib) if they see a CFLAGS it will override all their # default flags, including optimizations. env_init['CFLAGS'] = ' '.join(LLVM_FEATURE_FLAGS + [OPTIMIZATIONS] + self.cflags) - # This shouldn't be 'emcc_args += ...', because emcc_args is passed in as - # a parameter and changes will be visible to the caller. - emcc_args = emcc_args + lib_builder('js_' + llvm_root, native=False, env_init=env_init) + # Avoid mutating incoming emcc_args + emcc_args = emcc_args.copy() + emcc_args += lib_builder('js_' + llvm_root, native=False, env_init=env_init) final = os.path.dirname(filename) + os.path.sep + self.name + ('_' if self.name else '') + os.path.basename(filename) + '.js' final = final.replace('.cpp', '') utils.delete_file(final) @@ -327,7 +329,7 @@ def build(self, parent, filename, shared_args, emcc_args, native_args, native_ex if lib_builder: # build as "native" (so no emcc env stuff), but with all the cheerp stuff # set in the env - cheerp_args = cheerp_args + lib_builder(self.name, native=True, env_init={ + cheerp_args += lib_builder(self.name, native=True, env_init={ 'CC': CHEERP_BIN + 'clang', 'CXX': CHEERP_BIN + 'clang++', 'AR': CHEERP_BIN + '../libexec/cheerp-unknown-none-ar', @@ -382,7 +384,7 @@ def get_output_files(self): named_benchmarkers = { 'clang': NativeBenchmarker('clang', [CLANG_CC], [CLANG_CXX]), - 'gcc': NativeBenchmarker('gcc', ['gcc', '-no-pie'], ['g++', '-no-pie']), + 'gcc': NativeBenchmarker('gcc', ['gcc', '-no-pie'], ['g++', '-no-pie']), 'size': SizeBenchmarker('size'), 'v8': EmscriptenBenchmarker('v8', aot_v8), 'v8-lto': EmscriptenBenchmarker('v8-lto', aot_v8, ['-flto']), @@ -1101,7 +1103,7 @@ def lib_builder(name, native, env_init): ['src/BulletDynamics/libBulletDynamics.a', 'src/BulletCollision/libBulletCollision.a', 'src/LinearMath/libLinearMath.a'], - configure=['cmake', '.'], configure_args=['-DCMAKE_POLICY_VERSION_MINIMUM=3.5','-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF', '-DCMAKE_CXX_STANDARD=14', f'-DCMAKE_CXX_FLAGS={cflags}'], + configure=['cmake', '.'], configure_args=['-DCMAKE_POLICY_VERSION_MINIMUM=3.5', '-DBUILD_DEMOS=OFF', '-DBUILD_EXTRAS=OFF', '-DUSE_GLUT=OFF', '-DCMAKE_CXX_STANDARD=14', f'-DCMAKE_CXX_FLAGS={cflags}'], make=['cmake', '--build', '.', '--'], make_args=[], native=native, cache_name_extra=name, env_init=env_init) self.do_benchmark('bullet', src, '\nok.\n', diff --git a/test/test_browser.py b/test/test_browser.py index abb46f9fab06c..1b72247d44be0 100644 --- a/test/test_browser.py +++ b/test/test_browser.py @@ -1800,14 +1800,16 @@ def setup(): create_file('file2.txt', 'second') setup() - self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w')) + with open('script2.js', 'w', encoding='utf-8') as f: + self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=f) self.btest_exit('test_emscripten_async_load_script.c', cflags=['-sFORCE_FILESYSTEM']) # check using file packager to another dir self.clear() setup() ensure_dir('sub') - self.run_process([FILE_PACKAGER, 'sub/test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w')) + with open('script2.js', 'w', encoding='utf-8') as f: + self.run_process([FILE_PACKAGER, 'sub/test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=f) shutil.copy(Path('sub/test.data'), '.') self.btest_exit('test_emscripten_async_load_script.c', cflags=['-sFORCE_FILESYSTEM']) @@ -1822,8 +1824,10 @@ def setup(): create_file('sub/file2.txt', 'second') setup() - self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'sub/file1.txt@/target/file1.txt'], stdout=open('script1.js', 'w')) - self.run_process([FILE_PACKAGER, 'test2.data', '--preload', 'sub/file2.txt@/target/file2.txt'], stdout=open('script2.js', 'w')) + with open('script1.js', 'w', encoding='utf-8') as f: + self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'sub/file1.txt@/target/file1.txt'], stdout=f) + with open('script2.js', 'w', encoding='utf-8') as f: + self.run_process([FILE_PACKAGER, 'test2.data', '--preload', 'sub/file2.txt@/target/file2.txt'], stdout=f) self.btest_exit('test_emscripten_overlapped_package.c', cflags=['-sFORCE_FILESYSTEM']) self.clear() @@ -2820,7 +2824,7 @@ def test_locate_file(self, args): ''') create_file('data.txt', 'load me right before...') create_file('pre.js', 'Module.locateFile = (x) => "sub/" + x;') - self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=open('data.js', 'w')) + self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=open('data.js', 'w', encoding='utf-8')) # put pre.js first, then the file packager data, so locateFile is there for the file loading code self.compile_btest('src.c', ['-O2', '-g', '--pre-js', 'pre.js', '--pre-js', 'data.js', '-o', 'page.html', '-sFORCE_FILESYSTEM'] + args, reporting=Reporting.JS_ONLY) ensure_dir('sub') @@ -4479,7 +4483,7 @@ def make_largefile(self): s = '12345678' for _ in range(14): s = s[::-1] + s # length of str will be 2^17=128KB - with open('largefile.txt', 'w') as f: + with open('largefile.txt', 'w', encoding='utf-8') as f: for _ in range(1024): f.write(s) @@ -4665,7 +4669,7 @@ def test_pthread_growth_mainthread(self, cflags, pthread_pool_size): @no_2gb('uses INITIAL_MEMORY') @no_4gb('uses INITIAL_MEMORY') @requires_growable_arraybuffers - def test_pthread_growth(self, cflags, pthread_pool_size = 1): + def test_pthread_growth(self, cflags, pthread_pool_size=1): self.set_setting('PTHREAD_POOL_SIZE', pthread_pool_size) if '-sGROWABLE_ARRAYBUFFERS' not in cflags: self.cflags.append('-Wno-pthreads-mem-growth') @@ -5383,7 +5387,7 @@ def test_4gb_fail(self): # Tests that Emscripten-compiled applications can be run when a slash in the URL query or fragment of the js file def test_browser_run_with_slash_in_query_and_hash(self): self.compile_btest('browser_test_hello_world.c', ['-o', 'test.html', '-O0']) - src = open('test.html').read() + src = utils.read_file('test.html') # Slash in query create_file('test-query.html', src.replace('test.js', 'test.js?type=pass/fail')) self.run_browser('test-query.html', '/report_result?0') diff --git a/test/test_core.py b/test/test_core.py index 6793c4ce375a6..ed24b3df28c29 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -204,7 +204,7 @@ def requires_x64_cpu(func): @wraps(func) def decorated(self, *args, **kwargs): - if platform.machine().lower() not in ['x86_64', 'amd64']: + if platform.machine().lower() not in {'x86_64', 'amd64'}: return self.skipTest(f'This test requires a native x64 CPU. Current CPU is {platform.machine()}.') return func(self, *args, **kwargs) @@ -3370,7 +3370,7 @@ def get_data_exports(wasm): data_exports = get_data_exports('test_dlfcn_self.wasm') # Certain exports are removed by wasm-emscripten-finalize, but this # tool is not run in all configurations, so ignore these exports. - data_exports = [d for d in data_exports if d not in ('__start_em_asm', '__stop_em_asm')] + data_exports = [d for d in data_exports if d not in {'__start_em_asm', '__stop_em_asm'}] data_exports = '\n'.join(sorted(data_exports)) + '\n' self.assertFileContents(test_file('core/test_dlfcn_self.exports'), data_exports) @@ -4025,9 +4025,7 @@ def dylink_testf(self, main, side=None, expected=None, force_c=False, main_cflag if getattr(self, 'dylink_reversed', False): # Test the reverse case. There we flip the role of the side module and main module. # - We add --no-entry since the side module doesn't have a `main` - side_ = side - side = main - main = side_ + side, main = main, side self.maybe_closure() # Same as dylink_test but takes source code as filenames on disc. old_args = self.cflags.copy() @@ -7901,7 +7899,7 @@ def test_source_map(self): # optimizer can deal with both types. map_filename = map_referent + '.map' - data = json.load(open(map_filename)) + data = json.loads(utils.read_file(map_filename)) if hasattr(data, 'file'): # the file attribute is optional, but if it is present it needs to refer # the output file. @@ -8193,7 +8191,7 @@ def test_asyncify_longjmp(self): # Test that a main with arguments is automatically asyncified. @with_asyncify_and_jspi def test_async_main(self): - create_file('main.c', r''' + create_file('main.c', r''' #include #include int main(int argc, char **argv) { @@ -8209,7 +8207,7 @@ def test_async_hello(self): # needs to flush stdio streams self.set_setting('EXIT_RUNTIME') - create_file('main.c', r''' + create_file('main.c', r''' #include #include void f(void *p) { @@ -8230,7 +8228,7 @@ def test_async_hello(self): @with_asyncify_and_jspi def test_async_loop(self): - create_file('main.c', r''' + create_file('main.c', r''' #include #include int main() { diff --git a/test/test_other.py b/test/test_other.py index f1ea94118a9b5..f5fab92327817 100644 --- a/test/test_other.py +++ b/test/test_other.py @@ -960,7 +960,7 @@ def test_cmake_bitcode_static_libraries(self): def test_cmake_compile_commands(self, args): self.run_process([EMCMAKE, 'cmake', test_file('cmake/static_lib'), '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON'] + args) self.assertExists('compile_commands.json') - compile_commands = json.load(open('compile_commands.json')) + compile_commands = json.loads(utils.read_file('compile_commands.json')) command = compile_commands[0]['command'] # Sometimes cmake puts the include dirs in an RSP file rsp = [p for p in command.split() if 'includes_CXX.rsp' in p] @@ -2889,7 +2889,7 @@ def test_prepost(self, no_initial_run, run_dep): # addRunDependency during preRun should prevent main, and post-run from # running. - with open('pre.js', 'a') as f: + with open('pre.js', 'a', encoding='utf-8') as f: f.write('Module["preRun"] = () => { out("add-dep"); addRunDependency("dep"); }\n') self.set_setting('DEFAULT_LIBRARY_FUNCS_TO_INCLUDE', '$addRunDependency') output = self.do_runf('hello_world.c', cflags=['--pre-js', 'pre.js', '-sRUNTIME_DEBUG', '-sWASM_ASYNC_COMPILATION=0', '-O2', '--closure=1']) @@ -2992,8 +2992,8 @@ def test_extern_prepost(self): 'safeHeap': (['safeHeap'],), 'object-literals': ([],), 'LittleEndianHeap': (['littleEndianHeap'],), - 'LittleEndianGrowableHeap': (['growableHeap','littleEndianHeap'],), - 'LittleEndianGrowableSafeHeap': (['safeHeap','growableHeap','littleEndianHeap'],), + 'LittleEndianGrowableHeap': (['growableHeap', 'littleEndianHeap'],), + 'LittleEndianGrowableSafeHeap': (['safeHeap', 'growableHeap', 'littleEndianHeap'],), }) @crossplatform def test_js_optimizer(self, passes, filename=None): @@ -3124,7 +3124,7 @@ def verify_dwarf_exists(self, wasm_file): # Verify if the given file name contains a source map def verify_source_map_exists(self, map_file): self.assertExists(map_file) - data = json.load(open(map_file)) + data = json.loads(utils.read_file(map_file)) # Simply check the existence of required sections self.assertIn('version', data) self.assertIn('sources', data) @@ -4191,7 +4191,7 @@ def test_demangle_malloc_infinite_loop_crash(self): output = self.run_js('a.out.js', assert_returncode=NON_ZERO) if output.count('Cannot enlarge memory arrays') > 5: print(output) - self.assertLess(output.count('Cannot enlarge memory arrays'), 6) + self.assertLess(output.count('Cannot enlarge memory arrays'), 6) @requires_node def test_module_exports_with_closure(self): @@ -4578,7 +4578,7 @@ def test_on_abort(self): def add_on_abort_and_verify(extra=''): js = read_file('a.out.js') - with open('a.out.js', 'w') as f: + with open('a.out.js', 'w', encoding='utf-8') as f: f.write("var Module = { onAbort: () => console.log('%s') };\n" % expected_output) f.write(extra + '\n') f.write(js) @@ -4625,7 +4625,7 @@ def add_on_abort_and_verify(extra=''): ''') self.run_process([EMCC, 'src.c', '-sWASM_ASYNC_COMPILATION=0']) js = read_file('a.out.js') - with open('a.out.js', 'w') as f: + with open('a.out.js', 'w', encoding='utf-8') as f: f.write("var Module = { onAbort: () => { console.log('%s'); throw 're-throw'; } };\n" % expected_output) f.write(js) out = self.run_js('a.out.js', assert_returncode=NON_ZERO) @@ -4908,9 +4908,9 @@ def test_warn_dylibs(self): for suffix in ('.o', '.bc', '.so', '.dylib', '.js', '.html'): print(suffix) cmd = [EMCC, test_file('hello_world.c'), '-o', 'out' + suffix] - if suffix in ['.o', '.bc']: + if suffix in {'.o', '.bc'}: cmd.append('-c') - if suffix in ['.dylib', '.so']: + if suffix in {'.dylib', '.so'}: cmd.append('-shared') err = self.run_process(cmd, stderr=PIPE).stderr warning = 'linking a library with `-shared` will emit a static object file' @@ -4943,7 +4943,7 @@ def get_minified_middle(symbols_file): def is_js_symbol_map(symbols_file): for _minified, full in read_symbol_map(symbols_file): # define symbolication file by JS specific entries - if full in ['FUNCTION_TABLE', 'HEAP32']: + if full in {'FUNCTION_TABLE', 'HEAP32'}: return True return False @@ -7441,14 +7441,14 @@ def test(page_diff): 'mimalloc_pthreads': ('mimalloc', ['-DWORKERS=4', '-pthread']), }) def test_malloc_multithreading(self, allocator, args): - args = args + [ + cflags = [ '-O2', '-DTOTAL=10000', '-sINITIAL_MEMORY=128mb', '-sTOTAL_STACK=1mb', f'-sMALLOC={allocator}', ] - self.do_other_test('test_malloc_multithreading.c', cflags=args) + self.do_other_test('test_malloc_multithreading.c', cflags=cflags + args) @parameterized({ '': ([], 'testbind.js'), @@ -8863,16 +8863,16 @@ def test_standalone_system_headers(self, prefix): # These headers cannot be included in isolation. # e.g: error: unknown type name 'EGLDisplay' # Don't include avxintrin.h and avx2inrin.h directly, include immintrin.h instead - if header in ['eglext.h', 'SDL_config_macosx.h', 'glext.h', 'gl2ext.h', 'avxintrin.h', 'avx2intrin.h']: + if header in {'eglext.h', 'SDL_config_macosx.h', 'glext.h', 'gl2ext.h', 'avxintrin.h', 'avx2intrin.h'}: continue # These headers are C++ only and cannot be included from C code. # But we still want to check they can be included on there own without # any errors or warnings. - cxx_only = header in [ + cxx_only = header in { 'wire.h', 'val.h', 'bind.h', # Some headers are not yet C compatible 'arm_neon.h', - ] + } if directory and directory != 'compat': header = f'{directory}/{header}' inc = f'#include <{header}>\n__attribute__((weak)) int foo;\n' @@ -8898,9 +8898,9 @@ def test_standalone_system_headers(self, prefix): 'closure': (False, True), }) @parameterized({ - '': (True,False), - 'disabled': (False,False), - 'binary_encode': (True,True), + '': (True, False), + 'disabled': (False, False), + 'binary_encode': (True, True), }) def test_single_file(self, debug_enabled, closure_enabled, single_file_enabled, single_file_binary_encoded): cmd = [EMCC, test_file('hello_world.c')] + self.get_cflags() @@ -9586,7 +9586,7 @@ def test_emcc_sourcemap_options(self, prefixes, sources): # "sourcesContent" contains source code iff -gsource-map=inline is specified. if sources: p = wasm_sourcemap.Prefixes(prefixes, preserve_deterministic_prefix=False) - for filepath in [src_file, lib_file]: + for filepath in {src_file, lib_file}: resolved_path = p.resolve(utils.normalize_path(filepath)) sources_content = json.dumps(read_file(resolved_path)) self.assertIn(sources_content, output) @@ -11280,7 +11280,7 @@ def test_files_and_module_assignment(self): def test_error(pre): create_file('pre.js', pre) expected = 'All preRun tasks that exist before user pre-js code should remain after; did you replace Module or modify Module.preRun?' - self.do_runf('src.c', expected, cflags=['--pre-js=pre.js', '--preload-file=src.c'], assert_returncode=NON_ZERO) + self.do_runf('src.c', expected, cflags=['--pre-js=pre.js', '--preload-file=src.c'], assert_returncode=NON_ZERO) # error if the user replaces Module or Module.preRun test_error('Module = { preRun: [] };') @@ -12647,7 +12647,7 @@ def create_o(name, i): count = 300 for i in range(count): name = 'a' + str(i) - name = name * 32 + name *= 32 create_o(name, i) create_file('main.c', '#include\n%s int main() { int value = 0; %s printf("%%d\\n", value); }' % (decls, calls)) @@ -13076,7 +13076,7 @@ def test_growable_arraybuffers(self): 'proxy': (['-sPROXY_TO_PTHREAD', '-sEXIT_RUNTIME'], 2), 'minimal': (['-sMINIMAL_RUNTIME', '-sMODULARIZE', '-sEXPORT_NAME=MyModule'],), }) - def test_pthread_growth(self, cflags, pthread_pool_size = 1): + def test_pthread_growth(self, cflags, pthread_pool_size=1): if WINDOWS and platform.machine() == 'ARM64': # https://github.com/emscripten-core/emscripten/issues/25627 # TODO: Switch this to a "require Node.js 24" check @@ -13414,7 +13414,7 @@ def check_for_es6(filename, expect): check_for_es6('test_closure.js', False) def test_node_prefix_transpile(self): - self.run_process([EMCC, test_file('hello_world.c'), '-sEXPORT_ES6']) + self.run_process([EMCC, test_file('hello_world.c'), '-sEXPORT_ES6']) content = read_file('a.out.js') self.assertContained('node:', content) @@ -14138,9 +14138,9 @@ def test_missing_struct_info(self): def run_wasi_test_suite_test(self, name): if not os.path.exists(path_from_root('test/third_party/wasi-test-suite')): self.fail('wasi-testsuite not found; run `git submodule update --init`') - wasm = path_from_root('test', 'third_party', 'wasi-test-suite', name + '.wasm') - with open(path_from_root('test', 'third_party', 'wasi-test-suite', name + '.json')) as f: - config = json.load(f) + wasm = path_from_root(f'test/third_party/wasi-test-suite/{name}.wasm') + json_data = utils.read_file(path_from_root(f'test/third_party/wasi-test-suite/{name}.json')) + config = json.loads(json_data) exit_code = config.get('exitCode', 0) args = config.get('args', []) env = config.get('env', []) @@ -14237,7 +14237,7 @@ def test_node_pthreads_err_out(self): @only_windows('This test verifies Windows batch script behavior against bug https://github.com/microsoft/terminal/issues/15212') @with_env_modify({'PATH': path_from_root() + os.pathsep + os.getenv('PATH')}) def test_windows_batch_file_dp0_expansion_bug(self): - create_file('build_with_quotes.bat', f'@"emcc" "{test_file("hello_world.c")}"') + create_file('build_with_quotes.bat', f'@"emcc" "{test_file("hello_world.c")}"') self.run_process(['build_with_quotes.bat']) @only_windows('Check that directory permissions are properly retrieved on Windows') @@ -14590,7 +14590,7 @@ def test_embind_no_exceptions(self): def test_embind_optional_val_no_bind(self): # Ensure passing std::optional to emscripten::val works if # was not included in the compilation unit using val. - self.run_process([EMXX,'-lembind', + self.run_process([EMXX, '-lembind', test_file('embind/test_optional_val_main.cpp'), test_file('embind/test_optional_val_lib.cpp')]) output = self.run_js('a.out.js') @@ -15031,7 +15031,7 @@ def test_instantiate_wasm(self): return {}; // Compiling asynchronously, no exports. }''') # Test with ASYNCIFY here to ensure that that wasmExports gets set to the wrapped version of the wasm exports. - self.do_runf('test_manual_wasm_instantiate.c', cflags=['--pre-js=pre.js','-sASYNCIFY','-DASYNCIFY_ENABLED']) + self.do_runf('test_manual_wasm_instantiate.c', cflags=['--pre-js=pre.js', '-sASYNCIFY', '-DASYNCIFY_ENABLED']) def test_late_module_api_assignment(self): # When sync instantiation is used (or when async/await is used in MODULARIZE mode) certain @@ -15342,8 +15342,7 @@ def test_empath_split(self): def has_defined_function(file, func): self.run_process([common.WASM_DIS, file, '-o', 'test.wast']) pattern = re.compile(r'^\s*\(\s*func\s+\$' + func + r'[\s\(\)]', flags=re.MULTILINE) - with open('test.wast') as f: - return pattern.search(f.read()) is not None + return pattern.search(utils.read_file('test.wast')) is not None # main.cpp self.assertTrue(has_defined_function('test_myapp.wasm', '__original_main')) diff --git a/test/test_sanity.py b/test/test_sanity.py index 05fd283b82156..26e54be4319e0 100644 --- a/test/test_sanity.py +++ b/test/test_sanity.py @@ -79,7 +79,7 @@ def make_fake_tool(filename, version, report_name=None, extra_output=None): report_name = os.path.basename(filename) print('make_fake_tool: %s' % filename) ensure_dir(os.path.dirname(filename)) - with open(filename, 'w') as f: + with open(filename, 'w', encoding='utf-8') as f: f.write('#!/bin/sh\n') f.write('echo "%s version %s"\n' % (report_name, version)) f.write('echo "..."\n') @@ -224,7 +224,7 @@ def make_new_executable(name): possible_nodes.append('/usr/bin/nodejs') self.assertIdentical(possible_nodes, re.search("^ *NODE_JS *= (.*)$", output, re.M).group(1)) - template_data = Path(path_from_root('tools/config_template.py')).read_text() + template_data = utils.read_file(path_from_root('tools/config_template.py')) self.assertNotContained('{{{', config_data) self.assertNotContained('}}}', config_data) self.assertContained('{{{', template_data) @@ -620,7 +620,7 @@ def test_js_engine_path(self): print(filename, engine) test_engine_path = os.path.join(test_path, filename) - with open(test_engine_path, 'w') as f: + with open(test_engine_path, 'w', encoding='utf-8') as f: f.write('#!/bin/sh\n') f.write('exec %s $@\n' % (engine)) make_executable(test_engine_path) diff --git a/tools/building.py b/tools/building.py index 93dcc057f99ec..9bae06e798ada 100644 --- a/tools/building.py +++ b/tools/building.py @@ -182,11 +182,11 @@ def lld_flags_for_executable(external_symbols): # wasm-ld can strip debug info for us. this strips both the Names # section and DWARF, so we can only use it when we don't need any of # those things. - if (not settings.GENERATE_DWARF and - not settings.EMIT_SYMBOL_MAP and - not settings.GENERATE_SOURCE_MAP and - not settings.EMIT_NAME_SECTION and - not settings.ASYNCIFY): + if (not settings.GENERATE_DWARF and + not settings.EMIT_SYMBOL_MAP and + not settings.GENERATE_SOURCE_MAP and + not settings.EMIT_NAME_SECTION and + not settings.ASYNCIFY): cmd.append('--strip-debug') if settings.LINKABLE: @@ -283,7 +283,7 @@ def lld_flags(args): # lld doesn't currently support --start-group/--end-group since the # semantics are more like the windows linker where there is no need for # grouping. - args = [a for a in args if a not in ('--start-group', '--end-group')] + args = [a for a in args if a not in {'--start-group', '--end-group'}] # Emscripten currently expects linkable output (SIDE_MODULE/MAIN_MODULE) to # include all archive contents. @@ -390,7 +390,7 @@ def acorn_optimizer(filename, passes, extra_info=None, return_output=False, work temp_files = shared.get_temp_files() temp = temp_files.get('.js', prefix='emcc_acorn_info_').name shutil.copyfile(filename, temp) - with open(temp, 'a') as f: + with open(temp, 'a', encoding='utf-8') as f: f.write('// EXTRA_INFO: ' + json.dumps(extra_info)) filename = temp cmd = config.NODE_JS + [optimizer, filename] + passes @@ -744,7 +744,7 @@ def move_to_safe_7bit_ascii_filename(filename): # Print input file (long wall of text!) if DEBUG == 2 and (proc.returncode != 0 or (len(proc.stderr.strip()) > 0 and closure_warnings['enabled'])): - input_file = open(filename).read().splitlines() + input_file = utils.read_file(filename).splitlines() for i in range(len(input_file)): sys.stderr.write(f'{i + 1}: {input_file[i]}\n') @@ -898,7 +898,7 @@ def metadce(js_file, wasm_file, debug_info, last): name = line.removeprefix('unused:').strip() # With dynamic linking we never want to strip the memory or the table # This can be removed once SIDE_MODULE_IMPORTS includes tables and memories. - if settings.MAIN_MODULE and name.split('$')[-1] in ('wasmMemory', 'wasmTable'): + if settings.MAIN_MODULE and name.split('$')[-1] in {'wasmMemory', 'wasmTable'}: continue # we only remove imports and exports in applyDCEGraphRemovals if name.startswith('emcc$import$'): @@ -996,7 +996,7 @@ def wasm2js(js_file, wasm_file, opt_level, use_closure_compiler, debug_info, sym # purpose JS minifier here. if use_closure_compiler == 2: temp = shared.get_temp_files().get('.js').name - with open(temp, 'a') as f: + with open(temp, 'a', encoding='utf-8') as f: f.write(wasm2js_js) temp = closure_compiler(temp, advanced=False) wasm2js_js = utils.read_file(temp) @@ -1015,7 +1015,7 @@ def wasm2js(js_file, wasm_file, opt_level, use_closure_compiler, debug_info, sym marker = finds[0] all_js = all_js.replace(marker, f'(\n{wasm2js_js}\n)') # replace the placeholder with the actual code - js_file = js_file + '.wasm2js.js' + js_file += '.wasm2js.js' utils.write_file(js_file, all_js) return js_file @@ -1135,7 +1135,7 @@ def is_ar(filename): logger.debug('is_ar failed to test whether file \'%s\' is a llvm archive file! Failed on exception: %s' % (filename, e)) return False - return header in (b'!\n', b'!\n') + return header in {b'!\n', b'!\n'} def is_wasm(filename): @@ -1153,7 +1153,7 @@ def is_wasm_dylib(filename): section = next(module.sections()) if section.type == webassembly.SecType.CUSTOM: module.seek(section.offset) - if module.read_string() in ('dylink', 'dylink.0'): + if module.read_string() in {'dylink', 'dylink.0'}: return True return False @@ -1168,7 +1168,7 @@ def emit_wasm_source_map(wasm_file, map_file, final_wasm): wasm_sourcemap = importlib.import_module('tools.wasm-sourcemap') sourcemap_cmd = [wasm_file, '--dwarfdump=' + LLVM_DWARFDUMP, - '-o', map_file, + '-o', map_file, '--basepath=' + base_path] if settings.SOURCE_MAP_PREFIXES: @@ -1214,7 +1214,7 @@ def check_binaryen(bindir): # Allow the expected version or the following one in order avoid needing to update both # emscripten and binaryen in lock step in emscripten-releases. - if version not in (EXPECTED_BINARYEN_VERSION, EXPECTED_BINARYEN_VERSION + 1): + if version not in {EXPECTED_BINARYEN_VERSION, EXPECTED_BINARYEN_VERSION + 1}: diagnostics.warning('version-check', 'unexpected binaryen version: %s (expected %s)', version, EXPECTED_BINARYEN_VERSION) @@ -1259,7 +1259,7 @@ def run_binaryen_command(tool, infile, outfile=None, args=None, debug=False, std # we must tell binaryen to update it # TODO: all tools should support source maps; wasm-ctor-eval does not atm, # for example - if settings.GENERATE_SOURCE_MAP and outfile and tool in ['wasm-opt', 'wasm-emscripten-finalize', 'wasm-metadce']: + if settings.GENERATE_SOURCE_MAP and outfile and tool in {'wasm-opt', 'wasm-emscripten-finalize', 'wasm-metadce'}: cmd += [f'--input-source-map={infile}.map'] cmd += [f'--output-source-map={outfile}.map'] if shared.SKIP_SUBPROCS: diff --git a/tools/cmdline.py b/tools/cmdline.py index 6436aec27f669..d708a09f5c5a7 100644 --- a/tools/cmdline.py +++ b/tools/cmdline.py @@ -407,7 +407,7 @@ def consume_arg_file(): settings.SEPARATE_DWARF = True settings.GENERATE_DWARF = 1 settings.DEBUG_LEVEL = 3 - elif debug_level in ['source-map', 'source-map=inline']: + elif debug_level in {'source-map', 'source-map=inline'}: settings.GENERATE_SOURCE_MAP = 1 if debug_level == 'source-map' else 2 newargs[i] = '-g' elif debug_level == 'z': @@ -507,9 +507,9 @@ def consume_arg_file(): options.cpu_profiler = True elif check_flag('--threadprofiler'): settings.PTHREADS_PROFILING = 1 - elif arg in ('-fcolor-diagnostics', '-fdiagnostics-color', '-fdiagnostics-color=always'): + elif arg in {'-fcolor-diagnostics', '-fdiagnostics-color', '-fdiagnostics-color=always'}: colored_logger.enable(force=True) - elif arg in ('-fno-color-diagnostics', '-fno-diagnostics-color', '-fdiagnostics-color=never'): + elif arg in {'-fno-color-diagnostics', '-fno-diagnostics-color', '-fdiagnostics-color=never'}: colored_logger.disable() elif arg == '-fno-exceptions': settings.DISABLE_EXCEPTION_CATCHING = 1 @@ -588,17 +588,17 @@ def consume_arg_file(): options.output_file = arg.removeprefix('-o') elif check_arg('-target') or check_arg('--target'): options.target = consume_arg() - if options.target not in ('wasm32', 'wasm64', 'wasm64-unknown-emscripten', 'wasm32-unknown-emscripten'): + if options.target not in {'wasm32', 'wasm64', 'wasm64-unknown-emscripten', 'wasm32-unknown-emscripten'}: exit_with_error(f'unsupported target: {options.target} (emcc only supports wasm64-unknown-emscripten and wasm32-unknown-emscripten)') elif check_arg('--use-port'): ports.handle_use_port_arg(settings, consume_arg()) - elif arg in ('-c', '--precompile'): + elif arg in {'-c', '--precompile'}: options.dash_c = True elif arg == '-S': options.dash_S = True elif arg == '-E': options.dash_E = True - elif arg in ('-M', '-MM'): + elif arg in {'-M', '-MM'}: options.dash_M = True elif arg.startswith('-x'): # TODO(sbc): Handle multiple -x flags on the same command line @@ -711,7 +711,7 @@ def parse_string_list(text): if text[-1] != ']': raise ValueError('unterminated string list. expected final character to be "]"') text = text[1:-1] - if text.strip() == "": + if not text.strip(): return [] return parse_string_list_members(text) @@ -724,7 +724,7 @@ def parse_string_list(text): return parse_string_list(text) # if we succeeded in parsing as json, check some properties of it before returning - if type(parsed) not in (str, list): + if type(parsed) not in {str, list}: raise ValueError(f'settings must be strings or lists (not {type(parsed)})') if type(parsed) is list: for elem in parsed: @@ -810,7 +810,7 @@ def normalize_boolean_setting(name, value): # (note that *non*-boolean setting values have special meanings, # and we can't just flip them, so leave them as-is to be # handled in a special way later) - if name.startswith('NO_') and value in ('0', '1'): + if name.startswith('NO_') and value in {'0', '1'}: name = name.removeprefix('NO_') value = str(1 - int(value)) return name, value diff --git a/tools/config.py b/tools/config.py index fb3b19d1dfacb..d22f2f017c65d 100644 --- a/tools/config.py +++ b/tools/config.py @@ -124,12 +124,12 @@ def parse_config_file(): env_var = 'EM_' + key env_value = os.environ.get(env_var) if env_value is not None: - if env_value in ('', '0'): + if env_value in {'', '0'}: env_value = None # Unlike the other keys these two should always be lists. - if env_var in ('EM_JS_ENGINES', 'EM_WASM_ENGINES'): + if env_var in {'EM_JS_ENGINES', 'EM_WASM_ENGINES'}: env_value = env_value.split(',') - if env_var in ('EM_CONFIG', 'EM_CACHE', 'EM_PORTS', 'EM_LLVM_ROOT', 'EM_BINARYEN_ROOT'): + if env_var in {'EM_CONFIG', 'EM_CACHE', 'EM_PORTS', 'EM_LLVM_ROOT', 'EM_BINARYEN_ROOT'}: if not os.path.isabs(env_value): exit_with_error(f'environment variable {env_var} must be an absolute path: {env_value}') globals()[key] = env_value diff --git a/tools/diagnostics.py b/tools/diagnostics.py index db4f1ed70a948..23312da18dcdb 100644 --- a/tools/diagnostics.py +++ b/tools/diagnostics.py @@ -38,7 +38,7 @@ def diag(level, msg, *args): prefix = level_prefixes[level] color = level_colors[level] if args: - msg = msg % args + msg %= args # Add colors prefix = colored_logger.with_bold_color(color, prefix) diff --git a/tools/emcoverage.py b/tools/emcoverage.py index 8695f3bb1e551..99ecb8d7af3cf 100755 --- a/tools/emcoverage.py +++ b/tools/emcoverage.py @@ -60,7 +60,7 @@ def main(): shutil.rmtree(store) return - if sys.argv[1] in ('html', 'report', 'xml'): + if sys.argv[1] in {'html', 'report', 'xml'}: old_argv = sys.argv sys.argv = ['coverage', 'combine'] + glob(os.path.join(store, '*')) with contextlib.suppress(SystemExit): diff --git a/tools/empath-split.py b/tools/empath-split.py index cb828602a4be9..c29841e16d9c4 100755 --- a/tools/empath-split.py +++ b/tools/empath-split.py @@ -142,9 +142,9 @@ def check_errors(args): # Check source map validity. Just perform simple checks to make sure mandatory # fields exist. + json_data = utils.read_file(sourcemap) try: - with open(sourcemap) as f: - source_map_data = json.load(f) + source_map_data = json.loads(json_data) except json.JSONDecodeError: exit_with_error(f'Invalid JSON format in file {args.sourcemap}') for field in ['version', 'sources', 'mappings']: @@ -160,11 +160,11 @@ def get_sourceMappingURL(wasm, arg_sourcemap): def print_sources(sourcemap): - with open(sourcemap) as f: - sources = json.load(f).get('sources') - assert isinstance(sources, list) - for src in sources: - print(src) + contents = utils.read_file(sourcemap) + sources = json.loads(contents).get('sources') + assert isinstance(sources, list) + for src in sources: + print(src) def get_path_to_functions_map(wasm, sourcemap, paths): @@ -319,7 +319,7 @@ def main(): path_to_funcs = get_path_to_functions_map(args.wasm, sourcemap, all_paths) # Write .manifest file - f = tempfile.NamedTemporaryFile(suffix=".manifest", mode='w+', delete=False) + f = tempfile.NamedTemporaryFile(suffix=".manifest", mode='w', encoding='utf-8', delete=False) manifest = f.name try: for i, (module, paths) in enumerate(module_to_paths.items()): diff --git a/tools/emprofile.py b/tools/emprofile.py index c0cc6355dc1c4..12152fa2dea57 100755 --- a/tools/emprofile.py +++ b/tools/emprofile.py @@ -39,7 +39,7 @@ def create_profiling_graph(outfile): print(f'Processing {len(log_files)} profile log files in {profiler_logs_path}...') for f in log_files: print(f'Processing: {f}') - json_data = Path(f).read_text() + json_data = Path(f).read_text(encoding='utf-8') if len(json_data.strip()) == 0: continue lines = json_data.split('\n') @@ -62,8 +62,8 @@ def create_profiling_graph(outfile): emprofile_json_data = json.dumps(all_results, indent=2) html_file = outfile + '.html' - html_contents = Path(os.path.dirname(os.path.realpath(__file__)), 'toolchain_profiler.results_template.html').read_text().replace('{{{ emprofile_json_data }}}', emprofile_json_data) - Path(html_file).write_text(html_contents) + html_contents = Path(os.path.dirname(os.path.realpath(__file__)), 'toolchain_profiler.results_template.html').read_text(encoding='utf-8').replace('{{{ emprofile_json_data }}}', emprofile_json_data) + Path(html_file).write_text(html_contents, encoding='utf-8') print(f'Wrote "{html_file}"') return 0 diff --git a/tools/emscripten.py b/tools/emscripten.py index 00afe2a4944cb..d9e7384a0c934 100644 --- a/tools/emscripten.py +++ b/tools/emscripten.py @@ -211,7 +211,7 @@ def compile_javascript(symbols_only=False): if stderr_file: stderr_file = os.path.abspath(stderr_file) logger.info('logging stderr in js compiler phase into %s' % stderr_file) - stderr_file = open(stderr_file, 'w') + stderr_file = open(stderr_file, 'w', encoding='utf-8') # Save settings to a file to work around v8 issue 1579 settings_json = json.dumps(settings.external_dict(), sort_keys=True, indent=2) @@ -938,7 +938,7 @@ def install_debug_wrapper(sym): # `__trap` can occur before the runtime is initialized since it is used in abort. # `emscripten_get_sbrk_ptr` can be called prior to runtime initialization by # the dynamic linking code. - return sym not in ['__trap', 'emscripten_get_sbrk_ptr'] + return sym not in {'__trap', 'emscripten_get_sbrk_ptr'} def should_export(sym): diff --git a/tools/extract_metadata.py b/tools/extract_metadata.py index e57d76f6871f5..8459f3f723fa4 100644 --- a/tools/extract_metadata.py +++ b/tools/extract_metadata.py @@ -121,9 +121,9 @@ def parse_function_for_memory_inits(module, func_index, offset_map): assert False, "unknown: %s" % opcode case OpCode.ATOMIC_PREFIX: opcode = AtomicOpCode(module.read_byte()) - if opcode in (AtomicOpCode.ATOMIC_I32_RMW_CMPXCHG, AtomicOpCode.ATOMIC_I32_STORE, + if opcode in {AtomicOpCode.ATOMIC_I32_RMW_CMPXCHG, AtomicOpCode.ATOMIC_I32_STORE, AtomicOpCode.ATOMIC_NOTIFY, AtomicOpCode.ATOMIC_WAIT32, - AtomicOpCode.ATOMIC_WAIT64): + AtomicOpCode.ATOMIC_WAIT64}: module.read_uleb() module.read_uleb() else: @@ -276,7 +276,7 @@ def read_module_imports(module, metadata): if i.field in em_js_funcs: em_js_func_types[i.field] = types[i.type] imports.append(i.field) - elif i.kind in (webassembly.ExternType.GLOBAL, webassembly.ExternType.TAG): + elif i.kind in {webassembly.ExternType.GLOBAL, webassembly.ExternType.TAG}: imports.append(i.field) diff --git a/tools/file_packager.py b/tools/file_packager.py index daaceaec352b4..6591548a3c3a1 100755 --- a/tools/file_packager.py +++ b/tools/file_packager.py @@ -245,8 +245,8 @@ def escape(c): if c_symbol in used: counter = 2 while c_symbol + str(counter) in used: - counter = counter + 1 - c_symbol = c_symbol + str(counter) + counter += 1 + c_symbol += str(counter) used.add(c_symbol) return c_symbol @@ -261,7 +261,7 @@ def generate_object_file(data_files): for f in embed_files: f.c_symbol_name = '__em_file_data_%s' % to_c_symbol(f.dstpath, used) - with open(asm_file, 'w') as out: + with open(asm_file, 'w', encoding='utf-8') as out: out.write('# Emscripten embedded file data, generated by tools/file_packager.py\n') for f in embed_files: @@ -524,11 +524,11 @@ def main(): # noqa: C901, PLR0912, PLR0915 # If user has submitted a directory name as the destination but omitted # the destination filename, use the filename from source file if file_.dstpath.endswith('/'): - file_.dstpath = file_.dstpath + os.path.basename(file_.srcpath) + file_.dstpath += os.path.basename(file_.srcpath) # make destination path always relative to the root file_.dstpath = posixpath.normpath(os.path.join('/', file_.dstpath)) if DEBUG: - err('Packaging file "%s" to VFS in path "%s".' % (file_.srcpath, file_.dstpath)) + err('Packaging file "%s" to VFS in path "%s".' % (file_.srcpath, file_.dstpath)) # Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/) seen = set() @@ -552,7 +552,7 @@ def was_seen(name): if options.jsoutput: targets.append(data_target) targets.append(options.jsoutput) - with open(options.depfile, 'w') as f: + with open(options.depfile, 'w', encoding='utf-8') as f: for target in targets: if target: f.write(escape_for_makefile(target)) @@ -628,7 +628,7 @@ def generate_preload_js(data_target, data_files, metadata): ret += " var isNode = globalThis.process && globalThis.process.versions && globalThis.process.versions.node && globalThis.process.type != 'renderer';\n" if options.support_node and options.export_es6: - ret += '''if (isNode) { + ret += '''if (isNode) { const { createRequire } = await import('node:module'); /** @suppress{duplicate} */ var require = createRequire(import.meta.url); @@ -649,7 +649,7 @@ def generate_preload_js(data_target, data_files, metadata): assert file_.mode == 'preload' dirname = os.path.dirname(file_.dstpath) dirname = dirname.lstrip('/') # absolute paths start with '/', remove that - if dirname != '': + if dirname: parts = dirname.split('/') for i in range(len(parts)): partial = '/'.join(parts[:i + 1]) diff --git a/tools/gen_struct_info.py b/tools/gen_struct_info.py index dee74814d52d6..3f99e1a555302 100755 --- a/tools/gen_struct_info.py +++ b/tools/gen_struct_info.py @@ -290,7 +290,7 @@ def inspect_code(headers, cflags): def parse_json(path): header_files = [] - with open(path) as stream: + with open(path, encoding='utf-8') as stream: # Remove comments before loading the JSON. data = json.loads(re.sub(r'//.*\n', '', stream.read())) @@ -299,7 +299,7 @@ def parse_json(path): for item in data: for key in item: - if key not in ['file', 'defines', 'structs']: + if key not in {'file', 'defines', 'structs'}: raise 'Unexpected key in json file: %s' % key header = {'name': item['file'], 'structs': {}, 'defines': {}} @@ -393,7 +393,7 @@ def main(args): else: output_file = utils.path_from_root('src/struct_info_generated.json') - with open(output_file, 'w') as f: + with open(output_file, 'w', encoding='utf-8') as f: output_json(info, f) return 0 diff --git a/tools/install.py b/tools/install.py index a279df86d46d4..807ef6cff3c29 100755 --- a/tools/install.py +++ b/tools/install.py @@ -50,7 +50,7 @@ def add_revision_file(target): # text=True would be better than encoding here, but it's only supported in 3.7+ git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'], encoding='utf-8').strip() - with open(os.path.join(target, 'emscripten-revision.txt'), 'w') as f: + with open(os.path.join(target, 'emscripten-revision.txt'), 'w', encoding='utf-8') as f: f.write(git_hash + '\n') diff --git a/tools/js_optimizer.py b/tools/js_optimizer.py index cb3836a074651..c5de59a38e6f9 100755 --- a/tools/js_optimizer.py +++ b/tools/js_optimizer.py @@ -91,7 +91,7 @@ def minify_shell(self, shell, minify_whitespace): self.globs = [] with temp_files.get_file('.minifyglobals.js') as temp_file: - with open(temp_file, 'w') as f: + with open(temp_file, 'w', encoding='utf-8') as f: f.write(shell) f.write('\n') f.write('// EXTRA_INFO:' + json.dumps(self.serialize())) @@ -282,7 +282,7 @@ def write_chunk(chunk, i): with temp_files.get_file('.cl.js') as cle: pre_1, pre_2 = pre.split(start_asm) post_1, post_2 = post.split(end_asm) - with open(cle, 'w') as f: + with open(cle, 'w', encoding='utf-8') as f: f.write(pre_1) f.write(cl_sep) f.write(post_2) @@ -322,7 +322,7 @@ def write_chunk(chunk, i): filename += '.jo.js' temp_files.note(filename) - with open(filename, 'w') as f: + with open(filename, 'w', encoding='utf-8') as f: with ToolchainProfiler.profile_block('write_pre'): f.write(pre) pre = None diff --git a/tools/link.py b/tools/link.py index 919d6ef5431d4..eb850b597b87e 100644 --- a/tools/link.py +++ b/tools/link.py @@ -462,7 +462,7 @@ def make_js_executable(script): logger.debug(f'adding `#!` to JavaScript file: {settings.EXECUTABLE}') # add shebang - with open(script, 'w') as f: + with open(script, 'w', encoding='utf-8') as f: f.write(f'#!{settings.EXECUTABLE}\n') f.write(src) try: @@ -924,14 +924,14 @@ def phase_linker_setup(options, linker_args): # noqa: C901, PLR0912, PLR0915 else: options.oformat = OFormat.JS - if options.oformat in (OFormat.WASM, OFormat.OBJECT): + if options.oformat in {OFormat.WASM, OFormat.OBJECT}: for s in JS_ONLY_SETTINGS: if s in user_settings: diagnostics.warning('unused-command-line-argument', f'{s} is only valid when generating JavaScript output') # When there is no final suffix or the suffix is `.out` (as in `a.out`) then default to # making the resulting file exectuable. - if settings.ENVIRONMENT_MAY_BE_NODE and options.oformat == OFormat.JS and final_suffix in ('', '.out'): + if settings.ENVIRONMENT_MAY_BE_NODE and options.oformat == OFormat.JS and final_suffix in {'', '.out'}: default_setting('EXECUTABLE', 1) if settings.EXECUTABLE and not settings.ENVIRONMENT_MAY_BE_NODE: @@ -964,7 +964,7 @@ def phase_linker_setup(options, linker_args): # noqa: C901, PLR0912, PLR0915 if settings.ABORT_ON_WASM_EXCEPTIONS: exit_with_error('WASM_ESM_INTEGRATION is not compatible with ABORT_ON_WASM_EXCEPTIONS') - if settings.MODULARIZE and settings.MODULARIZE not in [1, 'instance']: + if settings.MODULARIZE and settings.MODULARIZE not in {1, 'instance'}: exit_with_error(f'Invalid setting "{settings.MODULARIZE}" for MODULARIZE.') def limit_incoming_module_api(): @@ -992,7 +992,7 @@ def limit_incoming_module_api(): if settings.MINIMAL_RUNTIME and len(options.preload_files): exit_with_error('MINIMAL_RUNTIME is not compatible with --preload-file') - if options.oformat in (OFormat.WASM, OFormat.BARE): + if options.oformat in {OFormat.WASM, OFormat.BARE}: if options.emit_tsd: exit_with_error('Wasm only output is not compatible with --emit-tsd') # If the user asks directly for a wasm file then this *is* the target @@ -1005,7 +1005,7 @@ def limit_incoming_module_api(): # Otherwise the wasm file is produced alongside the final target. wasm_target = get_secondary_target(target, '.wasm') - if settings.SAFE_HEAP not in [0, 1, 2]: + if settings.SAFE_HEAP not in {0, 1, 2}: exit_with_error('SAFE_HEAP must be 0, 1 or 2') if not settings.WASM: @@ -1217,7 +1217,7 @@ def limit_incoming_module_api(): settings.USE_CLOSURE_COMPILER = 1 if 'CLOSURE_WARNINGS' in user_settings: - if settings.CLOSURE_WARNINGS not in ['quiet', 'warn', 'error']: + if settings.CLOSURE_WARNINGS not in {'quiet', 'warn', 'error'}: exit_with_error('invalid option -sCLOSURE_WARNINGS=%s specified! Allowed values are "quiet", "warn" or "error".' % settings.CLOSURE_WARNINGS) closure_warnings = diagnostics.manager.warnings['closure'] if settings.CLOSURE_WARNINGS == 'error': @@ -1309,7 +1309,7 @@ def limit_incoming_module_api(): # dyncalls which call into the wasm, which then does an indirect call. settings.DYNCALLS = 1 - if options.oformat != OFormat.OBJECT and final_suffix in ('.o', '.bc', '.so', '.dylib') and not settings.SIDE_MODULE: + if options.oformat != OFormat.OBJECT and final_suffix in {'.o', '.bc', '.so', '.dylib'} and not settings.SIDE_MODULE: diagnostics.warning('emcc', 'object file output extension (%s) used for non-object output. If you meant to build an object file please use `-c, `-r`, or `-shared`' % final_suffix) if settings.SUPPORT_BIG_ENDIAN: @@ -1606,7 +1606,7 @@ def limit_incoming_module_api(): if will_metadce() and \ settings.OPT_LEVEL >= 2 and \ settings.DEBUG_LEVEL <= 2 and \ - options.oformat not in (OFormat.WASM, OFormat.BARE) and \ + options.oformat not in {OFormat.WASM, OFormat.BARE} and \ settings.ASYNCIFY != 2 and \ not settings.LINKABLE and \ not settings.STANDALONE_WASM and \ @@ -1637,7 +1637,7 @@ def limit_incoming_module_api(): settings.INCOMING_MODULE_JS_API += ['loadSplitModule'] # wasm side modules have suffix .wasm - if settings.SIDE_MODULE and utils.suffix(target) in ('.js', '.mjs'): + if settings.SIDE_MODULE and utils.suffix(target) in {'.js', '.mjs'}: diagnostics.warning('emcc', 'JavaScript output suffix requested, but wasm side modules are just wasm files; emitting only a .wasm, no .js') if options.sanitize: @@ -1889,7 +1889,7 @@ def phase_post_link(options, in_wasm, wasm_target, target, js_syms, base_metadat settings.TARGET_BASENAME = unsuffixed_basename(target) - if options.oformat in (OFormat.JS, OFormat.MJS): + if options.oformat in {OFormat.JS, OFormat.MJS}: js_target = target else: js_target = get_secondary_target(target, '.js') @@ -2434,8 +2434,8 @@ def modularize(): # FIXME(https://github.com/emscripten-core/emscripten/issues/24558): Running acorn at this # late phase seems to cause OOM (some kind of infinite loop perhaps) in node. # Instead we minify src/modularize.js in isolation above. - #if settings.MINIFY_WHITESPACE: - # final_js = building.acorn_optimizer(final_js, ['--minify-whitespace']) + # if settings.MINIFY_WHITESPACE: + # final_js = building.acorn_optimizer(final_js, ['--minify-whitespace']) def module_export_name_substitution(): @@ -2854,7 +2854,7 @@ def process_dynamic_libs(dylibs, lib_dirs): settings.SIDE_MODULE_EXPORTS.extend(sorted(exports)) imports = webassembly.get_imports(dylib) - imports = [i.field for i in imports if i.kind in (webassembly.ExternType.FUNC, webassembly.ExternType.GLOBAL, webassembly.ExternType.TAG)] + imports = [i.field for i in imports if i.kind in {webassembly.ExternType.FUNC, webassembly.ExternType.GLOBAL, webassembly.ExternType.TAG}] # For now we ignore `invoke_` functions imported by side modules and rely # on the dynamic linker to create them on the fly. # TODO(sbc): Integrate with metadata.invoke_funcs that comes from the diff --git a/tools/maint/add_license.py b/tools/maint/add_license.py index d3e571558a11e..577f577fe6447 100755 --- a/tools/maint/add_license.py +++ b/tools/maint/add_license.py @@ -62,9 +62,9 @@ def process_file(filename): if any(filename.startswith(ex) for ex in exclude_filenames): return ext = os.path.splitext(filename)[1] - if ext not in ('.py', '.c', '.cpp', '.h', '.js'): + if ext not in {'.py', '.c', '.cpp', '.h', '.js'}: return - with open(filename) as f: + with open(filename, encoding='utf-8') as f: contents = f.read() header = '\n'.join(contents.splitlines()[:30]) if any(ex in header for ex in exclude_contents): @@ -72,7 +72,7 @@ def process_file(filename): output = subprocess.check_output(['git', 'log', '--pretty=format:%cd', '--date=format:%Y', filename]) year = output.splitlines()[-1].split()[0] print(filename) - with open(filename, 'w') as f: + with open(filename, 'w', encoding='utf-8') as f: if ext == '.py': if contents.startswith('#!'): line1, rest = contents.split('\n', 1) @@ -81,18 +81,18 @@ def process_file(filename): f.write(py_license % year) if not contents.startswith('\n'): f.write('\n') - elif ext in ('.c', '.h'): + elif ext in {'.c', '.h'}: f.write(c_license % year) if not contents.startswith('\n'): f.write('\n') - elif ext in ('.cpp', '.js'): - if contents.startswith('/*\n'): - contents = contents[3:] - f.write(c_license_base % year) - else: - f.write(cpp_license % year) - if not contents.startswith('\n'): - f.write('\n') + elif ext in {'.cpp', '.js'}: + if contents.startswith('/*\n'): + contents = contents[3:] + f.write(c_license_base % year) + else: + f.write(cpp_license % year) + if not contents.startswith('\n'): + f.write('\n') else: assert False f.write(contents) diff --git a/tools/maint/check_emcc_help_text.py b/tools/maint/check_emcc_help_text.py index f8e8399431b7d..79cc80d6377dd 100755 --- a/tools/maint/check_emcc_help_text.py +++ b/tools/maint/check_emcc_help_text.py @@ -23,8 +23,8 @@ def main(): print('doc build output not found: %s' % build_output) return 1 - emcc_docs_output = Path(build_output).read_text() - emcc_docs = Path(docs_file).read_text() + emcc_docs_output = Path(build_output).read_text(encoding='utf-8') + emcc_docs = Path(docs_file).read_text(encoding='utf-8') if emcc_docs_output != emcc_docs: print('contents of checked in docs/emcc.txt does not match build output:') diff --git a/tools/maint/check_for_closed_issues.py b/tools/maint/check_for_closed_issues.py index 7bbfd8ef47978..0f2d011a501cf 100755 --- a/tools/maint/check_for_closed_issues.py +++ b/tools/maint/check_for_closed_issues.py @@ -22,7 +22,7 @@ def run(*args, **kwargs): def is_closed(issue_number): output = run(['gh', 'issue', 'view', '--json', 'state', issue_number]) state = json.loads(output)['state'] - assert state in ['OPEN', 'CLOSED'] + assert state in {'OPEN', 'CLOSED'} return state == 'CLOSED' diff --git a/tools/maint/create_dom_pk_codes.py b/tools/maint/create_dom_pk_codes.py index 6268e24320668..552461e594d81 100755 --- a/tools/maint/create_dom_pk_codes.py +++ b/tools/maint/create_dom_pk_codes.py @@ -30,6 +30,8 @@ # Use #include in your code to access these IDs. +# ruff: noqa: E241 + import os import random import sys @@ -281,8 +283,8 @@ def longest_key_code_length(): c_filename = os.path.join(root, 'system/lib/html5/dom_pk_codes.c') print(f'Writing: {h_filename}') print(f'Writing: {c_filename}') -h_file = open(h_filename, 'w') -c_file = open(c_filename, 'w') +h_file = open(h_filename, 'w', encoding='utf-8') +c_file = open(c_filename, 'w', encoding='utf-8') # Generate the output file: diff --git a/tools/maint/create_entry_points.py b/tools/maint/create_entry_points.py index 889a0e68106b5..0c7ca5fbec6e5 100755 --- a/tools/maint/create_entry_points.py +++ b/tools/maint/create_entry_points.py @@ -75,6 +75,16 @@ def maybe_remove(filename): os.remove(filename) +def read_file(filename): + with open(filename, encoding='utf-8') as f: + return f.read() + + +def write_file(filename, content): + with open(filename, 'w', encoding='utf-8') as f: + f.write(content) + + def main(all_platforms, use_exe_files): is_windows = sys.platform.startswith('win') is_msys2 = 'MSYSTEM' in os.environ @@ -85,12 +95,9 @@ def generate_entry_points(cmd, path): sh_file = path + '.sh' bat_file = path + '.bat' ps1_file = path + '.ps1' - with open(sh_file) as f: - sh_file = f.read() - with open(bat_file) as f: - bat_file = f.read() - with open(ps1_file) as f: - ps1_file = f.read() + sh_file = read_file(sh_file) + bat_file = read_file(bat_file) + ps1_file = read_file(ps1_file) for entry_point in cmd: sh_data = sh_file @@ -103,8 +110,7 @@ def generate_entry_points(cmd, path): launcher = os.path.join(__rootdir__, entry_point) if do_unix: - with open(launcher, 'w') as f: - f.write(sh_data) + write_file(launcher, sh_data) make_executable(launcher) if do_windows: @@ -114,10 +120,8 @@ def generate_entry_points(cmd, path): if use_exe_files: shutil.copyfile(windows_exe, launcher + '.exe') else: - with open(launcher + '.bat', 'w') as f: - f.write(bat_data) - with open(launcher + '.ps1', 'w') as f: - f.write(ps1_data) + write_file(launcher + '.bat', bat_data) + write_file(launcher + '.pa1', ps1_data) generate_entry_points(entry_points, os.path.join(__scriptdir__, 'run_python')) generate_entry_points(compiler_entry_points, os.path.join(__scriptdir__, 'run_python_compiler')) diff --git a/tools/maint/create_release.py b/tools/maint/create_release.py index 39c78e119b107..48e90b1e8a95d 100755 --- a/tools/maint/create_release.py +++ b/tools/maint/create_release.py @@ -85,7 +85,7 @@ def main(argv): print('Creating new release: %s' % release_version) if is_github_runner: # For GitHub Actions workflows - with open(os.environ['GITHUB_ENV'], 'a') as f: + with open(os.environ['GITHUB_ENV'], 'a', encoding='utf-8') as f: f.write(f'RELEASE_VERSION={release_version}') else: # Local use create_git_branch(release_version) diff --git a/tools/maint/find_unused_settings.py b/tools/maint/find_unused_settings.py index fed2ba8d55edb..60c9de1bf3908 100755 --- a/tools/maint/find_unused_settings.py +++ b/tools/maint/find_unused_settings.py @@ -26,7 +26,7 @@ def main(): print(f'Searching {len(settings.attrs)} settings') for key in settings.attrs: - cmd = ['git', 'grep', '-q', f'\\<{key}\\>', ':(exclude)src/settings.js', ':(exclude)src/settings_internal.js'] + cmd = ['git', 'grep', '-q', f'\\<{key}\\>', ':(exclude)src/settings.js', ':(exclude)src/settings_internal.js'] # git grep returns 0 if there is a match and non-zero when there is not if subprocess.run(cmd, check=False).returncode: print('NOT FOUND ANYWHERE:', key) diff --git a/tools/maint/heuristic_clear_cache.py b/tools/maint/heuristic_clear_cache.py index 8918e4901d1cd..154fdfee0bc64 100644 --- a/tools/maint/heuristic_clear_cache.py +++ b/tools/maint/heuristic_clear_cache.py @@ -44,7 +44,7 @@ def newest_mtime(paths): def heuristic_clear_cache(): mtime_file = cache.get_path('system_libs_mtime.txt') try: - system_libs_mtime = open(mtime_file).read() + system_libs_mtime = utils.read_file(mtime_file) except Exception: system_libs_mtime = 0 @@ -53,7 +53,7 @@ def heuristic_clear_cache(): if newest_system_libs_mtime != system_libs_mtime: print(f'Cache timestamp {system_libs_mtime} does not match with current timestamp {newest_system_libs_mtime}. Clearing cache...') cache.erase() - open(mtime_file, 'w').write(str(newest_system_libs_mtime)) + utils.write_file(mtime_file, str(newest_system_libs_mtime)) else: print('Cache timestamp is up to date, no clear needed.') diff --git a/tools/maint/npm_update.py b/tools/maint/npm_update.py index 63e5f3217773c..a66baf868b14b 100755 --- a/tools/maint/npm_update.py +++ b/tools/maint/npm_update.py @@ -35,9 +35,9 @@ def main(): message += 'This change was automatically generated by tools/maint/npm_update.py\n\n' lines = output.splitlines() assert lines[0].startswith('Upgrading ') - assert lines[1] == '' + assert not lines[1] assert lines[-1].startswith('Run npm install to install new versions') - assert lines[-2] == '' + assert not lines[-2] lines = lines[2:-2] message += '\n'.join(lines) + '\n' diff --git a/tools/maint/rebaseline_tests.py b/tools/maint/rebaseline_tests.py index 2270c0a8f69dc..d197e007077b1 100755 --- a/tools/maint/rebaseline_tests.py +++ b/tools/maint/rebaseline_tests.py @@ -41,7 +41,7 @@ def read_size_from_json(content): def process_changed_file(filename): - content = open(filename).read() + content = utils.read_file(filename) old_content = run(['git', 'show', f'HEAD:{filename}']) print(f'processing {filename}') diff --git a/tools/maint/simde_update.py b/tools/maint/simde_update.py index fd1e1e8efe4e8..821c0589d8d1e 100755 --- a/tools/maint/simde_update.py +++ b/tools/maint/simde_update.py @@ -19,9 +19,9 @@ sys.path.insert(0, __rootdir__) from tools.shared import get_emscripten_temp_dir +from tools.utils import path_from_root tmpdir = get_emscripten_temp_dir() -emdir = __rootdir__ def main(): @@ -50,9 +50,9 @@ def main(): return 1 try: - os.mkdir(path.join(emdir, "system", "include", "compat")) + os.mkdir(path_from_root('system/include/compat')) except FileExistsError: - if not path.isdir(path.join(emdir, "system", "include", "compat")): + if not path.isdir(path_from_root('system/include/compat')): print("system/include/compat exists and is not a directory, exiting...") return 1 @@ -70,7 +70,7 @@ def main(): return 1 neon_h_buf = neon_h_buf[:insert_location] + line_to_insert + neon_h_buf[insert_location:] - with open(path.join(emdir, "system", "include", "compat", "arm_neon.h"), "w+") as f: + with open(path_from_root('system/include/compat/arm_neon.h'), 'w', encoding='utf-8') as f: try: f.write("#define SIMDE_ARM_NEON_A32V7_ENABLE_NATIVE_ALIASES\n") f.write("#define SIMDE_ARM_NEON_A32V8_ENABLE_NATIVE_ALIASES\n") diff --git a/tools/maint/update_docs.py b/tools/maint/update_docs.py index 92bfedecba0c0..dd970decb3709 100755 --- a/tools/maint/update_docs.py +++ b/tools/maint/update_docs.py @@ -25,12 +25,12 @@ def is_git_clean(dirname): - return subprocess.check_output(['git', 'status', '-uno', '--porcelain'], text=True, cwd=dirname).strip() == '' + return not subprocess.check_output(['git', 'status', '-uno', '--porcelain'], text=True, cwd=dirname).strip() def get_changed_files(dirname): files_changed = subprocess.check_output(['git', 'status', '-uno', '--porcelain'], text=True, cwd=dirname).splitlines() - return [line[3:].strip() for line in files_changed] + return [line[3:].strip() for line in files_changed] def main(args): diff --git a/tools/maint/update_settings_docs.py b/tools/maint/update_settings_docs.py index 8140e6fd8ba6f..45a60becbdbd8 100755 --- a/tools/maint/update_settings_docs.py +++ b/tools/maint/update_settings_docs.py @@ -165,14 +165,14 @@ def main(args): if '--check' in args: safe_ensure_dirs(path_from_root('out')) tmp_output = path_from_root('out/settings_reference.rst') - with open(tmp_output, 'w') as f: + with open(tmp_output, 'w', encoding='utf-8') as f: write_file(f) if read_file(tmp_output) != read_file(output_file): print(f'{output_file} is out-of-date. Please run tools/maint/update_settings_docs.py') subprocess.call(['diff', '-u', output_file, tmp_output]) return 1 else: - with open(output_file, 'w') as f: + with open(output_file, 'w', encoding='utf-8') as f: write_file(f) return 0 diff --git a/tools/ports/__init__.py b/tools/ports/__init__.py index 56ab677311ba5..4e70d7e3f955e 100644 --- a/tools/ports/__init__.py +++ b/tools/ports/__init__.py @@ -237,7 +237,7 @@ def build_port(src_dir, output_path, port_name, includes=[], flags=[], cxxflags= dirs.remove(ex) for f in files: ext = utils.suffix(f) - if ext in ('.c', '.cpp') and not any((excluded in f) for excluded in exclude_files): + if ext in {'.c', '.cpp'} and not any((excluded in f) for excluded in exclude_files): srcs.append(os.path.join(root, f)) cflags = system_libs.get_base_cflags(build_dir) + ['-O2', '-I' + src_dir] + flags @@ -260,7 +260,7 @@ def build_port(src_dir, output_path, port_name, includes=[], flags=[], cxxflags= dirname = os.path.dirname(obj) os.makedirs(dirname, exist_ok=True) cmd = [shared.EMCC, '-c', src, '-o', obj] + cflags - if utils.suffix(src) in ('.cc', '.cxx', '.cpp'): + if utils.suffix(src) in {'.cc', '.cxx', '.cpp'}: cmd[0] = shared.EMXX cmd += cxxflags commands.append(cmd) diff --git a/tools/ports/cocos2d.py b/tools/ports/cocos2d.py index f018ba8d77a8b..5b3cc5c1e6601 100644 --- a/tools/ports/cocos2d.py +++ b/tools/ports/cocos2d.py @@ -79,7 +79,7 @@ def make_source_list(cocos2d_root, cocos2dx_root): sources = [] def add_makefile(makefile): - with open(makefile) as infile: + with open(makefile, encoding='utf-8') as infile: add_next = False for line in infile: if line.startswith('SOURCES'): diff --git a/tools/ports/sdl2_image.py b/tools/ports/sdl2_image.py index b6bd321fe3d7d..138fb3f09cd1b 100644 --- a/tools/ports/sdl2_image.py +++ b/tools/ports/sdl2_image.py @@ -39,7 +39,7 @@ def get_lib_name(settings): formats = '-'.join(sorted(get_formats(settings))) libname = 'libSDL2_image' - if formats != '': + if formats: libname += '-' + formats if settings.PTHREADS: libname += '-mt' diff --git a/tools/ports/sdl2_mixer.py b/tools/ports/sdl2_mixer.py index a7a2a7c1a5294..dff423a745284 100644 --- a/tools/ports/sdl2_mixer.py +++ b/tools/ports/sdl2_mixer.py @@ -40,7 +40,7 @@ def get_lib_name(settings): formats = '-'.join(sorted(get_formats(settings))) libname = 'libSDL2_mixer' - if formats != '': + if formats: libname += '-' + formats if settings.PTHREADS: libname += '-mt' diff --git a/tools/ports/zlib.py b/tools/ports/zlib.py index 18e1c9e253ced..8a37a714536f1 100644 --- a/tools/ports/zlib.py +++ b/tools/ports/zlib.py @@ -39,4 +39,3 @@ def clear(ports, settings, shared): def show(): return 'zlib (-sUSE_ZLIB or --use-port=zlib; zlib license)' - diff --git a/tools/response_file.py b/tools/response_file.py index 7b01ad7b8cd71..12555c5094fe9 100644 --- a/tools/response_file.py +++ b/tools/response_file.py @@ -103,7 +103,7 @@ def expand_response_file(arg): if DEBUG: logging.warning(f'failed to parse response file {response_filename} with guessed encoding "{guessed_encoding}". Trying default system encoding...') # If that fails, try with the Python default locale.getpreferredencoding() - with open(response_filename) as f: + with open(response_filename) as f: # noqa: PLW1514 args = f.read() args = shlex.split(args) diff --git a/tools/settings.py b/tools/settings.py index 7032b99b86552..ce511e75eb1d5 100644 --- a/tools/settings.py +++ b/tools/settings.py @@ -9,7 +9,7 @@ import re from typing import Any -from . import diagnostics +from . import diagnostics, utils from .utils import exit_with_error, path_from_root # Subset of settings that take a memory size (i.e. 1Gb, 64kb etc) @@ -281,8 +281,7 @@ def __init__(self): # Load the JS defaults into python. def read_js_settings(filename, attrs): - with open(filename) as fh: - settings = fh.read() + settings = utils.read_file(filename) # Use a bunch of regexs to convert the file from JS to python # TODO(sbc): This is kind hacky and we should probably convert # this file in format that python can read directly (since we @@ -403,16 +402,16 @@ def __setattr__(self, name, value): def check_type(self, name, value): # These settings have a variable type so cannot be easily type checked. - if name in ('EXECUTABLE', 'SUPPORT_LONGJMP', 'PTHREAD_POOL_SIZE', 'SEPARATE_DWARF', 'LTO', 'MODULARIZE'): + if name in {'EXECUTABLE', 'SUPPORT_LONGJMP', 'PTHREAD_POOL_SIZE', 'SEPARATE_DWARF', 'LTO', 'MODULARIZE'}: return expected_type = self.types.get(name) if not expected_type: return # Allow integers 1 and 0 for type `bool` if expected_type == bool: - if value in (1, 0): + if value in (1, 0): # noqa: PLR6201 value = bool(value) - if value in ('True', 'False', 'true', 'false'): + if value in ('True', 'False', 'true', 'false'): # noqa: PLR6201 exit_with_error(f'attempt to set `{name}` to `{value}`; use 1/0 to set boolean settings') if type(value) is not expected_type: exit_with_error(f'setting `{name}` expects `{expected_type.__name__}` but got `{type(value).__name__}`') diff --git a/tools/shared.py b/tools/shared.py index 040809e00b138..5982a59e075d8 100644 --- a/tools/shared.py +++ b/tools/shared.py @@ -579,7 +579,7 @@ def is_internal_global(name): def is_user_export(name): if is_internal_global(name): return False - return name not in ['__asyncify_data', '__asyncify_state', '__indirect_function_table', 'memory'] and not name.startswith(('dynCall_', 'orig$')) + return name not in {'__asyncify_data', '__asyncify_state', '__indirect_function_table', 'memory'} and not name.startswith(('dynCall_', 'orig$')) def asmjs_mangle(name): diff --git a/tools/system_libs.py b/tools/system_libs.py index 34b2d68f93f47..404a02efc850f 100644 --- a/tools/system_libs.py +++ b/tools/system_libs.py @@ -144,7 +144,7 @@ def create_lib(libname, inputs): suffix = utils.suffix(libname) inputs = sorted(inputs, key=objectfile_sort_key) - if suffix in ('.bc', '.o'): + if suffix in {'.bc', '.o'}: if len(inputs) == 1: if inputs[0] != libname: shutil.copyfile(inputs[0], libname) @@ -169,7 +169,7 @@ def run_ninja(build_dir): def ensure_target_in_ninja_file(ninja_file, target): if os.path.isfile(ninja_file) and target in read_file(ninja_file): return - with open(ninja_file, 'a') as f: + with open(ninja_file, 'a', encoding='utf-8') as f: f.write(target + '\n') @@ -1755,7 +1755,7 @@ def __init__(self, **kwargs): super().__init__(**kwargs) def can_use(self): - return super().can_use() and self.eh_mode in (Exceptions.WASM_LEGACY, Exceptions.WASM) + return super().can_use() and self.eh_mode in {Exceptions.WASM_LEGACY, Exceptions.WASM} def get_cflags(self): cflags = super().get_cflags() @@ -1780,7 +1780,7 @@ class libmalloc(MTLibrary): def __init__(self, **kwargs): self.malloc = kwargs.pop('malloc') - if self.malloc not in ('dlmalloc', 'emmalloc', 'emmalloc-debug', 'emmalloc-memvalidate', 'emmalloc-verbose', 'emmalloc-memvalidate-verbose', 'mimalloc', 'none'): + if self.malloc not in {'dlmalloc', 'emmalloc', 'emmalloc-debug', 'emmalloc-memvalidate', 'emmalloc-verbose', 'emmalloc-memvalidate-verbose', 'mimalloc', 'none'}: raise Exception('malloc must be one of "emmalloc[-debug|-memvalidate][-verbose]", "mimalloc", "dlmalloc" or "none", see settings.js') self.is_tracing = kwargs.pop('is_tracing') diff --git a/tools/toolchain_profiler.py b/tools/toolchain_profiler.py index 5a834c3af55a9..660243b54e9c0 100644 --- a/tools/toolchain_profiler.py +++ b/tools/toolchain_profiler.py @@ -144,7 +144,8 @@ def log_access(): # the parent->child process spawns for the subprocessing pools. Therefore # any profiling events that the subprocess children generate are virtually # treated as if they were performed by the parent PID. - return open(os.path.join(ToolchainProfiler.profiler_logs_path, 'toolchain_profiler.pid_' + str(os.getpid()) + '.json'), 'a') + filename = 'toolchain_profiler.pid_' + str(os.getpid()) + '.json' + return open(os.path.join(ToolchainProfiler.profiler_logs_path, filename), 'a', encoding='utf-8') @staticmethod def escape_string(arg): diff --git a/tools/wasm-sourcemap.py b/tools/wasm-sourcemap.py index 14bd45bc0187e..7139089be10e1 100755 --- a/tools/wasm-sourcemap.py +++ b/tools/wasm-sourcemap.py @@ -110,8 +110,8 @@ def encode_vlq(n): x = (n << 1) if n >= 0 else ((-n << 1) + 1) result = "" while x > 31: - result = result + VLQ_CHARS[32 + (x & 31)] - x = x >> 5 + result += VLQ_CHARS[32 + (x & 31)] + x >>= 5 return result + VLQ_CHARS[x] @@ -119,11 +119,11 @@ def read_var_uint(wasm, pos): n = 0 shift = 0 b = ord(wasm[pos:pos + 1]) - pos = pos + 1 + pos += 1 while b >= 128: - n = n | ((b - 128) << shift) + n |= ((b - 128) << shift) b = ord(wasm[pos:pos + 1]) - pos = pos + 1 + pos += 1 shift += 7 return n + (b << shift), pos @@ -144,7 +144,7 @@ def strip_debug_sections(wasm): name = str(wasm[name_pos:name_end]) if name in {'linking', 'sourceMappingURL'} or name.startswith(('reloc..debug_', '.debug_')): continue # skip debug related sections - stripped = stripped + wasm[section_start:pos] + stripped += wasm[section_start:pos] return stripped @@ -153,7 +153,7 @@ def encode_uint_var(n): result = bytearray() while n > 127: result.append(128 | (n & 127)) - n = n >> 7 + n >>= 7 result.append(n) return bytes(result) @@ -174,7 +174,7 @@ def get_code_section_offset(wasm): section_size, pos = read_var_uint(wasm, pos_) if section_id == 10: return pos - pos = pos + section_size + pos += section_size def remove_dead_entries(entries): @@ -567,8 +567,7 @@ def get_function_id(address): if collect_sources: load_name = prefixes.load.resolve(file_name) try: - with open(load_name) as infile: - source_content = infile.read() + source_content = utils.read_file(load_name) sources_content.append(source_content) except OSError: print('Failed to read source: %s' % load_name) diff --git a/tools/webidl_binder.py b/tools/webidl_binder.py index 02e78224ae249..9db9e517cda5c 100644 --- a/tools/webidl_binder.py +++ b/tools/webidl_binder.py @@ -408,7 +408,7 @@ def render_function(class_name, func_name, sigs, return_type, non_pointer, # no To revalidate these numbers, run `ruff check --select=C901,PLR091`. """ - legacy_mode = CHECKS not in ['ALL', 'FAST'] + legacy_mode = CHECKS not in {'ALL', 'FAST'} all_checks = CHECKS == 'ALL' bindings_name = class_name + '_' + func_name @@ -632,9 +632,9 @@ def make_call_args(i): cast_self = 'dynamic_cast<' + type_to_c(func_scope) + '>(' + cast_self + ')' maybe_deref = deref_if_nonpointer(raw[0]) operator = operator.strip() - if operator in ["+", "-", "*", "/", "%", "^", "&", "|", "=", + if operator in {"+", "-", "*", "/", "%", "^", "&", "|", "=", "<", ">", "+=", "-=", "*=", "/=", "%=", "^=", "&=", "|=", "<<", ">>", ">>=", - "<<=", "==", "!=", "<=", ">=", "<=>", "&&", "||"]: + "<<=", "==", "!=", "<=", ">=", "<=>", "&&", "||"}: call = '(*%s %s %s%s)' % (cast_self, operator, maybe_deref, args[0]) elif operator == '[]': call = '((*%s)[%s%s])' % (cast_self, maybe_deref, args[0]) @@ -924,12 +924,12 @@ class %s : public %s { # Write -with open(cpp_output, 'w') as c: +with open(cpp_output, 'w', encoding='utf-8') as c: for x in pre_c: c.write(x) for x in mid_c: c.write(x) -with open(js_output, 'w') as js: +with open(js_output, 'w', encoding='utf-8') as js: for x in mid_js: js.write(x)