diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 0000000000000..3c79e54c03a11 --- /dev/null +++ b/.style.yapf @@ -0,0 +1,11 @@ +[style] +based_on_style = yapf +# Defined in https://github.com/google/yapf/blob/20d0c8f1774cf3843f4032f3e9ab02338bf98c75/yapf/yapflib/style.py#L326 +# Docs and full list of knobs: +# https://github.com/google/yapf#knobs +split_before_first_argument = true +blank_line_before_module_docstring = true +# dedent_closing_brackets is required by coalesce_brackets +dedent_closing_brackets = true +coalesce_brackets = true +each_dict_entry_on_separate_line = false diff --git a/DEPS b/DEPS index ff14d7c44456e..f949503394ec3 100644 --- a/DEPS +++ b/DEPS @@ -173,6 +173,9 @@ deps = { 'src/third_party/boringssl': Var('github_git') + '/dart-lang/boringssl_gen.git' + '@' + Var('dart_boringssl_gen_rev'), + 'src/third_party/yapf': + Var('github_git') + '/google/yapf' + '@' + '212c5b5ad8e172d2d914ae454c121c89cccbcb35', + 'src/third_party/boringssl/src': 'https://boringssl.googlesource.com/boringssl.git' + '@' + Var('dart_boringssl_rev'), diff --git a/build/android_artifacts.py b/build/android_artifacts.py index 3f5de89b5a357..b7fe9d0fa048e 100644 --- a/build/android_artifacts.py +++ b/build/android_artifacts.py @@ -24,8 +24,13 @@ def cp_files(args): def main(): parser = argparse.ArgumentParser() - parser.add_argument('-i', dest='input_pairs', nargs=2, action='append', - help='The input file and its destination.') + parser.add_argument( + '-i', + dest='input_pairs', + nargs=2, + action='append', + help='The input file and its destination.' + ) cp_files(parser.parse_args()) return 0 diff --git a/build/copy_info_plist.py b/build/copy_info_plist.py index d9e9333674d77..89f67a2e23b53 100644 --- a/build/copy_info_plist.py +++ b/build/copy_info_plist.py @@ -20,34 +20,60 @@ import git_revision import os -def GetClangVersion(bitcode) : - clang_executable = str(os.path.join("..", "..", "buildtools", "mac-x64", "clang", "bin", "clang++")) + +def GetClangVersion(bitcode): + clang_executable = str( + os.path.join( + "..", "..", "buildtools", "mac-x64", "clang", "bin", "clang++" + ) + ) if bitcode: clang_executable = "clang++" version = subprocess.check_output([clang_executable, "--version"]) return version.splitlines()[0] + def main(): parser = argparse.ArgumentParser( - description='Copies the Info.plist and adds extra fields to it like the git hash of the engine') + description='Copies the Info.plist and adds extra fields to it like the git hash of the engine' + ) - parser.add_argument('--source', help='Path to Info.plist source template', type=str, required=True) - parser.add_argument('--destination', help='Path to destination Info.plist', type=str, required=True) - parser.add_argument('--bitcode', help='Built with bitcode', action='store_true') - parser.add_argument('--minversion', help='Minimum device OS version like "9.0"', type=str) + parser.add_argument( + '--source', + help='Path to Info.plist source template', + type=str, + required=True + ) + parser.add_argument( + '--destination', + help='Path to destination Info.plist', + type=str, + required=True + ) + parser.add_argument( + '--bitcode', help='Built with bitcode', action='store_true' + ) + parser.add_argument( + '--minversion', help='Minimum device OS version like "9.0"', type=str + ) args = parser.parse_args() text = open(args.source).read() engine_path = os.path.join(os.getcwd(), "..", "..", "flutter") revision = git_revision.GetRepositoryVersion(engine_path) - bitcode = args.bitcode is not None; + bitcode = args.bitcode is not None clang_version = GetClangVersion(bitcode) - text = text.format(revision = revision, clang_version = clang_version, min_version = args.minversion) + text = text.format( + revision=revision, + clang_version=clang_version, + min_version=args.minversion + ) with open(args.destination, "w") as outfile: outfile.write(text) + if __name__ == "__main__": main() diff --git a/build/dart/tools/dart_package_name.py b/build/dart/tools/dart_package_name.py index db8c9e93193ed..c6e245009f1b3 100755 --- a/build/dart/tools/dart_package_name.py +++ b/build/dart/tools/dart_package_name.py @@ -10,11 +10,13 @@ import os import sys + # TODO(johnmccutchan): Use the yaml package to parse. def PackageName(line): assert line.startswith("name:") return line.split(":")[1].strip() + def main(pubspec_file): source_file = open(pubspec_file, "r") for line in source_file: @@ -25,15 +27,19 @@ def main(pubspec_file): # Couldn't find it. return -1 + if __name__ == '__main__': parser = argparse.ArgumentParser( description="This script outputs the package name specified in the" - "pubspec.yaml") - parser.add_argument("--pubspec", - dest="pubspec_file", - metavar="", - type=str, - required=True, - help="Path to pubspec file") + "pubspec.yaml" + ) + parser.add_argument( + "--pubspec", + dest="pubspec_file", + metavar="", + type=str, + required=True, + help="Path to pubspec file" + ) args = parser.parse_args() sys.exit(main(args.pubspec_file)) diff --git a/build/dart/tools/dart_pkg.py b/build/dart/tools/dart_pkg.py index 9e7d38c8c47b3..1a8e2785ea8c2 100755 --- a/build/dart/tools/dart_pkg.py +++ b/build/dart/tools/dart_pkg.py @@ -16,8 +16,10 @@ USE_LINKS = sys.platform != "win32" -DART_ANALYZE = os.path.join(os.path.dirname(os.path.abspath(__file__)), - "dart_analyze.py") +DART_ANALYZE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "dart_analyze.py" +) + def dart_filter(path): if os.path.isdir(path): @@ -142,7 +144,7 @@ def remove_broken_symlinks(root_dir): def analyze_entrypoints(dart_sdk, package_root, entrypoints): - cmd = [ "python", DART_ANALYZE ] + cmd = ["python", DART_ANALYZE] cmd.append("--dart-sdk") cmd.append(dart_sdk) cmd.append("--entrypoints") @@ -161,60 +163,84 @@ def analyze_entrypoints(dart_sdk, package_root, entrypoints): def main(): parser = argparse.ArgumentParser(description='Generate a dart-pkg') - parser.add_argument('--dart-sdk', - action='store', - metavar='dart_sdk', - help='Path to the Dart SDK.') - parser.add_argument('--package-name', - action='store', - metavar='package_name', - help='Name of package', - required=True) - parser.add_argument('--pkg-directory', - metavar='pkg_directory', - help='Directory where dart_pkg should go', - required=True) - parser.add_argument('--package-root', - metavar='package_root', - help='packages/ directory', - required=True) - parser.add_argument('--stamp-file', - metavar='stamp_file', - help='timestamp file', - required=True) - parser.add_argument('--entries-file', - metavar='entries_file', - help='script entries file', - required=True) - parser.add_argument('--package-sources', - metavar='package_sources', - help='Package sources', - nargs='+') - parser.add_argument('--package-entrypoints', - metavar='package_entrypoints', - help='Package entry points for analyzer', - nargs='*', - default=[]) - parser.add_argument('--sdk-ext-directories', - metavar='sdk_ext_directories', - help='Directory containing .dart sources', - nargs='*', - default=[]) - parser.add_argument('--sdk-ext-files', - metavar='sdk_ext_files', - help='List of .dart files that are part of sdk_ext.', - nargs='*', - default=[]) - parser.add_argument('--sdk-ext-mappings', - metavar='sdk_ext_mappings', - help='Mappings for SDK extension libraries.', - nargs='*', - default=[]) - parser.add_argument('--read_only', - action='store_true', - dest='read_only', - help='Package is a read only package.', - default=False) + parser.add_argument( + '--dart-sdk', + action='store', + metavar='dart_sdk', + help='Path to the Dart SDK.' + ) + parser.add_argument( + '--package-name', + action='store', + metavar='package_name', + help='Name of package', + required=True + ) + parser.add_argument( + '--pkg-directory', + metavar='pkg_directory', + help='Directory where dart_pkg should go', + required=True + ) + parser.add_argument( + '--package-root', + metavar='package_root', + help='packages/ directory', + required=True + ) + parser.add_argument( + '--stamp-file', + metavar='stamp_file', + help='timestamp file', + required=True + ) + parser.add_argument( + '--entries-file', + metavar='entries_file', + help='script entries file', + required=True + ) + parser.add_argument( + '--package-sources', + metavar='package_sources', + help='Package sources', + nargs='+' + ) + parser.add_argument( + '--package-entrypoints', + metavar='package_entrypoints', + help='Package entry points for analyzer', + nargs='*', + default=[] + ) + parser.add_argument( + '--sdk-ext-directories', + metavar='sdk_ext_directories', + help='Directory containing .dart sources', + nargs='*', + default=[] + ) + parser.add_argument( + '--sdk-ext-files', + metavar='sdk_ext_files', + help='List of .dart files that are part of sdk_ext.', + nargs='*', + default=[] + ) + parser.add_argument( + '--sdk-ext-mappings', + metavar='sdk_ext_mappings', + help='Mappings for SDK extension libraries.', + nargs='*', + default=[] + ) + parser.add_argument( + '--read_only', + action='store_true', + dest='read_only', + help='Package is a read only package.', + default=False + ) args = parser.parse_args() # We must have a pubspec.yaml. @@ -233,14 +259,16 @@ def main(): sdkext_path = os.path.join(lib_path, '_sdkext') if mappings: with open(sdkext_path, 'w') as stream: - json.dump(mappings, stream, sort_keys=True, - indent=2, separators=(',', ': ')) + json.dump( + mappings, stream, sort_keys=True, indent=2, separators=(',', ': ') + ) else: remove_if_exists(sdkext_path) # Copy or symlink package sources into pkg directory. - common_source_prefix = os.path.dirname(os.path.commonprefix( - args.package_sources)) + common_source_prefix = os.path.dirname( + os.path.commonprefix(args.package_sources) + ) for source in args.package_sources: relative_source = os.path.relpath(source, common_source_prefix) target = os.path.join(target_dir, relative_source) @@ -263,8 +291,9 @@ def main(): target = os.path.join(sdk_ext_dir, relative_source) copy_or_link(source, target) - common_source_prefix = os.path.dirname(os.path.commonprefix( - args.sdk_ext_files)) + common_source_prefix = os.path.dirname( + os.path.commonprefix(args.sdk_ext_files) + ) for source in args.sdk_ext_files: relative_source = os.path.relpath(source, common_source_prefix) target = os.path.join(sdk_ext_dir, relative_source) @@ -293,5 +322,6 @@ def main(): return 0 + if __name__ == '__main__': sys.exit(main()) diff --git a/build/generate_coverage.py b/build/generate_coverage.py index 4beecddb068f5..3279d301af878 100755 --- a/build/generate_coverage.py +++ b/build/generate_coverage.py @@ -11,8 +11,11 @@ import errno import shutil + def GetLLVMBinDirectory(): - buildtool_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../buildtools") + buildtool_dir = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../../buildtools" + ) platform_dir = "" if sys.platform.startswith('linux'): platform_dir = "linux-x64" @@ -20,7 +23,9 @@ def GetLLVMBinDirectory(): platform_dir = "mac-x64" else: raise Exception("Unknown/Unsupported platform.") - llvm_bin_dir = os.path.abspath(os.path.join(buildtool_dir, platform_dir, "clang/bin")) + llvm_bin_dir = os.path.abspath( + os.path.join(buildtool_dir, platform_dir, "clang/bin") + ) if not os.path.exists(llvm_bin_dir): raise Exception("LLVM directory %s double not be located." % llvm_bin_dir) return llvm_bin_dir @@ -34,23 +39,48 @@ def MakeDirs(new_dir): if e.errno != errno.EEXIST: raise + def RemoveIfExists(path): - if os.path.isdir(path) and not os.path.islink(path): - shutil.rmtree(path) - elif os.path.exists(path): - os.remove(path) + if os.path.isdir(path) and not os.path.islink(path): + shutil.rmtree(path) + elif os.path.exists(path): + os.remove(path) -def main(): - parser = argparse.ArgumentParser(); - parser.add_argument('-t', '--tests', nargs='+', dest='tests', - required=True, help='The unit tests to run and gather coverage data on.') - parser.add_argument('-o', '--output', dest='output', - required=True, help='The output directory for coverage results.') - parser.add_argument('-f', '--format', type=str, choices=['all', 'html', 'summary', 'lcov'], - required=True, help='The type of coverage information to be displayed.') - parser.add_argument('-a', '--args', nargs='+', dest='test_args', - required=False, help='The arguments to pass to the unit test executable being run.') +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + '-t', + '--tests', + nargs='+', + dest='tests', + required=True, + help='The unit tests to run and gather coverage data on.' + ) + parser.add_argument( + '-o', + '--output', + dest='output', + required=True, + help='The output directory for coverage results.' + ) + parser.add_argument( + '-f', + '--format', + type=str, + choices=['all', 'html', 'summary', 'lcov'], + required=True, + help='The type of coverage information to be displayed.' + ) + parser.add_argument( + '-a', + '--args', + nargs='+', + dest='test_args', + required=False, + help='The arguments to pass to the unit test executable being run.' + ) args = parser.parse_args() @@ -73,7 +103,9 @@ def main(): print("Path %s does not exist." % absolute_test_path) return -1 - unstripped_test_path = os.path.join(absolute_test_dir, "exe.unstripped", test_name) + unstripped_test_path = os.path.join( + absolute_test_dir, "exe.unstripped", test_name + ) if os.path.exists(unstripped_test_path): binaries.append(unstripped_test_path) @@ -84,7 +116,10 @@ def main(): RemoveIfExists(raw_profile) - print("Running test %s to gather profile." % os.path.basename(absolute_test_path)) + print( + "Running test %s to gather profile." % + os.path.basename(absolute_test_path) + ) test_command = [absolute_test_path] @@ -93,9 +128,7 @@ def main(): if test_args is not None: test_command += test_args - subprocess.check_call(test_command, env={ - "LLVM_PROFILE_FILE": raw_profile - }) + subprocess.check_call(test_command, env={"LLVM_PROFILE_FILE": raw_profile}) if not os.path.exists(raw_profile): print("Could not find raw profile data for unit test run %s." % test) @@ -121,7 +154,8 @@ def main(): print("Merging %d raw profile(s) into single profile." % len(raw_profiles)) merged_profile_path = os.path.join(output, "all.profile") RemoveIfExists(merged_profile_path) - merge_command = [profdata_binary, "merge", "-sparse"] + raw_profiles + ["-o", merged_profile_path] + merge_command = [profdata_binary, "merge", "-sparse" + ] + raw_profiles + ["-o", merged_profile_path] subprocess.check_call(merge_command) print("Done.") @@ -137,11 +171,11 @@ def main(): if generate_all_reports or args.format == 'html': print("Generating HTML report.") show_command = [llvm_cov_binary, "show"] + binaries_flag + [ - instr_profile_flag, - "-format=html", - "-output-dir=%s" % output, - "-tab-size=2", - ignore_flags, + instr_profile_flag, + "-format=html", + "-output-dir=%s" % output, + "-tab-size=2", + ignore_flags, ] subprocess.check_call(show_command) print("Done.") @@ -150,8 +184,8 @@ def main(): if generate_all_reports or args.format == 'summary': print("Generating a summary report.") report_command = [llvm_cov_binary, "report"] + binaries_flag + [ - instr_profile_flag, - ignore_flags, + instr_profile_flag, + ignore_flags, ] subprocess.check_call(report_command) print("Done.") @@ -162,9 +196,9 @@ def main(): lcov_file = os.path.join(output, 'coverage.lcov') RemoveIfExists(lcov_file) lcov_command = [llvm_cov_binary, "export"] + binaries_flag + [ - instr_profile_flag, - ignore_flags, - "-format=lcov", + instr_profile_flag, + ignore_flags, + "-format=lcov", ] with open(lcov_file, 'w') as lcov_redirect: subprocess.check_call(lcov_command, stdout=lcov_redirect) @@ -172,5 +206,6 @@ def main(): return 0 + if __name__ == '__main__': sys.exit(main()) diff --git a/build/get_concurrent_jobs.py b/build/get_concurrent_jobs.py index 589937caa716a..24445ef7ff271 100755 --- a/build/get_concurrent_jobs.py +++ b/build/get_concurrent_jobs.py @@ -84,43 +84,45 @@ def __call__(self, parser, args, values, option_string=None): def Main(): - parser = argparse.ArgumentParser() - parser.add_argument( + parser = argparse.ArgumentParser() + parser.add_argument( '--memory-per-job', action=ParseSizeAction, default=[], nargs='*', help='Key value pairings (dart=1GB) giving an estimate of the amount of ' - 'memory needed for the class of job.') - parser.add_argument( + 'memory needed for the class of job.' + ) + parser.add_argument( '--reserve-memory', type=ParseSize, default=0, - help='The amount of memory to be held out of the amount for jobs to use.') - args = parser.parse_args() + help='The amount of memory to be held out of the amount for jobs to use.' + ) + args = parser.parse_args() - total_memory = GetTotalMemory() + total_memory = GetTotalMemory() - # Ensure the total memory used in the calculation below is at least 0 - mem_total_bytes = max(0, total_memory - args.reserve_memory) + # Ensure the total memory used in the calculation below is at least 0 + mem_total_bytes = max(0, total_memory - args.reserve_memory) - # Ensure the number of cpus used in the calculation below is at least 1 - try: - cpu_cap = multiprocessing.cpu_count() - except: - cpu_cap = 1 + # Ensure the number of cpus used in the calculation below is at least 1 + try: + cpu_cap = multiprocessing.cpu_count() + except: + cpu_cap = 1 - concurrent_jobs = {} - for job, memory_per_job in args.memory_per_job: - # Calculate the number of jobs that will fit in memory. Ensure the - # value is at least 1. - num_concurrent_jobs = int(max(1, mem_total_bytes / memory_per_job)) - # Cap the number of jobs by the number of cpus available. - concurrent_jobs[job] = min(num_concurrent_jobs, cpu_cap) + concurrent_jobs = {} + for job, memory_per_job in args.memory_per_job: + # Calculate the number of jobs that will fit in memory. Ensure the + # value is at least 1. + num_concurrent_jobs = int(max(1, mem_total_bytes / memory_per_job)) + # Cap the number of jobs by the number of cpus available. + concurrent_jobs[job] = min(num_concurrent_jobs, cpu_cap) - print(json.dumps(concurrent_jobs)) + print(json.dumps(concurrent_jobs)) - return 0 + return 0 if __name__ == '__main__': diff --git a/build/git_revision.py b/build/git_revision.py index 287be26e03ca3..a79119faff8c1 100755 --- a/build/git_revision.py +++ b/build/git_revision.py @@ -11,6 +11,7 @@ import os import argparse + def IsWindows(): os_id = sys.platform return os_id.startswith('win32') or os_id.startswith('cygwin') @@ -25,11 +26,11 @@ def GetRepositoryVersion(repository): if IsWindows(): git = 'git.bat' version = subprocess.check_output([ - git, - '-C', - repository, - 'rev-parse', - 'HEAD', + git, + '-C', + repository, + 'rev-parse', + 'HEAD', ]) return str(version.strip(), 'utf-8') @@ -38,10 +39,12 @@ def GetRepositoryVersion(repository): def main(): parser = argparse.ArgumentParser() - parser.add_argument('--repository', - action='store', - help='Path to the Git repository.', - required=True) + parser.add_argument( + '--repository', + action='store', + help='Path to the Git repository.', + required=True + ) args = parser.parse_args() repository = os.path.abspath(args.repository) diff --git a/build/zip.py b/build/zip.py index 1a712421de21a..a56e11d784ec8 100755 --- a/build/zip.py +++ b/build/zip.py @@ -18,13 +18,14 @@ def _zip_dir(path, zip_file, prefix): for file in files: if os.path.islink(os.path.join(root, file)): add_symlink( - zip_file, - os.path.join(root, file), + zip_file, os.path.join(root, file), os.path.join(root.replace(path, prefix), file) ) continue - zip_file.write(os.path.join(root, file), os.path.join( - root.replace(path, prefix), file)) + zip_file.write( + os.path.join(root, file), + os.path.join(root.replace(path, prefix), file) + ) def add_symlink(zip_file, source, target): @@ -35,11 +36,12 @@ def add_symlink(zip_file, source, target): source: The full path to the symlink. target: The target path for the symlink within the zip file. """ - zip_info = zipfile.ZipInfo(target) - zip_info.create_system = 3 # Unix like system - unix_st_mode = (stat.S_IFLNK | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | - stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | - stat.S_IWOTH | stat.S_IXOTH) + zip_info = zipfile.ZipInfo(target) + zip_info.create_system = 3 # Unix like system + unix_st_mode = ( + stat.S_IFLNK | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP + | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH + ) zip_info.external_attr = unix_st_mode << 16 zip_file.writestr(zip_info, source) @@ -70,12 +72,24 @@ def main(args): if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='This script creates zip files.') - parser.add_argument('-o', dest='output', action='store', - help='The name of the output zip file.') - parser.add_argument('-i', dest='input_pairs', nargs=2, action='append', - help='The input file and its destination location in the zip archive.') - parser.add_argument('-f', dest='source_file', action='store', - help='The path to the file list to zip.') + parser = argparse.ArgumentParser(description='This script creates zip files.') + parser.add_argument( + '-o', + dest='output', + action='store', + help='The name of the output zip file.' + ) + parser.add_argument( + '-i', + dest='input_pairs', + nargs=2, + action='append', + help='The input file and its destination location in the zip archive.' + ) + parser.add_argument( + '-f', + dest='source_file', + action='store', + help='The path to the file list to zip.' + ) sys.exit(main(parser.parse_args())) diff --git a/ci/bin/format.dart b/ci/bin/format.dart index 16f2e842469e9..73e8f6a8f9a6e 100644 --- a/ci/bin/format.dart +++ b/ci/bin/format.dart @@ -45,21 +45,24 @@ enum MessageType { enum FormatCheck { clang, + gn, java, + python, whitespace, - gn, } FormatCheck nameToFormatCheck(String name) { switch (name) { case 'clang': return FormatCheck.clang; + case 'gn': + return FormatCheck.gn; case 'java': return FormatCheck.java; + case 'python': + return FormatCheck.python; case 'whitespace': return FormatCheck.whitespace; - case 'gn': - return FormatCheck.gn; default: throw FormattingException('Unknown FormatCheck type $name'); } @@ -69,12 +72,14 @@ String formatCheckToName(FormatCheck check) { switch (check) { case FormatCheck.clang: return 'C++/ObjC'; + case FormatCheck.gn: + return 'GN'; case FormatCheck.java: return 'Java'; + case FormatCheck.python: + return 'Python'; case FormatCheck.whitespace: return 'Trailing whitespace'; - case FormatCheck.gn: - return 'GN'; } } @@ -140,6 +145,15 @@ abstract class FormatChecker { allFiles: allFiles, messageCallback: messageCallback, ); + case FormatCheck.gn: + return GnFormatChecker( + processManager: processManager, + baseGitRef: baseGitRef, + repoDir: repoDir, + srcDir: srcDir, + allFiles: allFiles, + messageCallback: messageCallback, + ); case FormatCheck.java: return JavaFormatChecker( processManager: processManager, @@ -149,8 +163,8 @@ abstract class FormatChecker { allFiles: allFiles, messageCallback: messageCallback, ); - case FormatCheck.whitespace: - return WhitespaceFormatChecker( + case FormatCheck.python: + return PythonFormatChecker( processManager: processManager, baseGitRef: baseGitRef, repoDir: repoDir, @@ -158,8 +172,8 @@ abstract class FormatChecker { allFiles: allFiles, messageCallback: messageCallback, ); - case FormatCheck.gn: - return GnFormatChecker( + case FormatCheck.whitespace: + return WhitespaceFormatChecker( processManager: processManager, baseGitRef: baseGitRef, repoDir: repoDir, @@ -665,6 +679,93 @@ class GnFormatChecker extends FormatChecker { } } +/// Checks the format of any .py files using the "yapf" command. +class PythonFormatChecker extends FormatChecker { + PythonFormatChecker({ + ProcessManager processManager = const LocalProcessManager(), + required String baseGitRef, + required Directory repoDir, + required Directory srcDir, + bool allFiles = false, + MessageCallback? messageCallback, + }) : super( + processManager: processManager, + baseGitRef: baseGitRef, + repoDir: repoDir, + srcDir: srcDir, + allFiles: allFiles, + messageCallback: messageCallback, + ) { + yapfBin = File(path.join( + repoDir.absolute.path, + 'tools', + 'yapf.sh', + )); + _yapfStyle = File(path.join( + repoDir.absolute.path, + '.style.yapf', + )); + } + + late final File yapfBin; + late final File _yapfStyle; + + @override + Future checkFormatting() async { + message('Checking Python formatting...'); + return (await _runYapfCheck(fixing: false)) == 0; + } + + @override + Future fixFormatting() async { + message('Fixing Python formatting...'); + await _runYapfCheck(fixing: true); + // The yapf script shouldn't fail when fixing errors. + return true; + } + + Future _runYapfCheck({required bool fixing}) async { + final List filesToCheck = await getFileList(['*.py']); + + final List cmd = [ + yapfBin.path, + '--style', _yapfStyle.path, + if (!fixing) '--diff', + if (fixing) '--in-place', + ]; + final List jobs = []; + for (final String file in filesToCheck) { + jobs.add(WorkerJob([...cmd, file])); + } + final ProcessPool yapfPool = ProcessPool( + processRunner: _processRunner, + printReport: namedReport('python format'), + ); + final List completedJobs = await yapfPool.runToCompletion(jobs); + reportDone(); + final List incorrect = []; + for (final WorkerJob job in completedJobs) { + if (job.result.exitCode == 1) { + incorrect.add(' ${job.command.last}\n${job.result.output}'); + } + } + if (incorrect.isNotEmpty) { + final bool plural = incorrect.length > 1; + if (fixing) { + message('Fixed ${incorrect.length} python file${plural ? 's' : ''}' + ' which ${plural ? 'were' : 'was'} formatted incorrectly.'); + } else { + error('Found ${incorrect.length} python file${plural ? 's' : ''}' + ' which ${plural ? 'were' : 'was'} formatted incorrectly:'); + incorrect.forEach(stderr.writeln); + } + } else { + message('All python files formatted correctly.'); + } + return incorrect.length; + } +} + @immutable class _GrepResult { const _GrepResult(this.file, [this.hits = const [], this.lineNumbers = const []]); diff --git a/ci/deps_parser.py b/ci/deps_parser.py index b4e81045cde7f..c8944a685671a 100644 --- a/ci/deps_parser.py +++ b/ci/deps_parser.py @@ -20,83 +20,86 @@ # Used in parsing the DEPS file. class VarImpl(object): - _env_vars = { - "host_cpu": "x64", - "host_os": "linux", - } + _env_vars = { + "host_cpu": "x64", + "host_os": "linux", + } - def __init__(self, local_scope): - self._local_scope = local_scope + def __init__(self, local_scope): + self._local_scope = local_scope - def Lookup(self, var_name): - """Implements the Var syntax.""" - if var_name in self._local_scope.get("vars", {}): - return self._local_scope["vars"][var_name] - # Inject default values for env variables - if var_name in self._env_vars: - return self._env_vars[var_name] - raise Exception("Var is not defined: %s" % var_name) + def Lookup(self, var_name): + """Implements the Var syntax.""" + if var_name in self._local_scope.get("vars", {}): + return self._local_scope["vars"][var_name] + # Inject default values for env variables + if var_name in self._env_vars: + return self._env_vars[var_name] + raise Exception("Var is not defined: %s" % var_name) def ParseDepsFile(deps_file): - local_scope = {} - var = VarImpl(local_scope) - global_scope = { - 'Var': var.Lookup, - 'deps_os': {}, - } - # Read the content. - with open(deps_file, 'r') as fp: - deps_content = fp.read() - - # Eval the content. - exec (deps_content, global_scope, local_scope) - - # Extract the deps and filter. - deps = local_scope.get('deps', {}) - filtered_deps = [] - for k, v in deps.items(): - # We currently do not support packages or cipd which are represented - # as dictionaries. - if isinstance(v, str): - filtered_deps.append(v) - - return filtered_deps + local_scope = {} + var = VarImpl(local_scope) + global_scope = { + 'Var': var.Lookup, + 'deps_os': {}, + } + # Read the content. + with open(deps_file, 'r') as fp: + deps_content = fp.read() + + # Eval the content. + exec(deps_content, global_scope, local_scope) + + # Extract the deps and filter. + deps = local_scope.get('deps', {}) + filtered_deps = [] + for k, v in deps.items(): + # We currently do not support packages or cipd which are represented + # as dictionaries. + if isinstance(v, str): + filtered_deps.append(v) + + return filtered_deps def WriteManifest(deps, manifest_file): - print('\n'.join(sorted(deps))) - with open(manifest_file, 'w') as manifest: - manifest.write('\n'.join(sorted(deps))) + print('\n'.join(sorted(deps))) + with open(manifest_file, 'w') as manifest: + manifest.write('\n'.join(sorted(deps))) def ParseArgs(args): - args = args[1:] - parser = argparse.ArgumentParser( - description='A script to flatten a gclient DEPS file.') - - parser.add_argument( - '--deps', - '-d', - type=str, - help='Input DEPS file.', - default=os.path.join(CHECKOUT_ROOT, 'DEPS')) - parser.add_argument( - '--output', - '-o', - type=str, - help='Output flattened deps file.', - default=os.path.join(CHECKOUT_ROOT, 'deps_flatten.txt')) - - return parser.parse_args(args) + args = args[1:] + parser = argparse.ArgumentParser( + description='A script to flatten a gclient DEPS file.' + ) + + parser.add_argument( + '--deps', + '-d', + type=str, + help='Input DEPS file.', + default=os.path.join(CHECKOUT_ROOT, 'DEPS') + ) + parser.add_argument( + '--output', + '-o', + type=str, + help='Output flattened deps file.', + default=os.path.join(CHECKOUT_ROOT, 'deps_flatten.txt') + ) + + return parser.parse_args(args) def Main(argv): - args = ParseArgs(argv) - deps = ParseDepsFile(args.deps) - WriteManifest(deps, args.output) - return 0 + args = ParseArgs(argv) + deps = ParseDepsFile(args.deps) + WriteManifest(deps, args.output) + return 0 if __name__ == '__main__': - sys.exit(Main(sys.argv)) + sys.exit(Main(sys.argv)) diff --git a/ci/firebase_testlab.py b/ci/firebase_testlab.py index 12a5b6cad5504..c36d9c07d44a7 100755 --- a/ci/firebase_testlab.py +++ b/ci/firebase_testlab.py @@ -25,27 +25,38 @@ def RunFirebaseTest(apk, results_dir): # See https://firebase.google.com/docs/test-lab/android/game-loop # Pixel 5. As of this commit, this is a highly available device in FTL. process = subprocess.Popen( - [ - 'gcloud', - '--project', 'flutter-infra', - 'firebase', 'test', 'android', 'run', - '--type', 'game-loop', - '--app', apk, - '--timeout', '2m', - '--results-bucket', bucket, - '--results-dir', results_dir, - '--device', 'model=redfin,version=30', - ], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, + [ + 'gcloud', + '--project', + 'flutter-infra', + 'firebase', + 'test', + 'android', + 'run', + '--type', + 'game-loop', + '--app', + apk, + '--timeout', + '2m', + '--results-bucket', + bucket, + '--results-dir', + results_dir, + '--device', + 'model=redfin,version=30', + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, ) return process def CheckLogcat(results_dir): logcat = subprocess.check_output([ - 'gsutil', 'cat', '%s/%s/*/logcat' % (bucket, results_dir) + 'gsutil', 'cat', + '%s/%s/*/logcat' % (bucket, results_dir) ]) if not logcat: sys.exit(1) @@ -60,7 +71,8 @@ def CheckLogcat(results_dir): def CheckTimeline(results_dir): du = subprocess.check_output([ 'gsutil', 'du', - '%s/%s/*/game_loop_results/results_scenario_0.json' % (bucket, results_dir) + '%s/%s/*/game_loop_results/results_scenario_0.json' % + (bucket, results_dir) ]).strip() if du == '0': print('Failed to produce a timeline.') @@ -69,11 +81,18 @@ def CheckTimeline(results_dir): def main(): parser = argparse.ArgumentParser() - parser.add_argument('--variant', dest='variant', action='store', - default='android_profile_arm64', help='The engine variant to run tests for.') - parser.add_argument('--build-id', + parser.add_argument( + '--variant', + dest='variant', + action='store', + default='android_profile_arm64', + help='The engine variant to run tests for.' + ) + parser.add_argument( + '--build-id', default=os.environ.get('SWARMING_TASK_ID', 'local_test'), - help='A unique build identifier for this test. Used to sort results in the GCS bucket.') + help='A unique build identifier for this test. Used to sort results in the GCS bucket.' + ) args = parser.parse_args() @@ -84,12 +103,14 @@ def main(): print('No APKs found at %s' % apks_dir) return 1 - git_revision = subprocess.check_output( - ['git', 'rev-parse', 'HEAD'], cwd=script_dir).strip() + git_revision = subprocess.check_output(['git', 'rev-parse', 'HEAD'], + cwd=script_dir).strip() results = [] for apk in apks: - results_dir = '%s/%s/%s' % (os.path.basename(apk), git_revision, args.build_id) + results_dir = '%s/%s/%s' % ( + os.path.basename(apk), git_revision, args.build_id + ) process = RunFirebaseTest(apk, results_dir) results.append((results_dir, process)) diff --git a/ci/licenses_golden/licenses_flutter b/ci/licenses_golden/licenses_flutter index 917c13e628ac5..e58fc07647657 100644 --- a/ci/licenses_golden/licenses_flutter +++ b/ci/licenses_golden/licenses_flutter @@ -12,6 +12,7 @@ LIBRARY: txt ORIGIN: ../../../flutter/LICENSE TYPE: LicenseType.bsd FILE: ../../../flutter/.clang-tidy +FILE: ../../../flutter/.style.yapf FILE: ../../../flutter/DEPS FILE: ../../../flutter/assets/asset_manager.cc FILE: ../../../flutter/assets/asset_manager.h diff --git a/ci/licenses_golden/tool_signature b/ci/licenses_golden/tool_signature index e825cd0eee441..37b61dc38ad9e 100644 --- a/ci/licenses_golden/tool_signature +++ b/ci/licenses_golden/tool_signature @@ -1,2 +1,2 @@ -Signature: 7b0c7d76d9cfc331776ed8d447e52f67 +Signature: 4d80570261409484c1813b3fe5282783 diff --git a/impeller/tools/build_metal_library.py b/impeller/tools/build_metal_library.py index 004d6b9dc5a85..d5ae22d7c4707 100644 --- a/impeller/tools/build_metal_library.py +++ b/impeller/tools/build_metal_library.py @@ -9,104 +9,122 @@ import os import subprocess + def MakeDirectories(path): - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST and os.path.isdir(path): + pass + else: + raise + def Main(): parser = argparse.ArgumentParser() - parser.add_argument("--output", - type=str, required=True, - help="The location to generate the Metal library to.") - parser.add_argument("--depfile", - type=str, required=True, - help="The location of the depfile.") - parser.add_argument("--source", - type=str, action="append", required=True, - help="The source file to compile. Can be specified multiple times.") - parser.add_argument("--optimize", action="store_true", default=False, - help="If available optimizations must be applied to the compiled Metal sources.") - parser.add_argument("--platform", required=True, choices=["mac", "ios", "ios-simulator"], - help="Select the platform.") + parser.add_argument( + "--output", + type=str, + required=True, + help="The location to generate the Metal library to." + ) + parser.add_argument( + "--depfile", type=str, required=True, help="The location of the depfile." + ) + parser.add_argument( + "--source", + type=str, + action="append", + required=True, + help="The source file to compile. Can be specified multiple times." + ) + parser.add_argument( + "--optimize", + action="store_true", + default=False, + help="If available optimizations must be applied to the compiled Metal sources." + ) + parser.add_argument( + "--platform", + required=True, + choices=["mac", "ios", "ios-simulator"], + help="Select the platform." + ) args = parser.parse_args() MakeDirectories(os.path.dirname(args.depfile)) command = [ - "xcrun", + "xcrun", ] if args.platform == "mac": command += [ - "-sdk", - "macosx", + "-sdk", + "macosx", ] elif args.platform == "ios": command += [ - "-sdk", - "iphoneos", + "-sdk", + "iphoneos", ] elif args.platform == "ios-simulator": command += [ - "-sdk", - "iphonesimulator", + "-sdk", + "iphonesimulator", ] command += [ - "metal", - # These warnings are from generated code and would make no sense to the GLSL - # author. - "-Wno-unused-variable", - # Both user and system header will be tracked. - "-MMD", - "-MF", - args.depfile, - "-o", - args.output, + "metal", + # These warnings are from generated code and would make no sense to the GLSL + # author. + "-Wno-unused-variable", + # Both user and system header will be tracked. + "-MMD", + "-MF", + args.depfile, + "-o", + args.output, ] # The Metal standard must match the specification in impellerc. if args.platform == "mac": command += [ - "--std=macos-metal1.2", + "--std=macos-metal1.2", ] elif args.platform == "ios": command += [ - "--std=ios-metal1.2", - "-mios-version-min=10.0", + "--std=ios-metal1.2", + "-mios-version-min=10.0", ] if args.optimize: command += [ - # Like -Os (and thus -O2), but reduces code size further. - "-Oz", - # Allow aggressive, lossy floating-point optimizations. - "-ffast-math", + # Like -Os (and thus -O2), but reduces code size further. + "-Oz", + # Allow aggressive, lossy floating-point optimizations. + "-ffast-math", ] else: command += [ - # Embeds both sources and driver options in the output. This aids in - # debugging but should be removed from release builds. - # TODO(chinmaygarde): Use -frecord-sources when CI upgrades to - # Xcode 13. - "-MO", - # Assist the sampling profiler. - "-gline-tables-only", - "-g", - # Optimize for debuggability. - "-Og", + # Embeds both sources and driver options in the output. This aids in + # debugging but should be removed from release builds. + # TODO(chinmaygarde): Use -frecord-sources when CI upgrades to + # Xcode 13. + "-MO", + # Assist the sampling profiler. + "-gline-tables-only", + "-g", + # Optimize for debuggability. + "-Og", ] command += args.source subprocess.check_call(command) + if __name__ == '__main__': if sys.platform != 'darwin': raise Exception("This script only runs on Mac") diff --git a/impeller/tools/check_licenses.py b/impeller/tools/check_licenses.py index 50dc921012277..a366099404fa3 100644 --- a/impeller/tools/check_licenses.py +++ b/impeller/tools/check_licenses.py @@ -27,39 +27,39 @@ def ContainsLicenseBlock(source_file): def IsSourceFile(path): known_extensions = [ - ".cc", - ".cpp", - ".c", - ".h", - ".hpp", - ".py", - ".sh", - ".gn", - ".gni", - ".glsl", - ".sl.h", - ".vert", - ".frag", - ".tesc", - ".tese", - ".yaml", - ".dart", + ".cc", + ".cpp", + ".c", + ".h", + ".hpp", + ".py", + ".sh", + ".gn", + ".gni", + ".glsl", + ".sl.h", + ".vert", + ".frag", + ".tesc", + ".tese", + ".yaml", + ".dart", ] for extension in known_extensions: if os.path.basename(path).endswith(extension): return True - return False; + return False # Checks that all source files have the same license preamble. def Main(): parser = argparse.ArgumentParser() - parser.add_argument("--source-root", - type=str, required=True, - help="The source root.") + parser.add_argument( + "--source-root", type=str, required=True, help="The source root." + ) args = parser.parse_args() - assert(os.path.exists(args.source_root)) + assert (os.path.exists(args.source_root)) source_files = set() @@ -71,7 +71,10 @@ def Main(): for source_file in source_files: if not ContainsLicenseBlock(source_file): - raise Exception("Could not find valid license block in source ", source_file) + raise Exception( + "Could not find valid license block in source ", source_file + ) + if __name__ == '__main__': Main() diff --git a/impeller/tools/xxd.py b/impeller/tools/xxd.py index 3a8516320165b..50d2fdd926d12 100644 --- a/impeller/tools/xxd.py +++ b/impeller/tools/xxd.py @@ -9,35 +9,49 @@ import os import struct + def MakeDirectories(path): - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST and os.path.isdir(path): + pass + else: + raise + # Dump the bytes of file into a C translation unit. # This can be used to embed the file contents into a binary. def Main(): parser = argparse.ArgumentParser() - parser.add_argument("--symbol-name", - type=str, required=True, - help="The name of the symbol referencing the data.") - parser.add_argument("--output-header", - type=str, required=True, - help="The header file containing the symbol reference.") - parser.add_argument("--output-source", - type=str, required=True, - help="The source file containing the file bytes.") - parser.add_argument("--source", - type=str, required=True, - help="The source file whose contents to embed in the output source file.") + parser.add_argument( + "--symbol-name", + type=str, + required=True, + help="The name of the symbol referencing the data." + ) + parser.add_argument( + "--output-header", + type=str, + required=True, + help="The header file containing the symbol reference." + ) + parser.add_argument( + "--output-source", + type=str, + required=True, + help="The source file containing the file bytes." + ) + parser.add_argument( + "--source", + type=str, + required=True, + help="The source file whose contents to embed in the output source file." + ) args = parser.parse_args() - assert(os.path.exists(args.source)) + assert (os.path.exists(args.source)) output_header = os.path.abspath(args.output_header) output_source = os.path.abspath(args.output_source) @@ -58,7 +72,9 @@ def Main(): data_len += 1 output.write(f"{ord(byte)},") output.write("};\n") - output.write(f"const unsigned long impeller_{args.symbol_name}_length = {data_len};\n") + output.write( + f"const unsigned long impeller_{args.symbol_name}_length = {data_len};\n" + ) with open(output_header, "w") as output: output.write("#pragma once\n") @@ -66,12 +82,17 @@ def Main(): output.write("extern \"C\" {\n") output.write("#endif\n\n") - output.write(f"extern const unsigned char impeller_{args.symbol_name}_data[];\n") - output.write(f"extern const unsigned long impeller_{args.symbol_name}_length;\n\n") + output.write( + f"extern const unsigned char impeller_{args.symbol_name}_data[];\n" + ) + output.write( + f"extern const unsigned long impeller_{args.symbol_name}_length;\n\n" + ) output.write("#ifdef __cplusplus\n") output.write("}\n") output.write("#endif\n") + if __name__ == '__main__': Main() diff --git a/shell/platform/fuchsia/flutter/build/asset_package.py b/shell/platform/fuchsia/flutter/build/asset_package.py index 5ec916f5ff027..89470a6c14863 100755 --- a/shell/platform/fuchsia/flutter/build/asset_package.py +++ b/shell/platform/fuchsia/flutter/build/asset_package.py @@ -13,20 +13,39 @@ def main(): parser = argparse.ArgumentParser(description='Package a Flutter application') - parser.add_argument('--flutter-root', type=str, required=True, - help='The root of the Flutter SDK') - parser.add_argument('--flutter-tools', type=str, required=True, - help='The executable for the Flutter tool') - parser.add_argument('--asset-dir', type=str, required=True, - help='The directory where to put intermediate files') - parser.add_argument('--app-dir', type=str, required=True, - help='The root of the app') - parser.add_argument('--packages', type=str, required=True, - help='The package map to use') + parser.add_argument( + '--flutter-root', + type=str, + required=True, + help='The root of the Flutter SDK' + ) + parser.add_argument( + '--flutter-tools', + type=str, + required=True, + help='The executable for the Flutter tool' + ) + parser.add_argument( + '--asset-dir', + type=str, + required=True, + help='The directory where to put intermediate files' + ) + parser.add_argument( + '--app-dir', type=str, required=True, help='The root of the app' + ) + parser.add_argument( + '--packages', type=str, required=True, help='The package map to use' + ) parser.add_argument('--manifest', type=str, help='The application manifest') - parser.add_argument('--component-name', type=str, help='The name of the component') - parser.add_argument('--asset-manifest-out', type=str, - help='Output path for the asset manifest used by the fuchsia packaging tool') + parser.add_argument( + '--component-name', type=str, help='The name of the component' + ) + parser.add_argument( + '--asset-manifest-out', + type=str, + help='Output path for the asset manifest used by the fuchsia packaging tool' + ) args = parser.parse_args() @@ -34,9 +53,9 @@ def main(): env['FLUTTER_ROOT'] = args.flutter_root call_args = [ - args.flutter_tools, - '--asset-dir=%s' % args.asset_dir, - '--packages=%s' % args.packages, + args.flutter_tools, + '--asset-dir=%s' % args.asset_dir, + '--packages=%s' % args.packages, ] if 'manifest' in args: call_args.append('--manifest=%s' % args.manifest) @@ -51,5 +70,6 @@ def main(): return result + if __name__ == '__main__': sys.exit(main()) diff --git a/shell/platform/fuchsia/flutter/build/gen_debug_wrapper_main.py b/shell/platform/fuchsia/flutter/build/gen_debug_wrapper_main.py index d5811d5cddb05..22bb1ac5d8928 100755 --- a/shell/platform/fuchsia/flutter/build/gen_debug_wrapper_main.py +++ b/shell/platform/fuchsia/flutter/build/gen_debug_wrapper_main.py @@ -7,16 +7,18 @@ import os import re import sys + + def main(): parser = argparse.ArgumentParser( - sys.argv[0], - description="Generate main file for Fuchsia dart test") - parser.add_argument("--out", - help="Path to .dart file to generate", - required=True) - parser.add_argument("--main-dart", - help="Path to main.dart file to import", - required=True) + sys.argv[0], description="Generate main file for Fuchsia dart test" + ) + parser.add_argument( + "--out", help="Path to .dart file to generate", required=True + ) + parser.add_argument( + "--main-dart", help="Path to main.dart file to import", required=True + ) args = parser.parse_args() out_dir = os.path.dirname(args.out) assert os.path.isfile(os.path.join(os.path.dirname(args.out), args.main_dart)) @@ -27,16 +29,19 @@ def main(): # may not always be synchronous across all functions. outfile.write('''// Generated by ''') outfile.write(os.path.basename(__file__)) - outfile.write(''' + outfile.write( + ''' // ignore_for_file: avoid_relative_lib_imports import 'dart:async'; import 'package:flutter_driver/driver_extension.dart'; -''') +''' + ) outfile.write("import '%s' as flutter_app_main;\n" % args.main_dart) - outfile.write(''' + outfile.write( + ''' void main() async { assert(await (() async { // TODO(awdavies): Use the logger instead. @@ -67,9 +72,10 @@ def main(): await res; } } -''') +''' + ) outfile.close() + if __name__ == '__main__': main() - diff --git a/sky/tools/create_ios_framework.py b/sky/tools/create_ios_framework.py index 431d2092a8270..201fe635b9199 100755 --- a/sky/tools/create_ios_framework.py +++ b/sky/tools/create_ios_framework.py @@ -12,30 +12,46 @@ from create_xcframework import create_xcframework -DSYMUTIL = os.path.join(os.path.dirname(__file__), '..', '..', '..', - 'buildtools', 'mac-x64', 'clang', 'bin', 'dsymutil') +DSYMUTIL = os.path.join( + os.path.dirname(__file__), '..', '..', '..', 'buildtools', 'mac-x64', + 'clang', 'bin', 'dsymutil' +) + def main(): - parser = argparse.ArgumentParser(description='Creates Flutter.framework and Flutter.xcframework') + parser = argparse.ArgumentParser( + description='Creates Flutter.framework and Flutter.xcframework' + ) parser.add_argument('--dst', type=str, required=True) parser.add_argument('--arm64-out-dir', type=str, required=True) parser.add_argument('--armv7-out-dir', type=str, required=False) # TODO(gw280): Remove --simulator-out-dir alias when all recipes are updated - parser.add_argument('--simulator-x64-out-dir', '--simulator-out-dir', type=str, required=True) + parser.add_argument( + '--simulator-x64-out-dir', '--simulator-out-dir', type=str, required=True + ) parser.add_argument('--simulator-arm64-out-dir', type=str, required=False) parser.add_argument('--strip', action="store_true", default=False) parser.add_argument('--dsym', action="store_true", default=False) - parser.add_argument('--strip-bitcode', dest='strip_bitcode', action="store_true", default=False) + parser.add_argument( + '--strip-bitcode', + dest='strip_bitcode', + action="store_true", + default=False + ) args = parser.parse_args() framework = os.path.join(args.dst, 'Flutter.framework') simulator_framework = os.path.join(args.dst, 'sim', 'Flutter.framework') arm64_framework = os.path.join(args.arm64_out_dir, 'Flutter.framework') - simulator_x64_framework = os.path.join(args.simulator_x64_out_dir, 'Flutter.framework') + simulator_x64_framework = os.path.join( + args.simulator_x64_out_dir, 'Flutter.framework' + ) if args.simulator_arm64_out_dir is not None: - simulator_arm64_framework = os.path.join(args.simulator_arm64_out_dir, 'Flutter.framework') + simulator_arm64_framework = os.path.join( + args.simulator_arm64_out_dir, 'Flutter.framework' + ) simulator_arm64_dylib = os.path.join(simulator_arm64_framework, 'Flutter') arm64_dylib = os.path.join(arm64_framework, 'Flutter') @@ -74,12 +90,8 @@ def main(): # Create the arm64/x64 simulator fat framework. subprocess.check_call([ - 'lipo', - simulator_x64_dylib, - simulator_arm64_dylib, - '-create', - '-output', - simulator_framework_binary + 'lipo', simulator_x64_dylib, simulator_arm64_dylib, '-create', + '-output', simulator_framework_binary ]) process_framework(args, simulator_framework, simulator_framework_binary) simulator_framework = simulator_framework @@ -93,12 +105,8 @@ def main(): # Add the x64 simulator into the fat framework subprocess.check_call([ - 'lipo', - arm64_dylib, - simulator_x64_dylib, - '-create', - '-output', - framework_binary + 'lipo', arm64_dylib, simulator_x64_dylib, '-create', '-output', + framework_binary ]) process_framework(args, framework, framework_binary) @@ -106,7 +114,9 @@ def main(): def process_framework(args, framework, framework_binary): if args.strip_bitcode: - subprocess.check_call(['xcrun', 'bitcode_strip', '-r', framework_binary, '-o', framework_binary]) + subprocess.check_call([ + 'xcrun', 'bitcode_strip', '-r', framework_binary, '-o', framework_binary + ]) if args.dsym: dsym_out = os.path.splitext(framework)[0] + '.dSYM' diff --git a/sky/tools/create_macos_framework.py b/sky/tools/create_macos_framework.py index 4053c9b43773f..2f61b8094dabb 100755 --- a/sky/tools/create_macos_framework.py +++ b/sky/tools/create_macos_framework.py @@ -12,11 +12,16 @@ from create_xcframework import create_xcframework -DSYMUTIL = os.path.join(os.path.dirname(__file__), '..', '..', '..', - 'buildtools', 'mac-x64', 'clang', 'bin', 'dsymutil') +DSYMUTIL = os.path.join( + os.path.dirname(__file__), '..', '..', '..', 'buildtools', 'mac-x64', + 'clang', 'bin', 'dsymutil' +) + def main(): - parser = argparse.ArgumentParser(description='Creates FlutterMacOS.framework for macOS') + parser = argparse.ArgumentParser( + description='Creates FlutterMacOS.framework for macOS' + ) parser.add_argument('--dst', type=str, required=True) parser.add_argument('--arm64-out-dir', type=str, required=True) @@ -56,16 +61,13 @@ def main(): shutil.rmtree(fat_framework, True) shutil.copytree(arm64_framework, fat_framework, symlinks=True) - fat_framework_binary = os.path.join(fat_framework, 'Versions', 'A', 'FlutterMacOS') + fat_framework_binary = os.path.join( + fat_framework, 'Versions', 'A', 'FlutterMacOS' + ) # Create the arm64/x64 fat framework. subprocess.check_call([ - 'lipo', - arm64_dylib, - x64_dylib, - '-create', - '-output', - fat_framework_binary + 'lipo', arm64_dylib, x64_dylib, '-create', '-output', fat_framework_binary ]) process_framework(args, fat_framework, fat_framework_binary) diff --git a/sky/tools/create_macos_gen_snapshots.py b/sky/tools/create_macos_gen_snapshots.py index 507ce2512f26b..7c689063da9ed 100755 --- a/sky/tools/create_macos_gen_snapshots.py +++ b/sky/tools/create_macos_gen_snapshots.py @@ -24,8 +24,7 @@ def main(): if args.x64_out_dir: generate_gen_snapshot( - args.x64_out_dir, - os.path.join(args.dst, 'gen_snapshot_x64') + args.x64_out_dir, os.path.join(args.dst, 'gen_snapshot_x64') ) if args.arm64_out_dir: @@ -47,11 +46,10 @@ def generate_gen_snapshot(directory, destination): print('Cannot find gen_snapshot at %s' % gen_snapshot_dir) sys.exit(1) - subprocess.check_call( - ['xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination] - ) + subprocess.check_call([ + 'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination + ]) if __name__ == '__main__': sys.exit(main()) - diff --git a/sky/tools/create_xcframework.py b/sky/tools/create_xcframework.py index 774a3ef90d8c5..889dea35d84a8 100755 --- a/sky/tools/create_xcframework.py +++ b/sky/tools/create_xcframework.py @@ -4,10 +4,6 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - - - - import argparse import errno import os @@ -15,19 +11,30 @@ import subprocess import sys + def main(): parser = argparse.ArgumentParser( - description='Creates an XCFramework consisting of the specified universal frameworks') - - parser.add_argument('--frameworks', - nargs='+', help='The framework paths used to create the XCFramework.', required=True) - parser.add_argument('--name', help='Name of the XCFramework', type=str, required=True) - parser.add_argument('--location', help='Output directory', type=str, required=True) + description='Creates an XCFramework consisting of the specified universal frameworks' + ) + + parser.add_argument( + '--frameworks', + nargs='+', + help='The framework paths used to create the XCFramework.', + required=True + ) + parser.add_argument( + '--name', help='Name of the XCFramework', type=str, required=True + ) + parser.add_argument( + '--location', help='Output directory', type=str, required=True + ) args = parser.parse_args() create_xcframework(args.location, args.name, args.frameworks) + def create_xcframework(location, name, frameworks): output_dir = os.path.abspath(location) output_xcframework = os.path.join(output_dir, '%s.xcframework' % name) @@ -40,10 +47,7 @@ def create_xcframework(location, name, frameworks): shutil.rmtree(output_xcframework) # xcrun xcodebuild -create-xcframework -framework foo/baz.framework -framework bar/baz.framework -output output/ - command = ['xcrun', - 'xcodebuild', - '-quiet', - '-create-xcframework'] + command = ['xcrun', 'xcodebuild', '-quiet', '-create-xcframework'] for framework in frameworks: command.extend(['-framework', os.path.abspath(framework)]) @@ -52,5 +56,6 @@ def create_xcframework(location, name, frameworks): subprocess.check_call(command, stdout=open(os.devnull, 'w')) + if __name__ == '__main__': sys.exit(main()) diff --git a/sky/tools/dist_dart_pkg.py b/sky/tools/dist_dart_pkg.py index 2a01a571e3ae2..f91cf24adaf6d 100755 --- a/sky/tools/dist_dart_pkg.py +++ b/sky/tools/dist_dart_pkg.py @@ -15,8 +15,12 @@ def main(): parser = argparse.ArgumentParser(description='Copy a Dart package') - parser.add_argument('--source', type=str, help='Source directory assembled by dart_pkg.py') - parser.add_argument('--dest', type=str, help='Destination directory for the package') + parser.add_argument( + '--source', type=str, help='Source directory assembled by dart_pkg.py' + ) + parser.add_argument( + '--dest', type=str, help='Destination directory for the package' + ) args = parser.parse_args() @@ -25,7 +29,10 @@ def main(): # dart_pkg.py will create a packages directory within the package. # Do not copy this into the release output. - shutil.copytree(args.source, args.dest, ignore=shutil.ignore_patterns('packages')) + shutil.copytree( + args.source, args.dest, ignore=shutil.ignore_patterns('packages') + ) + if __name__ == '__main__': sys.exit(main()) diff --git a/sky/tools/gen_snapshots.py b/sky/tools/gen_snapshots.py index bd4b8662cd40f..3dde468131fbe 100755 --- a/sky/tools/gen_snapshots.py +++ b/sky/tools/gen_snapshots.py @@ -37,11 +37,10 @@ def generate_gen_snapshot(directory, destination): print('Cannot find gen_snapshot at %s' % gen_snapshot_dir) sys.exit(1) - subprocess.check_call( - ['xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination] - ) + subprocess.check_call([ + 'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination + ]) if __name__ == '__main__': sys.exit(main()) - diff --git a/sky/tools/install_framework_headers.py b/sky/tools/install_framework_headers.py index 1defe100b1ea9..4cc631d0e47f3 100755 --- a/sky/tools/install_framework_headers.py +++ b/sky/tools/install_framework_headers.py @@ -15,10 +15,15 @@ def main(): parser = argparse.ArgumentParser( description='Removes existing files and installs the specified headers' + - 'at the given location.') - - parser.add_argument('--headers', - nargs='+', help='The headers to install at the location.', required=True) + 'at the given location.' + ) + + parser.add_argument( + '--headers', + nargs='+', + help='The headers to install at the location.', + required=True + ) parser.add_argument('--location', type=str, required=True) args = parser.parse_args() @@ -37,9 +42,9 @@ def main(): # Copy all files specified in the args. for header_file in args.headers: - shutil.copyfile(header_file, - os.path.join(args.location, os.path.basename(header_file))) - + shutil.copyfile( + header_file, os.path.join(args.location, os.path.basename(header_file)) + ) if __name__ == '__main__': diff --git a/sky/tools/objcopy.py b/sky/tools/objcopy.py index ed2256bc5f209..a1fdedd143468 100755 --- a/sky/tools/objcopy.py +++ b/sky/tools/objcopy.py @@ -11,22 +11,25 @@ # BFD architecture names recognized by objcopy. BFD_ARCH = { - 'arm': 'arm', - 'arm64': 'aarch64', - 'x86': 'i386', - 'x64': 'i386:x86-64', + 'arm': 'arm', + 'arm64': 'aarch64', + 'x86': 'i386', + 'x64': 'i386:x86-64', } # BFD target names recognized by objcopy. BFD_TARGET = { - 'arm': 'elf32-littlearm', - 'arm64': 'elf64-littleaarch64', - 'x86': 'elf32-i386', - 'x64': 'elf64-x86-64', + 'arm': 'elf32-littlearm', + 'arm64': 'elf64-littleaarch64', + 'x86': 'elf32-i386', + 'x64': 'elf64-x86-64', } + def main(): - parser = argparse.ArgumentParser(description='Convert a data file to an object file') + parser = argparse.ArgumentParser( + description='Convert a data file to an object file' + ) parser.add_argument('--objcopy', type=str, required=True) parser.add_argument('--input', type=str, required=True) parser.add_argument('--output', type=str, required=True) @@ -38,13 +41,18 @@ def main(): output_path = os.path.abspath(args.output) subprocess.check_call([ - args.objcopy, - '-I', 'binary', - '-O', BFD_TARGET[args.arch], - '-B', BFD_ARCH[args.arch], - input_file, - output_path, - ], cwd=input_dir) + args.objcopy, + '-I', + 'binary', + '-O', + BFD_TARGET[args.arch], + '-B', + BFD_ARCH[args.arch], + input_file, + output_path, + ], + cwd=input_dir) + if __name__ == '__main__': sys.exit(main()) diff --git a/sky/tools/roll/patch.py b/sky/tools/roll/patch.py index 1ffc6a88aae1e..651f187f25b28 100755 --- a/sky/tools/roll/patch.py +++ b/sky/tools/roll/patch.py @@ -8,11 +8,13 @@ import subprocess import utils + def patch_and_filter(dest_dir, relative_patches_dir): os.chdir(dest_dir) - utils.filter_file("build/landmines.py", - lambda line: not "gyp_environment" in line) + utils.filter_file( + "build/landmines.py", lambda line: not "gyp_environment" in line + ) utils.commit("filter gyp_environment out of build/landmines.py") patch(dest_dir, relative_patches_dir) @@ -28,8 +30,9 @@ def patch(dest_dir, relative_patches_dir=os.curdir): Raises: subprocess.CalledProcessError if the patch couldn't be applied. """ - patches_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), - relative_patches_dir) + patches_dir = os.path.join( + os.path.dirname(os.path.realpath(__file__)), relative_patches_dir + ) assert os.path.isdir(patches_dir) os.chdir(dest_dir) diff --git a/sky/tools/roll/roll.py b/sky/tools/roll/roll.py index a4d03aabdc2d2..0c0f754fef799 100755 --- a/sky/tools/roll/roll.py +++ b/sky/tools/roll/roll.py @@ -91,49 +91,51 @@ def rev(source_dir, dest_dir, dirs_to_rev, name, revision_file=None): - for dir_to_rev in dirs_to_rev: - if type(dir_to_rev) is tuple: - d, file_subset = dir_to_rev - else: - d = dir_to_rev - file_subset = None - print("removing directory %s" % d) - try: - system(["git", "rm", "-r", d], cwd=dest_dir) - except subprocess.CalledProcessError: - print("Could not remove %s" % d) - print("cloning directory %s" % d) - - if file_subset is None: - files = system(["git", "ls-files", d], cwd=source_dir).splitlines() - else: - files = [os.path.join(d, f) for f in file_subset] - - for f in files: - source_path = os.path.join(source_dir, f) - if not os.path.isfile(source_path): - continue - dest_path = os.path.join(dest_dir, f) - system(["mkdir", "-p", os.path.dirname(dest_path)], cwd=source_dir) - system(["cp", source_path, dest_path], cwd=source_dir) - system(["git", "add", d], cwd=dest_dir) - - for f in files_not_to_roll: - system(["git", "checkout", "HEAD", f], cwd=dest_dir) - - src_commit = system(["git", "rev-parse", "HEAD"], cwd=source_dir).strip() - - if revision_file: - with open(revision_file, 'w') as f: - f.write(src_commit) - - system(["git", "add", "."], cwd=dest_dir) - commit("Update to %s %s" % (name, src_commit), cwd=dest_dir) + for dir_to_rev in dirs_to_rev: + if type(dir_to_rev) is tuple: + d, file_subset = dir_to_rev + else: + d = dir_to_rev + file_subset = None + print("removing directory %s" % d) + try: + system(["git", "rm", "-r", d], cwd=dest_dir) + except subprocess.CalledProcessError: + print("Could not remove %s" % d) + print("cloning directory %s" % d) + + if file_subset is None: + files = system(["git", "ls-files", d], cwd=source_dir).splitlines() + else: + files = [os.path.join(d, f) for f in file_subset] + + for f in files: + source_path = os.path.join(source_dir, f) + if not os.path.isfile(source_path): + continue + dest_path = os.path.join(dest_dir, f) + system(["mkdir", "-p", os.path.dirname(dest_path)], cwd=source_dir) + system(["cp", source_path, dest_path], cwd=source_dir) + system(["git", "add", d], cwd=dest_dir) + + for f in files_not_to_roll: + system(["git", "checkout", "HEAD", f], cwd=dest_dir) + + src_commit = system(["git", "rev-parse", "HEAD"], cwd=source_dir).strip() + + if revision_file: + with open(revision_file, 'w') as f: + f.write(src_commit) + + system(["git", "add", "."], cwd=dest_dir) + commit("Update to %s %s" % (name, src_commit), cwd=dest_dir) def main(): - parser = argparse.ArgumentParser(description="Update the mojo repo's " + - "snapshot of things imported from chromium.") + parser = argparse.ArgumentParser( + description="Update the mojo repo's " + + "snapshot of things imported from chromium." + ) parser.add_argument("--mojo-dir", type=str) parser.add_argument("--chromium-dir", type=str) parser.add_argument("--dest-dir", type=str) @@ -143,18 +145,26 @@ def main(): dest_dir = os.path.abspath(args.dest_dir) if args.mojo_dir: - rev(os.path.abspath(args.mojo_dir), dest_dir, dirs_from_mojo, 'mojo', - revision_file='mojo/VERSION') + rev( + os.path.abspath(args.mojo_dir), + dest_dir, + dirs_from_mojo, + 'mojo', + revision_file='mojo/VERSION' + ) if args.chromium_dir: - rev(os.path.abspath(args.chromium_dir), dest_dir, dirs_from_chromium, 'chromium') - - try: - patch.patch_and_filter(dest_dir, os.path.join('patches', 'chromium')) - except subprocess.CalledProcessError: - print("ERROR: Roll failed due to a patch not applying") - print("Fix the patch to apply, commit the result, and re-run this script") - return 1 + rev( + os.path.abspath(args.chromium_dir), dest_dir, dirs_from_chromium, + 'chromium' + ) + + try: + patch.patch_and_filter(dest_dir, os.path.join('patches', 'chromium')) + except subprocess.CalledProcessError: + print("ERROR: Roll failed due to a patch not applying") + print("Fix the patch to apply, commit the result, and re-run this script") + return 1 return 0 diff --git a/sky/tools/roll/utils.py b/sky/tools/roll/utils.py index c5008eec30b33..a5234d3416074 100755 --- a/sky/tools/roll/utils.py +++ b/sky/tools/roll/utils.py @@ -8,12 +8,15 @@ import os import subprocess + def commit(message, cwd=None): subprocess.call(['git', 'commit', '-a', '-m', message], cwd=cwd) + def system(command, cwd=None): return subprocess.check_output(command, cwd=cwd) + def find(patterns, start='.'): for path, dirs, files in os.walk(start): for basename in files + dirs: @@ -21,6 +24,7 @@ def find(patterns, start='.'): filename = os.path.join(path, basename) yield filename + def filter_file(path, predicate): with open(path, 'r+') as f: lines = f.readlines() diff --git a/testing/android_systrace_test.py b/testing/android_systrace_test.py index 3bc1bd8f56ddc..2367df9ae6138 100755 --- a/testing/android_systrace_test.py +++ b/testing/android_systrace_test.py @@ -9,7 +9,9 @@ import subprocess import sys -BUILDROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..')) +BUILDROOT_DIR = os.path.abspath( + os.path.join(os.path.realpath(__file__), '..', '..', '..') +) PERFETTO_SESSION_KEY = 'session1' PERFETTO_TRACE_FILE = '/data/misc/perfetto-traces/trace' @@ -32,19 +34,23 @@ } ''' + def InstallApk(apk_path, package_name, adb_path='adb'): print('Installing APK') subprocess.check_output([adb_path, 'shell', 'am', 'force-stop', package_name]) # Allowed to fail if APK was never installed. - subprocess.call([adb_path, 'uninstall', package_name], stdout=subprocess.DEVNULL) + subprocess.call([adb_path, 'uninstall', package_name], + stdout=subprocess.DEVNULL) subprocess.check_output([adb_path, 'install', apk_path]) def StartPerfetto(package_name, adb_path='adb'): print('Starting trace') - cmd = [adb_path, 'shell', 'echo' , "'" + PERFETTO_CONFIG % package_name + "'", '|', - 'perfetto', '-c', '-', '--txt', '-o', PERFETTO_TRACE_FILE, - '--detach', PERFETTO_SESSION_KEY] + cmd = [ + adb_path, 'shell', 'echo', "'" + PERFETTO_CONFIG % package_name + "'", + '|', 'perfetto', '-c', '-', '--txt', '-o', PERFETTO_TRACE_FILE, + '--detach', PERFETTO_SESSION_KEY + ] subprocess.check_output(cmd, stderr=subprocess.STDOUT) @@ -52,30 +58,44 @@ def StartPerfetto(package_name, adb_path='adb'): def LaunchPackage(package_name, activity_name, adb_path='adb'): print('Scanning logcat') subprocess.check_output([adb_path, 'logcat', '-c'], stderr=subprocess.STDOUT) - logcat = subprocess.Popen([adb_path, 'logcat'], stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, universal_newlines=True) + logcat = subprocess.Popen([adb_path, 'logcat'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True) print('Launching %s (%s)' % (package_name, activity_name)) - subprocess.check_output( - [adb_path, 'shell', 'am ', 'start', '-n', - '%s/%s' % (package_name, activity_name)], stderr=subprocess.STDOUT) + subprocess.check_output([ + adb_path, 'shell', 'am ', 'start', '-n', + '%s/%s' % (package_name, activity_name) + ], + stderr=subprocess.STDOUT) for line in logcat.stdout: print('>>>>>>>> ' + line.strip()) - if ('Observatory listening' in line) or ('Dart VM service is listening' in line): + if ('Observatory listening' in line) or ('Dart VM service is listening' + in line): logcat.kill() break -def CollectAndValidateTrace(adb_path = 'adb'): +def CollectAndValidateTrace(adb_path='adb'): print('Fetching trace') - subprocess.check_output([adb_path, 'shell', 'perfetto', '--attach', - PERFETTO_SESSION_KEY, '--stop'], stderr=subprocess.STDOUT) - subprocess.check_output([adb_path, 'pull', PERFETTO_TRACE_FILE, 'trace.pb'], stderr=subprocess.STDOUT) + subprocess.check_output([ + adb_path, 'shell', 'perfetto', '--attach', PERFETTO_SESSION_KEY, '--stop' + ], + stderr=subprocess.STDOUT) + subprocess.check_output([adb_path, 'pull', PERFETTO_TRACE_FILE, 'trace.pb'], + stderr=subprocess.STDOUT) print('Validating trace') - traceconv = os.path.join(BUILDROOT_DIR, 'third_party', - 'android_tools', 'trace_to_text', 'trace_to_text') - traceconv_output = subprocess.check_output([traceconv, 'systrace', 'trace.pb'], stderr=subprocess.STDOUT, universal_newlines=True) + traceconv = os.path.join( + BUILDROOT_DIR, 'third_party', 'android_tools', 'trace_to_text', + 'trace_to_text' + ) + traceconv_output = subprocess.check_output([ + traceconv, 'systrace', 'trace.pb' + ], + stderr=subprocess.STDOUT, + universal_newlines=True) print('Trace output:') print(traceconv_output) @@ -90,16 +110,33 @@ def CollectAndValidateTrace(adb_path = 'adb'): def main(): parser = argparse.ArgumentParser() - parser.add_argument('--apk-path', dest='apk_path', action='store', - help='Provide the path to the APK to install') - parser.add_argument('--package-name', dest='package_name', action='store', - help='The package name of the APK, e.g. dev.flutter.scenarios') - parser.add_argument('--activity-name', dest='activity_name', action='store', + parser.add_argument( + '--apk-path', + dest='apk_path', + action='store', + help='Provide the path to the APK to install' + ) + parser.add_argument( + '--package-name', + dest='package_name', + action='store', + help='The package name of the APK, e.g. dev.flutter.scenarios' + ) + parser.add_argument( + '--activity-name', + dest='activity_name', + action='store', help='The activity to launch as it appears in AndroidManifest.xml, ' - 'e.g. .TextPlatformViewActivity') - parser.add_argument('--adb-path', dest='adb_path', action='store', - default='adb', help='Provide the path of adb used for android tests. ' - 'By default it looks on $PATH.') + 'e.g. .TextPlatformViewActivity' + ) + parser.add_argument( + '--adb-path', + dest='adb_path', + action='store', + default='adb', + help='Provide the path of adb used for android tests. ' + 'By default it looks on $PATH.' + ) args = parser.parse_args() diff --git a/testing/benchmark/displaylist_benchmark_parser.py b/testing/benchmark/displaylist_benchmark_parser.py index 6d121625c50b4..a8b49c6b95579 100755 --- a/testing/benchmark/displaylist_benchmark_parser.py +++ b/testing/benchmark/displaylist_benchmark_parser.py @@ -11,7 +11,9 @@ import matplotlib.pyplot as plt from matplotlib.backends.backend_pdf import PdfPages as pdfp + class BenchmarkResult: + def __init__(self, name, backend, timeUnit, drawCallCount): self.name = name self.series = {} @@ -24,11 +26,13 @@ def __init__(self, name, backend, timeUnit, drawCallCount): self.optionalValues = {} def __repr__(self): - return 'Name: % s\nBackend: % s\nSeries: % s\nSeriesLabels: % s\n' % (self.name, self.backend, self.series, self.seriesLabels) + return 'Name: % s\nBackend: % s\nSeries: % s\nSeriesLabels: % s\n' % ( + self.name, self.backend, self.series, self.seriesLabels + ) def addDataPoint(self, family, x, y): if family not in self.series: - self.series[family] = { 'x': [], 'y': [] } + self.series[family] = {'x': [], 'y': []} self.series[family]['x'].append(x) self.series[family]['y'].append(y) @@ -56,7 +60,11 @@ def plot(self): figures.append(plt.figure(dpi=1200, frameon=False, figsize=(11, 8.5))) for family in self.series: - plt.plot(self.series[family]['x'], self.series[family]['y'], label = self.seriesLabels[family]) + plt.plot( + self.series[family]['x'], + self.series[family]['y'], + label=self.seriesLabels[family] + ) plt.xlabel('Benchmark Seed') plt.ylabel('Time (' + self.timeUnit + ')') @@ -83,7 +91,11 @@ def plot(self): # Plot again but with the full Y axis visible figures.append(plt.figure(dpi=1200, frameon=False, figsize=(11, 8.5))) for family in self.series: - plt.plot(self.series[family]['x'], self.series[family]['y'], label = self.seriesLabels[family]) + plt.plot( + self.series[family]['x'], + self.series[family]['y'], + label=self.seriesLabels[family] + ) plt.xlabel('Benchmark Seed') plt.ylabel('Time (' + self.timeUnit + ')') @@ -123,24 +135,42 @@ def writeCSV(self, writer): row.append(y_values[series][line]) writer.writerow(row) + def main(): parser = argparse.ArgumentParser() - parser.add_argument('filename', action='store', - help='Path to the JSON output from Google Benchmark') - parser.add_argument('-o', '--output-pdf', dest='outputPDF', action='store', default='output.pdf', - help='Filename to output the PDF of graphs to.') - parser.add_argument('-c', '--output-csv', dest='outputCSV', action='store', default='output.csv', - help='Filename to output the CSV data to.') + parser.add_argument( + 'filename', + action='store', + help='Path to the JSON output from Google Benchmark' + ) + parser.add_argument( + '-o', + '--output-pdf', + dest='outputPDF', + action='store', + default='output.pdf', + help='Filename to output the PDF of graphs to.' + ) + parser.add_argument( + '-c', + '--output-csv', + dest='outputCSV', + action='store', + default='output.csv', + help='Filename to output the CSV data to.' + ) args = parser.parse_args() jsonData = parseJSON(args.filename) return processBenchmarkData(jsonData, args.outputPDF, args.outputCSV) + def error(message): print(message) exit(1) + def extractAttributesLabel(benchmarkResult): # Possible attribute keys are: # AntiAliasing @@ -159,6 +189,7 @@ def extractAttributesLabel(benchmarkResult): return label[:-2] + def processBenchmarkData(benchmarkJSON, outputPDF, outputCSV): benchmarkResultsData = {} @@ -175,9 +206,9 @@ def processBenchmarkData(benchmarkJSON, outputPDF, outputCSV): # First split is always the benchmark function name benchmarkName = benchmarkVariant[0] # The last split is always the seeded value into the benchmark - benchmarkSeededValue = benchmarkVariant[splits-1] + benchmarkSeededValue = benchmarkVariant[splits - 1] # The second last split is always the backend - benchmarkBackend = benchmarkVariant[splits-2] + benchmarkBackend = benchmarkVariant[splits - 2] # Time taken (wall clock time) for benchmark to run benchmarkRealTime = benchmarkResult['real_time'] benchmarkUnit = benchmarkResult['time_unit'] @@ -186,7 +217,7 @@ def processBenchmarkData(benchmarkJSON, outputPDF, outputCSV): benchmarkFamilyLabel = '' if splits > 3: - for i in range(1, splits-2): + for i in range(1, splits - 2): benchmarkFamilyLabel += benchmarkVariant[i] + ', ' benchmarkFamilyAttributes = extractAttributesLabel(benchmarkResult) @@ -201,17 +232,28 @@ def processBenchmarkData(benchmarkJSON, outputPDF, outputCSV): else: benchmarkDrawCallCount = -1 - optional_keys = ['DrawCallCount_Varies', 'VerbCount', 'PointCount', 'VertexCount', 'GlyphCount'] + optional_keys = [ + 'DrawCallCount_Varies', 'VerbCount', 'PointCount', 'VertexCount', + 'GlyphCount' + ] if benchmarkName not in benchmarkResultsData: - benchmarkResultsData[benchmarkName] = BenchmarkResult(benchmarkName, benchmarkBackend, benchmarkUnit, benchmarkDrawCallCount) + benchmarkResultsData[benchmarkName] = BenchmarkResult( + benchmarkName, benchmarkBackend, benchmarkUnit, benchmarkDrawCallCount + ) for key in optional_keys: if key in benchmarkResult: - benchmarkResultsData[benchmarkName].addOptionalValue(key, benchmarkSeededValue, benchmarkResult[key]) + benchmarkResultsData[benchmarkName].addOptionalValue( + key, benchmarkSeededValue, benchmarkResult[key] + ) - benchmarkResultsData[benchmarkName].addDataPoint(benchmarkFamilyIndex, benchmarkSeededValue, benchmarkRealTime) - benchmarkResultsData[benchmarkName].setFamilyLabel(benchmarkFamilyIndex, benchmarkFamilyLabel) + benchmarkResultsData[benchmarkName].addDataPoint( + benchmarkFamilyIndex, benchmarkSeededValue, benchmarkRealTime + ) + benchmarkResultsData[benchmarkName].setFamilyLabel( + benchmarkFamilyIndex, benchmarkFamilyLabel + ) pp = pdfp(outputPDF) @@ -239,5 +281,6 @@ def parseJSON(filename): return jsonData['benchmarks'] + if __name__ == '__main__': sys.exit(main()) diff --git a/testing/rules/run_gradle.py b/testing/rules/run_gradle.py index 2f24c028ee8a0..10a7c723ff225 100644 --- a/testing/rules/run_gradle.py +++ b/testing/rules/run_gradle.py @@ -16,18 +16,33 @@ SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__)) BAT = '.bat' if sys.platform.startswith(('cygwin', 'win')) else '' -GRADLE_BIN = os.path.normpath(os.path.join(SCRIPT_PATH, '..', '..', '..', - 'third_party', 'gradle', 'bin', 'gradle%s' % BAT)) - -ANDROID_HOME = os.path.normpath(os.path.join(SCRIPT_PATH, '..', '..', '..', - 'third_party', 'android_tools', 'sdk')) +GRADLE_BIN = os.path.normpath( + os.path.join( + SCRIPT_PATH, '..', '..', '..', 'third_party', 'gradle', 'bin', + 'gradle%s' % BAT + ) +) + +ANDROID_HOME = os.path.normpath( + os.path.join( + SCRIPT_PATH, '..', '..', '..', 'third_party', 'android_tools', 'sdk' + ) +) if platform.system() == 'Darwin': - JAVA_HOME = os.path.normpath(os.path.join(SCRIPT_PATH, '..', '..', '..', - 'third_party', 'java', 'openjdk', 'Contents', 'Home')) + JAVA_HOME = os.path.normpath( + os.path.join( + SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk', + 'Contents', 'Home' + ) + ) else: - JAVA_HOME = os.path.normpath(os.path.join(SCRIPT_PATH, '..', '..', '..', - 'third_party', 'java', 'openjdk')) + JAVA_HOME = os.path.normpath( + os.path.join( + SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk' + ) + ) + def main(): if not os.path.isdir(ANDROID_HOME): @@ -35,9 +50,9 @@ def main(): android_dir = sys.argv[1] subprocess.check_output( - args=[GRADLE_BIN] + sys.argv[2:], - cwd=android_dir, - env=dict(os.environ, ANDROID_HOME=ANDROID_HOME, JAVA_HOME=JAVA_HOME), + args=[GRADLE_BIN] + sys.argv[2:], + cwd=android_dir, + env=dict(os.environ, ANDROID_HOME=ANDROID_HOME, JAVA_HOME=JAVA_HOME), ) return 0 diff --git a/testing/run_tests.py b/testing/run_tests.py index 71e43c6c3f56f..c96a41ae1f9c1 100755 --- a/testing/run_tests.py +++ b/testing/run_tests.py @@ -17,10 +17,14 @@ import sys import time -buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..')) +buildroot_dir = os.path.abspath( + os.path.join(os.path.realpath(__file__), '..', '..', '..') +) out_dir = os.path.join(buildroot_dir, 'out') golden_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'resources') -fonts_dir = os.path.join(buildroot_dir, 'flutter', 'third_party', 'txt', 'third_party', 'fonts') +fonts_dir = os.path.join( + buildroot_dir, 'flutter', 'third_party', 'txt', 'third_party', 'fonts' +) roboto_font_path = os.path.join(fonts_dir, 'Roboto-Regular.ttf') font_subset_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'font-subset') @@ -43,14 +47,24 @@ def RunCmd(cmd, forbidden_output=[], expect_failure=False, env=None, **kwargs): start_time = time.time() stdout_pipe = sys.stdout if not forbidden_output else subprocess.PIPE stderr_pipe = sys.stderr if not forbidden_output else subprocess.PIPE - process = subprocess.Popen(cmd, stdout=stdout_pipe, stderr=stderr_pipe, env=env, universal_newlines=True, **kwargs) + process = subprocess.Popen( + cmd, + stdout=stdout_pipe, + stderr=stderr_pipe, + env=env, + universal_newlines=True, + **kwargs + ) stdout, stderr = process.communicate() end_time = time.time() if process.returncode != 0 and not expect_failure: PrintDivider('!') - print('Failed Command:\n\n%s\n\nExit Code: %d\n' % (command_string, process.returncode)) + print( + 'Failed Command:\n\n%s\n\nExit Code: %d\n' % + (command_string, process.returncode) + ) if stdout: print('STDOUT: \n%s' % stdout) @@ -60,18 +74,28 @@ def RunCmd(cmd, forbidden_output=[], expect_failure=False, env=None, **kwargs): PrintDivider('!') - raise Exception('Command "%s" exited with code %d.' % (command_string, process.returncode)) + raise Exception( + 'Command "%s" exited with code %d.' % + (command_string, process.returncode) + ) if stdout or stderr: print(stdout) print(stderr) for forbidden_string in forbidden_output: - if (stdout and forbidden_string in stdout) or (stderr and forbidden_string in stderr): - raise Exception('command "%s" contained forbidden string %s' % (command_string, forbidden_string)) + if (stdout and forbidden_string in stdout) or (stderr and + forbidden_string in stderr): + raise Exception( + 'command "%s" contained forbidden string %s' % + (command_string, forbidden_string) + ) PrintDivider('<') - print('Command run successfully in %.2f seconds: %s' % (end_time - start_time, command_string)) + print( + 'Command run successfully in %.2f seconds: %s' % + (end_time - start_time, command_string) + ) def IsMac(): @@ -106,7 +130,9 @@ def FindExecutablePath(path): raise Exception('Executable %s does not exist!' % path) -def BuildEngineExecutableCommand(build_dir, executable_name, flags=[], coverage=False, gtest=False): +def BuildEngineExecutableCommand( + build_dir, executable_name, flags=[], coverage=False, gtest=False +): unstripped_exe = os.path.join(build_dir, 'exe.unstripped', executable_name) # We cannot run the unstripped binaries directly when coverage is enabled. if IsLinux() and os.path.exists(unstripped_exe) and not coverage: @@ -116,24 +142,40 @@ def BuildEngineExecutableCommand(build_dir, executable_name, flags=[], coverage= else: executable = FindExecutablePath(os.path.join(build_dir, executable_name)) - coverage_script = os.path.join(buildroot_dir, 'flutter', 'build', 'generate_coverage.py') + coverage_script = os.path.join( + buildroot_dir, 'flutter', 'build', 'generate_coverage.py' + ) if coverage: - coverage_flags = ['-t', executable, '-o', os.path.join(build_dir, 'coverage', executable_name), '-f', 'html'] + coverage_flags = [ + '-t', executable, '-o', + os.path.join(build_dir, 'coverage', executable_name), '-f', 'html' + ] updated_flags = ['--args=%s' % ' '.join(flags)] - test_command = [ coverage_script ] + coverage_flags + updated_flags + test_command = [coverage_script] + coverage_flags + updated_flags else: - test_command = [ executable ] + flags + test_command = [executable] + flags if gtest: - gtest_parallel = os.path.join(buildroot_dir, 'third_party', 'gtest-parallel', 'gtest-parallel') + gtest_parallel = os.path.join( + buildroot_dir, 'third_party', 'gtest-parallel', 'gtest-parallel' + ) test_command = ['python', gtest_parallel] + test_command return test_command -def RunEngineExecutable(build_dir, executable_name, filter, flags=[], - cwd=buildroot_dir, forbidden_output=[], expect_failure=False, coverage=False, - extra_env={}, gtest=False): +def RunEngineExecutable( + build_dir, + executable_name, + filter, + flags=[], + cwd=buildroot_dir, + forbidden_output=[], + expect_failure=False, + coverage=False, + extra_env={}, + gtest=False +): if filter is not None and executable_name not in filter: print('Skipping %s due to filter.' % executable_name) return @@ -150,7 +192,11 @@ def RunEngineExecutable(build_dir, executable_name, filter, flags=[], print('Running %s in %s' % (executable_name, cwd)) test_command = BuildEngineExecutableCommand( - build_dir, executable_name, flags=flags, coverage=coverage, gtest=gtest, + build_dir, + executable_name, + flags=flags, + coverage=coverage, + gtest=gtest, ) if not env: @@ -160,7 +206,13 @@ def RunEngineExecutable(build_dir, executable_name, filter, flags=[], env[key] = value try: - RunCmd(test_command, cwd=cwd, forbidden_output=forbidden_output, expect_failure=expect_failure, env=env) + RunCmd( + test_command, + cwd=cwd, + forbidden_output=forbidden_output, + expect_failure=expect_failure, + env=env + ) except: # The LUCI environment may provide a variable containing a directory path # for additional output files that will be uploaded to cloud storage. @@ -168,21 +220,39 @@ def RunEngineExecutable(build_dir, executable_name, filter, flags=[], # the dump and output a report that will be uploaded. luci_test_outputs_path = os.environ.get('FLUTTER_TEST_OUTPUTS_DIR') core_path = os.path.join(cwd, 'core') - if luci_test_outputs_path and os.path.exists(core_path) and os.path.exists(unstripped_exe): - dump_path = os.path.join(luci_test_outputs_path, '%s_%s.txt' % (executable_name, sys.platform)) + if luci_test_outputs_path and os.path.exists(core_path) and os.path.exists( + unstripped_exe): + dump_path = os.path.join( + luci_test_outputs_path, '%s_%s.txt' % (executable_name, sys.platform) + ) print('Writing core dump analysis to %s' % dump_path) subprocess.call([ - os.path.join(buildroot_dir, 'flutter', 'testing', 'analyze_core_dump.sh'), - buildroot_dir, unstripped_exe, core_path, dump_path, + os.path.join( + buildroot_dir, 'flutter', 'testing', 'analyze_core_dump.sh' + ), + buildroot_dir, + unstripped_exe, + core_path, + dump_path, ]) os.unlink(core_path) raise class EngineExecutableTask(object): - def __init__(self, build_dir, executable_name, filter, flags=[], - cwd=buildroot_dir, forbidden_output=[], expect_failure=False, - coverage=False, extra_env={}): + + def __init__( + self, + build_dir, + executable_name, + filter, + flags=[], + cwd=buildroot_dir, + forbidden_output=[], + expect_failure=False, + coverage=False, + extra_env={} + ): self.build_dir = build_dir self.executable_name = executable_name self.filter = filter @@ -195,16 +265,23 @@ def __init__(self, build_dir, executable_name, filter, flags=[], def __call__(self, *args): RunEngineExecutable( - self.build_dir, self.executable_name, self.filter, flags=self.flags, - cwd=self.cwd, forbidden_output=self.forbidden_output, - expect_failure=self.expect_failure, coverage=self.coverage, - extra_env=self.extra_env, + self.build_dir, + self.executable_name, + self.filter, + flags=self.flags, + cwd=self.cwd, + forbidden_output=self.forbidden_output, + expect_failure=self.expect_failure, + coverage=self.coverage, + extra_env=self.extra_env, ) def __str__(self): command = BuildEngineExecutableCommand( - self.build_dir, self.executable_name, flags=self.flags, - coverage=self.coverage + self.build_dir, + self.executable_name, + flags=self.flags, + coverage=self.coverage ) return " ".join(command) @@ -214,101 +291,128 @@ def RunCCTests(build_dir, filter, coverage, capture_core_dump): if capture_core_dump and IsLinux(): import resource - resource.setrlimit(resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)) + resource.setrlimit( + resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY) + ) shuffle_flags = [ - "--gtest_repeat=2", - "--gtest_shuffle", + "--gtest_repeat=2", + "--gtest_shuffle", ] repeat_flags = [ - "--repeat=2", + "--repeat=2", ] def make_test(name, flags=repeat_flags, extra_env={}): return (name, flags, extra_env) unittests = [ - make_test('client_wrapper_glfw_unittests'), - make_test('client_wrapper_unittests'), - make_test('common_cpp_core_unittests'), - make_test('common_cpp_unittests'), - make_test('dart_plugin_registrant_unittests'), - make_test('display_list_rendertests'), - make_test('display_list_unittests'), - make_test('embedder_proctable_unittests'), - make_test('embedder_unittests'), - make_test('fml_unittests', flags=[ fml_unittests_filter ] + repeat_flags), - make_test('no_dart_plugin_registrant_unittests'), - make_test('runtime_unittests'), - make_test('testing_unittests'), - make_test('tonic_unittests'), - # The image release unit test can take a while on slow machines. - make_test('ui_unittests', flags=repeat_flags + ['--timeout=90']), + make_test('client_wrapper_glfw_unittests'), + make_test('client_wrapper_unittests'), + make_test('common_cpp_core_unittests'), + make_test('common_cpp_unittests'), + make_test('dart_plugin_registrant_unittests'), + make_test('display_list_rendertests'), + make_test('display_list_unittests'), + make_test('embedder_proctable_unittests'), + make_test('embedder_unittests'), + make_test('fml_unittests', flags=[fml_unittests_filter] + repeat_flags), + make_test('no_dart_plugin_registrant_unittests'), + make_test('runtime_unittests'), + make_test('testing_unittests'), + make_test('tonic_unittests'), + # The image release unit test can take a while on slow machines. + make_test('ui_unittests', flags=repeat_flags + ['--timeout=90']), ] if not IsWindows(): unittests += [ - # https://github.com/google/googletest/issues/2490 - make_test('android_external_view_embedder_unittests'), - make_test('jni_unittests'), - make_test('platform_view_android_delegate_unittests'), - # https://github.com/flutter/flutter/issues/36295 - make_test('shell_unittests'), + # https://github.com/google/googletest/issues/2490 + make_test('android_external_view_embedder_unittests'), + make_test('jni_unittests'), + make_test('platform_view_android_delegate_unittests'), + # https://github.com/flutter/flutter/issues/36295 + make_test('shell_unittests'), ] if IsWindows(): unittests += [ - # The accessibility library only supports Mac and Windows. - make_test('accessibility_unittests'), - make_test('client_wrapper_windows_unittests'), - make_test('flutter_windows_unittests'), + # The accessibility library only supports Mac and Windows. + make_test('accessibility_unittests'), + make_test('client_wrapper_windows_unittests'), + make_test('flutter_windows_unittests'), ] # These unit-tests are Objective-C and can only run on Darwin. if IsMac(): unittests += [ - # The accessibility library only supports Mac and Windows. - make_test('accessibility_unittests'), - make_test('flutter_channels_unittests'), + # The accessibility library only supports Mac and Windows. + make_test('accessibility_unittests'), + make_test('flutter_channels_unittests'), ] if IsLinux(): flow_flags = [ - '--golden-dir=%s' % golden_dir, - '--font-file=%s' % roboto_font_path, + '--golden-dir=%s' % golden_dir, + '--font-file=%s' % roboto_font_path, + ] + icu_flags = [ + '--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat') ] - icu_flags = ['--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat')] unittests += [ - make_test('flow_unittests', flags=repeat_flags + ['--'] + flow_flags), - make_test('flutter_glfw_unittests'), - make_test('flutter_linux_unittests', extra_env={'G_DEBUG': 'fatal-criticals'}), - # https://github.com/flutter/flutter/issues/36296 - make_test('txt_unittests', flags=repeat_flags + ['--'] + icu_flags), + make_test('flow_unittests', flags=repeat_flags + ['--'] + flow_flags), + make_test('flutter_glfw_unittests'), + make_test( + 'flutter_linux_unittests', extra_env={'G_DEBUG': 'fatal-criticals'} + ), + # https://github.com/flutter/flutter/issues/36296 + make_test('txt_unittests', flags=repeat_flags + ['--'] + icu_flags), ] else: flow_flags = ['--gtest_filter=-PerformanceOverlayLayer.Gold'] unittests += [ - make_test('flow_unittests', flags=repeat_flags + flow_flags), + make_test('flow_unittests', flags=repeat_flags + flow_flags), ] for test, flags, extra_env in unittests: - RunEngineExecutable(build_dir, test, filter, flags, coverage=coverage, - extra_env=extra_env, gtest=True) + RunEngineExecutable( + build_dir, + test, + filter, + flags, + coverage=coverage, + extra_env=extra_env, + gtest=True + ) if IsMac(): # flutter_desktop_darwin_unittests uses global state that isn't handled # correctly by gtest-parallel. # https://github.com/flutter/flutter/issues/104789 - RunEngineExecutable(build_dir, 'flutter_desktop_darwin_unittests', filter, shuffle_flags, coverage=coverage) + RunEngineExecutable( + build_dir, + 'flutter_desktop_darwin_unittests', + filter, + shuffle_flags, + coverage=coverage + ) # Impeller tests are only supported on macOS for now. - RunEngineExecutable(build_dir, 'impeller_unittests', filter, shuffle_flags, coverage=coverage) + RunEngineExecutable( + build_dir, + 'impeller_unittests', + filter, + shuffle_flags, + coverage=coverage + ) def RunEngineBenchmarks(build_dir, filter): print("Running Engine Benchmarks.") - icu_flags = ['--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat')] + icu_flags = [ + '--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat') + ] RunEngineExecutable(build_dir, 'shell_benchmarks', filter, icu_flags) @@ -320,13 +424,21 @@ def RunEngineBenchmarks(build_dir, filter): RunEngineExecutable(build_dir, 'txt_benchmarks', filter, icu_flags) -def GatherDartTest(build_dir, test_packages, dart_file, verbose_dart_snapshot, - multithreaded, enable_observatory=False, - expect_failure=False, alternative_tester=False): +def GatherDartTest( + build_dir, + test_packages, + dart_file, + verbose_dart_snapshot, + multithreaded, + enable_observatory=False, + expect_failure=False, + alternative_tester=False +): kernel_file_name = os.path.basename(dart_file) + '.dill' kernel_file_output = os.path.join(build_dir, 'gen', kernel_file_name) error_message = "%s doesn't exist. Please run the build that populates %s" % ( - kernel_file_output, build_dir) + kernel_file_output, build_dir + ) assert os.path.isfile(kernel_file_output), error_message command_args = [] @@ -334,16 +446,19 @@ def GatherDartTest(build_dir, test_packages, dart_file, verbose_dart_snapshot, command_args.append('--disable-observatory') dart_file_contents = open(dart_file, 'r') - custom_options = re.findall("// FlutterTesterOptions=(.*)", dart_file_contents.read()) + custom_options = re.findall( + "// FlutterTesterOptions=(.*)", dart_file_contents.read() + ) dart_file_contents.close() command_args.extend(custom_options) command_args += [ - '--use-test-fonts', - '--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat'), - '--flutter-assets-dir=%s' % os.path.join(build_dir, 'gen', 'flutter', 'lib', 'ui', 'assets'), - '--disable-asset-fonts', - kernel_file_output, + '--use-test-fonts', + '--icu-data-file-path=%s' % os.path.join(build_dir, 'icudtl.dat'), + '--flutter-assets-dir=%s' % + os.path.join(build_dir, 'gen', 'flutter', 'lib', 'ui', 'assets'), + '--disable-asset-fonts', + kernel_file_output, ] if multithreaded: @@ -355,11 +470,20 @@ def GatherDartTest(build_dir, test_packages, dart_file, verbose_dart_snapshot, tester_name = 'flutter_tester' if alternative_tester: tester_name = 'flutter_tester_fractional_translation' - print("Running test '%s' using '%s' (%s)" % (kernel_file_name, tester_name, threading)) - forbidden_output = [] if 'unopt' in build_dir or expect_failure else ['[ERROR'] + print( + "Running test '%s' using '%s' (%s)" % + (kernel_file_name, tester_name, threading) + ) + forbidden_output = [] if 'unopt' in build_dir or expect_failure else [ + '[ERROR' + ] return EngineExecutableTask( - build_dir, tester_name, None, command_args, - forbidden_output=forbidden_output, expect_failure=expect_failure, + build_dir, + tester_name, + None, + command_args, + forbidden_output=forbidden_output, + expect_failure=expect_failure, ) @@ -369,7 +493,8 @@ def EnsureDebugUnoptSkyPackagesAreBuilt(): message.append('gn --runtime-mode debug --unopt --no-lto') message.append('ninja -C %s flutter/sky/packages' % variant_out_dir) final_message = '%s doesn\'t exist. Please run the following commands: \n%s' % ( - variant_out_dir, '\n'.join(message)) + variant_out_dir, '\n'.join(message) + ) assert os.path.exists(variant_out_dir), final_message @@ -378,11 +503,15 @@ def EnsureIosTestsAreBuilt(ios_out_dir): tmp_out_dir = os.path.join(out_dir, ios_out_dir) ios_test_lib = os.path.join(tmp_out_dir, 'libios_test_flutter.dylib') message = [] - message.append('gn --ios --unoptimized --runtime-mode=debug --no-lto --simulator') + message.append( + 'gn --ios --unoptimized --runtime-mode=debug --no-lto --simulator' + ) message.append('autoninja -C %s ios_test_flutter' % ios_out_dir) final_message = '%s or %s doesn\'t exist. Please run the following commands: \n%s' % ( - ios_out_dir, ios_test_lib, '\n'.join(message)) - assert os.path.exists(tmp_out_dir) and os.path.exists(ios_test_lib), final_message + ios_out_dir, ios_test_lib, '\n'.join(message) + ) + assert os.path.exists(tmp_out_dir + ) and os.path.exists(ios_test_lib), final_message def AssertExpectedXcodeVersion(): @@ -396,9 +525,14 @@ def AssertExpectedXcodeVersion(): def JavaHome(): script_path = os.path.dirname(os.path.realpath(__file__)) if IsMac(): - return os.path.join(script_path, '..', '..', 'third_party', 'java', 'openjdk', 'Contents', 'Home') + return os.path.join( + script_path, '..', '..', 'third_party', 'java', 'openjdk', 'Contents', + 'Home' + ) else: - return os.path.join(script_path, '..', '..', 'third_party', 'java', 'openjdk') + return os.path.join( + script_path, '..', '..', 'third_party', 'java', 'openjdk' + ) def JavaBin(): @@ -407,24 +541,35 @@ def JavaBin(): def RunJavaTests(filter, android_variant='android_debug_unopt'): """Runs the Java JUnit unit tests for the Android embedding""" - test_runner_dir = os.path.join(buildroot_dir, 'flutter', 'shell', 'platform', 'android', 'test_runner') - gradle_bin = os.path.join(buildroot_dir, 'third_party', 'gradle', 'bin', 'gradle.bat' if IsWindows() else 'gradle') + test_runner_dir = os.path.join( + buildroot_dir, 'flutter', 'shell', 'platform', 'android', 'test_runner' + ) + gradle_bin = os.path.join( + buildroot_dir, 'third_party', 'gradle', 'bin', + 'gradle.bat' if IsWindows() else 'gradle' + ) flutter_jar = os.path.join(out_dir, android_variant, 'flutter.jar') - android_home = os.path.join(buildroot_dir, 'third_party', 'android_tools', 'sdk') - build_dir = os.path.join(out_dir, android_variant, 'robolectric_tests', 'build') - gradle_cache_dir = os.path.join(out_dir, android_variant, 'robolectric_tests', '.gradle') + android_home = os.path.join( + buildroot_dir, 'third_party', 'android_tools', 'sdk' + ) + build_dir = os.path.join( + out_dir, android_variant, 'robolectric_tests', 'build' + ) + gradle_cache_dir = os.path.join( + out_dir, android_variant, 'robolectric_tests', '.gradle' + ) test_class = filter if filter else '*' command = [ - gradle_bin, - '-Pflutter_jar=%s' % flutter_jar, - '-Pbuild_dir=%s' % build_dir, - 'testDebugUnitTest', - '--tests=%s' % test_class, - '--rerun-tasks', - '--no-daemon', - '--project-cache-dir=%s' % gradle_cache_dir, - '--gradle-user-home=%s' % gradle_cache_dir, + gradle_bin, + '-Pflutter_jar=%s' % flutter_jar, + '-Pbuild_dir=%s' % build_dir, + 'testDebugUnitTest', + '--tests=%s' % test_class, + '--rerun-tasks', + '--no-daemon', + '--project-cache-dir=%s' % gradle_cache_dir, + '--gradle-user-home=%s' % gradle_cache_dir, ] env = dict(os.environ, ANDROID_HOME=android_home, JAVA_HOME=JavaHome()) @@ -441,13 +586,17 @@ def RunAndroidTests(android_variant='android_debug_unopt', adb_path=None): RunCmd([adb_path, 'push', tests_path, remote_path], cwd=buildroot_dir) RunCmd([adb_path, 'shell', remote_tests_path]) - systrace_test = os.path.join(buildroot_dir, 'flutter', 'testing', - 'android_systrace_test.py') - scenario_apk = os.path.join(out_dir, android_variant, 'firebase_apks', - 'scenario_app.apk') - RunCmd([systrace_test, '--adb-path', adb_path, '--apk-path', scenario_apk, - '--package-name', 'dev.flutter.scenarios', - '--activity-name', '.TextPlatformViewActivity']) + systrace_test = os.path.join( + buildroot_dir, 'flutter', 'testing', 'android_systrace_test.py' + ) + scenario_apk = os.path.join( + out_dir, android_variant, 'firebase_apks', 'scenario_app.apk' + ) + RunCmd([ + systrace_test, '--adb-path', adb_path, '--apk-path', scenario_apk, + '--package-name', 'dev.flutter.scenarios', '--activity-name', + '.TextPlatformViewActivity' + ]) def RunObjcTests(ios_variant='ios_debug_sim_unopt', test_filter=None): @@ -456,19 +605,21 @@ def RunObjcTests(ios_variant='ios_debug_sim_unopt', test_filter=None): ios_out_dir = os.path.join(out_dir, ios_variant) EnsureIosTestsAreBuilt(ios_out_dir) - ios_unit_test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'ios', 'IosUnitTests') + ios_unit_test_dir = os.path.join( + buildroot_dir, 'flutter', 'testing', 'ios', 'IosUnitTests' + ) # Avoid using xcpretty unless the following can be addressed: # - Make sure all relevant failure output is printed on a failure. # - Make sure that a failing exit code is set for CI. # See https://github.com/flutter/flutter/issues/63742 command = [ - 'xcodebuild ' - '-sdk iphonesimulator ' - '-scheme IosUnitTests ' - "-destination platform='iOS Simulator,name=iPhone 11' " - 'test ' - 'FLUTTER_ENGINE=' + ios_variant + 'xcodebuild ' + '-sdk iphonesimulator ' + '-scheme IosUnitTests ' + "-destination platform='iOS Simulator,name=iPhone 11' " + 'test ' + 'FLUTTER_ENGINE=' + ios_variant ] if test_filter != None: command[0] = command[0] + " -only-testing:%s" % test_filter @@ -476,7 +627,12 @@ def RunObjcTests(ios_variant='ios_debug_sim_unopt', test_filter=None): def GatherDartTests(build_dir, filter, verbose_dart_snapshot): - dart_tests_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'dart',) + dart_tests_dir = os.path.join( + buildroot_dir, + 'flutter', + 'testing', + 'dart', + ) # This one is a bit messy. The pubspec.yaml at flutter/testing/dart/pubspec.yaml # has dependencies that are hardcoded to point to the sky packages at host_debug_unopt/ @@ -485,14 +641,16 @@ def GatherDartTests(build_dir, filter, verbose_dart_snapshot): # Now that we have the Sky packages at the hardcoded location, run `dart pub get`. RunEngineExecutable( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=['pub', 'get', '--offline'], - cwd=dart_tests_dir, + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=['pub', 'get', '--offline'], + cwd=dart_tests_dir, ) - dart_observatory_tests = glob.glob('%s/observatory/*_test.dart' % dart_tests_dir) + dart_observatory_tests = glob.glob( + '%s/observatory/*_test.dart' % dart_tests_dir + ) dart_tests = glob.glob('%s/*_test.dart' % dart_tests_dir) test_packages = os.path.join(dart_tests_dir, '.packages') @@ -501,26 +659,60 @@ def GatherDartTests(build_dir, filter, verbose_dart_snapshot): if filter is not None and os.path.basename(dart_test_file) not in filter: print("Skipping '%s' due to filter." % dart_test_file) else: - print("Gathering dart test '%s' with observatory enabled" % dart_test_file) - yield GatherDartTest(build_dir, test_packages, dart_test_file, verbose_dart_snapshot, True, True) - yield GatherDartTest(build_dir, test_packages, dart_test_file, verbose_dart_snapshot, False, True) + print( + "Gathering dart test '%s' with observatory enabled" % dart_test_file + ) + yield GatherDartTest( + build_dir, test_packages, dart_test_file, verbose_dart_snapshot, + True, True + ) + yield GatherDartTest( + build_dir, test_packages, dart_test_file, verbose_dart_snapshot, + False, True + ) # Smoke test with tester variant that has no raster cache and enabled fractional translation - yield GatherDartTest(build_dir, test_packages, dart_test_file, verbose_dart_snapshot, False, True, True) + yield GatherDartTest( + build_dir, test_packages, dart_test_file, verbose_dart_snapshot, + False, True, True + ) for dart_test_file in dart_tests: if filter is not None and os.path.basename(dart_test_file) not in filter: print("Skipping '%s' due to filter." % dart_test_file) else: print("Gathering dart test '%s'" % dart_test_file) - yield GatherDartTest(build_dir, test_packages, dart_test_file, verbose_dart_snapshot, True) - yield GatherDartTest(build_dir, test_packages, dart_test_file, verbose_dart_snapshot, False) + yield GatherDartTest( + build_dir, test_packages, dart_test_file, verbose_dart_snapshot, True + ) + yield GatherDartTest( + build_dir, test_packages, dart_test_file, verbose_dart_snapshot, False + ) def GatherDartSmokeTest(build_dir, verbose_dart_snapshot): - smoke_test = os.path.join(buildroot_dir, "flutter", "testing", "smoke_test_failure", "fail_test.dart") - test_packages = os.path.join(buildroot_dir, "flutter", "testing", "smoke_test_failure", ".packages") - yield GatherDartTest(build_dir, test_packages, smoke_test, verbose_dart_snapshot, True, expect_failure=True) - yield GatherDartTest(build_dir, test_packages, smoke_test, verbose_dart_snapshot, False, expect_failure=True) + smoke_test = os.path.join( + buildroot_dir, "flutter", "testing", "smoke_test_failure", + "fail_test.dart" + ) + test_packages = os.path.join( + buildroot_dir, "flutter", "testing", "smoke_test_failure", ".packages" + ) + yield GatherDartTest( + build_dir, + test_packages, + smoke_test, + verbose_dart_snapshot, + True, + expect_failure=True + ) + yield GatherDartTest( + build_dir, + test_packages, + smoke_test, + verbose_dart_snapshot, + False, + expect_failure=True + ) def GatherFrontEndServerTests(build_dir): @@ -528,77 +720,78 @@ def GatherFrontEndServerTests(build_dir): dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: opts = [ - '--disable-dart-dev', - dart_test_file, - build_dir, - os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'), - os.path.join(build_dir, 'flutter_patched_sdk')] + '--disable-dart-dev', dart_test_file, build_dir, + os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'), + os.path.join(build_dir, 'flutter_patched_sdk') + ] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def GatherConstFinderTests(build_dir): - test_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'const_finder', 'test') + test_dir = os.path.join( + buildroot_dir, 'flutter', 'tools', 'const_finder', 'test' + ) opts = [ - '--disable-dart-dev', - os.path.join(test_dir, 'const_finder_test.dart'), - os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'), - os.path.join(build_dir, 'flutter_patched_sdk')] + '--disable-dart-dev', + os.path.join(test_dir, 'const_finder_test.dart'), + os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'), + os.path.join(build_dir, 'flutter_patched_sdk') + ] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def GatherLitetestTests(build_dir): test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'litetest') dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: - opts = [ - '--disable-dart-dev', - dart_test_file] + opts = ['--disable-dart-dev', dart_test_file] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def RunBenchmarkTests(build_dir): test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'benchmark') dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: - opts = [ - '--disable-dart-dev', - dart_test_file] + opts = ['--disable-dart-dev', dart_test_file] RunEngineExecutable( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def GatherGithooksTests(build_dir): test_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'githooks') dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: - opts = [ - '--disable-dart-dev', - dart_test_file] + opts = ['--disable-dart-dev', dart_test_file] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def GatherClangTidyTests(build_dir): @@ -606,16 +799,17 @@ def GatherClangTidyTests(build_dir): dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: opts = [ - '--disable-dart-dev', - dart_test_file, - os.path.join(build_dir, 'compile_commands.json'), - os.path.join(buildroot_dir, 'flutter')] + '--disable-dart-dev', dart_test_file, + os.path.join(build_dir, 'compile_commands.json'), + os.path.join(buildroot_dir, 'flutter') + ] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def GatherApiConsistencyTests(build_dir): @@ -623,15 +817,16 @@ def GatherApiConsistencyTests(build_dir): dart_tests = glob.glob('%s/test/*_test.dart' % test_dir) for dart_test_file in dart_tests: opts = [ - '--disable-dart-dev', - dart_test_file, - os.path.join(buildroot_dir, 'flutter')] + '--disable-dart-dev', dart_test_file, + os.path.join(buildroot_dir, 'flutter') + ] yield EngineExecutableTask( - build_dir, - os.path.join('dart-sdk', 'bin', 'dart'), - None, - flags=opts, - cwd=test_dir) + build_dir, + os.path.join('dart-sdk', 'bin', 'dart'), + None, + flags=opts, + cwd=test_dir + ) def RunEngineTasksInParallel(tasks): @@ -649,9 +844,7 @@ def RunEngineTasksInParallel(tasks): max_processes = 60 pool = multiprocessing.Pool(processes=max_processes) - async_results = [ - (t, pool.apply_async(t, ())) for t in tasks - ] + async_results = [(t, pool.apply_async(t, ())) for t in tasks] failures = [] for task, async_result in async_results: try: @@ -665,37 +858,99 @@ def RunEngineTasksInParallel(tasks): print("%s\n" % str(task)) raise Exception() + def main(): parser = argparse.ArgumentParser() - all_types = ['engine', 'dart', 'benchmarks', 'java', 'android', 'objc', 'font-subset'] - - parser.add_argument('--variant', dest='variant', action='store', - default='host_debug_unopt', help='The engine build variant to run the tests for.') - parser.add_argument('--type', type=str, default='all', help='A list of test types, default is "all" (equivalent to "%s")' % (','.join(all_types))) - parser.add_argument('--engine-filter', type=str, default='', - help='A list of engine test executables to run.') - parser.add_argument('--dart-filter', type=str, default='', - help='A list of Dart test scripts to run.') - parser.add_argument('--java-filter', type=str, default='', - help='A single Java test class to run (example: "io.flutter.SmokeTest")') - parser.add_argument('--android-variant', dest='android_variant', action='store', + all_types = [ + 'engine', 'dart', 'benchmarks', 'java', 'android', 'objc', 'font-subset' + ] + + parser.add_argument( + '--variant', + dest='variant', + action='store', + default='host_debug_unopt', + help='The engine build variant to run the tests for.' + ) + parser.add_argument( + '--type', + type=str, + default='all', + help='A list of test types, default is "all" (equivalent to "%s")' % + (','.join(all_types)) + ) + parser.add_argument( + '--engine-filter', + type=str, + default='', + help='A list of engine test executables to run.' + ) + parser.add_argument( + '--dart-filter', + type=str, + default='', + help='A list of Dart test scripts to run.' + ) + parser.add_argument( + '--java-filter', + type=str, + default='', + help='A single Java test class to run (example: "io.flutter.SmokeTest")' + ) + parser.add_argument( + '--android-variant', + dest='android_variant', + action='store', default='android_debug_unopt', - help='The engine build variant to run java or android tests for') - parser.add_argument('--ios-variant', dest='ios_variant', action='store', + help='The engine build variant to run java or android tests for' + ) + parser.add_argument( + '--ios-variant', + dest='ios_variant', + action='store', default='ios_debug_sim_unopt', - help='The engine build variant to run objective-c tests for') - parser.add_argument('--verbose-dart-snapshot', dest='verbose_dart_snapshot', action='store_true', - default=False, help='Show extra dart snapshot logging.') - parser.add_argument('--objc-filter', type=str, default=None, - help='Filter parameter for which objc tests to run (example: "IosUnitTestsTests/SemanticsObjectTest/testShouldTriggerAnnouncement")') - parser.add_argument('--coverage', action='store_true', default=None, - help='Generate coverage reports for each unit test framework run.') - parser.add_argument('--engine-capture-core-dump', dest='engine_capture_core_dump', action='store_true', - default=False, help='Capture core dumps from crashes of engine tests.') - parser.add_argument('--use-sanitizer-suppressions', dest='sanitizer_suppressions', action='store_true', - default=False, help='Provide the sanitizer suppressions lists to the via environment to the tests.') - parser.add_argument('--adb-path', dest='adb_path', action='store', - default=None, help='Provide the path of adb used for android tests. By default it looks on $PATH.') + help='The engine build variant to run objective-c tests for' + ) + parser.add_argument( + '--verbose-dart-snapshot', + dest='verbose_dart_snapshot', + action='store_true', + default=False, + help='Show extra dart snapshot logging.' + ) + parser.add_argument( + '--objc-filter', + type=str, + default=None, + help='Filter parameter for which objc tests to run (example: "IosUnitTestsTests/SemanticsObjectTest/testShouldTriggerAnnouncement")' + ) + parser.add_argument( + '--coverage', + action='store_true', + default=None, + help='Generate coverage reports for each unit test framework run.' + ) + parser.add_argument( + '--engine-capture-core-dump', + dest='engine_capture_core_dump', + action='store_true', + default=False, + help='Capture core dumps from crashes of engine tests.' + ) + parser.add_argument( + '--use-sanitizer-suppressions', + dest='sanitizer_suppressions', + action='store_true', + default=False, + help='Provide the sanitizer suppressions lists to the via environment to the tests.' + ) + parser.add_argument( + '--adb-path', + dest='adb_path', + action='store', + default=None, + help='Provide the path of adb used for android tests. By default it looks on $PATH.' + ) args = parser.parse_args() @@ -706,27 +961,34 @@ def main(): build_dir = os.path.join(out_dir, args.variant) if args.type != 'java' and args.type != 'android': - assert os.path.exists(build_dir), 'Build variant directory %s does not exist!' % build_dir + assert os.path.exists( + build_dir + ), 'Build variant directory %s does not exist!' % build_dir if args.sanitizer_suppressions: - assert IsLinux() or IsMac(), "The sanitizer suppressions flag is only supported on Linux and Mac." + assert IsLinux() or IsMac( + ), "The sanitizer suppressions flag is only supported on Linux and Mac." file_dir = os.path.dirname(os.path.abspath(__file__)) command = [ - "env", "-i", "bash", - "-c", "source {}/sanitizer_suppressions.sh >/dev/null && env".format(file_dir) + "env", "-i", "bash", "-c", + "source {}/sanitizer_suppressions.sh >/dev/null && env" + .format(file_dir) ] process = subprocess.Popen(command, stdout=subprocess.PIPE) for line in process.stdout: key, _, value = line.decode('ascii').strip().partition("=") os.environ[key] = value - process.communicate() # Avoid pipe deadlock while waiting for termination. + process.communicate() # Avoid pipe deadlock while waiting for termination. engine_filter = args.engine_filter.split(',') if args.engine_filter else None if 'engine' in types: - RunCCTests(build_dir, engine_filter, args.coverage, args.engine_capture_core_dump) + RunCCTests( + build_dir, engine_filter, args.coverage, args.engine_capture_core_dump + ) if 'dart' in types: - assert not IsWindows(), "Dart tests can't be run on windows. https://github.com/flutter/flutter/issues/36301." + assert not IsWindows( + ), "Dart tests can't be run on windows. https://github.com/flutter/flutter/issues/36301." dart_filter = args.dart_filter.split(',') if args.dart_filter else None tasks = list(GatherDartSmokeTest(build_dir, args.verbose_dart_snapshot)) tasks += list(GatherLitetestTests(build_dir)) @@ -735,14 +997,19 @@ def main(): tasks += list(GatherApiConsistencyTests(build_dir)) tasks += list(GatherConstFinderTests(build_dir)) tasks += list(GatherFrontEndServerTests(build_dir)) - tasks += list(GatherDartTests(build_dir, dart_filter, args.verbose_dart_snapshot)) + tasks += list( + GatherDartTests(build_dir, dart_filter, args.verbose_dart_snapshot) + ) RunEngineTasksInParallel(tasks) if 'java' in types: assert not IsWindows(), "Android engine files can't be compiled on Windows." java_filter = args.java_filter if ',' in java_filter or '*' in java_filter: - print('Can only filter JUnit4 tests by single entire class name, eg "io.flutter.SmokeTest". Ignoring filter=' + java_filter) + print( + 'Can only filter JUnit4 tests by single entire class name, eg "io.flutter.SmokeTest". Ignoring filter=' + + java_filter + ) java_filter = None RunJavaTests(java_filter, args.android_variant) @@ -760,7 +1027,8 @@ def main(): RunEngineBenchmarks(build_dir, engine_filter) variants_to_skip = ['host_release', 'host_profile'] - if ('engine' in types or 'font-subset' in types) and args.variant not in variants_to_skip: + if ('engine' in types or + 'font-subset' in types) and args.variant not in variants_to_skip: RunCmd(['python', 'test.py'], cwd=font_subset_dir) diff --git a/tools/activate_emsdk.py b/tools/activate_emsdk.py index 91a05a5a2c1ba..ad9a8bc131db5 100644 --- a/tools/activate_emsdk.py +++ b/tools/activate_emsdk.py @@ -16,18 +16,23 @@ # See lib/web_ui/README.md for instructions on updating the EMSDK version. EMSDK_VERSION = '3.1.3' + def main(): - try: - subprocess.check_call([sys.executable, EMSDK_PATH, 'install', EMSDK_VERSION]) - except subprocess.CalledProcessError: - print ('Failed to install emsdk') - return 1 - try: - subprocess.check_call([sys.executable, EMSDK_PATH, 'activate', EMSDK_VERSION]) - except subprocess.CalledProcessError: - print ('Failed to activate emsdk') - return 1 + try: + subprocess.check_call([ + sys.executable, EMSDK_PATH, 'install', EMSDK_VERSION + ]) + except subprocess.CalledProcessError: + print('Failed to install emsdk') + return 1 + try: + subprocess.check_call([ + sys.executable, EMSDK_PATH, 'activate', EMSDK_VERSION + ]) + except subprocess.CalledProcessError: + print('Failed to activate emsdk') + return 1 if __name__ == '__main__': - sys.exit(main()) \ No newline at end of file + sys.exit(main()) diff --git a/tools/android_illegal_imports.py b/tools/android_illegal_imports.py index 0cb70958e962d..47e8a7ad202c6 100644 --- a/tools/android_illegal_imports.py +++ b/tools/android_illegal_imports.py @@ -29,8 +29,11 @@ def CheckBadFiles(bad_files, bad_class, good_class): return False + def main(): - parser = argparse.ArgumentParser(description='Checks Flutter Android library for forbidden imports') + parser = argparse.ArgumentParser( + description='Checks Flutter Android library for forbidden imports' + ) parser.add_argument('--stamp', type=str, required=True) parser.add_argument('--files', type=str, required=True, nargs='+') args = parser.parse_args() @@ -42,7 +45,8 @@ def main(): for file in args.files: if (file.endswith(os.path.join('io', 'flutter', 'Log.java')) or - file.endswith(os.path.join('io', 'flutter', 'util', 'TraceSection.java'))): + file.endswith(os.path.join('io', 'flutter', 'util', 'TraceSection.java') + )): continue with open(file) as f: contents = f.read() @@ -51,8 +55,12 @@ def main(): if ANDROIDX_TRACE_CLASS in contents or ANDROID_TRACE_CLASS in contents: bad_trace_files.append(file) - has_bad_files = CheckBadFiles(bad_log_files, ANDROID_LOG_CLASS, FLUTTER_LOG_CLASS) - has_bad_files = has_bad_files or CheckBadFiles(bad_trace_files, 'android[x].tracing.Trace', FLUTTER_TRACE_CLASS) + has_bad_files = CheckBadFiles( + bad_log_files, ANDROID_LOG_CLASS, FLUTTER_LOG_CLASS + ) + has_bad_files = has_bad_files or CheckBadFiles( + bad_trace_files, 'android[x].tracing.Trace', FLUTTER_TRACE_CLASS + ) if has_bad_files: return 1 diff --git a/tools/androidx/generate_pom_file.py b/tools/androidx/generate_pom_file.py index 1807b1ddadbf8..4d9231c31a3cc 100644 --- a/tools/androidx/generate_pom_file.py +++ b/tools/androidx/generate_pom_file.py @@ -36,7 +36,7 @@ ''' -MAVEN_METADATA_CONTENT =''' +MAVEN_METADATA_CONTENT = ''' io.flutter {0} @@ -63,22 +63,41 @@ ''' + def utf8(s): return str(s, 'utf-8') if isinstance(s, (bytes, bytearray)) else s + def main(): - with open (os.path.join(THIS_DIR, 'files.json')) as f: + with open(os.path.join(THIS_DIR, 'files.json')) as f: dependencies = json.load(f) - parser = argparse.ArgumentParser(description='Generate the POM file for the engine artifacts') - parser.add_argument('--engine-artifact-id', type=utf8, required=True, - help='The artifact id. e.g. android_arm_release') - parser.add_argument('--engine-version', type=utf8, required=True, - help='The engine commit hash') - parser.add_argument('--destination', type=utf8, required=True, - help='The destination directory absolute path') - parser.add_argument('--include-embedding-dependencies', type=bool, - help='Include the dependencies for the embedding') + parser = argparse.ArgumentParser( + description='Generate the POM file for the engine artifacts' + ) + parser.add_argument( + '--engine-artifact-id', + type=utf8, + required=True, + help='The artifact id. e.g. android_arm_release' + ) + parser.add_argument( + '--engine-version', + type=utf8, + required=True, + help='The engine commit hash' + ) + parser.add_argument( + '--destination', + type=utf8, + required=True, + help='The destination directory absolute path' + ) + parser.add_argument( + '--include-embedding-dependencies', + type=bool, + help='Include the dependencies for the embedding' + ) args = parser.parse_args() engine_artifact_id = args.engine_artifact_id @@ -97,12 +116,23 @@ def main(): # Write the POM file. with open(os.path.join(args.destination, out_file_name), 'w') as f: - f.write(POM_FILE_CONTENT.format(engine_artifact_id, artifact_version, pom_dependencies)) + f.write( + POM_FILE_CONTENT.format( + engine_artifact_id, artifact_version, pom_dependencies + ) + ) # Write the Maven metadata file. - with open(os.path.join(args.destination, '%s.maven-metadata.xml' % engine_artifact_id), 'w') as f: + with open(os.path.join(args.destination, + '%s.maven-metadata.xml' % engine_artifact_id), + 'w') as f: timestamp = datetime.datetime.utcnow().strftime("%Y%m%d.%H%M%S") - f.write(MAVEN_METADATA_CONTENT.format(engine_artifact_id, artifact_version, timestamp)) + f.write( + MAVEN_METADATA_CONTENT.format( + engine_artifact_id, artifact_version, timestamp + ) + ) + if __name__ == '__main__': sys.exit(main()) diff --git a/tools/dia_dll.py b/tools/dia_dll.py index 0c72174dd6af3..8d367ca691620 100644 --- a/tools/dia_dll.py +++ b/tools/dia_dll.py @@ -16,11 +16,11 @@ import stat import sys - # Path constants. (All of these should be absolute paths.) THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -LLVM_BUILD_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', 'buildtools', - 'windows-x64', 'clang')) +LLVM_BUILD_DIR = os.path.abspath( + os.path.join(THIS_DIR, '..', '..', 'buildtools', 'windows-x64', 'clang') +) def GetDiaDll(): @@ -28,11 +28,11 @@ def GetDiaDll(): # Bump after VC updates. DIA_DLL = { - '2013': 'msdia120.dll', - '2015': 'msdia140.dll', - '2017': 'msdia140.dll', - '2019': 'msdia140.dll', - '2022': 'msdia140.dll', + '2013': 'msdia120.dll', + '2015': 'msdia140.dll', + '2017': 'msdia140.dll', + '2019': 'msdia140.dll', + '2022': 'msdia140.dll', } # Don't let vs_toolchain overwrite our environment. diff --git a/tools/download_fuchsia_sdk.py b/tools/download_fuchsia_sdk.py index 0a493d794d761..67fe3c772fcf2 100755 --- a/tools/download_fuchsia_sdk.py +++ b/tools/download_fuchsia_sdk.py @@ -6,7 +6,6 @@ # The return code of this script will always be 0, even if there is an error, # unless the --fail-loudly flag is passed. - import argparse import tarfile import json @@ -15,10 +14,13 @@ import subprocess import sys -SRC_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +SRC_ROOT = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +) FUCHSIA_SDK_DIR = os.path.join(SRC_ROOT, 'fuchsia', 'sdk') FLUTTER_DIR = os.path.join(SRC_ROOT, 'flutter') + # Prints to stderr. def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) @@ -43,28 +45,38 @@ def DownloadFuchsiaSDKFromGCS(sdk_path, verbose): # Ensure destination folder exists. os.makedirs(FUCHSIA_SDK_DIR, exist_ok=True) curl_command = [ - 'curl', - '--retry', '3', - '--continue-at', '-', '--location', - '--output', dest, - url, + 'curl', + '--retry', + '3', + '--continue-at', + '-', + '--location', + '--output', + dest, + url, ] if verbose: print('Running: "%s"' % (' '.join(curl_command))) curl_result = subprocess.run( - curl_command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + curl_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, ) if curl_result.returncode == 0 and verbose: - print('curl output:stdout:\n{}\nstderr:\n{}'.format( - curl_result.stdout, curl_result.stderr, - )) + print( + 'curl output:stdout:\n{}\nstderr:\n{}'.format( + curl_result.stdout, + curl_result.stderr, + ) + ) elif curl_result.returncode != 0: - eprint('Failed to download: stdout:\n{}\nstderr:\n{}'.format( - curl_result.stdout, curl_result.stderr, - )) + eprint( + 'Failed to download: stdout:\n{}\nstderr:\n{}'.format( + curl_result.stdout, + curl_result.stderr, + ) + ) return None return dest @@ -111,24 +123,25 @@ def ExtractGzipArchive(archive, host_os, verbose): def Main(): parser = argparse.ArgumentParser() parser.add_argument( - '--fail-loudly', - action='store_true', - default=False, - help="Return an error code if a prebuilt couldn't be fetched and extracted") + '--fail-loudly', + action='store_true', + default=False, + help="Return an error code if a prebuilt couldn't be fetched and extracted" + ) parser.add_argument( - '--verbose', - action='store_true', - default='LUCI_CONTEXT' in os.environ, - help='Emit verbose output') + '--verbose', + action='store_true', + default='LUCI_CONTEXT' in os.environ, + help='Emit verbose output' + ) - parser.add_argument( - '--host-os', - help='The host os') + parser.add_argument('--host-os', help='The host os') parser.add_argument( - '--fuchsia-sdk-path', - help='The path in gcs to the fuchsia sdk to download') + '--fuchsia-sdk-path', + help='The path in gcs to the fuchsia sdk to download' + ) args = parser.parse_args() fail_loudly = 1 if args.fail_loudly else 0 diff --git a/tools/font-subset/test.py b/tools/font-subset/test.py index fe49e67465e25..fff6f89b86507 100755 --- a/tools/font-subset/test.py +++ b/tools/font-subset/test.py @@ -14,7 +14,6 @@ import sys from zipfile import ZipFile - # Dictionary to map the platform name to the output directory # of the font artifacts. PLATFORM_2_PATH = { @@ -26,7 +25,6 @@ 'win32': 'windows-x64', } - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, '..', '..', '..')) MATERIAL_TTF = os.path.join(SCRIPT_DIR, 'fixtures', 'MaterialIcons-Regular.ttf') @@ -34,45 +32,82 @@ EXE = '.exe' if IS_WINDOWS else '' BAT = '.bat' if IS_WINDOWS else '' FONT_SUBSET = os.path.join(SRC_DIR, 'out', 'host_debug', 'font-subset' + EXE) -FONT_SUBSET_ZIP = os.path.join(SRC_DIR, 'out', 'host_debug', 'zip_archives', PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip') +FONT_SUBSET_ZIP = os.path.join( + SRC_DIR, 'out', 'host_debug', 'zip_archives', + PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip' +) if not os.path.isfile(FONT_SUBSET): - FONT_SUBSET = os.path.join(SRC_DIR, 'out', 'host_debug_unopt', 'font-subset' + EXE) - FONT_SUBSET_ZIP = os.path.join(SRC_DIR, 'out', 'host_debug_unopt', 'zip_archives', PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip') + FONT_SUBSET = os.path.join( + SRC_DIR, 'out', 'host_debug_unopt', 'font-subset' + EXE + ) + FONT_SUBSET_ZIP = os.path.join( + SRC_DIR, 'out', 'host_debug_unopt', 'zip_archives', + PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip' + ) if not os.path.isfile(FONT_SUBSET): - raise Exception('Could not locate font-subset%s in host_debug or host_debug_unopt - build before running this script.' % EXE) + raise Exception( + 'Could not locate font-subset%s in host_debug or host_debug_unopt - build before running this script.' + % EXE + ) COMPARE_TESTS = ( - (True, '1.ttf', MATERIAL_TTF, [r'57347']), - (True, '1.ttf', MATERIAL_TTF, [r'0xE003']), - (True, '1.ttf', MATERIAL_TTF, [r'\uE003']), - (False, '1.ttf', MATERIAL_TTF, [r'57348']), # False because different codepoint - (True, '2.ttf', MATERIAL_TTF, [r'0xE003', r'0xE004']), - (True, '2.ttf', MATERIAL_TTF, [r'0xE003', r'0xE004', r'57347',]), # Duplicated codepoint - (True, '3.ttf', MATERIAL_TTF, [r'0xE003', r'0xE004', r'0xE021',]), + (True, '1.ttf', MATERIAL_TTF, [r'57347']), + (True, '1.ttf', MATERIAL_TTF, [r'0xE003']), + (True, '1.ttf', MATERIAL_TTF, [r'\uE003']), + (False, '1.ttf', MATERIAL_TTF, [r'57348' + ]), # False because different codepoint + (True, '2.ttf', MATERIAL_TTF, [r'0xE003', r'0xE004']), + (True, '2.ttf', MATERIAL_TTF, [ + r'0xE003', + r'0xE004', + r'57347', + ]), # Duplicated codepoint + (True, '3.ttf', MATERIAL_TTF, [ + r'0xE003', + r'0xE004', + r'0xE021', + ]), ) FAIL_TESTS = [ - ([FONT_SUBSET, 'output.ttf', 'does-not-exist.ttf'], ['1',]), # non-existent input font - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['0xFFFFFFFF',]), # Value too big. - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['-1',]), # invalid value - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['foo',]), # no valid values - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['0xE003', '0x12', '0xE004',]), # codepoint not in font - ([FONT_SUBSET, 'non-existent-dir/output.ttf', MATERIAL_TTF], ['0xE003',]), # dir doesn't exist - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [' ',]), # empty input - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], []), # empty input - ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['']), # empty input + ([FONT_SUBSET, 'output.ttf', 'does-not-exist.ttf'], [ + '1', + ]), # non-existent input font + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [ + '0xFFFFFFFF', + ]), # Value too big. + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [ + '-1', + ]), # invalid value + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [ + 'foo', + ]), # no valid values + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [ + '0xE003', + '0x12', + '0xE004', + ]), # codepoint not in font + ([FONT_SUBSET, 'non-existent-dir/output.ttf', MATERIAL_TTF], [ + '0xE003', + ]), # dir doesn't exist + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], [ + ' ', + ]), # empty input + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], []), # empty input + ([FONT_SUBSET, 'output.ttf', MATERIAL_TTF], ['']), # empty input ] + def RunCmd(cmd, codepoints, fail=False): print('Running command:') print(' %s' % ' '.join(cmd)) print('STDIN: "%s"' % ' '.join(codepoints)) p = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=SRC_DIR + cmd, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=SRC_DIR ) stdout_data, stderr_data = p.communicate(input=' '.join(codepoints).encode()) if p.returncode != 0 and fail == False: @@ -133,4 +168,3 @@ def main(): if __name__ == '__main__': sys.exit(main()) - diff --git a/tools/fuchsia/build_fuchsia_artifacts.py b/tools/fuchsia/build_fuchsia_artifacts.py index 2d4321485c2a1..0768d5206cb11 100755 --- a/tools/fuchsia/build_fuchsia_artifacts.py +++ b/tools/fuchsia/build_fuchsia_artifacts.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Builds all Fuchsia artifacts vended by Flutter. """ @@ -56,8 +57,10 @@ def RunExecutable(command): def RunGN(variant_dir, flags): - print('Running gn for variant "%s" with flags: %s' % - (variant_dir, ','.join(flags))) + print( + 'Running gn for variant "%s" with flags: %s' % + (variant_dir, ','.join(flags)) + ) RunExecutable([ os.path.join('flutter', 'tools', 'gn'), ] + flags) @@ -112,12 +115,18 @@ def CopyGenSnapshotIfExists(source, destination): destination_base = os.path.join(destination, 'dart_binaries') FindFileAndCopyTo('gen_snapshot', source_root, destination_base) FindFileAndCopyTo('gen_snapshot_product', source_root, destination_base) - FindFileAndCopyTo('kernel_compiler.dart.snapshot', source_root, - destination_base, 'kernel_compiler.snapshot') - FindFileAndCopyTo('frontend_server.dart.snapshot', source_root, - destination_base, 'flutter_frontend_server.snapshot') - FindFileAndCopyTo('list_libraries.dart.snapshot', source_root, - destination_base, 'list_libraries.snapshot') + FindFileAndCopyTo( + 'kernel_compiler.dart.snapshot', source_root, destination_base, + 'kernel_compiler.snapshot' + ) + FindFileAndCopyTo( + 'frontend_server.dart.snapshot', source_root, destination_base, + 'flutter_frontend_server.snapshot' + ) + FindFileAndCopyTo( + 'list_libraries.dart.snapshot', source_root, destination_base, + 'list_libraries.snapshot' + ) def CopyFlutterTesterBinIfExists(source, destination): @@ -125,12 +134,16 @@ def CopyFlutterTesterBinIfExists(source, destination): destination_base = os.path.join(destination, 'flutter_binaries') FindFileAndCopyTo('flutter_tester', source_root, destination_base) + def CopyZirconFFILibIfExists(source, destination): source_root = os.path.join(_out_dir, source) destination_base = os.path.join(destination, 'flutter_binaries') FindFileAndCopyTo('libzircon_ffi.so', source_root, destination_base) -def CopyToBucketWithMode(source, destination, aot, product, runner_type, api_level): + +def CopyToBucketWithMode( + source, destination, aot, product, runner_type, api_level +): mode = 'aot' if aot else 'jit' product_suff = '_product' if product else '' runner_name = '%s_%s%s_runner' % (runner_type, mode, product_suff) @@ -160,6 +173,7 @@ def CopyToBucket(src, dst, product=False): CopyToBucketWithMode(src, dst, False, product, 'dart', api_level) CopyToBucketWithMode(src, dst, True, product, 'dart', api_level) + def ReadTargetAPILevel(): filename = os.path.join(os.path.dirname(__file__), 'target_api_level') with open(filename) as f: @@ -171,8 +185,14 @@ def CopyVulkanDepsToBucket(src, dst, arch): sdk_path = GetFuchsiaSDKPath() deps_bucket_path = os.path.join(_bucket_directory, dst) if not os.path.exists(deps_bucket_path): - FindFileAndCopyTo('VkLayer_khronos_validation.json', '%s/pkg' % (sdk_path), deps_bucket_path) - FindFileAndCopyTo('VkLayer_khronos_validation.so', '%s/arch/%s' % (sdk_path, arch), deps_bucket_path) + FindFileAndCopyTo( + 'VkLayer_khronos_validation.json', '%s/pkg' % (sdk_path), + deps_bucket_path + ) + FindFileAndCopyTo( + 'VkLayer_khronos_validation.so', '%s/arch/%s' % (sdk_path, arch), + deps_bucket_path + ) def CopyIcuDepsToBucket(src, dst): @@ -202,11 +222,8 @@ def CopyBuildToBucket(runtime_mode, arch, optimized, product): bucket_root = os.path.join(_bucket_directory, 'flutter') licenses_root = os.path.join(_src_root_dir, 'flutter/ci/licenses_golden') license_files = [ - 'licenses_flutter', - 'licenses_fuchsia', - 'licenses_gpu', - 'licenses_skia', - 'licenses_third_party' + 'licenses_flutter', 'licenses_fuchsia', 'licenses_gpu', 'licenses_skia', + 'licenses_third_party' ] for license in license_files: src_path = os.path.join(licenses_root, license) @@ -217,11 +234,11 @@ def CopyBuildToBucket(runtime_mode, arch, optimized, product): def CheckCIPDPackageExists(package_name, tag): '''Check to see if the current package/tag combo has been published''' command = [ - 'cipd', - 'search', - package_name, - '-tag', - tag, + 'cipd', + 'search', + package_name, + '-tag', + tag, ] stdout = subprocess.check_output(command) match = re.search(r'No matching instances\.', stdout) @@ -256,11 +273,11 @@ def ProcessCIPDPackage(upload, engine_version): # Everything after this point will only run iff `upload==true` and # `IsLinux() == true` - assert(upload) - assert(IsLinux()) + assert (upload) + assert (IsLinux()) if engine_version is None: - print('--upload requires --engine-version to be specified.') - return + print('--upload requires --engine-version to be specified.') + return tag = 'git_revision:%s' % engine_version already_exists = CheckCIPDPackageExists('flutter/fuchsia', tag) @@ -269,14 +286,28 @@ def ProcessCIPDPackage(upload, engine_version): return RunCIPDCommandWithRetries([ - 'cipd', 'create', '-pkg-def', 'fuchsia.cipd.yaml', '-ref', 'latest', + 'cipd', + 'create', + '-pkg-def', + 'fuchsia.cipd.yaml', + '-ref', + 'latest', '-tag', tag, ]) -def BuildTarget(runtime_mode, arch, optimized, enable_lto, enable_legacy, - asan, dart_version_git_info, prebuilt_dart_sdk, additional_targets=[]): +def BuildTarget( + runtime_mode, + arch, + optimized, + enable_lto, + enable_legacy, + asan, + dart_version_git_info, + prebuilt_dart_sdk, + additional_targets=[] +): unopt = "_unopt" if not optimized else "" out_dir = 'fuchsia_%s%s_%s' % (runtime_mode, unopt, arch) flags = [ @@ -301,7 +332,7 @@ def BuildTarget(runtime_mode, arch, optimized, enable_lto, enable_legacy, flags.append('--no-prebuilt-dart-sdk') RunGN(out_dir, flags) - BuildNinjaTargets(out_dir, [ 'flutter' ] + additional_targets) + BuildNinjaTargets(out_dir, ['flutter'] + additional_targets) return @@ -313,83 +344,99 @@ def main(): '--cipd-dry-run', default=False, action='store_true', - help='If set, creates the CIPD package but does not upload it.') + help='If set, creates the CIPD package but does not upload it.' + ) parser.add_argument( '--upload', default=False, action='store_true', - help='If set, uploads the CIPD package and tags it as the latest.') + help='If set, uploads the CIPD package and tags it as the latest.' + ) parser.add_argument( '--engine-version', required=False, - help='Specifies the flutter engine SHA.') + help='Specifies the flutter engine SHA.' + ) parser.add_argument( '--unoptimized', action='store_true', default=False, - help='If set, disables compiler optimization for the build.') + help='If set, disables compiler optimization for the build.' + ) parser.add_argument( '--runtime-mode', type=str, choices=['debug', 'profile', 'release', 'all'], - default='all') + default='all' + ) parser.add_argument( - '--archs', type=str, choices=['x64', 'arm64', 'all'], default='all') + '--archs', type=str, choices=['x64', 'arm64', 'all'], default='all' + ) parser.add_argument( '--asan', action='store_true', default=False, - help='If set, enables address sanitization (including leak sanitization) for the build.') + help='If set, enables address sanitization (including leak sanitization) for the build.' + ) parser.add_argument( '--no-lto', action='store_true', default=False, - help='If set, disables LTO for the build.') + help='If set, disables LTO for the build.' + ) parser.add_argument( '--no-legacy', action='store_true', default=False, - help='If set, disables legacy code for the build.') + help='If set, disables legacy code for the build.' + ) parser.add_argument( '--skip-build', action='store_true', default=False, - help='If set, skips building and just creates packages.') + help='If set, skips building and just creates packages.' + ) parser.add_argument( '--targets', default='', - help=('Comma-separated list; adds additional targets to build for ' - 'Fuchsia.')) + help=( + 'Comma-separated list; adds additional targets to build for ' + 'Fuchsia.' + ) + ) parser.add_argument( '--no-dart-version-git-info', action='store_true', default=False, - help='If set, turns off the Dart SDK git hash check.') + help='If set, turns off the Dart SDK git hash check.' + ) parser.add_argument( '--no-prebuilt-dart-sdk', action='store_true', default=False, - help='If set, builds the Dart SDK locally instead of using the prebuilt Dart SDK.') + help='If set, builds the Dart SDK locally instead of using the prebuilt Dart SDK.' + ) parser.add_argument( - '--copy-unoptimized-debug-artifacts', - action='store_true', - default=False, - help='If set, unoptimized debug artifacts will be copied into CIPD along ' - 'with optimized builds. This is a hack to allow infra to make ' - 'and copy two debug builds, one with ASAN and one without.') + '--copy-unoptimized-debug-artifacts', + action='store_true', + default=False, + help='If set, unoptimized debug artifacts will be copied into CIPD along ' + 'with optimized builds. This is a hack to allow infra to make ' + 'and copy two debug builds, one with ASAN and one without.' + ) args = parser.parse_args() RemoveDirectoryIfExists(_bucket_directory) @@ -410,10 +457,12 @@ def main(): product = product_modes[i] if build_mode == 'all' or runtime_mode == build_mode: if not args.skip_build: - BuildTarget(runtime_mode, arch, optimized, enable_lto, enable_legacy, - args.asan, not args.no_dart_version_git_info, - not args.no_prebuilt_dart_sdk, - args.targets.split(",") if args.targets else []) + BuildTarget( + runtime_mode, arch, optimized, enable_lto, enable_legacy, + args.asan, not args.no_dart_version_git_info, + not args.no_prebuilt_dart_sdk, + args.targets.split(",") if args.targets else [] + ) CopyBuildToBucket(runtime_mode, arch, optimized, product) # This is a hack. The recipe for building and uploading Fuchsia to CIPD diff --git a/tools/fuchsia/compile_cml.py b/tools/fuchsia/compile_cml.py index ae058d2f53946..48a59dfe74b6a 100755 --- a/tools/fuchsia/compile_cml.py +++ b/tools/fuchsia/compile_cml.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Compiles a cml manifest file. """ @@ -11,15 +12,17 @@ import subprocess import sys + def main(): parser = argparse.ArgumentParser() parser.add_argument( - '--cmc-bin', dest='cmc_bin', action='store', required=True) - parser.add_argument( - '--output', dest='output', action='store', required=True) + '--cmc-bin', dest='cmc_bin', action='store', required=True + ) + parser.add_argument('--output', dest='output', action='store', required=True) parser.add_argument( - '--manifest-file', dest='manifest_file', action='store', required=True) + '--manifest-file', dest='manifest_file', action='store', required=True + ) parser.add_argument( '--includepath', dest='includepath', @@ -38,7 +41,7 @@ def main(): '--output', args.output, args.manifest_file, - ] + (args.includepath and [ '--includepath' ] + args.includepath)) + ] + (args.includepath and ['--includepath'] + args.includepath)) return 0 diff --git a/tools/fuchsia/copy_debug_symbols.py b/tools/fuchsia/copy_debug_symbols.py index 61c48b0fdc870..d92a0be603bc9 100755 --- a/tools/fuchsia/copy_debug_symbols.py +++ b/tools/fuchsia/copy_debug_symbols.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Gather the build_id, prefix_dir, and exec_name given the path to executable also copies to the specified destination. @@ -27,7 +28,7 @@ def HashFile(filepath): digest = hashlib.sha1() with open(filepath, 'rb') as f: while True: - chunk = f.read(1024*1024) + chunk = f.read(1024 * 1024) if not chunk: break digest.update(chunk) @@ -43,11 +44,13 @@ def GetBuildIdParts(exec_path, read_elf): sha1_pattern = re.compile(r'[0-9a-fA-F\-]+') file_out = subprocess.check_output([read_elf, '-n', exec_path]) build_id_line = file_out.splitlines()[-1].split() - if (build_id_line[0] != b'Build' or - build_id_line[1] != b'ID:' or not - sha1_pattern.match(str(build_id_line[-1])) or not - len(build_id_line[-1]) > 2): - raise Exception('Expected the last line of llvm-readelf to match "Build ID " Got: %s' % file_out) + if (build_id_line[0] != b'Build' or build_id_line[1] != b'ID:' or + not sha1_pattern.match(str(build_id_line[-1])) or + not len(build_id_line[-1]) > 2): + raise Exception( + 'Expected the last line of llvm-readelf to match "Build ID " Got: %s' + % file_out + ) build_id = build_id_line[-1] return { @@ -72,7 +75,8 @@ def main(): dest='exec_path', action='store', required=True, - help='Path to the executable on the filesystem.') + help='Path to the executable on the filesystem.' + ) parser.add_argument( '--destination-base', dest='dest', @@ -85,26 +89,28 @@ def main(): dest='stripped', action='store_true', default=True, - help='Executable at the specified path is stripped.') + help='Executable at the specified path is stripped.' + ) parser.add_argument( '--unstripped', dest='stripped', action='store_false', - help='Executable at the specified path is unstripped.') + help='Executable at the specified path is unstripped.' + ) parser.add_argument( '--read-elf', dest='read_elf', action='store', required=True, - help='Path to read-elf executable.') + help='Path to read-elf executable.' + ) args = parser.parse_args() - assert os.path.exists(args.exec_path), ( - 'exec_path "%s" does not exist' % args.exec_path) - assert os.path.exists(args.dest), ( - 'dest "%s" does not exist' % args.dest) - assert os.path.exists(args.read_elf), ( - 'read_elf "%s" does not exist' % args.read_elf) + assert os.path.exists(args.exec_path + ), ('exec_path "%s" does not exist' % args.exec_path) + assert os.path.exists(args.dest), ('dest "%s" does not exist' % args.dest) + assert os.path.exists(args.read_elf + ), ('read_elf "%s" does not exist' % args.read_elf) parts = GetBuildIdParts(args.exec_path, args.read_elf) dbg_prefix_base = os.path.join(args.dest, parts['prefix_dir']) @@ -129,7 +135,8 @@ def main(): # If the debug file hasn't changed, don't rewrite the debug and completion # file, speeding up incremental builds. - if os.path.exists(dbg_file_path) and HashFile(args.exec_path) == HashFile(dbg_file_path): + if os.path.exists(dbg_file_path) and HashFile(args.exec_path + ) == HashFile(dbg_file_path): return 0 shutil.copyfile(args.exec_path, dbg_file_path) diff --git a/tools/fuchsia/copy_path.py b/tools/fuchsia/copy_path.py index 335c262ce3dee..5f3be36637364 100755 --- a/tools/fuchsia/copy_path.py +++ b/tools/fuchsia/copy_path.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Copies paths, creates if they do not exist. """ @@ -50,7 +51,8 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument( - '--file-list', dest='file_list', action='store', required=True) + '--file-list', dest='file_list', action='store', required=True + ) args = parser.parse_args() diff --git a/tools/fuchsia/dart/gen_app_invocation.py b/tools/fuchsia/dart/gen_app_invocation.py index b7988d3a19175..bfd5bd8bff245 100755 --- a/tools/fuchsia/dart/gen_app_invocation.py +++ b/tools/fuchsia/dart/gen_app_invocation.py @@ -11,34 +11,39 @@ def main(): - parser = argparse.ArgumentParser( - description='Generate a script that invokes a Dart application') - parser.add_argument( - '--out', help='Path to the invocation file to generate', required=True) - parser.add_argument('--dart', help='Path to the Dart binary', required=True) - parser.add_argument( - '--snapshot', help='Path to the app snapshot', required=True) - args = parser.parse_args() - - app_file = args.out - app_path = os.path.dirname(app_file) - if not os.path.exists(app_path): - os.makedirs(app_path) - - script_template = string.Template( - '''#!/bin/sh + parser = argparse.ArgumentParser( + description='Generate a script that invokes a Dart application' + ) + parser.add_argument( + '--out', help='Path to the invocation file to generate', required=True + ) + parser.add_argument('--dart', help='Path to the Dart binary', required=True) + parser.add_argument( + '--snapshot', help='Path to the app snapshot', required=True + ) + args = parser.parse_args() + + app_file = args.out + app_path = os.path.dirname(app_file) + if not os.path.exists(app_path): + os.makedirs(app_path) + + script_template = string.Template( + '''#!/bin/sh $dart \\ $snapshot \\ "$$@" -''') - with open(app_file, 'w') as file: - file.write(script_template.substitute(args.__dict__)) - permissions = ( - stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | - stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH) - os.chmod(app_file, permissions) +''' + ) + with open(app_file, 'w') as file: + file.write(script_template.substitute(args.__dict__)) + permissions = ( + stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP + | stat.S_IXGRP | stat.S_IROTH + ) + os.chmod(app_file, permissions) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/dart/gen_dart_package_config.py b/tools/fuchsia/dart/gen_dart_package_config.py index 537e98abe1043..02a876f871f1a 100755 --- a/tools/fuchsia/dart/gen_dart_package_config.py +++ b/tools/fuchsia/dart/gen_dart_package_config.py @@ -2,6 +2,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """Reads the contents of a package config file generated by the build and converts it to a real package_config.json file """ @@ -14,126 +15,133 @@ import sys THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -sys.path += [os.path.join( - THIS_DIR, '..', '..', '..', '..', 'third_party', 'pyyaml', 'lib3')] +sys.path += [ + os.path.join( + THIS_DIR, '..', '..', '..', '..', 'third_party', 'pyyaml', 'lib3' + ) +] import yaml DEFAULT_LANGUAGE_VERSION = '2.8' Package = collections.namedtuple( - 'Package', ['name', 'rootUri', 'languageVersion', 'packageUri']) + 'Package', ['name', 'rootUri', 'languageVersion', 'packageUri'] +) class PackageConfig: - # The version of the package config. - VERSION = 2 + # The version of the package config. + VERSION = 2 - # The name of the generator which gets written to the json output - GENERATOR_NAME = os.path.basename(__file__) + # The name of the generator which gets written to the json output + GENERATOR_NAME = os.path.basename(__file__) - def __init__(self, packages): - self.packages = packages + def __init__(self, packages): + self.packages = packages - def asdict(self): - """Converts the package config to a dictionary""" - return { - 'configVersion': self.VERSION, - 'packages': [p._asdict() for p in sorted(self.packages)], - 'generator': self.GENERATOR_NAME, - } + def asdict(self): + """Converts the package config to a dictionary""" + return { + 'configVersion': self.VERSION, + 'packages': [p._asdict() for p in sorted(self.packages)], + 'generator': self.GENERATOR_NAME, + } def language_version_from_pubspec(pubspec): - """Parse the content of a pubspec.yaml""" - with open(pubspec) as pubspec: - parsed = yaml.safe_load(pubspec) - if not parsed: - return DEFAULT_LANGUAGE_VERSION + """Parse the content of a pubspec.yaml""" + with open(pubspec) as pubspec: + parsed = yaml.safe_load(pubspec) + if not parsed: + return DEFAULT_LANGUAGE_VERSION - # If a format like sdk: '>=a.b' or sdk: 'a.b' is found, we'll use a.b. - # In all other cases we default to "2.8" - env_sdk = parsed.get('environment', {}).get('sdk', 'any') - match = re.search(r'^(>=)?((0|[1-9]\d*)\.(0|[1-9]\d*))', env_sdk) - if match: - min_sdk_version = match.group(2) - else: - min_sdk_version = DEFAULT_LANGUAGE_VERSION + # If a format like sdk: '>=a.b' or sdk: 'a.b' is found, we'll use a.b. + # In all other cases we default to "2.8" + env_sdk = parsed.get('environment', {}).get('sdk', 'any') + match = re.search(r'^(>=)?((0|[1-9]\d*)\.(0|[1-9]\d*))', env_sdk) + if match: + min_sdk_version = match.group(2) + else: + min_sdk_version = DEFAULT_LANGUAGE_VERSION - return min_sdk_version + return min_sdk_version def collect_packages(items, relative_to): - """Reads metadata produced by GN to create lists of packages and pubspecs. + """Reads metadata produced by GN to create lists of packages and pubspecs. - items: a list of objects collected from gn - relative_to: The directory which the packages are relative to. This is the location that contains the package_config.json file Returns None if there was a problem parsing packages """ - packages = [] - pubspec_paths = [] - for item in items: - if 'language_version' in item: - language_version = item['language_version'] - elif 'pubspec_path' in item: - pubspec_paths.append(item['pubspec_path']) - language_version = language_version_from_pubspec( - item['pubspec_path']) - else: - language_version = DEFAULT_LANGUAGE_VERSION - - package = Package( - name=item['name'], - rootUri=os.path.relpath(item['root_uri'], relative_to), - languageVersion=language_version, - packageUri=item['package_uri']) - - # TODO(fxbug.dev/56428): enable once we sort out our duplicate packages - # for p in packages: - # if p.rootUri == package.rootUri: - # print('Failed to create package_config.json file') - # print('The following packages contain the same package root ' + p.rootUri) - # print(' - ' + p.rootUri) - # print(' - ' + package.rootUri) - # return None - - packages.append(package) - - return packages, pubspec_paths + packages = [] + pubspec_paths = [] + for item in items: + if 'language_version' in item: + language_version = item['language_version'] + elif 'pubspec_path' in item: + pubspec_paths.append(item['pubspec_path']) + language_version = language_version_from_pubspec(item['pubspec_path']) + else: + language_version = DEFAULT_LANGUAGE_VERSION + + package = Package( + name=item['name'], + rootUri=os.path.relpath(item['root_uri'], relative_to), + languageVersion=language_version, + packageUri=item['package_uri'] + ) + + # TODO(fxbug.dev/56428): enable once we sort out our duplicate packages + # for p in packages: + # if p.rootUri == package.rootUri: + # print('Failed to create package_config.json file') + # print('The following packages contain the same package root ' + p.rootUri) + # print(' - ' + p.rootUri) + # print(' - ' + package.rootUri) + # return None + + packages.append(package) + + return packages, pubspec_paths def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - '--input', help='Path to original package_config', required=True) - parser.add_argument( - '--output', help='Path to the updated package_config', required=True) - parser.add_argument('--root', help='Path to fuchsia root', required=True) - parser.add_argument('--depfile', help='Path to the depfile', required=True) - args = parser.parse_args() - - with open(args.input, 'r') as input_file: - contents = json.load(input_file) - - output_dir = os.path.dirname(os.path.abspath(args.output)) - packages, pubspec_paths = collect_packages(contents, output_dir) - if packages is None: - return 1 - - with open(args.depfile, 'w') as depfile: - depfile.write('%s: %s' % (args.output, ' '.join(pubspec_paths))) - - with open(args.output, 'w') as output_file: - package_config = PackageConfig(packages) - json.dump( - package_config.asdict(), - output_file, - indent=2, - sort_keys=True, - separators=(',', ': ')) - - return 0 + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--input', help='Path to original package_config', required=True + ) + parser.add_argument( + '--output', help='Path to the updated package_config', required=True + ) + parser.add_argument('--root', help='Path to fuchsia root', required=True) + parser.add_argument('--depfile', help='Path to the depfile', required=True) + args = parser.parse_args() + + with open(args.input, 'r') as input_file: + contents = json.load(input_file) + + output_dir = os.path.dirname(os.path.abspath(args.output)) + packages, pubspec_paths = collect_packages(contents, output_dir) + if packages is None: + return 1 + + with open(args.depfile, 'w') as depfile: + depfile.write('%s: %s' % (args.output, ' '.join(pubspec_paths))) + + with open(args.output, 'w') as output_file: + package_config = PackageConfig(packages) + json.dump( + package_config.asdict(), + output_file, + indent=2, + sort_keys=True, + separators=(',', ': ') + ) + + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/dart/kernel/convert_manifest_to_json.py b/tools/fuchsia/dart/kernel/convert_manifest_to_json.py index fd64c84f246d0..a5f329ab169ba 100755 --- a/tools/fuchsia/dart/kernel/convert_manifest_to_json.py +++ b/tools/fuchsia/dart/kernel/convert_manifest_to_json.py @@ -2,6 +2,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + '''Reads the contents of a kernel manifest generated by the build and converts it to a format suitable for distribution_entries ''' @@ -16,45 +17,49 @@ def collect(path_prefix, lines): - '''Reads the kernel manifest and creates an array of Entry objects. + '''Reads the kernel manifest and creates an array of Entry objects. - lines: a list of lines from the manifest ''' - entries = [] - for line in lines: - values = line.split("=", 1) - entries.append(Entry(source=path_prefix + values[1], dest=values[0])) + entries = [] + for line in lines: + values = line.split("=", 1) + entries.append(Entry(source=path_prefix + values[1], dest=values[0])) - return entries + return entries def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - '--path_prefix', help='Directory path containing the manifest entry sources', required=True) - parser.add_argument( - '--input', help='Path to original manifest', required=True) - parser.add_argument( - '--output', help='Path to the updated json file', required=True) - args = parser.parse_args() + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--path_prefix', + help='Directory path containing the manifest entry sources', + required=True + ) + parser.add_argument( + '--input', help='Path to original manifest', required=True + ) + parser.add_argument( + '--output', help='Path to the updated json file', required=True + ) + args = parser.parse_args() - with open(args.input, 'r') as input_file: - contents = input_file.read().splitlines() + with open(args.input, 'r') as input_file: + contents = input_file.read().splitlines() - entries = collect(args.path_prefix, contents) + entries = collect(args.path_prefix, contents) - if entries is None: - return 1 + if entries is None: + return 1 - with open(args.output, 'w') as output_file: - json.dump( - [e._asdict() for e in entries], - output_file, - indent=2, - sort_keys=True, - separators=(',', ': ')) + with open(args.output, 'w') as output_file: + json.dump([e._asdict() for e in entries], + output_file, + indent=2, + sort_keys=True, + separators=(',', ': ')) - return 0 + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/dart/merge_deps_sources.py b/tools/fuchsia/dart/merge_deps_sources.py index 2a53afa36682c..58e49c4f3751e 100755 --- a/tools/fuchsia/dart/merge_deps_sources.py +++ b/tools/fuchsia/dart/merge_deps_sources.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 + """Merges sources of a Dart target and its dependencies""" # Copyright 2013 The Flutter Authors. All rights reserved. @@ -12,40 +13,45 @@ def main(): - parser = argparse.ArgumentParser( - 'Merges sources of a Dart target and its dependencies', - fromfile_prefix_chars='@') - parser.add_argument( - '--output', - help='Path to output the final list', - type=argparse.FileType('w'), - required=True) - parser.add_argument( - '--depfile', - help='Path to the depfile to generate', - type=argparse.FileType('w'), - required=True) - parser.add_argument( - '--sources', - help='Sources of this target', - nargs='*', - ) - parser.add_argument( - '--source_lists', - help='Files containing lists of Dart sources', - nargs='*') - args = parser.parse_args() - - args.depfile.write( - '{}: {}\n'.format(args.output.name, ' '.join(args.source_lists))) - - # Merges sources of this target, and all of its dependencies. - all_sources = set(args.sources) - for f in args.source_lists: - with open(f, 'r') as f: - all_sources.update(json.load(f)) - json.dump(sorted(all_sources), args.output) + parser = argparse.ArgumentParser( + 'Merges sources of a Dart target and its dependencies', + fromfile_prefix_chars='@' + ) + parser.add_argument( + '--output', + help='Path to output the final list', + type=argparse.FileType('w'), + required=True + ) + parser.add_argument( + '--depfile', + help='Path to the depfile to generate', + type=argparse.FileType('w'), + required=True + ) + parser.add_argument( + '--sources', + help='Sources of this target', + nargs='*', + ) + parser.add_argument( + '--source_lists', + help='Files containing lists of Dart sources', + nargs='*' + ) + args = parser.parse_args() + + args.depfile.write( + '{}: {}\n'.format(args.output.name, ' '.join(args.source_lists)) + ) + + # Merges sources of this target, and all of its dependencies. + all_sources = set(args.sources) + for f in args.source_lists: + with open(f, 'r') as f: + all_sources.update(json.load(f)) + json.dump(sorted(all_sources), args.output) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/dart/verify_sources.py b/tools/fuchsia/dart/verify_sources.py index a24be6bc8aa12..b76b49875f395 100755 --- a/tools/fuchsia/dart/verify_sources.py +++ b/tools/fuchsia/dart/verify_sources.py @@ -11,62 +11,62 @@ def main(): - parser = argparse.ArgumentParser( - "Verifies that all .dart files are included in sources, and sources don't include nonexsitent files" - ) - parser.add_argument( - "--source_dir", - help="Path to the directory containing the package sources", - required=True) - parser.add_argument( - "--stamp", - help="File to touch when source checking succeeds", - required=True) - parser.add_argument( - "sources", help="source files", nargs=argparse.REMAINDER) - args = parser.parse_args() - - actual_sources = set() - # Get all dart sources from source directory. - src_dir_path = pathlib.Path(args.source_dir) - for (dirpath, dirnames, filenames) in os.walk(src_dir_path, topdown=True): - relpath_to_src_root = pathlib.Path(dirpath).relative_to(src_dir_path) - actual_sources.update( - os.path.normpath(relpath_to_src_root.joinpath(filename)) - for filename in filenames - if pathlib.Path(filename).suffix == ".dart") + parser = argparse.ArgumentParser( + "Verifies that all .dart files are included in sources, and sources don't include nonexsitent files" + ) + parser.add_argument( + "--source_dir", + help="Path to the directory containing the package sources", + required=True + ) + parser.add_argument( + "--stamp", + help="File to touch when source checking succeeds", + required=True + ) + parser.add_argument("sources", help="source files", nargs=argparse.REMAINDER) + args = parser.parse_args() - expected_sources = set(args.sources) - # It is possible for sources to include dart files outside of source_dir. + actual_sources = set() + # Get all dart sources from source directory. + src_dir_path = pathlib.Path(args.source_dir) + for (dirpath, dirnames, filenames) in os.walk(src_dir_path, topdown=True): + relpath_to_src_root = pathlib.Path(dirpath).relative_to(src_dir_path) actual_sources.update( - [ - s for s in (expected_sources - actual_sources) - if src_dir_path.joinpath(s).resolve().exists() - ], + os.path.normpath(relpath_to_src_root.joinpath(filename)) + for filename in filenames + if pathlib.Path(filename).suffix == ".dart" ) - if actual_sources == expected_sources: - with open(args.stamp, "w") as stamp: - stamp.write("Success!") - return 0 + expected_sources = set(args.sources) + # It is possible for sources to include dart files outside of source_dir. + actual_sources.update([ + s for s in (expected_sources - actual_sources) + if src_dir_path.joinpath(s).resolve().exists() + ],) + + if actual_sources == expected_sources: + with open(args.stamp, "w") as stamp: + stamp.write("Success!") + return 0 - def sources_to_abs_path(sources): - return sorted(str(src_dir_path.joinpath(s)) for s in sources) + def sources_to_abs_path(sources): + return sorted(str(src_dir_path.joinpath(s)) for s in sources) - missing_sources = actual_sources - expected_sources - if missing_sources: - print( - '\nSource files found that were missing from the "sources" parameter:\n{}\n' - .format("\n".join(sources_to_abs_path(missing_sources))), - ) - nonexistent_sources = expected_sources - actual_sources - if nonexistent_sources: - print( - '\nSource files listed in "sources" parameter but not found:\n{}\n'. - format("\n".join(sources_to_abs_path(nonexistent_sources))), - ) - return 1 + missing_sources = actual_sources - expected_sources + if missing_sources: + print( + '\nSource files found that were missing from the "sources" parameter:\n{}\n' + .format("\n".join(sources_to_abs_path(missing_sources))), + ) + nonexistent_sources = expected_sources - actual_sources + if nonexistent_sources: + print( + '\nSource files listed in "sources" parameter but not found:\n{}\n' + .format("\n".join(sources_to_abs_path(nonexistent_sources))), + ) + return 1 if __name__ == "__main__": - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/depfile_path_to_relative.py b/tools/fuchsia/depfile_path_to_relative.py index a65005fb70f68..27a9c2d9491da 100755 --- a/tools/fuchsia/depfile_path_to_relative.py +++ b/tools/fuchsia/depfile_path_to_relative.py @@ -10,27 +10,28 @@ def main(): - parser = argparse.ArgumentParser( - description= - 'Executes a command, then rewrites the depfile, converts all absolute paths to relative' - ) - parser.add_argument( - '--depfile', help='Path to the depfile to rewrite', required=True) - parser.add_argument( - 'command', nargs='+', help='Positional args for the command to run') - args = parser.parse_args() + parser = argparse.ArgumentParser( + description='Executes a command, then rewrites the depfile, converts all absolute paths to relative' + ) + parser.add_argument( + '--depfile', help='Path to the depfile to rewrite', required=True + ) + parser.add_argument( + 'command', nargs='+', help='Positional args for the command to run' + ) + args = parser.parse_args() - retval = subprocess.call(args.command) - if retval != 0: - return retval + retval = subprocess.call(args.command) + if retval != 0: + return retval - lines = [] - with open(args.depfile, 'r') as f: - for line in f: - lines.append(' '.join(os.path.relpath(p) for p in line.split())) - with open(args.depfile, 'w') as f: - f.write('\n'.join(lines)) + lines = [] + with open(args.depfile, 'r') as f: + for line in f: + lines.append(' '.join(os.path.relpath(p) for p in line.split())) + with open(args.depfile, 'w') as f: + f.write('\n'.join(lines)) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/fidl/gen_response_file.py b/tools/fuchsia/fidl/gen_response_file.py index 8be6ebea3295d..c4efdc328c380 100755 --- a/tools/fuchsia/fidl/gen_response_file.py +++ b/tools/fuchsia/fidl/gen_response_file.py @@ -10,113 +10,124 @@ def read_libraries(libraries_path): - with open(libraries_path) as f: - lines = f.readlines() - return [l.rstrip("\n") for l in lines] + with open(libraries_path) as f: + lines = f.readlines() + return [l.rstrip("\n") for l in lines] def write_libraries(libraries_path, libraries): - directory = os.path.dirname(libraries_path) - if not os.path.exists(directory): - os.makedirs(directory) - with open(libraries_path, "w+") as f: - for library in libraries: - f.write(library) - f.write("\n") + directory = os.path.dirname(libraries_path) + if not os.path.exists(directory): + os.makedirs(directory) + with open(libraries_path, "w+") as f: + for library in libraries: + f.write(library) + f.write("\n") def main(): - parser = argparse.ArgumentParser( - description="Generate response file for FIDL frontend") - parser.add_argument( - "--out-response-file", - help="The path for the response file to generate", - required=True) - parser.add_argument( - "--out-libraries", - help="The path for the libraries file to generate", - required=True) - parser.add_argument( - "--json", help="The path for the JSON file to generate, if any") - parser.add_argument( - "--tables", help="The path for the tables file to generate, if any") - parser.add_argument( - "--deprecated-fuchsia-only-c-client", - help="The path for the C simple client file to generate, if any") - parser.add_argument( - "--deprecated-fuchsia-only-c-header", - help="The path for the C header file to generate, if any") - parser.add_argument( - "--deprecated-fuchsia-only-c-server", - help="The path for the C simple server file to generate, if any") - parser.add_argument( - "--name", help="The name for the generated FIDL library, if any") - parser.add_argument( - "--depfile", help="The name for the generated depfile, if any") - parser.add_argument( - "--sources", help="List of FIDL source files", nargs="*") - parser.add_argument( - "--dep-libraries", help="List of dependent libraries", nargs="*") - parser.add_argument( - "--experimental-flag", - help="List of experimental flags", - action="append") - args = parser.parse_args() - - target_libraries = [] - - for dep_libraries_path in args.dep_libraries or []: - dep_libraries = read_libraries(dep_libraries_path) - for library in dep_libraries: - if library in target_libraries: - continue - target_libraries.append(library) - - target_libraries.append(" ".join(sorted(args.sources))) - write_libraries(args.out_libraries, target_libraries) - - response_file = [] - - response_file.append('--experimental new_syntax_only') - - if args.json: - response_file.append("--json %s" % args.json) - - if args.tables: - response_file.append("--tables %s" % args.tables) - - if args.deprecated_fuchsia_only_c_client: - response_file.append( - "--deprecated-fuchsia-only-c-client %s" % - args.deprecated_fuchsia_only_c_client) - - if args.deprecated_fuchsia_only_c_header: - response_file.append( - "--deprecated-fuchsia-only-c-header %s" % - args.deprecated_fuchsia_only_c_header) - - if args.deprecated_fuchsia_only_c_server: - response_file.append( - "--deprecated-fuchsia-only-c-server %s" % - args.deprecated_fuchsia_only_c_server) - - if args.name: - response_file.append("--name %s" % args.name) - - if args.depfile: - response_file.append("--depfile %s" % args.depfile) - - if args.experimental_flag: - for experimental_flag in args.experimental_flag: - response_file.append("--experimental %s" % experimental_flag) - - response_file.extend( - ["--files %s" % library for library in target_libraries]) - - with open(args.out_response_file, "w+") as f: - f.write(" ".join(response_file)) - f.write("\n") + parser = argparse.ArgumentParser( + description="Generate response file for FIDL frontend" + ) + parser.add_argument( + "--out-response-file", + help="The path for the response file to generate", + required=True + ) + parser.add_argument( + "--out-libraries", + help="The path for the libraries file to generate", + required=True + ) + parser.add_argument( + "--json", help="The path for the JSON file to generate, if any" + ) + parser.add_argument( + "--tables", help="The path for the tables file to generate, if any" + ) + parser.add_argument( + "--deprecated-fuchsia-only-c-client", + help="The path for the C simple client file to generate, if any" + ) + parser.add_argument( + "--deprecated-fuchsia-only-c-header", + help="The path for the C header file to generate, if any" + ) + parser.add_argument( + "--deprecated-fuchsia-only-c-server", + help="The path for the C simple server file to generate, if any" + ) + parser.add_argument( + "--name", help="The name for the generated FIDL library, if any" + ) + parser.add_argument( + "--depfile", help="The name for the generated depfile, if any" + ) + parser.add_argument("--sources", help="List of FIDL source files", nargs="*") + parser.add_argument( + "--dep-libraries", help="List of dependent libraries", nargs="*" + ) + parser.add_argument( + "--experimental-flag", help="List of experimental flags", action="append" + ) + args = parser.parse_args() + + target_libraries = [] + + for dep_libraries_path in args.dep_libraries or []: + dep_libraries = read_libraries(dep_libraries_path) + for library in dep_libraries: + if library in target_libraries: + continue + target_libraries.append(library) + + target_libraries.append(" ".join(sorted(args.sources))) + write_libraries(args.out_libraries, target_libraries) + + response_file = [] + + response_file.append('--experimental new_syntax_only') + + if args.json: + response_file.append("--json %s" % args.json) + + if args.tables: + response_file.append("--tables %s" % args.tables) + + if args.deprecated_fuchsia_only_c_client: + response_file.append( + "--deprecated-fuchsia-only-c-client %s" % + args.deprecated_fuchsia_only_c_client + ) + + if args.deprecated_fuchsia_only_c_header: + response_file.append( + "--deprecated-fuchsia-only-c-header %s" % + args.deprecated_fuchsia_only_c_header + ) + + if args.deprecated_fuchsia_only_c_server: + response_file.append( + "--deprecated-fuchsia-only-c-server %s" % + args.deprecated_fuchsia_only_c_server + ) + + if args.name: + response_file.append("--name %s" % args.name) + + if args.depfile: + response_file.append("--depfile %s" % args.depfile) + + if args.experimental_flag: + for experimental_flag in args.experimental_flag: + response_file.append("--experimental %s" % experimental_flag) + + response_file.extend(["--files %s" % library for library in target_libraries]) + + with open(args.out_response_file, "w+") as f: + f.write(" ".join(response_file)) + f.write("\n") if __name__ == "__main__": - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/gather_flutter_runner_artifacts.py b/tools/fuchsia/gather_flutter_runner_artifacts.py index 942d02fed14f8..9438a0d5120e6 100755 --- a/tools/fuchsia/gather_flutter_runner_artifacts.py +++ b/tools/fuchsia/gather_flutter_runner_artifacts.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Gather all the fuchsia artifacts to a destination directory. """ @@ -16,10 +17,8 @@ import sys _ARTIFACT_PATH_TO_DST = { - 'flutter_jit_runner': 'flutter_jit_runner', - 'icudtl.dat': 'data/icudtl.dat', - 'dart_runner': 'dart_runner', - 'flutter_patched_sdk': 'flutter_patched_sdk' + 'flutter_jit_runner': 'flutter_jit_runner', 'icudtl.dat': 'data/icudtl.dat', + 'dart_runner': 'dart_runner', 'flutter_patched_sdk': 'flutter_patched_sdk' } @@ -74,9 +73,11 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument( - '--artifacts-root', dest='artifacts_root', action='store', required=True) + '--artifacts-root', dest='artifacts_root', action='store', required=True + ) parser.add_argument( - '--dest-dir', dest='dst_dir', action='store', required=True) + '--dest-dir', dest='dst_dir', action='store', required=True + ) args = parser.parse_args() diff --git a/tools/fuchsia/gen_package.py b/tools/fuchsia/gen_package.py index 5ebc4ee555ac3..ab4d423186ddc 100755 --- a/tools/fuchsia/gen_package.py +++ b/tools/fuchsia/gen_package.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Generate a Fuchsia FAR Archive from an asset manifest. """ @@ -15,6 +16,7 @@ from gather_flutter_runner_artifacts import CreateMetaPackage + # Generates the manifest and returns the file. def GenerateManifest(package_dir): full_paths = [] @@ -23,7 +25,9 @@ def GenerateManifest(package_dir): common_prefix = os.path.commonprefix([root, package_dir]) rel_path = os.path.relpath(os.path.join(root, f), common_prefix) from_package = os.path.abspath(os.path.join(package_dir, rel_path)) - assert from_package, 'Failed to create from_package for %s' % os.path.join(root, f) + assert from_package, 'Failed to create from_package for %s' % os.path.join( + root, f + ) full_paths.append('%s=%s' % (rel_path, from_package)) parent_dir = os.path.abspath(os.path.join(package_dir, os.pardir)) @@ -39,7 +43,8 @@ def CreateFarPackage(pm_bin, package_dir, signing_key, dst_dir, api_level): manifest_path = GenerateManifest(package_dir) pm_command_base = [ - pm_bin, '-m', manifest_path, '-k', signing_key, '-o', dst_dir, '--api-level', api_level + pm_bin, '-m', manifest_path, '-k', signing_key, '-o', dst_dir, + '--api-level', api_level ] # Build the package @@ -56,15 +61,23 @@ def main(): parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True) parser.add_argument( - '--package-dir', dest='package_dir', action='store', required=True) + '--package-dir', dest='package_dir', action='store', required=True + ) parser.add_argument( - '--manifest-file', dest='manifest_file', action='store', required=False) + '--manifest-file', dest='manifest_file', action='store', required=False + ) parser.add_argument( - '--manifest-json-file', dest='manifest_json_file', action='store', required=True) + '--manifest-json-file', + dest='manifest_json_file', + action='store', + required=True + ) parser.add_argument( - '--far-name', dest='far_name', action='store', required=False) + '--far-name', dest='far_name', action='store', required=False + ) parser.add_argument( - '--api-level', dest='api_level', action='store', required=False) + '--api-level', dest='api_level', action='store', required=False + ) args = parser.parse_args() @@ -100,31 +113,41 @@ def main(): # Use check_output so if anything goes wrong we get the output. try: - build_command = ['build', '--output-package-manifest', args.manifest_json_file] + build_command = [ + 'build', '--output-package-manifest', args.manifest_json_file + ] if args.api_level is not None: build_command = ['--api-level', args.api_level] + build_command - archive_command = ['archive', '--output='+ os.path.join(os.path.dirname(output_dir), args.far_name + "-0")] - - pm_commands = [ - build_command, - archive_command + archive_command = [ + 'archive', '--output=' + + os.path.join(os.path.dirname(output_dir), args.far_name + "-0") ] + pm_commands = [build_command, archive_command] + for pm_command in pm_commands: subprocess.check_output(pm_command_base + pm_command) except subprocess.CalledProcessError as e: - print('==================== Manifest contents =========================================') + print( + '==================== Manifest contents =========================================' + ) with open(manifest_file, 'r') as manifest: sys.stdout.write(manifest.read()) - print('==================== End manifest contents =====================================') + print( + '==================== End manifest contents =====================================' + ) meta_contents_path = os.path.join(output_dir, 'meta', 'contents') if os.path.exists(meta_contents_path): - print('==================== meta/contents =============================================') + print( + '==================== meta/contents =============================================' + ) with open(meta_contents_path, 'r') as meta_contents: sys.stdout.write(meta_contents.read()) - print('==================== End meta/contents =========================================') + print( + '==================== End meta/contents =========================================' + ) raise return 0 diff --git a/tools/fuchsia/gen_repo.py b/tools/fuchsia/gen_repo.py index 604fe33cb2367..b121b505eec68 100755 --- a/tools/fuchsia/gen_repo.py +++ b/tools/fuchsia/gen_repo.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Generate a Fuchsia repo capable of serving Fuchsia archives over the network. """ @@ -19,9 +20,11 @@ def main(): parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True) parser.add_argument( - '--repo-dir', dest='repo_dir', action='store', required=True) + '--repo-dir', dest='repo_dir', action='store', required=True + ) parser.add_argument( - '--archive', dest='archives', action='append', required=True) + '--archive', dest='archives', action='append', required=True + ) args = parser.parse_args() diff --git a/tools/fuchsia/gn-sdk/prepare_package_inputs.py b/tools/fuchsia/gn-sdk/prepare_package_inputs.py index d29660147c870..930e45fc88351 100755 --- a/tools/fuchsia/gn-sdk/prepare_package_inputs.py +++ b/tools/fuchsia/gn-sdk/prepare_package_inputs.py @@ -6,6 +6,7 @@ # # Derivative work of https://chromium.googlesource.com/chromium/src/+/HEAD/build/config/fuchsia/prepare_package_inputs.py # + """Creates a archive manifest used for Fuchsia package generation.""" import argparse @@ -20,7 +21,7 @@ def make_package_path(file_path, roots): - """Computes a path for |file_path| relative to one of the |roots|. + """Computes a path for |file_path| relative to one of the |roots|. Args: file_path: The file path to relativize. @@ -33,353 +34,364 @@ def make_package_path(file_path, roots): * make_package_path('/foo/out/Debug/bar.exe', ['/foo/', '/foo/out/Debug/']) 'bar.exe' """ - # Prevents greedily matching against a shallow path when a deeper, better - # matching path exists. - roots.sort(key=len, reverse=True) + # Prevents greedily matching against a shallow path when a deeper, better + # matching path exists. + roots.sort(key=len, reverse=True) - for next_root in roots: - if not next_root.endswith(os.sep): - next_root += os.sep + for next_root in roots: + if not next_root.endswith(os.sep): + next_root += os.sep - if file_path.startswith(next_root): - relative_path = file_path[len(next_root):] - return relative_path + if file_path.startswith(next_root): + relative_path = file_path[len(next_root):] + return relative_path - return file_path + return file_path def _get_stripped_path(bin_path): - """Finds the stripped version of |bin_path| in the build output directory. + """Finds the stripped version of |bin_path| in the build output directory. returns |bin_path| if no stripped path is found. """ - stripped_path = bin_path.replace('lib.unstripped/', - 'lib/').replace('exe.unstripped/', '') - if os.path.exists(stripped_path): - return stripped_path - else: - return bin_path + stripped_path = bin_path.replace('lib.unstripped/', + 'lib/').replace('exe.unstripped/', '') + if os.path.exists(stripped_path): + return stripped_path + else: + return bin_path def _is_binary(path): - """Checks if the file at |path| is an ELF executable. + """Checks if the file at |path| is an ELF executable. This is done by inspecting its FourCC header. """ - with open(path, 'rb') as f: - file_tag = f.read(4) - return file_tag == b'\x7fELF' + with open(path, 'rb') as f: + file_tag = f.read(4) + return file_tag == b'\x7fELF' def _write_build_ids_txt(binary_paths, ids_txt_path): - """Writes an index text file mapping build IDs to unstripped binaries.""" - - READELF_FILE_PREFIX = 'File: ' - READELF_BUILD_ID_PREFIX = 'Build ID: ' - - # List of binaries whose build IDs are awaiting processing by readelf. - # Entries are removed as readelf's output is parsed. - unprocessed_binary_paths = set(binary_paths) - build_ids_map = {} - - # Sanity check that unstripped binaries do not also have their stripped - # counterpart listed. - for binary_path in binary_paths: - stripped_binary_path = _get_stripped_path(binary_path) - if stripped_binary_path != binary_path: - unprocessed_binary_paths.discard(stripped_binary_path) - - with open(ids_txt_path, 'w') as ids_file: - # TODO(richkadel): This script (originally from the Fuchsia GN SDK) was - # changed, adding this `if unprocessed_binary_paths` check, because for - # the Dart packages I tested (child-view and parent-view), this was - # empty. Update the Fuchsia GN SDK? (Or figure out if the Dart packages - # _should_ have at least one unprocessed_binary_path?) - if unprocessed_binary_paths: - # Create a set to dedupe stripped binary paths in case both the stripped and - # unstripped versions of a binary are specified. - readelf_stdout = subprocess.check_output( - ['readelf', '-n'] + sorted(unprocessed_binary_paths)).decode('utf8') - - if len(binary_paths) == 1: - # Readelf won't report a binary's path if only one was provided to the - # tool. - binary_path = binary_paths[0] - else: - binary_path = None - - for line in readelf_stdout.split('\n'): - line = line.strip() - - if line.startswith(READELF_FILE_PREFIX): - binary_path = line[len(READELF_FILE_PREFIX):] - assert binary_path in unprocessed_binary_paths - - elif line.startswith(READELF_BUILD_ID_PREFIX): - # Paths to the unstripped executables listed in "ids.txt" are specified - # as relative paths to that file. - unstripped_rel_path = os.path.relpath( - os.path.abspath(binary_path), - os.path.dirname(os.path.abspath(ids_txt_path))) - - build_id = line[len(READELF_BUILD_ID_PREFIX):] - build_ids_map[build_id] = unstripped_rel_path - unprocessed_binary_paths.remove(binary_path) - - for id_and_path in sorted(build_ids_map.items()): - ids_file.write(id_and_path[0] + ' ' + id_and_path[1] + '\n') - - # Did readelf forget anything? Make sure that all binaries are accounted for. - assert not unprocessed_binary_paths + """Writes an index text file mapping build IDs to unstripped binaries.""" + + READELF_FILE_PREFIX = 'File: ' + READELF_BUILD_ID_PREFIX = 'Build ID: ' + + # List of binaries whose build IDs are awaiting processing by readelf. + # Entries are removed as readelf's output is parsed. + unprocessed_binary_paths = set(binary_paths) + build_ids_map = {} + + # Sanity check that unstripped binaries do not also have their stripped + # counterpart listed. + for binary_path in binary_paths: + stripped_binary_path = _get_stripped_path(binary_path) + if stripped_binary_path != binary_path: + unprocessed_binary_paths.discard(stripped_binary_path) + + with open(ids_txt_path, 'w') as ids_file: + # TODO(richkadel): This script (originally from the Fuchsia GN SDK) was + # changed, adding this `if unprocessed_binary_paths` check, because for + # the Dart packages I tested (child-view and parent-view), this was + # empty. Update the Fuchsia GN SDK? (Or figure out if the Dart packages + # _should_ have at least one unprocessed_binary_path?) + if unprocessed_binary_paths: + # Create a set to dedupe stripped binary paths in case both the stripped and + # unstripped versions of a binary are specified. + readelf_stdout = subprocess.check_output(['readelf', '-n'] + + sorted(unprocessed_binary_paths) + ).decode('utf8') + + if len(binary_paths) == 1: + # Readelf won't report a binary's path if only one was provided to the + # tool. + binary_path = binary_paths[0] + else: + binary_path = None + + for line in readelf_stdout.split('\n'): + line = line.strip() + + if line.startswith(READELF_FILE_PREFIX): + binary_path = line[len(READELF_FILE_PREFIX):] + assert binary_path in unprocessed_binary_paths + + elif line.startswith(READELF_BUILD_ID_PREFIX): + # Paths to the unstripped executables listed in "ids.txt" are specified + # as relative paths to that file. + unstripped_rel_path = os.path.relpath( + os.path.abspath(binary_path), + os.path.dirname(os.path.abspath(ids_txt_path)) + ) + + build_id = line[len(READELF_BUILD_ID_PREFIX):] + build_ids_map[build_id] = unstripped_rel_path + unprocessed_binary_paths.remove(binary_path) + + for id_and_path in sorted(build_ids_map.items()): + ids_file.write(id_and_path[0] + ' ' + id_and_path[1] + '\n') + + # Did readelf forget anything? Make sure that all binaries are accounted for. + assert not unprocessed_binary_paths def _parse_component(component_info_file): - component_info = json.load(open(component_info_file, 'r')) - return component_info + component_info = json.load(open(component_info_file, 'r')) + return component_info def _get_component_manifests(component_info): - return [c for c in component_info if c.get('type') == 'manifest'] + return [c for c in component_info if c.get('type') == 'manifest'] # TODO(richkadel): Changed, from the Fuchsia GN SDK version to add this function # and related code, to include support for a file of resources that aren't known # until compile time. def _get_resource_items_from_json_items(component_info): - nested_resources = [] - files = [c.get('source') for c in component_info if c.get('type') == 'json_of_resources'] - for json_file in files: - for resource in _parse_component(json_file): - nested_resources.append(resource) - return nested_resources + nested_resources = [] + files = [ + c.get('source') + for c in component_info + if c.get('type') == 'json_of_resources' + ] + for json_file in files: + for resource in _parse_component(json_file): + nested_resources.append(resource) + return nested_resources def _get_resource_items(component_info): - return ( - [c for c in component_info if c.get('type') == 'resource'] + - _get_resource_items_from_json_items(component_info) - ) + return ([c for c in component_info if c.get('type') == 'resource'] + + _get_resource_items_from_json_items(component_info)) def _get_expanded_files(runtime_deps_file): - """ Process the runtime deps file for file paths, recursively walking + """ Process the runtime deps file for file paths, recursively walking directories as needed. Returns a set of expanded files referenced by the runtime deps file. """ - # runtime_deps may contain duplicate paths, so use a set for - # de-duplication. - expanded_files = set() - for next_path in open(runtime_deps_file, 'r'): - next_path = next_path.strip() - if os.path.isdir(next_path): - for root, _, files in os.walk(next_path): - for current_file in files: - if current_file.startswith('.'): - continue - expanded_files.add( - os.path.normpath(os.path.join(root, current_file))) - else: - expanded_files.add(os.path.normpath(next_path)) - return expanded_files + # runtime_deps may contain duplicate paths, so use a set for + # de-duplication. + expanded_files = set() + for next_path in open(runtime_deps_file, 'r'): + next_path = next_path.strip() + if os.path.isdir(next_path): + for root, _, files in os.walk(next_path): + for current_file in files: + if current_file.startswith('.'): + continue + expanded_files.add(os.path.normpath(os.path.join(root, current_file))) + else: + expanded_files.add(os.path.normpath(next_path)) + return expanded_files def _write_gn_deps_file( - depfile_path, package_manifest, component_manifests, out_dir, - expanded_files): - with open(depfile_path, 'w') as depfile: - deps_list = [os.path.relpath(f, out_dir) for f in expanded_files] - deps_list.extend(component_manifests) + depfile_path, package_manifest, component_manifests, out_dir, expanded_files +): + with open(depfile_path, 'w') as depfile: + deps_list = [os.path.relpath(f, out_dir) for f in expanded_files] + deps_list.extend(component_manifests) - # The deps file is space-delimited, so filenames containing spaces - # must have them escaped. - deps_list = [f.replace(' ','\\ ') for f in deps_list] + # The deps file is space-delimited, so filenames containing spaces + # must have them escaped. + deps_list = [f.replace(' ', '\\ ') for f in deps_list] - deps_string = ' '.join(sorted(deps_list)) - depfile.write('%s: %s' % (package_manifest, deps_string)) + deps_string = ' '.join(sorted(deps_list)) + depfile.write('%s: %s' % (package_manifest, deps_string)) def _write_meta_package_manifest( - manifest_entries, manifest_path, app_name, out_dir, package_version): - # Write meta/package manifest file and add to archive manifest. - meta_package = os.path.join(os.path.dirname(manifest_path), 'package') - with open(meta_package, 'w') as package_json: - json_payload = {'version': package_version, 'name': app_name} - json.dump(json_payload, package_json) - package_json_filepath = os.path.relpath(package_json.name, out_dir) - manifest_entries['meta/package'] = package_json_filepath + manifest_entries, manifest_path, app_name, out_dir, package_version +): + # Write meta/package manifest file and add to archive manifest. + meta_package = os.path.join(os.path.dirname(manifest_path), 'package') + with open(meta_package, 'w') as package_json: + json_payload = {'version': package_version, 'name': app_name} + json.dump(json_payload, package_json) + package_json_filepath = os.path.relpath(package_json.name, out_dir) + manifest_entries['meta/package'] = package_json_filepath def _write_component_manifest( - manifest_entries, component_info, archive_manifest_path, out_dir): - """Copy component manifest files and add to archive manifest. + manifest_entries, component_info, archive_manifest_path, out_dir +): + """Copy component manifest files and add to archive manifest. Raises an exception if a component uses a unknown manifest version. """ - for component_manifest in _get_component_manifests(component_info): - manifest_version = component_manifest.get('manifest_version') - - if manifest_version not in MANIFEST_VERSION_EXTENSIONS: - raise Exception( - 'Unknown manifest_version: {}'.format(manifest_version)) - - # TODO(richkadel): Changed, from the Fuchsia GN SDK version, to assume - # the given `output_name` already includes its extension. This change - # has not been fully validate, in particular, it has not been tested - # with CF v2 `.cm` (from `.cml`) files. Original implementation was: - # - # extension = MANIFEST_VERSION_EXTENSIONS.get(manifest_version) - # manifest_dest_file_path = os.path.join( - # os.path.dirname(archive_manifest_path), - # component_manifest.get('output_name') + extension) - manifest_dest_file_path = os.path.join( - os.path.dirname(archive_manifest_path), - component_manifest.get('output_name')) - # Add the 'meta/' subdir, for example, if `output_name` includes it - os.makedirs(os.path.dirname(manifest_dest_file_path), exist_ok=True) - shutil.copy(component_manifest.get('source'), manifest_dest_file_path) - - manifest_entries[ - 'meta/%s' % - os.path.basename(manifest_dest_file_path)] = os.path.relpath( - manifest_dest_file_path, out_dir) - return manifest_dest_file_path + for component_manifest in _get_component_manifests(component_info): + manifest_version = component_manifest.get('manifest_version') + + if manifest_version not in MANIFEST_VERSION_EXTENSIONS: + raise Exception('Unknown manifest_version: {}'.format(manifest_version)) + + # TODO(richkadel): Changed, from the Fuchsia GN SDK version, to assume + # the given `output_name` already includes its extension. This change + # has not been fully validate, in particular, it has not been tested + # with CF v2 `.cm` (from `.cml`) files. Original implementation was: + # + # extension = MANIFEST_VERSION_EXTENSIONS.get(manifest_version) + # manifest_dest_file_path = os.path.join( + # os.path.dirname(archive_manifest_path), + # component_manifest.get('output_name') + extension) + manifest_dest_file_path = os.path.join( + os.path.dirname(archive_manifest_path), + component_manifest.get('output_name') + ) + # Add the 'meta/' subdir, for example, if `output_name` includes it + os.makedirs(os.path.dirname(manifest_dest_file_path), exist_ok=True) + shutil.copy(component_manifest.get('source'), manifest_dest_file_path) + + manifest_entries['meta/%s' % os.path.basename(manifest_dest_file_path) + ] = os.path.relpath(manifest_dest_file_path, out_dir) + return manifest_dest_file_path def _write_package_manifest( - manifest_entries, expanded_files, out_dir, exclude_file, root_dir, - component_info): - """Writes the package manifest for a Fuchsia package + manifest_entries, expanded_files, out_dir, exclude_file, root_dir, + component_info +): + """Writes the package manifest for a Fuchsia package Returns a list of binaries in the package. Raises an exception if excluded files are not found.""" - gen_dir = os.path.normpath(os.path.join(out_dir, 'gen')) - excluded_files_set = set(exclude_file) - roots = [gen_dir, root_dir, out_dir] - - # Filter out component manifests. These are written out elsewhere. - excluded_files_set.update( - [ - make_package_path( - os.path.relpath(cf.get('source'), out_dir), roots) - for cf in _get_component_manifests(component_info) - if os.path.relpath(cf.get('source'), out_dir) in expanded_files - ]) - - # Filter out json_of_resources since only their contents are written, and we - # don't know the contained resources until late in the build cycle - excluded_files_set.update( - [ - make_package_path( - os.path.relpath(cf.get('source'), out_dir), roots) - for cf in component_info if cf.get('type') == 'json_of_resources' and - os.path.relpath(cf.get('source'), out_dir) in expanded_files - ]) - - # Write out resource files with specific package paths, and exclude them from - # the list of expanded files so they are not listed twice in the manifest. - for resource in _get_resource_items(component_info): - relative_src_file = os.path.relpath(resource.get('source'), out_dir) - resource_path = make_package_path(relative_src_file, roots) - manifest_entries[resource.get('dest')] = relative_src_file - if resource.get('type') == 'resource': - excluded_files_set.add(resource_path) - - for current_file in expanded_files: - current_file = _get_stripped_path(current_file) - # make_package_path() may relativize to either the source root or - # output directory. - in_package_path = make_package_path(current_file, roots) - - if in_package_path in excluded_files_set: - excluded_files_set.remove(in_package_path) - else: - manifest_entries[in_package_path] = current_file - - if excluded_files_set: - raise Exception( - 'Some files were excluded with --exclude-file but ' - 'not found in the deps list, or a resource (data) file ' - 'was added and not filtered out. Excluded files and resources: ' - '%s' % ', '.join(excluded_files_set)) + gen_dir = os.path.normpath(os.path.join(out_dir, 'gen')) + excluded_files_set = set(exclude_file) + roots = [gen_dir, root_dir, out_dir] + + # Filter out component manifests. These are written out elsewhere. + excluded_files_set.update([ + make_package_path(os.path.relpath(cf.get('source'), out_dir), roots) + for cf in _get_component_manifests(component_info) + if os.path.relpath(cf.get('source'), out_dir) in expanded_files + ]) + + # Filter out json_of_resources since only their contents are written, and we + # don't know the contained resources until late in the build cycle + excluded_files_set.update([ + make_package_path(os.path.relpath(cf.get('source'), out_dir), roots) + for cf in component_info + if cf.get('type') == 'json_of_resources' and + os.path.relpath(cf.get('source'), out_dir) in expanded_files + ]) + + # Write out resource files with specific package paths, and exclude them from + # the list of expanded files so they are not listed twice in the manifest. + for resource in _get_resource_items(component_info): + relative_src_file = os.path.relpath(resource.get('source'), out_dir) + resource_path = make_package_path(relative_src_file, roots) + manifest_entries[resource.get('dest')] = relative_src_file + if resource.get('type') == 'resource': + excluded_files_set.add(resource_path) + + for current_file in expanded_files: + current_file = _get_stripped_path(current_file) + # make_package_path() may relativize to either the source root or + # output directory. + in_package_path = make_package_path(current_file, roots) + + if in_package_path in excluded_files_set: + excluded_files_set.remove(in_package_path) + else: + manifest_entries[in_package_path] = current_file + + if excluded_files_set: + raise Exception( + 'Some files were excluded with --exclude-file but ' + 'not found in the deps list, or a resource (data) file ' + 'was added and not filtered out. Excluded files and resources: ' + '%s' % ', '.join(excluded_files_set) + ) def _build_manifest(args): - # Use a sorted list to make sure the manifest order is deterministic. - expanded_files = sorted(_get_expanded_files(args.runtime_deps_file)) - component_info = _parse_component(args.json_file) - component_manifests = [] - - # Collect the manifest entries in a map since duplication happens - # because of runtime libraries. - manifest_entries = {} - _write_meta_package_manifest( - manifest_entries, args.manifest_path, args.app_name, args.out_dir, - args.package_version) - for component_item in component_info: - _write_package_manifest( - manifest_entries, expanded_files, args.out_dir, args.exclude_file, - args.root_dir, component_item) - component_manifests.append( - _write_component_manifest( - manifest_entries, component_item, args.manifest_path, - args.out_dir)) - - with open(args.manifest_path, 'w') as manifest: - for key in sorted(manifest_entries.keys()): - manifest.write('%s=%s\n' % (key, manifest_entries[key])) - - binaries = [f for f in expanded_files if _is_binary(f)] - _write_build_ids_txt(sorted(binaries), args.build_ids_file) - - # Omit any excluded_files from the expanded_files written to the depfile. - gen_dir = os.path.normpath(os.path.join(args.out_dir, 'gen')) - roots = [gen_dir, args.root_dir, args.out_dir] - excluded_files_set = set(args.exclude_file) - expanded_deps_files = [path for path in expanded_files - if make_package_path(path, roots) - not in excluded_files_set] - - _write_gn_deps_file( - args.depfile_path, args.manifest_path, component_manifests, - args.out_dir, expanded_deps_files) - return 0 + # Use a sorted list to make sure the manifest order is deterministic. + expanded_files = sorted(_get_expanded_files(args.runtime_deps_file)) + component_info = _parse_component(args.json_file) + component_manifests = [] + + # Collect the manifest entries in a map since duplication happens + # because of runtime libraries. + manifest_entries = {} + _write_meta_package_manifest( + manifest_entries, args.manifest_path, args.app_name, args.out_dir, + args.package_version + ) + for component_item in component_info: + _write_package_manifest( + manifest_entries, expanded_files, args.out_dir, args.exclude_file, + args.root_dir, component_item + ) + component_manifests.append( + _write_component_manifest( + manifest_entries, component_item, args.manifest_path, args.out_dir + ) + ) + + with open(args.manifest_path, 'w') as manifest: + for key in sorted(manifest_entries.keys()): + manifest.write('%s=%s\n' % (key, manifest_entries[key])) + + binaries = [f for f in expanded_files if _is_binary(f)] + _write_build_ids_txt(sorted(binaries), args.build_ids_file) + + # Omit any excluded_files from the expanded_files written to the depfile. + gen_dir = os.path.normpath(os.path.join(args.out_dir, 'gen')) + roots = [gen_dir, args.root_dir, args.out_dir] + excluded_files_set = set(args.exclude_file) + expanded_deps_files = [ + path for path in expanded_files + if make_package_path(path, roots) not in excluded_files_set + ] + + _write_gn_deps_file( + args.depfile_path, args.manifest_path, component_manifests, args.out_dir, + expanded_deps_files + ) + return 0 def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - '--root-dir', required=True, help='Build root directory') - parser.add_argument( - '--out-dir', required=True, help='Build output directory') - parser.add_argument('--app-name', required=True, help='Package name') - parser.add_argument( - '--runtime-deps-file', - required=True, - help='File with the list of runtime dependencies.') - parser.add_argument( - '--depfile-path', required=True, help='Path to write GN deps file.') - parser.add_argument( - '--exclude-file', - action='append', - default=[], - help='Package-relative file path to exclude from the package.') - parser.add_argument( - '--manifest-path', required=True, help='Manifest output path.') - parser.add_argument( - '--build-ids-file', required=True, help='Debug symbol index path.') - parser.add_argument('--json-file', required=True) - parser.add_argument( - '--package-version', default='0', help='Version of the package') - - args = parser.parse_args() - - return _build_manifest(args) + parser = argparse.ArgumentParser() + parser.add_argument('--root-dir', required=True, help='Build root directory') + parser.add_argument('--out-dir', required=True, help='Build output directory') + parser.add_argument('--app-name', required=True, help='Package name') + parser.add_argument( + '--runtime-deps-file', + required=True, + help='File with the list of runtime dependencies.' + ) + parser.add_argument( + '--depfile-path', required=True, help='Path to write GN deps file.' + ) + parser.add_argument( + '--exclude-file', + action='append', + default=[], + help='Package-relative file path to exclude from the package.' + ) + parser.add_argument( + '--manifest-path', required=True, help='Manifest output path.' + ) + parser.add_argument( + '--build-ids-file', required=True, help='Debug symbol index path.' + ) + parser.add_argument('--json-file', required=True) + parser.add_argument( + '--package-version', default='0', help='Version of the package' + ) + + args = parser.parse_args() + + return _build_manifest(args) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools/fuchsia/interpolate_test_suite.py b/tools/fuchsia/interpolate_test_suite.py index 3e61f6520fb38..bfa3869546614 100755 --- a/tools/fuchsia/interpolate_test_suite.py +++ b/tools/fuchsia/interpolate_test_suite.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Interpolates test suite information into a cml file. """ diff --git a/tools/fuchsia/make_build_info.py b/tools/fuchsia/make_build_info.py index d4565702a199c..cde629296d18a 100755 --- a/tools/fuchsia/make_build_info.py +++ b/tools/fuchsia/make_build_info.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + ''' Interpolates build environment information into a file. ''' @@ -15,66 +16,67 @@ def GetDartSdkGitRevision(buildroot): - project_root = path.join(buildroot, 'third_party', 'dart') - return subprocess.check_output( - ['git', '-C', project_root, 'rev-parse', 'HEAD']).strip() + project_root = path.join(buildroot, 'third_party', 'dart') + return subprocess.check_output([ + 'git', '-C', project_root, 'rev-parse', 'HEAD' + ]).strip() def GetDartSdkSemanticVersion(buildroot): - project_root = path.join(buildroot, 'third_party', 'dart') - return subprocess.check_output( - ['git', '-C', project_root, 'describe', '--abbrev=0']).strip() + project_root = path.join(buildroot, 'third_party', 'dart') + return subprocess.check_output([ + 'git', '-C', project_root, 'describe', '--abbrev=0' + ]).strip() def GetFlutterEngineGitRevision(buildroot): - project_root = path.join(buildroot, 'flutter') - return subprocess.check_output( - ['git', '-C', project_root, 'rev-parse', 'HEAD']).strip() + project_root = path.join(buildroot, 'flutter') + return subprocess.check_output([ + 'git', '-C', project_root, 'rev-parse', 'HEAD' + ]).strip() def GetFuchsiaSdkVersion(buildroot): - with open(path.join( - buildroot, - 'fuchsia', - 'sdk', - 'linux' if sys.platform.startswith('linux') else 'mac', - 'meta', - 'manifest.json'), - 'r') as fuchsia_sdk_manifest: - return json.load(fuchsia_sdk_manifest)['id'] + with open(path.join(buildroot, 'fuchsia', 'sdk', + 'linux' if sys.platform.startswith('linux') else 'mac', + 'meta', 'manifest.json'), 'r') as fuchsia_sdk_manifest: + return json.load(fuchsia_sdk_manifest)['id'] def main(): - # Parse arguments. - parser = ArgumentParser() - parser.add_argument( - '--input', action='store', help='input file path', required=True) - parser.add_argument( - '--output', action='store', help='output file path', required=True) - parser.add_argument( - '--buildroot', - action='store', - help='path to the flutter engine buildroot', - required=True) - args = parser.parse_args() + # Parse arguments. + parser = ArgumentParser() + parser.add_argument( + '--input', action='store', help='input file path', required=True + ) + parser.add_argument( + '--output', action='store', help='output file path', required=True + ) + parser.add_argument( + '--buildroot', + action='store', + help='path to the flutter engine buildroot', + required=True + ) + args = parser.parse_args() - # Read, interpolate, write. - with open(args.input, 'r') as i, open(args.output, 'w') as o: - o.write( - i.read() - .replace( - '{{DART_SDK_GIT_REVISION}}', - GetDartSdkGitRevision(args.buildroot).decode('utf-8')) - .replace( - '{{DART_SDK_SEMANTIC_VERSION}}', - GetDartSdkSemanticVersion(args.buildroot).decode('utf-8')) - .replace( - '{{FLUTTER_ENGINE_GIT_REVISION}}', - GetFlutterEngineGitRevision(args.buildroot).decode('utf-8')) - .replace( - '{{FUCHSIA_SDK_VERSION}}', - GetFuchsiaSdkVersion(args.buildroot))) + # Read, interpolate, write. + with open(args.input, 'r') as i, open(args.output, 'w') as o: + o.write( + i.read().replace( + '{{DART_SDK_GIT_REVISION}}', + GetDartSdkGitRevision(args.buildroot).decode('utf-8') + ).replace( + '{{DART_SDK_SEMANTIC_VERSION}}', + GetDartSdkSemanticVersion(args.buildroot).decode('utf-8') + ).replace( + '{{FLUTTER_ENGINE_GIT_REVISION}}', + GetFlutterEngineGitRevision(args.buildroot).decode('utf-8') + ).replace( + '{{FUCHSIA_SDK_VERSION}}', GetFuchsiaSdkVersion(args.buildroot) + ) + ) if __name__ == '__main__': - main() + main() diff --git a/tools/fuchsia/merge_and_upload_debug_symbols.py b/tools/fuchsia/merge_and_upload_debug_symbols.py index e1c71b6eb9883..6e1038ed7ce1f 100755 --- a/tools/fuchsia/merge_and_upload_debug_symbols.py +++ b/tools/fuchsia/merge_and_upload_debug_symbols.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Merges the debug symbols and uploads them to cipd. """ @@ -56,11 +57,11 @@ def WriteCIPDDefinition(target_arch, out_dir, symbol_dirs): def CheckCIPDPackageExists(package_name, tag): '''Check to see if the current package/tag combo has been published''' command = [ - 'cipd', - 'search', - package_name, - '-tag', - tag, + 'cipd', + 'search', + package_name, + '-tag', + tag, ] stdout = subprocess.check_output(command) match = re.search(r'No matching instances\.', stdout) @@ -74,22 +75,29 @@ def ProcessCIPDPackage(upload, cipd_yaml, engine_version, out_dir, target_arch): _packaging_dir = GetPackagingDir(out_dir) tag = 'git_revision:%s' % engine_version package_name = 'flutter/fuchsia-debug-symbols-%s' % target_arch - already_exists = CheckCIPDPackageExists( - package_name, - tag) + already_exists = CheckCIPDPackageExists(package_name, tag) if already_exists: print('CIPD package %s tag %s already exists!' % (package_name, tag)) if upload and IsLinux() and not already_exists: command = [ - 'cipd', 'create', '-pkg-def', cipd_yaml, '-ref', 'latest', '-tag', - tag, '-verification-timeout', '10m0s', + 'cipd', + 'create', + '-pkg-def', + cipd_yaml, + '-ref', + 'latest', + '-tag', + tag, + '-verification-timeout', + '10m0s', ] else: command = [ 'cipd', 'pkg-build', '-pkg-def', cipd_yaml, '-out', - os.path.join(_packaging_dir, - 'fuchsia-debug-symbols-%s.cipd' % target_arch) + os.path.join( + _packaging_dir, 'fuchsia-debug-symbols-%s.cipd' % target_arch + ) ] # Retry up to three times. We've seen CIPD fail on verification in some @@ -105,6 +113,7 @@ def ProcessCIPDPackage(upload, cipd_yaml, engine_version, out_dir, target_arch): if tries == num_tries - 1: raise + # Recursively hardlinks contents from one directory to another, # skipping over collisions. def HardlinkContents(dirA, dirB): @@ -134,6 +143,7 @@ def HardlinkContents(dirA, dirB): os.link(src, dest) return internal_symbol_dirs + def main(): parser = argparse.ArgumentParser() @@ -148,13 +158,16 @@ def main(): required=True, action='store', dest='out_dir', - help='Output directory where the executables will be placed.') + help='Output directory where the executables will be placed.' + ) parser.add_argument( - '--target-arch', type=str, choices=['x64', 'arm64'], required=True) + '--target-arch', type=str, choices=['x64', 'arm64'], required=True + ) parser.add_argument( '--engine-version', required=True, - help='Specifies the flutter engine SHA.') + help='Specifies the flutter engine SHA.' + ) parser.add_argument('--upload', default=False, action='store_true') diff --git a/tools/fuchsia/parse_manifest.py b/tools/fuchsia/parse_manifest.py index 3863e6bae3199..3cd0584c18327 100755 --- a/tools/fuchsia/parse_manifest.py +++ b/tools/fuchsia/parse_manifest.py @@ -3,6 +3,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """ Parses manifest file and dumps it to json. """ @@ -12,13 +13,16 @@ import sys import hashlib + def main(): parser = argparse.ArgumentParser() parser.add_argument( - '--input', dest='file_path', action='store', required=True) + '--input', dest='file_path', action='store', required=True + ) parser.add_argument( - '--clang-cpu', dest='clang_cpu', action='store', required=True) + '--clang-cpu', dest='clang_cpu', action='store', required=True + ) args = parser.parse_args() diff --git a/tools/fuchsia/toolchain/copy.py b/tools/fuchsia/toolchain/copy.py index b142cbd4f113d..1f56a60fe8fc4 100755 --- a/tools/fuchsia/toolchain/copy.py +++ b/tools/fuchsia/toolchain/copy.py @@ -2,6 +2,7 @@ # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + """Emulation of `rm -f out && cp -af` in out. This is necessary on Mac in order to preserve nanoseconds of mtime. See https://fxbug.dev/56376#c5.""" @@ -11,38 +12,38 @@ def main(): - if len(sys.argv) != 3: - print('usage: copy.py source dest', file=sys.stderr) - return 1 - source = sys.argv[1] - dest = sys.argv[2] - - if os.path.isdir(source): - print( - f'{source} is a directory, tool "copy" does not support directory copies' - ) - return 1 - - if os.path.exists(dest): - if os.path.isdir(dest): - - def _on_error(fn, path, dummy_excinfo): - # The operation failed, possibly because the file is set to - # read-only. If that's why, make it writable and try the op - # again. - if not os.access(path, os.W_OK): - os.chmod(path, stat.S_IWRITE) - fn(path) - - shutil.rmtree(dest, onerror=_on_error) - else: - if not os.access(dest, os.W_OK): - # Attempt to make the file writable before deleting it. - os.chmod(dest, stat.S_IWRITE) - os.unlink(dest) - - shutil.copy2(source, dest) + if len(sys.argv) != 3: + print('usage: copy.py source dest', file=sys.stderr) + return 1 + source = sys.argv[1] + dest = sys.argv[2] + + if os.path.isdir(source): + print( + f'{source} is a directory, tool "copy" does not support directory copies' + ) + return 1 + + if os.path.exists(dest): + if os.path.isdir(dest): + + def _on_error(fn, path, dummy_excinfo): + # The operation failed, possibly because the file is set to + # read-only. If that's why, make it writable and try the op + # again. + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWRITE) + fn(path) + + shutil.rmtree(dest, onerror=_on_error) + else: + if not os.access(dest, os.W_OK): + # Attempt to make the file writable before deleting it. + os.chmod(dest, stat.S_IWRITE) + os.unlink(dest) + + shutil.copy2(source, dest) if __name__ == '__main__': - main() + main() diff --git a/tools/gen_android_buildconfig.py b/tools/gen_android_buildconfig.py index 485d8f99987af..d75d273fc1a53 100644 --- a/tools/gen_android_buildconfig.py +++ b/tools/gen_android_buildconfig.py @@ -28,8 +28,11 @@ }} """ + def main(): - parser = argparse.ArgumentParser(description='Generate BuildConfig.java for Android') + parser = argparse.ArgumentParser( + description='Generate BuildConfig.java for Android' + ) parser.add_argument('--runtime-mode', type=str, required=True) parser.add_argument('--out', type=str, required=True) @@ -42,7 +45,15 @@ def main(): assert debug or profile or release or jit_release with open(os.path.abspath(args.out), 'w+') as output_file: - output_file.write(BUILD_CONFIG_TEMPLATE.format(str(debug).lower(), str(profile).lower(), str(release).lower(), str(jit_release).lower())) + output_file.write( + BUILD_CONFIG_TEMPLATE.format( + str(debug).lower(), + str(profile).lower(), + str(release).lower(), + str(jit_release).lower() + ) + ) + if __name__ == '__main__': sys.exit(main()) diff --git a/tools/githooks/setup.py b/tools/githooks/setup.py index e045f0a26938f..ea8edf467a838 100755 --- a/tools/githooks/setup.py +++ b/tools/githooks/setup.py @@ -11,8 +11,11 @@ import subprocess import sys - -SRC_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +SRC_ROOT = os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + ) +) FLUTTER_DIR = os.path.join(SRC_ROOT, 'flutter') @@ -28,11 +31,12 @@ def Main(argv): git = 'git.bat' githooks = os.path.join(githooks, 'windows') result = subprocess.run([ - git, - 'config', - 'core.hooksPath', - githooks, - ], cwd=FLUTTER_DIR) + git, + 'config', + 'core.hooksPath', + githooks, + ], + cwd=FLUTTER_DIR) return result.returncode diff --git a/tools/gn_test.py b/tools/gn_test.py index 3f490cf6cf05d..a399d7e779c8d 100644 --- a/tools/gn_test.py +++ b/tools/gn_test.py @@ -5,32 +5,36 @@ import os import imp + SKY_TOOLS = os.path.dirname(os.path.abspath(__file__)) gn = imp.load_source('gn', os.path.join(SKY_TOOLS, 'gn')) class GNTestCase(unittest.TestCase): - def _expect_build_dir(self, arg_list, expected_build_dir): - args = gn.parse_args(['gn'] + arg_list) - self.assertEquals(gn.get_out_dir(args), expected_build_dir) - def test_get_out_dir(self): - self._expect_build_dir(['--debug'], 'out/Debug') - self._expect_build_dir(['--release'], 'out/Release') - self._expect_build_dir(['--ios'], 'out/ios_Debug') - self._expect_build_dir(['--ios', '--release'], 'out/ios_Release') - self._expect_build_dir(['--android'], 'out/android_Debug') - self._expect_build_dir(['--android', '--release'], 'out/android_Release') + def _expect_build_dir(self, arg_list, expected_build_dir): + args = gn.parse_args(['gn'] + arg_list) + self.assertEquals(gn.get_out_dir(args), expected_build_dir) + + def test_get_out_dir(self): + self._expect_build_dir(['--debug'], 'out/Debug') + self._expect_build_dir(['--release'], 'out/Release') + self._expect_build_dir(['--ios'], 'out/ios_Debug') + self._expect_build_dir(['--ios', '--release'], 'out/ios_Release') + self._expect_build_dir(['--android'], 'out/android_Debug') + self._expect_build_dir(['--android', '--release'], 'out/android_Release') - def _gn_args(self, arg_list): - args = gn.parse_args(['gn'] + arg_list) - return gn.to_gn_args(args) + def _gn_args(self, arg_list): + args = gn.parse_args(['gn'] + arg_list) + return gn.to_gn_args(args) - def test_to_gn_args(self): - # This would not necessarily be true on a 32-bit machine? - self.assertEquals(self._gn_args(['--ios', '--simulator'])['target_cpu'], 'x64') - self.assertEquals(self._gn_args(['--ios'])['target_cpu'], 'arm') + def test_to_gn_args(self): + # This would not necessarily be true on a 32-bit machine? + self.assertEquals( + self._gn_args(['--ios', '--simulator'])['target_cpu'], 'x64' + ) + self.assertEquals(self._gn_args(['--ios'])['target_cpu'], 'arm') if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools/javadoc/gen_javadoc.py b/tools/javadoc/gen_javadoc.py index 97a4cfb5d3e89..e10a046cb051c 100755 --- a/tools/javadoc/gen_javadoc.py +++ b/tools/javadoc/gen_javadoc.py @@ -13,71 +13,96 @@ SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) + def JavadocBin(): if sys.platform == 'darwin': - return os.path.join(SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'Contents', 'Home', 'bin', 'javadoc') + return os.path.join( + SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', + 'Contents', 'Home', 'bin', 'javadoc' + ) elif sys.platform.startswith(('cygwin', 'win')): - return os.path.join(SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', 'javadoc.exe') - else : - return os.path.join(SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', 'javadoc') + return os.path.join( + SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', + 'javadoc.exe' + ) + else: + return os.path.join( + SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', + 'javadoc' + ) def main(): - parser = argparse.ArgumentParser(description='Runs javadoc on Flutter Android libraries') + parser = argparse.ArgumentParser( + description='Runs javadoc on Flutter Android libraries' + ) parser.add_argument('--out-dir', type=str, required=True) - parser.add_argument('--android-source-root', type=str, default=ANDROID_SRC_ROOT) + parser.add_argument( + '--android-source-root', type=str, default=ANDROID_SRC_ROOT + ) parser.add_argument('--build-config-path', type=str) parser.add_argument('--third-party', type=str, default='third_party') parser.add_argument('--quiet', default=False, action='store_true') args = parser.parse_args() if not os.path.exists(args.android_source_root): - print('This script must be run at the root of the Flutter source tree, or ' - 'the --android-source-root must be set.') + print( + 'This script must be run at the root of the Flutter source tree, or ' + 'the --android-source-root must be set.' + ) return 1 if not os.path.exists(args.out_dir): os.makedirs(args.out_dir) classpath = [ - args.android_source_root, - os.path.join(args.third_party, 'android_tools/sdk/platforms/android-32/android.jar'), - os.path.join(args.third_party, 'android_embedding_dependencies', 'lib', '*'), + args.android_source_root, + os.path.join( + args.third_party, 'android_tools/sdk/platforms/android-32/android.jar' + ), + os.path.join( + args.third_party, 'android_embedding_dependencies', 'lib', '*' + ), ] if args.build_config_path: classpath.append(args.build_config_path) packages = [ - 'io.flutter.app', - 'io.flutter.embedding.android', - 'io.flutter.embedding.engine', - 'io.flutter.embedding.engine.dart', - 'io.flutter.embedding.engine.loader', - 'io.flutter.embedding.engine.mutatorsstack', - 'io.flutter.embedding.engine.plugins', - 'io.flutter.embedding.engine.plugins.activity', - 'io.flutter.embedding.engine.plugins.broadcastreceiver', - 'io.flutter.embedding.engine.plugins.contentprovider', - 'io.flutter.embedding.engine.plugins.lifecycle', - 'io.flutter.embedding.engine.plugins.service', - 'io.flutter.embedding.engine.plugins.shim', - 'io.flutter.embedding.engine.renderer', - 'io.flutter.embedding.engine.systemchannels', - 'io.flutter.plugin.common', - 'io.flutter.plugin.editing', - 'io.flutter.plugin.platform', - 'io.flutter.util', - 'io.flutter.view', + 'io.flutter.app', + 'io.flutter.embedding.android', + 'io.flutter.embedding.engine', + 'io.flutter.embedding.engine.dart', + 'io.flutter.embedding.engine.loader', + 'io.flutter.embedding.engine.mutatorsstack', + 'io.flutter.embedding.engine.plugins', + 'io.flutter.embedding.engine.plugins.activity', + 'io.flutter.embedding.engine.plugins.broadcastreceiver', + 'io.flutter.embedding.engine.plugins.contentprovider', + 'io.flutter.embedding.engine.plugins.lifecycle', + 'io.flutter.embedding.engine.plugins.service', + 'io.flutter.embedding.engine.plugins.shim', + 'io.flutter.embedding.engine.renderer', + 'io.flutter.embedding.engine.systemchannels', + 'io.flutter.plugin.common', + 'io.flutter.plugin.editing', + 'io.flutter.plugin.platform', + 'io.flutter.util', + 'io.flutter.view', ] android_package_list = os.path.join(SCRIPT_DIR, 'android_reference') command = [ - JavadocBin(), - '-classpath', ':'.join(classpath), - '-d', args.out_dir, - '-linkoffline', 'https://developer.android.com/reference/', android_package_list, - '-source', '1.8', + JavadocBin(), + '-classpath', + ':'.join(classpath), + '-d', + args.out_dir, + '-linkoffline', + 'https://developer.android.com/reference/', + android_package_list, + '-source', + '1.8', ] + packages if not args.quiet: diff --git a/tools/licenses/lib/main.dart b/tools/licenses/lib/main.dart index 9a6b1a54b666c..fdd323d06f274 100644 --- a/tools/licenses/lib/main.dart +++ b/tools/licenses/lib/main.dart @@ -1844,6 +1844,7 @@ class _RepositoryRootThirdPartyDirectory extends _RepositoryGenericThirdPartyDir && entry.name != 'mockito' // only used by tests && entry.name != 'pymock' // presumably only used by tests && entry.name != 'pyyaml' // build-time dependency only + && entry.name != 'yapf' // only used for code formatting && entry.name != 'android_embedding_dependencies' // testing framework for android && entry.name != 'yasm' // build-time dependency only && entry.name != 'binutils' // build-time dependency only diff --git a/tools/luci/build.py b/tools/luci/build.py index 5aa1bc7ea711d..1cfcd5cab4d6f 100755 --- a/tools/luci/build.py +++ b/tools/luci/build.py @@ -20,27 +20,27 @@ def GetAllBuilders(): curl_command = [ - 'curl', - 'https://ci.chromium.org/p/flutter/g/engine/builders', + 'curl', + 'https://ci.chromium.org/p/flutter/g/engine/builders', ] curl_result = subprocess.run( - curl_command, - universal_newlines=True, - capture_output=True, + curl_command, + universal_newlines=True, + capture_output=True, ) if curl_result.returncode != 0: print('Failed to fetch builder list: stderr:\n%s' % curl_result.stderr) return [] sed_command = [ - 'sed', - '-En', - 's:.*aria-label="builder buildbucket/luci\\.flutter\\.prod/([^/]+)".*:\\1:p', + 'sed', + '-En', + 's:.*aria-label="builder buildbucket/luci\\.flutter\\.prod/([^/]+)".*:\\1:p', ] sed_result = subprocess.run( - sed_command, - input=curl_result.stdout, - capture_output=True, - universal_newlines=True, + sed_command, + input=curl_result.stdout, + capture_output=True, + universal_newlines=True, ) if sed_result.returncode != 0: print('Failed to fetch builder list: stderr:\n%s' % sed_result.stderr) @@ -50,28 +50,23 @@ def GetAllBuilders(): def Main(): parser = argparse.ArgumentParser(description='Reruns Engine LUCI prod builds') parser.add_argument( - '--force-upload', - action='store_true', - default=False, - help='Force artifact upload, overwriting existing artifacts.') - parser.add_argument( - '--all', - action='store_true', - default=False, - help='Re-run all builds.') + '--force-upload', + action='store_true', + default=False, + help='Force artifact upload, overwriting existing artifacts.' + ) parser.add_argument( - '--builder', - type=str, - help='The builer to rerun.') + '--all', action='store_true', default=False, help='Re-run all builds.' + ) + parser.add_argument('--builder', type=str, help='The builer to rerun.') parser.add_argument( - '--commit', - type=str, - required=True, - help='The commit to rerun.') + '--commit', type=str, required=True, help='The commit to rerun.' + ) parser.add_argument( - '--dry-run', - action='store_true', - help='Print what would be done, but do nothing.') + '--dry-run', + action='store_true', + help='Print what would be done, but do nothing.' + ) args = parser.parse_args() if 'help' in vars(args) and args.help: @@ -87,32 +82,39 @@ def Main(): builders = [args.builder] auth_command = [ - 'gcloud', - 'auth', - 'print-identity-token', + 'gcloud', + 'auth', + 'print-identity-token', ] auth_result = subprocess.run( - auth_command, - universal_newlines=True, - capture_output=True, + auth_command, + universal_newlines=True, + capture_output=True, ) if auth_result.returncode != 0: - print('Auth failed:\nstdout:\n%s\nstderr:\n%s' % (auth_result.stdout, auth_result.stderr)) + print( + 'Auth failed:\nstdout:\n%s\nstderr:\n%s' % + (auth_result.stdout, auth_result.stderr) + ) return 1 auth_token = auth_result.stdout.rstrip() for builder in builders: if args.force_upload: - params = ('{"Commit": "%s", "Builder": "%s", "Repo": "engine", "Properties": {"force_upload":true}}' - % (args.commit, builder)) + params = ( + '{"Commit": "%s", "Builder": "%s", "Repo": "engine", "Properties": {"force_upload":true}}' + % (args.commit, builder) + ) else: - params = '{"Commit": "%s", "Builder": "%s", "Repo": "engine"}' % (args.commit, builder) + params = '{"Commit": "%s", "Builder": "%s", "Repo": "engine"}' % ( + args.commit, builder + ) curl_command = [ - 'curl', - 'http://flutter-dashboard.appspot.com/api/reset-prod-task', - "-d %s" % params, - '-H', - 'X-Flutter-IdToken: %s' % auth_token, + 'curl', + 'http://flutter-dashboard.appspot.com/api/reset-prod-task', + "-d %s" % params, + '-H', + 'X-Flutter-IdToken: %s' % auth_token, ] if args.dry_run: print('Running: %s' % ' '.join(curl_command)) diff --git a/tools/pub_get_offline.py b/tools/pub_get_offline.py index 4795871e40412..21bdb1421a9fb 100644 --- a/tools/pub_get_offline.py +++ b/tools/pub_get_offline.py @@ -15,22 +15,22 @@ import sys ALL_PACKAGES = [ - os.path.join("src", "flutter", "ci"), - os.path.join("src", "flutter", "flutter_frontend_server"), - os.path.join("src", "flutter", "shell", "vmservice"), - os.path.join("src", "flutter", "testing", "benchmark"), - os.path.join("src", "flutter", "testing", "dart"), - os.path.join("src", "flutter", "testing", "litetest"), - os.path.join("src", "flutter", "testing", "android_background_image"), - os.path.join("src", "flutter", "testing", "scenario_app"), - os.path.join("src", "flutter", "testing", "smoke_test_failure"), - os.path.join("src", "flutter", "testing", "symbols"), - os.path.join("src", "flutter", "tools", "api_check"), - os.path.join("src", "flutter", "tools", "android_lint"), - os.path.join("src", "flutter", "tools", "clang_tidy"), - os.path.join("src", "flutter", "tools", "const_finder"), - os.path.join("src", "flutter", "tools", "githooks"), - os.path.join("src", "flutter", "tools", "licenses"), + os.path.join("src", "flutter", "ci"), + os.path.join("src", "flutter", "flutter_frontend_server"), + os.path.join("src", "flutter", "shell", "vmservice"), + os.path.join("src", "flutter", "testing", "benchmark"), + os.path.join("src", "flutter", "testing", "dart"), + os.path.join("src", "flutter", "testing", "litetest"), + os.path.join("src", "flutter", "testing", "android_background_image"), + os.path.join("src", "flutter", "testing", "scenario_app"), + os.path.join("src", "flutter", "testing", "smoke_test_failure"), + os.path.join("src", "flutter", "testing", "symbols"), + os.path.join("src", "flutter", "tools", "api_check"), + os.path.join("src", "flutter", "tools", "android_lint"), + os.path.join("src", "flutter", "tools", "clang_tidy"), + os.path.join("src", "flutter", "tools", "const_finder"), + os.path.join("src", "flutter", "tools", "githooks"), + os.path.join("src", "flutter", "tools", "licenses"), ] @@ -38,8 +38,10 @@ def FetchPackage(pub, package): try: subprocess.check_output(pub, cwd=package, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as err: - print("'%s' failed in '%s' with status %d:\n%s" % - (' '.join(pub), package, err.returncode, err.output)) + print( + "'%s' failed in '%s' with status %d:\n%s" % + (' '.join(pub), package, err.returncode, err.output) + ) return 1 return 0 @@ -57,18 +59,27 @@ def CheckPackage(package): print("Error: package '%s' was fetched from pub" % package_name) pub_count = pub_count + 1 if pub_count > 0: - print("Error: %d packages were fetched from pub for %s" % (pub_count, package)) - print("Please fix the pubspec.yaml for %s " - "so that all dependencies are path dependencies" % package) + print( + "Error: %d packages were fetched from pub for %s" % + (pub_count, package) + ) + print( + "Please fix the pubspec.yaml for %s " + "so that all dependencies are path dependencies" % package + ) return pub_count def Main(): - leading = os.path.join("src", "third_party", "dart", "tools", "sdks", "dart-sdk", "bin") + leading = os.path.join( + "src", "third_party", "dart", "tools", "sdks", "dart-sdk", "bin" + ) dart = "dart" if os.name == "nt": dart = "dart.exe" - pubcmd = [os.path.abspath(os.path.join(leading, dart)), "pub", "get", "--offline"] + pubcmd = [ + os.path.abspath(os.path.join(leading, dart)), "pub", "get", "--offline" + ] pub_count = 0 for package in ALL_PACKAGES: diff --git a/tools/yapf.sh b/tools/yapf.sh new file mode 100755 index 0000000000000..af6e7021ddcc1 --- /dev/null +++ b/tools/yapf.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Copyright 2013 The Flutter Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates objc docs for Flutter iOS libraries. + +set -e + +# On Mac OS, readlink -f doesn't work, so follow_links traverses the path one +# link at a time, and then cds into the link destination and find out where it +# ends up. +# +# The function is enclosed in a subshell to avoid changing the working directory +# of the caller. +function follow_links() ( + cd -P "$(dirname -- "$1")" + file="$PWD/$(basename -- "$1")" + while [[ -h "$file" ]]; do + cd -P "$(dirname -- "$file")" + file="$(readlink -- "$file")" + cd -P "$(dirname -- "$file")" + file="$PWD/$(basename -- "$file")" + done + echo "$file" +) + +SCRIPT_DIR=$(follow_links "$(dirname -- "${BASH_SOURCE[0]}")") +SRC_DIR="$(cd "$SCRIPT_DIR/../.."; pwd -P)" +YAPF_DIR="$(cd "$SRC_DIR/third_party/yapf"; pwd -P)" + +PYTHONPATH="$YAPF_DIR" python3 "$YAPF_DIR/yapf" "$@"