# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires # some enhancements since the commands on Mac are slightly different than on # Linux. import("//build/config/apple/symbols.gni") import("//build/config/clang/clang.gni") import("//build/config/compiler/compiler.gni") import("//build/config/coverage/coverage.gni") import("//build/config/rust.gni") import("//build/toolchain/cc_wrapper.gni") import("//build/toolchain/goma.gni") import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") import("//build_overrides/build.gni") # TODO(crbug.com/1370527): This import is required to detect whether the # build is for the catalyst environment in order to disable the hermetic # swift compiler (as it does not include support for catalyst). Remove it # once the support is available. if (is_ios) { import("//build/config/ios/config.gni") } assert((target_os == "ios" && host_os == "mac") || host_os != "win") declare_args() { # This controls whether whole module optimization is enabled when building # Swift modules. If enabled, the compiler will compile the module as one # unit, generating just one single object file. Otherwise, it will generate # one object file per .swift file. If unspecified, will default to "true" # for official builds, and "false" for all other builds. swift_whole_module_optimization = -1 # If unspecified, will use the toolchain downloaded via deps. swift_toolchain_path = -1 } # TODO(crbug.com/1370527): Remove this and replace with `build_with_chromium` # once the support for catalyst is available in the hermetic swift compiler. _can_use_hermetic_swift = build_with_chromium && is_ios && target_environment != "catalyst" if (swift_toolchain_path == -1) { if (_can_use_hermetic_swift) { # Version of the hermetic compiler. Needs to be updated when a new version of # the compiler is rolled to ensure that all outputs are regenerated. It must # be kept in sync with the `version` of `third_party/swift-toolchain` in # //DEPS. swiftc_version = "swift-5.7-release" # Use the hermetic swift toolchain. swift_toolchain_path = "//third_party/swift-toolchain/" } else { swift_toolchain_path = "" } } if (swift_whole_module_optimization == -1) { swift_whole_module_optimization = is_official_build } # When implementing tools using Python scripts, a TOOL_VERSION=N env # variable is placed in front of the command. The N should be incremented # whenever the script is changed, so that the build system rebuilds all # edges that utilize the script. Ideally this should be changed to use # proper input-dirty checking, but that could be expensive. Instead, use a # script to get the tool scripts' modification time to use as the version. # This won't cause a re-generation of GN files when the tool script changes # but it will cause edges to be marked as dirty if the ninja files are # regenerated. See https://crbug.com/619083 for details. A proper fix # would be to have inputs to tools (https://crbug.com/621119). tool_versions = exec_script("get_tool_mtime.py", rebase_path([ "//build/toolchain/apple/filter_libtool.py", "//build/toolchain/apple/linker_driver.py", "//build/toolchain/ios/compile_xcassets.py", "//build/toolchain/ios/swiftc.py", ], root_build_dir), "trim scope") # Shared toolchain definition. Invocations should set current_os to set the # build args in this definition. This is titled "single_apple_toolchain" # because it makes exactly one toolchain. Callers will normally want to # invoke instead "apple_toolchain" which may make an additional toolchain # without sanitizers. template("single_apple_toolchain") { toolchain(target_name) { # When invoking this toolchain not as the default one, these args will be # passed to the build. They are ignored when this is the default toolchain. assert(defined(invoker.toolchain_args), "Toolchains must declare toolchain_args") toolchain_args = { # Populate toolchain args from the invoker. forward_variables_from(invoker.toolchain_args, "*") # The host toolchain value computed by the default toolchain's setup # needs to be passed through unchanged to all secondary toolchains to # ensure that it's always the same, regardless of the values that may be # set on those toolchains. host_toolchain = host_toolchain # Similarly for the host toolchain which can be used to make .dylibs # that will successfully load into prebuilt tools. host_toolchain_no_sanitizers = host_toolchain_no_sanitizers } # When the invoker has explicitly overridden use_goma or cc_wrapper in the # toolchain args, use those values, otherwise default to the global one. # This works because the only reasonable override that toolchains might # supply for these values are to force-disable them. if (defined(toolchain_args.use_remoteexec)) { toolchain_uses_remoteexec = toolchain_args.use_remoteexec } else { toolchain_uses_remoteexec = use_remoteexec } if (defined(toolchain_args.use_goma)) { toolchain_uses_goma = toolchain_args.use_goma } else { toolchain_uses_goma = use_goma } if (defined(toolchain_args.cc_wrapper)) { toolchain_cc_wrapper = toolchain_args.cc_wrapper } else { toolchain_cc_wrapper = cc_wrapper } assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), "Goma and re-client can't be used together.") assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), "re-client and cc_wrapper can't be used together.") assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), "Goma and cc_wrapper can't be used together.") if (defined(toolchain_args.use_lld)) { toolchain_uses_lld = toolchain_args.use_lld } else { toolchain_uses_lld = use_lld } # The value of all global variables (such as `is_component_build`) is the # one from the default toolchain when evaluating a secondary toolchain # (see https://crbug.com/gn/286). This mean that the value may change when # evaluating target/configs in the new toolchain if the variable default # value depends on variable set in `toolchain_args`. # # For this reason, "ios" needs to override `is_component_build` as its # default value depends on `current_os`. Use the overridden value if it # is set in `toolchain_args`. if (defined(toolchain_args.is_component_build)) { toolchain_is_component_build = toolchain_args.is_component_build } else { toolchain_is_component_build = is_component_build } prefix = rebase_path("$clang_base_path/bin/", root_build_dir) _cc = "${prefix}clang" _cxx = "${prefix}clang++" swiftmodule_switch = "-Wl,-add_ast_path," # Compute the compiler prefix. if (toolchain_uses_remoteexec) { if (defined(toolchain_args.rbe_cc_cfg_file)) { toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file } else { toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file } # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true. compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " } else if (toolchain_uses_goma) { assert(toolchain_cc_wrapper == "", "Goma and cc_wrapper can't be used together.") compiler_prefix = "$goma_dir/gomacc " if (use_goma_rust) { rust_compiler_prefix = compiler_prefix } } else if (toolchain_cc_wrapper != "") { compiler_prefix = toolchain_cc_wrapper + " " } else { compiler_prefix = "" } cc = compiler_prefix + _cc cxx = compiler_prefix + _cxx ld = _cxx # Set the explicit search path for clang++ so it uses the right linker # binary. if (!toolchain_uses_lld) { ld += " -B " + invoker.bin_path } if (defined(toolchain_args.coverage_instrumentation_input_file)) { toolchain_coverage_instrumentation_input_file = toolchain_args.coverage_instrumentation_input_file } else { toolchain_coverage_instrumentation_input_file = coverage_instrumentation_input_file } _use_clang_coverage_wrapper = toolchain_coverage_instrumentation_input_file != "" if (_use_clang_coverage_wrapper) { _coverage_wrapper = rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", root_build_dir) + " --files-to-instrument=" + rebase_path(toolchain_coverage_instrumentation_input_file, root_build_dir) + " --target-os=" + target_os cc = "$python_path $_coverage_wrapper ${cc}" cxx = "$python_path $_coverage_wrapper ${cxx}" } linker_driver = "TOOL_VERSION=${tool_versions.linker_driver} " + rebase_path("//build/toolchain/apple/linker_driver.py", root_build_dir) # Specify an explicit path for the strip binary. _strippath = invoker.bin_path + "strip" _installnametoolpath = invoker.bin_path + "install_name_tool" linker_driver += " -Wcrl,strippath,${_strippath} -Wcrl,installnametoolpath,${_installnametoolpath}" _enable_dsyms = enable_dsyms _save_unstripped_output = save_unstripped_output # Make these apply to all tools below. lib_switch = "-l" lib_dir_switch = "-L" # Object files go in this directory. Use label_name instead of # target_output_name since labels will generally have no spaces and will be # unique in the directory. object_subdir = "{{target_out_dir}}/{{label_name}}" # If dSYMs are enabled, this flag will be added to the link tools. if (_enable_dsyms) { dsym_switch = " -Wcrl,dsym,{{root_out_dir}} " dsym_switch += "-Wcrl,dsymutilpath," + rebase_path("//tools/clang/dsymutil/bin/dsymutil", root_build_dir) + " " dsym_output_dir = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM" dsym_output = [ "$dsym_output_dir/Contents/Info.plist", "$dsym_output_dir/Contents/Resources/DWARF/" + "{{target_output_name}}{{output_extension}}", ] } else { dsym_switch = "" } if (_save_unstripped_output) { _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped" } if (toolchain_has_rust) { if (!defined(rust_compiler_prefix)) { rust_compiler_prefix = "" } rustc_bin = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) rustc = "$rust_compiler_prefix${rustc_bin}" rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") tool("rust_staticlib") { libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = "$libname.rsp" depfile = "$libname.d" default_output_extension = ".a" output_prefix = "lib" default_output_dir = "{{root_out_dir}}" description = "RUST(STATICLIB) {{output}}" outputs = [ libname ] # TODO(danakj): When `!toolchain_uses_lld` do we need to specify a path # to libtool like the "alink" rule? rspfile_content = "{{rustdeps}} {{externs}}" command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"$_cxx\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" rust_sysroot = rust_sysroot_relative_to_out } tool("rust_rlib") { # We must always prefix with `lib` even if the library already starts # with that prefix or else our stdlib is unable to find libc.rlib (or # actually liblibc.rlib). rlibname = "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" depfile = "$rlibname.d" # Do not use rsp files in this (common) case because they occupy the # ninja main thread, and {{rlibs}} have shorter command lines than # fully linked targets. default_output_extension = ".rlib" # This is prefixed unconditionally in `rlibname`. # output_prefix = "lib" default_output_dir = "{{root_out_dir}}" description = "RUST {{output}}" outputs = [ rlibname ] command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"$_cxx\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}" rust_sysroot = rust_sysroot_relative_to_out } tool("rust_bin") { exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = "$exename.rsp" depfile = "$exename.d" pool = "//build/toolchain:link_pool($default_toolchain)" # TODO(danakj): solink can generate TOC files for re-exporting library # symbols, and we should do the same here. default_output_dir = "{{root_out_dir}}" description = "RUST(BIN) {{output}}" outputs = [ exename ] # TODO(danakj): Support dsym_switch like C++ targets. # link_command += dsym_switch # if (_enable_dsyms) { # outputs += dsym_output # } # if (_save_unstripped_output) { # outputs += [ _unstripped_output ] # } rspfile_content = "{{rustdeps}} {{externs}}" command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" rust_sysroot = rust_sysroot_relative_to_out } tool("rust_cdylib") { dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = "$dllname.rsp" depfile = "$dllname.d" pool = "//build/toolchain:link_pool($default_toolchain)" # TODO(danakj): solink can generate TOC files for re-exporting library # symbols, and we should do the same here. default_output_extension = ".dylib" output_prefix = "lib" default_output_dir = "{{root_out_dir}}" description = "RUST(CDYLIB) {{output}}" outputs = [ dllname ] # TODO(danakj): Support dsym_switch like C++ targets. # link_command += dsym_switch # if (_enable_dsyms) { # outputs += dsym_output # } # if (_save_unstripped_output) { # outputs += [ _unstripped_output ] # } rspfile_content = "{{rustdeps}} {{externs}}" command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" rust_sysroot = rust_sysroot_relative_to_out } tool("rust_macro") { dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = "$dllname.rsp" depfile = "$dllname.d" pool = "//build/toolchain:link_pool($default_toolchain)" # TODO(danakj): solink can generate TOC files for re-exporting library # symbols, and we should do the same here. default_output_extension = ".dylib" output_prefix = "lib" default_output_dir = "{{root_out_dir}}" description = "RUST(MACRO) {{output}}" outputs = [ dllname ] # TODO(danakj): Support dsym_switch like C++ targets. # link_command += dsym_switch # if (_enable_dsyms) { # outputs += dsym_output # } # if (_save_unstripped_output) { # outputs += [ _unstripped_output ] # } rspfile_content = "{{rustdeps}} {{externs}}" command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" rust_sysroot = rust_sysroot_relative_to_out } } tool("cc") { depfile = "{{output}}.d" precompiled_header_type = "gcc" command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "CC {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] } tool("cxx") { depfile = "{{output}}.d" precompiled_header_type = "gcc" command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "CXX {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] } tool("asm") { # For GCC we can just use the C compiler to compile assembly. depfile = "{{output}}.d" command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "ASM {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] } tool("objc") { depfile = "{{output}}.d" precompiled_header_type = "gcc" command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "OBJC {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] } tool("objcxx") { depfile = "{{output}}.d" precompiled_header_type = "gcc" command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "OBJCXX {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] } tool("alink") { rspfile = "{{output}}.rsp" rspfile_content = "{{inputs}}" if (!toolchain_uses_lld) { script = rebase_path("//build/toolchain/apple/filter_libtool.py", root_build_dir) # Specify explicit path for libtool. libtool = invoker.bin_path + "libtool" command = "rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} $python_path $script $libtool -static -D {{arflags}} -o {{output}} @$rspfile" description = "LIBTOOL-STATIC {{output}}" } else { ar = "${prefix}llvm-ar" command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @$rspfile" # Remove the output file first so that ar doesn't try to modify the # existing file. command = "rm -f {{output}} && $command" description = "AR {{output}}" } outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ] default_output_dir = "{{target_out_dir}}" default_output_extension = ".a" output_prefix = "lib" } tool("solink") { # E.g. "./libfoo.dylib": dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = dylib + ".rsp" pool = "//build/toolchain:link_pool($default_toolchain)" # These variables are not built into GN but are helpers that implement # (1) linking to produce a .dylib, (2) extracting the symbols from that # file to a temporary file, (3) if the temporary file has differences from # the existing .TOC file, overwrite it, otherwise, don't change it. # # As a special case, if the library reexports symbols from other dynamic # libraries, we always update the .TOC and skip the temporary file and # diffing steps, since that library always needs to be re-linked. tocname = dylib + ".TOC" temporary_tocname = dylib + ".tmp" # Use explicit paths to binaries. The binaries present on the default # search path in /usr/bin are thin wrappers around xcrun, which requires a # full CommandLineTools or Xcode install, and still may not choose the # appropriate binary if there are multiple installs. if (host_os == "mac") { nm = invoker.bin_path + "nm" otool = invoker.bin_path + "otool" } else { nm = "${prefix}llvm-nm" otool = "${prefix}llvm-otool" } does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || $otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB" link_command = "$linker_driver $ld -shared " if (toolchain_is_component_build) { link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" " } link_command += dsym_switch link_command += "{{ldflags}} -o \"$dylib\" \"@$rspfile\"" replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\"" extract_toc_command = "{ $otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; $nm -gPp \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }" command = "if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi" rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" description = "SOLINK {{output}}" # Use this for {{output_extension}} expansions unless a target manually # overrides it (in which case {{output_extension}} will be what the target # specifies). default_output_dir = "{{root_out_dir}}" default_output_extension = ".dylib" output_prefix = "lib" # Since the above commands only updates the .TOC file when it changes, ask # Ninja to check if the timestamp actually changed to know if downstream # dependencies should be recompiled. restat = true # Tell GN about the output files. It will link to the dylib but use the # tocname for dependency management. outputs = [ dylib, tocname, ] link_output = dylib depend_output = tocname if (_enable_dsyms) { outputs += dsym_output } if (_save_unstripped_output) { outputs += [ _unstripped_output ] } } tool("solink_module") { # E.g. "./libfoo.so": sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = sofile + ".rsp" pool = "//build/toolchain:link_pool($default_toolchain)" link_command = "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" \"@$rspfile\"" link_command += dsym_switch command = link_command rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" description = "SOLINK_MODULE {{output}}" # Use this for {{output_extension}} expansions unless a target manually # overrides it (in which case {{output_extension}} will be what the target # specifies). default_output_dir = "{{root_out_dir}}" default_output_extension = ".so" outputs = [ sofile ] if (_enable_dsyms) { outputs += dsym_output } if (_save_unstripped_output) { outputs += [ _unstripped_output ] } } tool("link") { outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" rspfile = "$outfile.rsp" pool = "//build/toolchain:link_pool($default_toolchain)" command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" \"@$rspfile\"" description = "LINK $outfile" rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" outputs = [ outfile ] if (_enable_dsyms) { outputs += dsym_output } if (_save_unstripped_output) { outputs += [ _unstripped_output ] } default_output_dir = "{{root_out_dir}}" } # These two are really entirely generic, but have to be repeated in # each toolchain because GN doesn't allow a template to be used here. # See //build/toolchain/toolchain.gni for details. tool("stamp") { command = stamp_command description = stamp_description } tool("copy") { command = copy_command description = copy_description } tool("copy_bundle_data") { # copy_command use hardlink if possible but this does not work with # directories. Also when running EG2 tests from Xcode, Xcode tries to # copy some files into the application bundle which fails if source # and destination are hardlinked together. # # Instead use clonefile to copy the files which is as efficient as # hardlink but ensure the file have distinct metadata (thus avoid the # error with ditto, see https://crbug.com/1042182). if (host_os == "mac") { command = "rm -rf {{output}} && /bin/cp -Rc {{source}} {{output}}" } else { command = "rm -rf {{output}} && /bin/cp -Rld {{source}} {{output}}" } description = "COPY_BUNDLE_DATA {{source}} {{output}}" pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" } # Swift is only used on iOS, not macOS. We want to minimize the number # of Xcode-based tools used by the macOS toolchain, so we intentionally # disallow future uses of Swift on macOS. https://crbug.com/965663. if (toolchain_args.current_os == "ios") { tool("swift") { _tool = rebase_path("//build/toolchain/ios/swiftc.py", root_build_dir) depfile = "{{target_out_dir}}/{{module_name}}.d" depsformat = "gcc" outputs = [ # The module needs to be the first output listed. The blank line after # the module is required to prevent `gn format` from changing the file # order. "{{target_gen_dir}}/{{module_name}}.swiftmodule", "{{target_gen_dir}}/{{target_output_name}}.h", "{{target_gen_dir}}/{{module_name}}.swiftdoc", "{{target_gen_dir}}/{{module_name}}.swiftsourceinfo", ] # Additional flags passed to the wrapper script but that are only # set conditionally. _extra_flags = "" if (swift_whole_module_optimization) { _extra_flags += " -whole-module-optimization" _objects_dir = "{{target_out_dir}}" outputs += [ "$_objects_dir/{{module_name}}.o" ] } else { _objects_dir = "{{target_out_dir}}/{{label_name}}" partial_outputs = [ "$_objects_dir/{{source_name_part}}.o" ] } _env_vars = "TOOL_VERSION=${tool_versions.swiftc}" if (invoker.sdk_developer_dir != "") { _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}" } # Starting with version 5.6, the Swift compiler will always # generates precompiled headers. In anterior version, it was # used when bridging headers and whole module optimisation # where enabled, and it could be disabled with the parameter # `-disable-bridging-pch`. # # The precompiled headers are binary files (i.e. they are not # regular Objective-C header files and cannot be loaded as such). # # There is an hidden requirements that the compiler needs to # be told where to save those .pch files (via the parameter # `-pch-output-dir $dir`). If this parameter is not passed, the # compiler will silently write them at an incorrect location, # leading later pass to try to load those .pch files as either # regular header files (.h) or object files (.o) and causing # to compilation failures. # # List the directory where the precompiled header is generated # as an output, but do not list the .pch file itself. This is # because the names includes two hashes (one corresponding to # the compiler revision, and the other probably derived from # the module itself) that are difficult to generate. # # Still we want to avoid creating a directory that has the same # name as a file generated by another rule, so explicitly list # the directory in `outputs` so that gn can warn it is conflicts # with another output file. _pch_output_dir = "{{target_out_dir}}/{{module_name}}:pch/" outputs += [ _pch_output_dir ] # Include the version of the compiler on the command-line. This causes # `ninja` to consider all the compilation output to be dirty when the # version changes. if (defined(swiftc_version)) { _extra_flags += " -swiftc-version $swiftc_version" } # Include the version of Xcode on the command-line (if specified via # toolchain_args). This causes `ninja` to consider all the compilation # outputs to be dirty when the version change. # # This is required because sometimes module dependency changes between # different version of Xcode (e.g. when moving from Xcode 14 beta 6 to # Xcode 14 RC). If the swiftmodule are not rebuilt when the version # changes, they may encode dependency on now non-existing frameworks # causing linker failures ultimately. if (defined(toolchain_args.xcode_build)) { _extra_flags += " -xcode-version ${toolchain_args.xcode_build}" } if (swift_toolchain_path != "") { _extra_flags += " -swift-toolchain-path " + rebase_path(swift_toolchain_path, root_build_dir) } # The Swift compiler assumes that the generated header will be used by # Objective-C code compiled with module support enabled (-fmodules). # # The import looks like this in the generated header: # # #if __has_feature(modules) # @import UIKit; # #endif # # As Chromium code is compiled without support for modules (i.e. the # code is compiled without `-fmodules`), the dependent modules are not # imported from the generated header, which causes compilation failure # if the client code does not first import the required modules (see # https://crbug.com/1316061 for details). # # Secondly, clang ToT always returns `1` when `__has_features(modules)` # is evaluated, even if building with `-fno-modules` when building with # `-std=c++20` (see https://crbug.com/1284275 for details). This causes # the `@import` lines to be reached and the build to fail (since the # support for modules is not enabled). # # Instruct swiftc.py to rewrite the generated header to use the old # import pre-processor instructions (#import ) to work # around those two issues. _extra_flags += " -fix-module-imports" command = "$_env_vars $python_path $_tool -module-name {{module_name}} " + "-root-dir " + rebase_path("//", root_build_dir) + " " + "-object-dir $_objects_dir -pch-output-dir $_pch_output_dir " + "-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " + "-header-path {{target_gen_dir}}/{{target_output_name}}.h " + "-depfile {{target_out_dir}}/{{module_name}}.d " + "-bridge-header {{bridge_header}} $_extra_flags " + "{{swiftflags}} {{include_dirs}} {{module_dirs}} {{inputs}}" } } # xcassets are only used on iOS, not macOS. We want to minimize the number # of Xcode-based tools used by the macOS toolchain, so we intentionally # disallow future uses of xcassets on macOS. https://crbug.com/965663. if (toolchain_args.current_os == "ios") { tool("compile_xcassets") { _tool = rebase_path("//build/toolchain/ios/compile_xcassets.py", root_build_dir) _env_vars = "TOOL_VERSION=${tool_versions.compile_xcassets}" if (invoker.sdk_developer_dir != "") { _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}" } command = "$_env_vars $python_path $_tool " + "-p '${toolchain_args.current_os}' " + "-e '${invoker.target_environment}' " + "-t '${invoker.deployment_target}' " + "-T '{{bundle_product_type}}' " + "-P '{{bundle_partial_info_plist}}' " + "-o {{output}} {{inputs}}" description = "COMPILE_XCASSETS {{output}}" pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" } } tool("action") { pool = "//build/toolchain:action_pool($default_toolchain)" } } } # Makes a single Apple toolchain, or possibly two if we need a # sanitizer-free equivalent. template("apple_toolchain") { single_apple_toolchain(target_name) { assert(defined(invoker.toolchain_args), "Toolchains must declare toolchain_args") forward_variables_from(invoker, "*", [ "visibility", "test_only", ]) # No need to forward visibility and test_only as they apply to targets not # toolchains, but presubmit checks require that we explicitly exclude them } if (using_sanitizer) { # Make an additional toolchain with no sanitizers. single_apple_toolchain("${target_name}_no_sanitizers") { assert(defined(invoker.toolchain_args), "Toolchains must declare toolchain_args") forward_variables_from(invoker, "*", [ "toolchain_args", "visibility", "test_only", ]) toolchain_args = { # Populate toolchain args from the invoker. forward_variables_from(invoker.toolchain_args, "*") toolchain_disables_sanitizers = true } } } }