diff --git a/src/main/java/com/google/devtools/build/lib/analysis/BUILD b/src/main/java/com/google/devtools/build/lib/analysis/BUILD index b630ac7daeac91..cd72f4c6dc8846 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/BUILD +++ b/src/main/java/com/google/devtools/build/lib/analysis/BUILD @@ -336,6 +336,7 @@ java_library( ":package_specification_provider", ":platform_options", ":provider_collection", + ":repo_mapping_manifest_action", ":required_config_fragments_provider", ":resolved_toolchain_context", ":rule_configured_object_value", @@ -983,6 +984,26 @@ java_library( ], ) +java_library( + name = "repo_mapping_manifest_action", + srcs = ["RepoMappingManifestAction.java"], + deps = [ + ":actions/abstract_file_write_action", + ":actions/deterministic_writer", + "//src/main/java/com/google/devtools/build/lib/actions", + "//src/main/java/com/google/devtools/build/lib/actions:artifacts", + "//src/main/java/com/google/devtools/build/lib/actions:commandline_item", + "//src/main/java/com/google/devtools/build/lib/cmdline", + "//src/main/java/com/google/devtools/build/lib/collect/nestedset", + "//src/main/java/com/google/devtools/build/lib/packages", + "//src/main/java/com/google/devtools/build/lib/util", + "//src/main/java/com/google/devtools/build/lib/vfs:ospathpolicy", + "//src/main/java/net/starlark/java/eval", + "//third_party:guava", + "//third_party:jsr305", + ], +) + java_library( name = "required_config_fragments_provider", srcs = ["RequiredConfigFragmentsProvider.java"], diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java b/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java new file mode 100644 index 00000000000000..791d68de2effae --- /dev/null +++ b/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java @@ -0,0 +1,147 @@ +// Copyright 2022 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.analysis; + +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.util.Comparator.comparing; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableSortedMap; +import com.google.devtools.build.lib.actions.ActionExecutionContext; +import com.google.devtools.build.lib.actions.ActionKeyContext; +import com.google.devtools.build.lib.actions.ActionOwner; +import com.google.devtools.build.lib.actions.Artifact; +import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; +import com.google.devtools.build.lib.actions.CommandLineExpansionException; +import com.google.devtools.build.lib.actions.ExecException; +import com.google.devtools.build.lib.analysis.actions.AbstractFileWriteAction; +import com.google.devtools.build.lib.analysis.actions.DeterministicWriter; +import com.google.devtools.build.lib.cmdline.RepositoryMapping; +import com.google.devtools.build.lib.cmdline.RepositoryName; +import com.google.devtools.build.lib.collect.nestedset.NestedSet; +import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; +import com.google.devtools.build.lib.collect.nestedset.Order; +import com.google.devtools.build.lib.packages.Package; +import com.google.devtools.build.lib.util.Fingerprint; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; +import javax.annotation.Nullable; +import net.starlark.java.eval.EvalException; + +/** Creates a manifest file describing the repos and mappings relevant for a runfile tree. */ +public class RepoMappingManifestAction extends AbstractFileWriteAction { + private static final UUID MY_UUID = UUID.fromString("458e351c-4d30-433d-b927-da6cddd4737f"); + + private final NestedSet transitivePackages; + private final NestedSet runfilesArtifacts; + private final String workspaceName; + + public RepoMappingManifestAction( + ActionOwner owner, + Artifact output, + NestedSet transitivePackages, + NestedSet runfilesArtifacts, + String workspaceName) { + super(owner, NestedSetBuilder.emptySet(Order.STABLE_ORDER), output, /*makeExecutable=*/ false); + this.transitivePackages = transitivePackages; + this.runfilesArtifacts = runfilesArtifacts; + this.workspaceName = workspaceName; + } + + @Override + public String getMnemonic() { + return "RepoMappingManifest"; + } + + @Override + protected String getRawProgressMessage() { + return "writing repo mapping manifest for " + getOwner().getLabel(); + } + + @Override + protected void computeKey( + ActionKeyContext actionKeyContext, + @Nullable ArtifactExpander artifactExpander, + Fingerprint fp) + throws CommandLineExpansionException, EvalException, InterruptedException { + fp.addUUID(MY_UUID); + actionKeyContext.addNestedSetToFingerprint(fp, transitivePackages); + actionKeyContext.addNestedSetToFingerprint(fp, runfilesArtifacts); + fp.addString(workspaceName); + } + + @Override + public DeterministicWriter newDeterministicWriter(ActionExecutionContext ctx) + throws InterruptedException, ExecException { + return out -> { + Writer writer = new BufferedWriter(new OutputStreamWriter(out, ISO_8859_1)); + + ImmutableSet reposContributingRunfiles = + runfilesArtifacts.toList().stream() + .filter(a -> a.getOwner() != null) + .map(a -> a.getOwner().getRepository()) + .collect(toImmutableSet()); + Map reposAndMappings = new HashMap<>(); + for (Package pkg : transitivePackages.toList()) { + reposAndMappings.putIfAbsent( + pkg.getPackageIdentifier().getRepository(), pkg.getRepositoryMapping()); + } + for (Entry repoAndMapping : + ImmutableSortedMap.copyOf(reposAndMappings, comparing(RepositoryName::getName)) + .entrySet()) { + writeRepoMapping( + writer, reposContributingRunfiles, repoAndMapping.getKey(), repoAndMapping.getValue()); + } + writer.flush(); + }; + } + + private void writeRepoMapping( + Writer writer, + ImmutableSet reposContributingRunfiles, + RepositoryName repoName, + RepositoryMapping repoMapping) + throws IOException { + for (Entry mappingEntry : + ImmutableSortedMap.copyOf(repoMapping.entries()).entrySet()) { + if (mappingEntry.getKey().isEmpty()) { + // The apparent repo name can only be empty for the main repo. We skip this line. + continue; + } + if (!reposContributingRunfiles.contains(mappingEntry.getValue())) { + // We only write entries for repos that actually contribute runfiles. + continue; + } + writer.write(repoName.getName()); + writer.write(','); + writer.write(mappingEntry.getKey()); + writer.write(','); + if (mappingEntry.getValue().isMain()) { + // The canonical name of the main repo is the empty string, but we use the "workspace + // name" as the name of the directory under the runfiles tree for it. + writer.write(workspaceName); + } else { + writer.write(mappingEntry.getValue().getName()); + } + writer.write(System.lineSeparator()); + } + } +} diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java b/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java index ef668a7974a8be..7f9bf6ae295454 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java @@ -28,6 +28,7 @@ import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; +import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.vfs.FileSystemUtils; @@ -76,11 +77,13 @@ public final class RunfilesSupport { private static final String RUNFILES_DIR_EXT = ".runfiles"; private static final String INPUT_MANIFEST_EXT = ".runfiles_manifest"; private static final String OUTPUT_MANIFEST_BASENAME = "MANIFEST"; + private static final String REPO_MAPPING_MANIFEST_BASENAME = "_repo_mapping"; private final Runfiles runfiles; private final Artifact runfilesInputManifest; private final Artifact runfilesManifest; + private final Artifact repoMappingManifest; private final Artifact runfilesMiddleman; private final Artifact owningExecutable; private final boolean buildRunfileLinks; @@ -132,8 +135,11 @@ private static RunfilesSupport create( runfilesInputManifest = null; runfilesManifest = null; } + Artifact repoMappingManifest = + createRepoMappingManifestAction(ruleContext, runfiles, owningExecutable); Artifact runfilesMiddleman = - createRunfilesMiddleman(ruleContext, owningExecutable, runfiles, runfilesManifest); + createRunfilesMiddleman( + ruleContext, owningExecutable, runfiles, runfilesManifest, repoMappingManifest); boolean runfilesEnabled = ruleContext.getConfiguration().runfilesEnabled(); @@ -141,6 +147,7 @@ private static RunfilesSupport create( runfiles, runfilesInputManifest, runfilesManifest, + repoMappingManifest, runfilesMiddleman, owningExecutable, buildRunfileLinks, @@ -153,6 +160,7 @@ private RunfilesSupport( Runfiles runfiles, Artifact runfilesInputManifest, Artifact runfilesManifest, + Artifact repoMappingManifest, Artifact runfilesMiddleman, Artifact owningExecutable, boolean buildRunfileLinks, @@ -162,6 +170,7 @@ private RunfilesSupport( this.runfiles = runfiles; this.runfilesInputManifest = runfilesInputManifest; this.runfilesManifest = runfilesManifest; + this.repoMappingManifest = repoMappingManifest; this.runfilesMiddleman = runfilesMiddleman; this.owningExecutable = owningExecutable; this.buildRunfileLinks = buildRunfileLinks; @@ -268,6 +277,16 @@ public Artifact getRunfilesManifest() { return runfilesManifest; } + /** + * Returns the foo.runfiles/_repo_mapping file if Bazel is run with transitive package tracking + * turned on (see {@code SkyframeExecutor#getForcedSingleSourceRootIfNoExecrootSymlinkCreation}). + * Otherwise, returns null. + */ + @Nullable + public Artifact getRepoMappingManifest() { + return repoMappingManifest; + } + /** Returns the root directory of the runfiles symlink farm; otherwise, returns null. */ @Nullable public Path getRunfilesDirectory() { @@ -327,12 +346,16 @@ private static Artifact createRunfilesMiddleman( ActionConstructionContext context, Artifact owningExecutable, Runfiles runfiles, - @Nullable Artifact runfilesManifest) { + @Nullable Artifact runfilesManifest, + Artifact repoMappingManifest) { NestedSetBuilder deps = NestedSetBuilder.stableOrder(); deps.addTransitive(runfiles.getAllArtifacts()); if (runfilesManifest != null) { deps.add(runfilesManifest); } + if (repoMappingManifest != null) { + deps.add(repoMappingManifest); + } return context .getAnalysisEnvironment() .getMiddlemanFactory() @@ -495,4 +518,38 @@ public static Path inputManifestPath(Path runfilesDir) { public static Path outputManifestPath(Path runfilesDir) { return runfilesDir.getRelative(OUTPUT_MANIFEST_BASENAME); } + + @Nullable + private static Artifact createRepoMappingManifestAction( + RuleContext ruleContext, Runfiles runfiles, Artifact owningExecutable) { + NestedSet transitivePackages = + ruleContext.getTransitivePackagesForRunfileRepoMappingManifest(); + if (transitivePackages == null) { + // For environments where transitive packages are not tracked, we don't have external repos, + // so don't build the repo mapping manifest in such cases. + return null; + } + + PathFragment executablePath = + (owningExecutable != null) + ? owningExecutable.getOutputDirRelativePath( + ruleContext.getConfiguration().isSiblingRepositoryLayout()) + : ruleContext.getPackageDirectory().getRelative(ruleContext.getLabel().getName()); + Artifact repoMappingManifest = + ruleContext.getDerivedArtifact( + executablePath + .replaceName(executablePath.getBaseName() + RUNFILES_DIR_EXT) + .getRelative(REPO_MAPPING_MANIFEST_BASENAME), + ruleContext.getBinDirectory()); + ruleContext + .getAnalysisEnvironment() + .registerAction( + new RepoMappingManifestAction( + ruleContext.getActionOwner(), + repoMappingManifest, + transitivePackages, + runfiles.getAllArtifacts(), + ruleContext.getWorkspaceName())); + return repoMappingManifest; + } } diff --git a/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryMapping.java b/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryMapping.java index e504ff95f39d8c..0254f1d826a8a8 100644 --- a/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryMapping.java +++ b/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryMapping.java @@ -35,7 +35,8 @@ public abstract class RepositoryMapping { // Always fallback to the requested name public static final RepositoryMapping ALWAYS_FALLBACK = createAllowingFallback(ImmutableMap.of()); - abstract ImmutableMap repositoryMapping(); + /** Returns all the entries in this repo mapping. */ + public abstract ImmutableMap entries(); /** * The owner repo of this repository mapping. It is for providing useful debug information when @@ -64,7 +65,7 @@ public static RepositoryMapping createAllowingFallback( */ public RepositoryMapping withAdditionalMappings(Map additionalMappings) { HashMap allMappings = new HashMap<>(additionalMappings); - allMappings.putAll(repositoryMapping()); + allMappings.putAll(entries()); return new AutoValue_RepositoryMapping(ImmutableMap.copyOf(allMappings), ownerRepo()); } @@ -74,7 +75,7 @@ public RepositoryMapping withAdditionalMappings(Map addi * repo of the given additional mappings is ignored. */ public RepositoryMapping withAdditionalMappings(RepositoryMapping additionalMappings) { - return withAdditionalMappings(additionalMappings.repositoryMapping()); + return withAdditionalMappings(additionalMappings.entries()); } /** @@ -82,7 +83,7 @@ public RepositoryMapping withAdditionalMappings(RepositoryMapping additionalMapp * provided apparent repo name is assumed to be valid. */ public RepositoryName get(String preMappingName) { - RepositoryName canonicalRepoName = repositoryMapping().get(preMappingName); + RepositoryName canonicalRepoName = entries().get(preMappingName); if (canonicalRepoName != null) { return canonicalRepoName; } diff --git a/src/test/java/com/google/devtools/build/lib/analysis/BUILD b/src/test/java/com/google/devtools/build/lib/analysis/BUILD index da2be4dce0d157..940d84fe7dc476 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/BUILD +++ b/src/test/java/com/google/devtools/build/lib/analysis/BUILD @@ -35,6 +35,7 @@ java_library( "JDKJavaLauncherRunfilesSupportTest.java", "PackageGroupBuildViewTest.java", "RuleConfiguredTargetTest.java", + "RunfilesRepoMappingManifestTest.java", "SourceManifestActionTest.java", "AnalysisFailureInfoTest.java", ], @@ -355,6 +356,28 @@ java_test( ], ) +java_test( + name = "RunfilesRepoMappingManifestTest", + srcs = ["RunfilesRepoMappingManifestTest.java"], + deps = [ + "//src/main/java/com/google/devtools/build/lib/actions", + "//src/main/java/com/google/devtools/build/lib/analysis:blaze_directories", + "//src/main/java/com/google/devtools/build/lib/analysis:repo_mapping_manifest_action", + "//src/main/java/com/google/devtools/build/lib/bazel/bzlmod:resolution_impl", + "//src/main/java/com/google/devtools/build/lib/bazel/repository:repository_options", + "//src/main/java/com/google/devtools/build/lib/skyframe:precomputed_value", + "//src/main/java/com/google/devtools/build/lib/skyframe:sky_functions", + "//src/main/java/com/google/devtools/build/lib/vfs", + "//src/main/java/com/google/devtools/build/skyframe", + "//src/main/java/com/google/devtools/build/skyframe:skyframe-objects", + "//src/test/java/com/google/devtools/build/lib/analysis/util", + "//src/test/java/com/google/devtools/build/lib/bazel/bzlmod:util", + "//third_party:guava", + "//third_party:junit4", + "//third_party:truth", + ], +) + java_test( name = "TransitiveValidationPropagationTest", srcs = ["TransitiveValidationPropagationTest.java"], diff --git a/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java b/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java new file mode 100644 index 00000000000000..86f99e86771f3f --- /dev/null +++ b/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java @@ -0,0 +1,305 @@ +// Copyright 2022 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.analysis; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.truth.Truth.assertThat; +import static com.google.devtools.build.lib.bazel.bzlmod.BzlmodTestUtil.createModuleKey; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import com.google.devtools.build.lib.actions.Action; +import com.google.devtools.build.lib.analysis.util.AnalysisMock; +import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; +import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; +import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; +import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; +import com.google.devtools.build.lib.skyframe.PrecomputedValue; +import com.google.devtools.build.lib.skyframe.PrecomputedValue.Injected; +import com.google.devtools.build.lib.skyframe.SkyFunctions; +import com.google.devtools.build.lib.vfs.Path; +import com.google.devtools.build.skyframe.SkyFunction; +import com.google.devtools.build.skyframe.SkyFunctionName; +import java.io.IOException; +import java.util.Map.Entry; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Tests that the repo mapping manifest file is properly generated for runfiles. */ +@RunWith(JUnit4.class) +public class RunfilesRepoMappingManifestTest extends BuildViewTestCase { + private Path moduleRoot; + private FakeRegistry registry; + + @Override + protected ImmutableList extraPrecomputedValues() { + try { + moduleRoot = scratch.dir("modules"); + } catch (IOException e) { + throw new IllegalStateException(e); + } + registry = FakeRegistry.DEFAULT_FACTORY.newFakeRegistry(moduleRoot.getPathString()); + return ImmutableList.of( + PrecomputedValue.injected( + ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), + PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), + PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), + PrecomputedValue.injected( + BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING)); + } + + @Override + protected AnalysisMock getAnalysisMock() { + // Make sure we don't have built-in modules affecting the dependency graph. + return new AnalysisMock.Delegate(super.getAnalysisMock()) { + @Override + public ImmutableMap getSkyFunctions( + BlazeDirectories directories) { + return ImmutableMap.builder() + .putAll( + Maps.filterKeys( + super.getSkyFunctions(directories), + fnName -> !fnName.equals(SkyFunctions.MODULE_FILE))) + .put( + SkyFunctions.MODULE_FILE, + new ModuleFileFunction( + FakeRegistry.DEFAULT_FACTORY, directories.getWorkspace(), ImmutableMap.of())) + .buildOrThrow(); + } + }; + } + + @Before + public void enableBzlmod() throws Exception { + setBuildLanguageOptions("--enable_bzlmod"); + } + + /** + * Sets up a Bazel module bare_rule@1.0, which provides a bare_binary rule that passes along + * runfiles in the data attribute, and does nothing else. + */ + @Before + public void setupBareBinaryRule() throws Exception { + registry.addModule( + createModuleKey("bare_rule", "1.0"), "module(name='bare_rule',version='1.0')"); + scratch.overwriteFile(moduleRoot.getRelative("bare_rule~1.0/WORKSPACE").getPathString()); + scratch.overwriteFile( + moduleRoot.getRelative("bare_rule~1.0/defs.bzl").getPathString(), + "def _bare_binary_impl(ctx):", + " exe = ctx.actions.declare_file(ctx.label.name)", + " ctx.actions.write(exe, 'i got nothing', True)", + " runfiles = ctx.runfiles(files=ctx.files.data)", + " for data in ctx.attr.data:", + " runfiles.merge(data[DefaultInfo].default_runfiles)", + " return DefaultInfo(files=depset(direct=[exe]), executable=exe, runfiles=runfiles)", + "bare_binary=rule(", + " implementation=_bare_binary_impl,", + " attrs={'data':attr.label_list(allow_files=True)},", + " executable=True,", + ")"); + scratch.overwriteFile( + moduleRoot.getRelative("bare_rule~1.0/BUILD").getPathString(), + "load('//:defs.bzl', 'bare_binary')", + "bare_binary(name='bare1')", + "bare_binary(name='bare2')", + "bare_binary(name='bare1_with_data',data=[':bare1'])", + "bare_binary(name='bare2_with_data',data=[':bare2'])"); + } + + /** + * Sets up a Bazel module tooled_rule@1.0, which provides a tooled_binary rule that passes along + * runfiles in the data attribute. It also uses a toolchain (type //:toolchain_type, default + * toolchain //:tooled_toolchain; the macro //:defs.bzl%tooled_toolchain can be used to create + * more). It has a dependency on bare_rule to provide the binary needed for the toolchain. + */ + @Before + public void setupTooledBinaryRule() throws Exception { + registry.addModule( + createModuleKey("tooled_rule", "1.0"), + "module(name='tooled_rule',version='1.0')", + "bazel_dep(name='bare_rule',version='1.0')", + "register_toolchains('//:all')"); + scratch.overwriteFile(moduleRoot.getRelative("tooled_rule~1.0/WORKSPACE").getPathString()); + scratch.overwriteFile( + moduleRoot.getRelative("tooled_rule~1.0/defs.bzl").getPathString(), + "def _tooled_binary_impl(ctx):", + " exe = ctx.actions.declare_file(ctx.label.name)", + " ctx.actions.write(exe, ctx.toolchains['//:toolchain_type'].tooled_info, True)", + " runfiles = ctx.runfiles(files=ctx.files.data)", + " for data in ctx.attr.data:", + " runfiles.merge(data[DefaultInfo].default_runfiles)", + " return DefaultInfo(files=depset(direct=[exe]), executable=exe, runfiles=runfiles)", + "tooled_binary=rule(", + " implementation=_tooled_binary_impl,", + " attrs={'data':attr.label_list(allow_files=True)},", + " executable=True,", + " toolchains=['//:toolchain_type'],", + ")", + "", + "def _tooled_toolchain_rule_impl(ctx):", + " return [platform_common.ToolchainInfo(tooled_info = ctx.attr.string)]", + "tooled_toolchain_rule=rule(_tooled_toolchain_rule_impl, attrs={'string':attr.string()})", + "def tooled_toolchain(name, string):", + " tooled_toolchain_rule(name=name+'_impl',string=string)", + " native.toolchain(", + " name=name,", + " toolchain=':'+name+'_impl',", + " toolchain_type=Label('//:toolchain_type'),", + " )"); + scratch.overwriteFile( + moduleRoot.getRelative("tooled_rule~1.0/BUILD").getPathString(), + "load('//:defs.bzl', 'tooled_toolchain')", + "toolchain_type(name='toolchain_type')", + "tooled_toolchain(name='tooled_toolchain', string='tooled')"); + } + + private ImmutableList getRepoMappingManifestForTarget(String label) throws Exception { + Action action = getGeneratingAction(getRunfilesSupport(label).getRepoMappingManifest()); + assertThat(action).isInstanceOf(RepoMappingManifestAction.class); + return ((RepoMappingManifestAction) action) + .newDeterministicWriter(null) + .getBytes() + .toStringUtf8() + .lines() + .collect(toImmutableList()); + } + + @Test + public void diamond() throws Exception { + rewriteWorkspace("workspace(name='aaa_ws')"); + scratch.overwriteFile( + "MODULE.bazel", + "module(name='aaa',version='1.0')", + "bazel_dep(name='bbb',version='1.0')", + "bazel_dep(name='ccc',version='2.0')", + "bazel_dep(name='bare_rule',version='1.0')"); + registry.addModule( + createModuleKey("bbb", "1.0"), + "module(name='bbb',version='1.0')", + "bazel_dep(name='ddd',version='1.0')", + "bazel_dep(name='bare_rule',version='1.0')"); + registry.addModule( + createModuleKey("ccc", "2.0"), + "module(name='ccc',version='2.0')", + "bazel_dep(name='ddd',version='2.0')", + "bazel_dep(name='bare_rule',version='1.0')"); + registry.addModule( + createModuleKey("ddd", "1.0"), + "module(name='ddd',version='1.0')", + "bazel_dep(name='bare_rule',version='1.0')"); + registry.addModule( + createModuleKey("ddd", "2.0"), + "module(name='ddd',version='2.0')", + "bazel_dep(name='bare_rule',version='1.0')"); + + scratch.overwriteFile( + "BUILD", + "load('@bare_rule//:defs.bzl', 'bare_binary')", + "bare_binary(name='aaa',data=['@bbb'])"); + ImmutableMap buildFiles = + ImmutableMap.of( + "bbb~1.0", "bare_binary(name='bbb',data=['@ddd'])", + "ccc~2.0", "bare_binary(name='ccc',data=['@ddd'])", + "ddd~1.0", "bare_binary(name='ddd')", + "ddd~2.0", "bare_binary(name='ddd')"); + for (Entry entry : buildFiles.entrySet()) { + scratch.overwriteFile( + moduleRoot.getRelative(entry.getKey()).getRelative("WORKSPACE").getPathString()); + scratch.overwriteFile( + moduleRoot.getRelative(entry.getKey()).getRelative("BUILD").getPathString(), + "load('@bare_rule//:defs.bzl', 'bare_binary')", + entry.getValue()); + } + + assertThat(getRepoMappingManifestForTarget("//:aaa")) + .containsExactly( + ",aaa,aaa_ws", + ",aaa_ws,aaa_ws", + ",bbb,bbb~1.0", + "bbb~1.0,bbb,bbb~1.0", + "bbb~1.0,ddd,ddd~2.0", + "ddd~2.0,ddd,ddd~2.0") + .inOrder(); + assertThat(getRepoMappingManifestForTarget("@@ccc~2.0//:ccc")) + .containsExactly("ccc~2.0,ccc,ccc~2.0", "ccc~2.0,ddd,ddd~2.0", "ddd~2.0,ddd,ddd~2.0") + .inOrder(); + } + + @Test + public void toolchainDep() throws Exception { + rewriteWorkspace("workspace(name='main')"); + scratch.overwriteFile( + "MODULE.bazel", + "bazel_dep(name='tooled_rule',version='1.0')", + "bazel_dep(name='my_tooled_toolchain',version='1.0')", + "bazel_dep(name='unrelated_rule',version='1.0')", + "register_toolchains('@my_tooled_toolchain//:all', '@unrelated_rule//:all')"); + registry.addModule( + createModuleKey("my_tooled_toolchain", "1.0"), + "module(name='my_tooled_toolchain',version='1.0')", + "bazel_dep(name='tooled_rule',version='1.0')"); + registry.addModule( + createModuleKey("unrelated_rule", "1.0"), "module(name='unrelated_rule',version='1.0')"); + + scratch.overwriteFile( + "BUILD", + "load('@tooled_rule//:defs.bzl', 'tooled_binary')", + "load('@unrelated_rule//:defs.bzl', 'unrelated_binary')", + "tooled_binary(name='tooled')", + "unrelated_binary(name='unrelated')"); + scratch.overwriteFile( + moduleRoot.getRelative("my_tooled_toolchain~1.0/WORKSPACE").getPathString()); + scratch.overwriteFile( + moduleRoot.getRelative("my_tooled_toolchain~1.0/BUILD").getPathString(), + "load('@tooled_rule//:defs.bzl', 'tooled_toolchain')", + "tooled_toolchain(name='custom_toolchain',string='custom')"); + scratch.overwriteFile(moduleRoot.getRelative("unrelated_rule~1.0/WORKSPACE").getPathString()); + scratch.overwriteFile( + moduleRoot.getRelative("unrelated_rule~1.0/defs.bzl").getPathString(), + "def _unrelated_binary_impl(ctx):", + " exe = ctx.actions.declare_file(ctx.label.name)", + " ctx.actions.write(exe, ctx.toolchains['//:toolchain_type'].unrelated_info, True)", + " return DefaultInfo(files=depset(direct=[exe]), executable=exe)", + "unrelated_binary=rule(", + " implementation=_unrelated_binary_impl,", + " executable=True,", + " toolchains=['//:toolchain_type'],", + ")", + "", + "def _unrelated_toolchain_impl(ctx):", + " return [platform_common.ToolchainInfo(unrelated_info = '3')]", + "unrelated_toolchain=rule(_unrelated_toolchain_impl)"); + scratch.overwriteFile( + moduleRoot.getRelative("unrelated_rule~1.0/BUILD").getPathString(), + "load('//:defs.bzl', 'unrelated_toolchain')", + "toolchain_type(name='toolchain_type')", + "unrelated_toolchain(name='unrelated')", + "toolchain(name='toolchain',toolchain=':unrelated',toolchain_type=':toolchain_type')"); + + // Very importantly, the transitive repos for //:tooled do not include "unrelated_rule". + assertThat(getRepoMappingManifestForTarget("//:tooled")) + .containsExactly( + ",main,main", + ",my_tooled_toolchain,my_tooled_toolchain~1.0", + ",tooled_rule,tooled_rule~1.0", + ",unrelated_rule,unrelated_rule~1.0", + "my_tooled_toolchain~1.0,my_tooled_toolchain,my_tooled_toolchain~1.0", + "my_tooled_toolchain~1.0,tooled_rule,tooled_rule~1.0") + .inOrder(); + } +} diff --git a/src/test/shell/bazel/cc_integration_test.sh b/src/test/shell/bazel/cc_integration_test.sh index 038fe3ce939151..c0b47b0389e43a 100755 --- a/src/test/shell/bazel/cc_integration_test.sh +++ b/src/test/shell/bazel/cc_integration_test.sh @@ -1106,7 +1106,7 @@ function test_execroot_sibling_layout_null_build_for_external_subpackages() { # Null build. bazel build --experimental_sibling_repository_layout //baz:binary &> "$TEST_log" \ || fail "expected build success" - expect_log "INFO: 1 process: 1 internal" + expect_log "INFO: 2 process: 2 internal" } function test_execroot_sibling_layout_header_scanning_in_external_subpackage() {