From 608ff89d5260428661b4a2c35884c4db8176b6e5 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 25 Apr 2023 08:38:55 -0700 Subject: [PATCH] Make Stardoc repository mapping aware By taking in .bzl files as runfiles rather than inputs, Stardoc can use the existing Java runfiles library to resolve apparent repository names in load labels into canonical repository names. Work towards #16124 Fixes #14140 Closes #16775. PiperOrigin-RevId: 526976752 Change-Id: I0848a5c7590348f778ad8c939fd37c89a53e55b2 --- .../com/google/devtools/build/skydoc/BUILD | 11 +- .../build/skydoc/FilesystemFileAccessor.java | 34 --- .../devtools/build/skydoc/SkydocMain.java | 89 +++---- .../devtools/build/skydoc/SkydocOptions.java | 9 - .../build/skydoc/StarlarkFileAccessor.java | 31 --- .../com/google/devtools/build/skydoc/BUILD | 7 +- .../devtools/build/skydoc/SkydocTest.java | 180 ++++++------- .../devtools/build/skydoc/private/BUILD | 11 + .../devtools/build/skydoc/private/stardoc.bzl | 160 +++++++++++ .../devtools/build/skydoc/skydoc_test.bzl | 3 +- .../google/devtools/build/skydoc/stardoc.bzl | 251 +++++------------- tools/java/runfiles/BUILD | 20 +- tools/java/runfiles/Runfiles.java | 15 +- tools/java/runfiles/RunfilesForStardoc.java | 32 +++ 14 files changed, 425 insertions(+), 428 deletions(-) delete mode 100644 src/main/java/com/google/devtools/build/skydoc/FilesystemFileAccessor.java delete mode 100644 src/main/java/com/google/devtools/build/skydoc/StarlarkFileAccessor.java create mode 100644 src/test/java/com/google/devtools/build/skydoc/private/BUILD create mode 100644 src/test/java/com/google/devtools/build/skydoc/private/stardoc.bzl create mode 100644 tools/java/runfiles/RunfilesForStardoc.java diff --git a/src/main/java/com/google/devtools/build/skydoc/BUILD b/src/main/java/com/google/devtools/build/skydoc/BUILD index e6f956f01b56c8..a670d76a4d2c9d 100644 --- a/src/main/java/com/google/devtools/build/skydoc/BUILD +++ b/src/main/java/com/google/devtools/build/skydoc/BUILD @@ -26,15 +26,6 @@ filegroup( java_binary( name = "skydoc", - jvm_flags = [ - # quiet warnings from com.google.protobuf.UnsafeUtil, - # see: https://github.com/google/protobuf/issues/3781 - # and: https://github.com/bazelbuild/bazel/issues/5599 - "--add-opens=java.base/java.nio=ALL-UNNAMED", - "--add-opens=java.base/java.lang=ALL-UNNAMED", - # ... but only on JDK >= 9 - "-XX:+IgnoreUnrecognizedVMOptions", - ], main_class = "com.google.devtools.build.skydoc.SkydocMain", visibility = ["//visibility:public"], runtime_deps = [ @@ -60,5 +51,7 @@ java_library( "//src/main/java/net/starlark/java/lib/json", "//src/main/java/net/starlark/java/syntax", "//third_party:guava", + "//tools/java/runfiles", + "//tools/java/runfiles:runfiles_for_stardoc", ], ) diff --git a/src/main/java/com/google/devtools/build/skydoc/FilesystemFileAccessor.java b/src/main/java/com/google/devtools/build/skydoc/FilesystemFileAccessor.java deleted file mode 100644 index 80d2fbe7c69eff..00000000000000 --- a/src/main/java/com/google/devtools/build/skydoc/FilesystemFileAccessor.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.skydoc; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import net.starlark.java.syntax.ParserInput; - -/** Implementation of {@link StarlarkFileAccessor} which uses the real filesystem. */ -public class FilesystemFileAccessor implements StarlarkFileAccessor { - - @Override - public ParserInput inputSource(String filename) throws IOException { - return ParserInput.fromLatin1(Files.readAllBytes(Paths.get(filename)), filename); - } - - @Override - public boolean fileExists(String pathString) { - return Files.exists(Paths.get(pathString)); - } -} diff --git a/src/main/java/com/google/devtools/build/skydoc/SkydocMain.java b/src/main/java/com/google/devtools/build/skydoc/SkydocMain.java index 1e0064b16264fe..1a7ae1119378b2 100644 --- a/src/main/java/com/google/devtools/build/skydoc/SkydocMain.java +++ b/src/main/java/com/google/devtools/build/skydoc/SkydocMain.java @@ -19,19 +19,21 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Functions; import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.Label.PackageContext; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.RepositoryMapping; +import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.collect.nestedset.Depset; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions; +import com.google.devtools.build.runfiles.Runfiles; +import com.google.devtools.build.runfiles.RunfilesForStardoc; import com.google.devtools.build.skydoc.fakebuildapi.FakeApi; import com.google.devtools.build.skydoc.fakebuildapi.FakeDeepStructure; import com.google.devtools.build.skydoc.fakebuildapi.FakeProviderApi; @@ -48,6 +50,7 @@ import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.Paths; @@ -105,21 +108,12 @@ public class SkydocMain { private final EventHandler eventHandler = new SystemOutEventHandler(); private final LinkedHashSet pending = new LinkedHashSet<>(); private final Map loaded = new HashMap<>(); - private final StarlarkFileAccessor fileAccessor; - private final List depRoots; private final String workspaceName; + private final Runfiles.Preloaded runfiles; - public SkydocMain( - StarlarkFileAccessor fileAccessor, String workspaceName, List depRoots) { - this.fileAccessor = fileAccessor; + public SkydocMain(String workspaceName, Runfiles.Preloaded runfiles) { this.workspaceName = workspaceName; - if (depRoots.isEmpty()) { - // For backwards compatibility, if no dep_roots are specified, use the current - // directory as the only root. - this.depRoots = ImmutableList.of("."); - } else { - this.depRoots = depRoots; - } + this.runfiles = runfiles; } public static void main(String[] args) @@ -135,7 +129,6 @@ public static void main(String[] args) String targetFileLabelString; String outputPath; - ImmutableList depRoots; if (Strings.isNullOrEmpty(skydocOptions.targetFileLabel) || Strings.isNullOrEmpty(skydocOptions.outputFilePath)) { @@ -144,7 +137,6 @@ public static void main(String[] args) targetFileLabelString = skydocOptions.targetFileLabel; outputPath = skydocOptions.outputFilePath; - depRoots = ImmutableList.copyOf(skydocOptions.depRoots); Label targetFileLabel = Label.parseCanonical(targetFileLabelString); @@ -156,7 +148,7 @@ public static void main(String[] args) Module module = null; try { module = - new SkydocMain(new FilesystemFileAccessor(), skydocOptions.workspaceName, depRoots) + new SkydocMain(skydocOptions.workspaceName, Runfiles.preload()) .eval( semanticsOptions.toStarlarkSemantics(), targetFileLabel, @@ -293,7 +285,17 @@ public Module eval( List aspectInfoList = new ArrayList<>(); - Module module = recursiveEval(semantics, label, ruleInfoList, providerInfoList, aspectInfoList); + // Resolve the label provided on the command line with the main repository's repository mapping. + // The stardoc rules always pass in a canonical label, so in this case the repository mapping + // is not used. + Module module = + recursiveEval( + semantics, + label, + RepositoryName.MAIN.getName(), + ruleInfoList, + providerInfoList, + aspectInfoList); Map ruleFunctions = ruleInfoList.stream() @@ -387,6 +389,7 @@ private static void putStructFields( * those files. * * @param label the label of the Starlark file to evaluate + * @param parentSourceRepository the canonical name of the Bazel repository that loads label * @param ruleInfoList a collection of all rule definitions made so far (using rule()); this * method will add to this list as it evaluates additional files * @throws InterruptedException if evaluation is interrupted @@ -394,15 +397,15 @@ private static void putStructFields( private Module recursiveEval( StarlarkSemantics semantics, Label label, + String parentSourceRepository, List ruleInfoList, List providerInfoList, List aspectInfoList) - throws InterruptedException, - IOException, - LabelSyntaxException, - StarlarkEvaluationException, - EvalException { - Path path = pathOfLabel(label, semantics); + throws InterruptedException, IOException, LabelSyntaxException, StarlarkEvaluationException { + Path path = pathOfLabel(label, parentSourceRepository); + String sourceRepository = + RunfilesForStardoc.getCanonicalRepositoryName( + runfiles.withSourceRepository(parentSourceRepository), label.getRepository().getName()); if (pending.contains(path)) { throw new StarlarkEvaluationException("cycle with " + path); @@ -436,7 +439,7 @@ private Module recursiveEval( }; // parse & compile (and get doc) - ParserInput input = getInputSource(path.toString()); + ParserInput input = ParserInput.fromLatin1(Files.readAllBytes(path), path.toString()); Program prog; try { StarlarkFile file = StarlarkFile.parse(input, FileOptions.DEFAULT); @@ -449,19 +452,25 @@ private Module recursiveEval( // process loads Map imports = new HashMap<>(); for (String load : prog.getLoads()) { - Label relativeLabel = + Label apparentLabel = Label.parseWithPackageContext( load, PackageContext.of(label.getPackageIdentifier(), RepositoryMapping.ALWAYS_FALLBACK)); try { Module loadedModule = - recursiveEval(semantics, relativeLabel, ruleInfoList, providerInfoList, aspectInfoList); + recursiveEval( + semantics, + apparentLabel, + sourceRepository, + ruleInfoList, + providerInfoList, + aspectInfoList); imports.put(load, loadedModule); } catch (NoSuchFileException noSuchFileException) { throw new StarlarkEvaluationException( String.format( - "File %s imported '%s', yet %s was not found, even at roots %s.", - path, load, pathOfLabel(relativeLabel, semantics), depRoots), + "File %s imported '%s', yet %s was not found.", + path, load, pathOfLabel(apparentLabel, sourceRepository)), noSuchFileException); } } @@ -491,25 +500,11 @@ path, load, pathOfLabel(relativeLabel, semantics), depRoots), return module; } - private Path pathOfLabel(Label label, StarlarkSemantics semantics) throws EvalException { - String workspacePrefix = ""; - if (!label.getWorkspaceRootForStarlarkOnly(semantics).isEmpty() - && !label.getWorkspaceName().equals(workspaceName)) { - workspacePrefix = label.getWorkspaceRootForStarlarkOnly(semantics) + "/"; - } - - return Paths.get(workspacePrefix + label.toPathFragment()); - } - - private ParserInput getInputSource(String bzlWorkspacePath) throws IOException { - for (String rootPath : depRoots) { - if (fileAccessor.fileExists(rootPath + "/" + bzlWorkspacePath)) { - return fileAccessor.inputSource(rootPath + "/" + bzlWorkspacePath); - } - } - - // All depRoots attempted and no valid file was found. - throw new NoSuchFileException(bzlWorkspacePath); + private Path pathOfLabel(Label label, String sourceRepository) { + String targetRepositoryApparentName = + label.getRepository().isMain() ? workspaceName : label.getRepository().getName(); + String rlocationPath = targetRepositoryApparentName + "/" + label.toPathFragment(); + return Paths.get(runfiles.withSourceRepository(sourceRepository).rlocation(rlocationPath)); } private static void addMorePredeclared(ImmutableMap.Builder env) { diff --git a/src/main/java/com/google/devtools/build/skydoc/SkydocOptions.java b/src/main/java/com/google/devtools/build/skydoc/SkydocOptions.java index d1635be2466779..d91c2b4c13a1c7 100644 --- a/src/main/java/com/google/devtools/build/skydoc/SkydocOptions.java +++ b/src/main/java/com/google/devtools/build/skydoc/SkydocOptions.java @@ -59,13 +59,4 @@ public class SkydocOptions extends OptionsBase { + " is empty, then documentation for all exported rule definitions will be" + " generated.") public List symbolNames; - - @Option( - name = "dep_roots", - allowMultiple = true, - defaultValue = "null", - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = OptionEffectTag.UNKNOWN, - help = "File path roots to search when resolving transitive bzl dependencies") - public List depRoots; } diff --git a/src/main/java/com/google/devtools/build/skydoc/StarlarkFileAccessor.java b/src/main/java/com/google/devtools/build/skydoc/StarlarkFileAccessor.java deleted file mode 100644 index 0542a6c6b7dd21..00000000000000 --- a/src/main/java/com/google/devtools/build/skydoc/StarlarkFileAccessor.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2018 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.skydoc; - -import java.io.IOException; -import net.starlark.java.syntax.ParserInput; - -/** - * Helper to handle Skydoc file I/O. This abstraction is useful for tests which don't involve - * actual file I/O. - */ -public interface StarlarkFileAccessor { - - /** Returns a {@link ParserInput} for accessing the content of the given absolute path string. */ - ParserInput inputSource(String pathString) throws IOException; - - /** Returns true if a file exists at the current path. */ - boolean fileExists(String pathString); -} diff --git a/src/test/java/com/google/devtools/build/skydoc/BUILD b/src/test/java/com/google/devtools/build/skydoc/BUILD index d282595384ac59..bdd12bc044aa47 100644 --- a/src/test/java/com/google/devtools/build/skydoc/BUILD +++ b/src/test/java/com/google/devtools/build/skydoc/BUILD @@ -10,6 +10,7 @@ filegroup( name = "srcs", testonly = 0, srcs = glob(["**"]) + [ + "//src/test/java/com/google/devtools/build/skydoc/private:srcs", "//src/test/java/com/google/devtools/build/skydoc/testdata/same_level_file_test:srcs", ], visibility = ["//src:__pkg__"], @@ -23,19 +24,17 @@ java_test( deps = [ "//src/main/java/com/google/devtools/build/lib/cmdline", "//src/main/java/com/google/devtools/build/lib/packages/semantics", - "//src/main/java/com/google/devtools/build/lib/vfs", - "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", "//src/main/java/com/google/devtools/build/skydoc:skydoc_lib", "//src/main/java/com/google/devtools/build/skydoc/fakebuildapi", "//src/main/java/com/google/devtools/build/skydoc/rendering", "//src/main/java/com/google/devtools/build/skydoc/rendering:function_util", "//src/main/java/com/google/devtools/build/skydoc/rendering/proto:stardoc_output_java_proto", "//src/main/java/net/starlark/java/eval", - "//src/main/java/net/starlark/java/syntax", - "//src/test/java/com/google/devtools/build/lib/analysis/util", + "//src/test/java/com/google/devtools/build/lib/testutil:TestUtils", "//third_party:guava", "//third_party:junit4", "//third_party:truth", + "//tools/java/runfiles", ], ) diff --git a/src/test/java/com/google/devtools/build/skydoc/SkydocTest.java b/src/test/java/com/google/devtools/build/skydoc/SkydocTest.java index d2ae85f3dbfb55..086660cba3c8b6 100644 --- a/src/test/java/com/google/devtools/build/skydoc/SkydocTest.java +++ b/src/test/java/com/google/devtools/build/skydoc/SkydocTest.java @@ -22,12 +22,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions; -import com.google.devtools.build.lib.vfs.FileSystemUtils; -import com.google.devtools.build.lib.vfs.Path; -import com.google.devtools.build.lib.vfs.PathFragment; +import com.google.devtools.build.lib.testutil.TestUtils; +import com.google.devtools.build.runfiles.Runfiles; import com.google.devtools.build.skydoc.SkydocMain.StarlarkEvaluationException; import com.google.devtools.build.skydoc.rendering.DocstringParseException; import com.google.devtools.build.skydoc.rendering.FunctionUtil; @@ -38,60 +36,45 @@ import com.google.devtools.build.skydoc.rendering.proto.StardocOutputProtos.ProviderInfo; import com.google.devtools.build.skydoc.rendering.proto.StardocOutputProtos.RuleInfo; import com.google.devtools.build.skydoc.rendering.proto.StardocOutputProtos.StarlarkFunctionInfo; +import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; import net.starlark.java.eval.Module; import net.starlark.java.eval.StarlarkFunction; import net.starlark.java.eval.StarlarkSemantics; -import net.starlark.java.syntax.ParserInput; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Java tests for Skydoc. */ @RunWith(JUnit4.class) -// TODO(adonovan): Skydoc's tests should not depend on the analysis phase of Blaze. -public final class SkydocTest extends BuildViewTestCase { +public final class SkydocTest { + + @Rule public TemporaryFolder runfilesDir = new TemporaryFolder(TestUtils.tmpDirFile()); private SkydocMain skydocMain; @Before public void setUp() throws IOException { - scratch.dir("/execroot/io_bazel"); - scratch.setWorkingDir("/execroot/io_bazel"); - skydocMain = new SkydocMain( - new StarlarkFileAccessor() { - - @Override - public ParserInput inputSource(String pathString) throws IOException { - if (!pathString.startsWith("/")) { - pathString = "/execroot/io_bazel/" + pathString; - } - Path path = fileSystem.getPath(pathString); - byte[] bytes = FileSystemUtils.asByteSource(path).read(); - return ParserInput.fromLatin1(bytes, path.toString()); - } - - @Override - public boolean fileExists(String pathString) { - if (!pathString.startsWith("/")) { - pathString = "/execroot/io_bazel/" + pathString; - } - return fileSystem.exists(PathFragment.create(pathString)); - } - }, "io_bazel", - ImmutableList.of("/other_root", ".")); + Runfiles.preload( + ImmutableMap.of("RUNFILES_DIR", runfilesDir.getRoot().getAbsolutePath()))); } @Test public void testStarlarkEvaluationError() throws Exception { - scratch.file( - "/execroot/io_bazel/test/a.bzl", // + scratchRunfile( + "io_bazel/test/a.bzl", // "def f(): 1//0", "f()"); StarlarkEvaluationException ex = @@ -114,8 +97,8 @@ public void testStarlarkEvaluationError() throws Exception { @Test public void testRuleInfoAttrs() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def rule_impl(ctx):", " return []", "", @@ -183,8 +166,8 @@ private static Iterable getAttrTypes(AspectInfo aspectInfo) { @Test public void testMultipleRuleNames() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def rule_impl(ctx):", " return []", "", @@ -224,8 +207,8 @@ public void testMultipleRuleNames() throws Exception { @Test public void testRuleWithMultipleExports() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def rule_impl(ctx):", " return []", "", @@ -252,8 +235,8 @@ public void testRuleWithMultipleExports() throws Exception { @Test public void testRuleExportedWithSpecifiedName() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def rule_impl(ctx):", " return []", "", @@ -281,8 +264,8 @@ public void testRuleExportedWithSpecifiedName() throws Exception { @Test public void testUnassignedRuleNotDocumented() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def rule_impl(ctx):", " return []", "", @@ -310,12 +293,12 @@ public void testUnassignedRuleNotDocumented() throws Exception { @Test public void testRulesAcrossMultipleFiles() throws Exception { - scratch.file("/execroot/io_bazel/lib/rule_impl.bzl", "def rule_impl(ctx):", " return []"); + scratchRunfile("io_bazel/lib/rule_impl.bzl", "def rule_impl(ctx):", " return []"); - scratch.file("/other_root/deps/foo/other_root.bzl", "doc_string = 'Dep rule'"); + scratchRunfile("io_bazel/deps/foo/other_root.bzl", "doc_string = 'Dep rule'"); - scratch.file( - "/execroot/io_bazel/deps/foo/dep_rule.bzl", + scratchRunfile( + "io_bazel/deps/foo/dep_rule.bzl", "load('//lib:rule_impl.bzl', 'rule_impl')", "load(':other_root.bzl', 'doc_string')", "", @@ -329,8 +312,8 @@ public void testRulesAcrossMultipleFiles() throws Exception { " implementation = rule_impl,", ")"); - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "load('//lib:rule_impl.bzl', 'rule_impl')", "load('//deps/foo:dep_rule.bzl', 'dep_rule')", "", @@ -358,15 +341,12 @@ public void testRulesAcrossMultipleFiles() throws Exception { @Test public void testRulesAcrossRepository() throws Exception { - scratch.file( - "/execroot/io_bazel/external/dep_repo/lib/rule_impl.bzl", - "def rule_impl(ctx):", - " return []"); + scratchRunfile("dep_repo/lib/rule_impl.bzl", "def rule_impl(ctx):", " return []"); - scratch.file("/execroot/io_bazel/deps/foo/docstring.bzl", "doc_string = 'Dep rule'"); + scratchRunfile("io_bazel/deps/foo/docstring.bzl", "doc_string = 'Dep rule'"); - scratch.file( - "/execroot/io_bazel/deps/foo/dep_rule.bzl", + scratchRunfile( + "io_bazel/deps/foo/dep_rule.bzl", "load('@dep_repo//lib:rule_impl.bzl', 'rule_impl')", "load(':docstring.bzl', 'doc_string')", "", @@ -380,8 +360,8 @@ public void testRulesAcrossRepository() throws Exception { " implementation = rule_impl,", ")"); - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "load('@dep_repo//lib:rule_impl.bzl', 'rule_impl')", "load('//deps/foo:dep_rule.bzl', 'dep_rule')", "", @@ -409,12 +389,12 @@ public void testRulesAcrossRepository() throws Exception { @Test public void testRulesAcrossRepositorySiblingRepositoryLayout() throws Exception { - scratch.file("/execroot/dep_repo/lib/rule_impl.bzl", "def rule_impl(ctx):", " return []"); + scratchRunfile("dep_repo/lib/rule_impl.bzl", "def rule_impl(ctx):", " return []"); - scratch.file("/execroot/io_bazel/deps/foo/docstring.bzl", "doc_string = 'Dep rule'"); + scratchRunfile("io_bazel/deps/foo/docstring.bzl", "doc_string = 'Dep rule'"); - scratch.file( - "/execroot/io_bazel/deps/foo/dep_rule.bzl", + scratchRunfile( + "io_bazel/deps/foo/dep_rule.bzl", "load('@dep_repo//lib:rule_impl.bzl', 'rule_impl')", "load(':docstring.bzl', 'doc_string')", "", @@ -428,8 +408,8 @@ public void testRulesAcrossRepositorySiblingRepositoryLayout() throws Exception " implementation = rule_impl,", ")"); - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "load('@dep_repo//lib:rule_impl.bzl', 'rule_impl')", "load('//deps/foo:dep_rule.bzl', 'dep_rule')", "", @@ -459,10 +439,10 @@ public void testRulesAcrossRepositorySiblingRepositoryLayout() throws Exception @Test public void testLoadOwnRepository() throws Exception { - scratch.file("/execroot/io_bazel/deps/foo/dep_rule.bzl", "def rule_impl(ctx):", " return []"); + scratchRunfile("io_bazel/deps/foo/dep_rule.bzl", "def rule_impl(ctx):", " return []"); - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "load('@io_bazel//deps/foo:dep_rule.bzl', 'rule_impl')", "", "main_rule = rule(", @@ -489,14 +469,14 @@ public void testLoadOwnRepository() throws Exception { @Test public void testSkydocCrashesOnCycle() throws Exception { - scratch.file( - "/execroot/io_bazel/dep/dep.bzl", + scratchRunfile( + "io_bazel/dep/dep.bzl", "load('//test:main.bzl', 'some_var')", "def rule_impl(ctx):", " return []"); - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "load('//dep:dep.bzl', 'rule_impl')", "", "some_var = 1", @@ -518,13 +498,15 @@ public void testSkydocCrashesOnCycle() throws Exception { ImmutableMap.builder(), ImmutableMap.builder())); - assertThat(expected).hasMessageThat().contains("cycle with test/main.bzl"); + assertThat(expected) + .hasMessageThat() + .contains("cycle with " + runfilesDir.getRoot() + "/io_bazel/test/main.bzl"); } @Test public void testMalformedFunctionDocstring() throws Exception { - scratch.file( - "/execroot/io_bazel/test/main.bzl", + scratchRunfile( + "io_bazel/test/main.bzl", "def check_sources(name,", " required_param,", " bool_param = True,", @@ -561,7 +543,9 @@ public void testMalformedFunctionDocstring() throws Exception { .hasMessageThat() .contains( "Unable to generate documentation for function check_sources " - + "(defined at /execroot/io_bazel/test/main.bzl:1:5) " + + "(defined at " + + runfilesDir.getRoot() + + "/io_bazel/test/main.bzl:1:5) " + "due to malformed docstring. Parse errors:"); assertThat(expected) .hasMessageThat() @@ -572,8 +556,8 @@ public void testMalformedFunctionDocstring() throws Exception { @Test public void testFuncInfoParams() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def check_function(foo, bar, baz):", " \"\"\"Runs some checks on the given function parameter.", " ", @@ -616,8 +600,8 @@ private static Iterable getParamNames(StarlarkFunctionInfo funcInfo) { @Test public void testProviderInfo() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "MyExampleInfo = provider(", " doc = 'Stores information about example.',", " fields = {", @@ -666,8 +650,8 @@ private static Iterable getFieldDocString(ProviderInfo providerInfo) { @Test public void testAspectInfo() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def my_aspect_impl(ctx):\n" + " return []\n" + "\n" @@ -709,8 +693,8 @@ public void testAspectInfo() throws Exception { @Test public void testModuleDocstring() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "\"\"\"Input file to test module docstring\"\"\"", "def check_function(foo):", " \"\"\"Runs some checks on the given function parameter.", @@ -734,8 +718,8 @@ public void testModuleDocstring() throws Exception { @Test public void testnoModuleDoc() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "def check_function(foo):", " \"\"\"Runs some checks input file with no module docstring.", " ", @@ -767,8 +751,8 @@ public void testnoModuleDoc() throws Exception { @Test public void testMultipleLineModuleDoc() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", + scratchRunfile( + "io_bazel/test/test.bzl", "\"\"\"Input file to test", "multiple lines module docstring\"\"\"", "def check_function(foo):", @@ -802,13 +786,13 @@ public void testMultipleLineModuleDoc() throws Exception { @Test public void testModuleDocAcrossFiles() throws Exception { - scratch.file( - "/execroot/io_bazel/test/othertest.bzl", // + scratchRunfile( + "io_bazel/test/othertest.bzl", // "\"\"\"Should be displayed.\"\"\"", "load(':test.bzl', 'check_function')", "pass"); - scratch.file( - "/execroot/io_bazel/test/test.bzl", // + scratchRunfile( + "io_bazel/test/test.bzl", // "\"\"\"Should not be displayed.\"\"\"", "def check_function():", " pass"); @@ -836,8 +820,8 @@ public void testModuleDocAcrossFiles() throws Exception { @Test public void testDefaultSymbolFilter() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", // + scratchRunfile( + "io_bazel/test/test.bzl", // "def foo():", " pass", "def bar():", @@ -872,8 +856,8 @@ public void testDefaultSymbolFilter() throws Exception { @Test public void testCustomSymbolFilter() throws Exception { - scratch.file( - "/execroot/io_bazel/test/test.bzl", // + scratchRunfile( + "io_bazel/test/test.bzl", // "def foo():", " pass", "def bar():", @@ -905,4 +889,10 @@ public void testCustomSymbolFilter() throws Exception { .collect(toImmutableList()); assertThat(documentedFunctions).containsExactly("bar", "_baz"); } + + private void scratchRunfile(String path, String... lines) throws Exception { + Path file = runfilesDir.getRoot().toPath().resolve(path.replace('/', File.separatorChar)); + Files.createDirectories(file.getParent()); + Files.write(file, Arrays.asList(lines), StandardCharsets.UTF_8); + } } diff --git a/src/test/java/com/google/devtools/build/skydoc/private/BUILD b/src/test/java/com/google/devtools/build/skydoc/private/BUILD new file mode 100644 index 00000000000000..0adccbb47977d4 --- /dev/null +++ b/src/test/java/com/google/devtools/build/skydoc/private/BUILD @@ -0,0 +1,11 @@ +package( + default_applicable_licenses = ["//:license"], + default_visibility = ["//src:__subpackages__"], +) + +filegroup( + name = "srcs", + testonly = 0, + srcs = glob(["**"]), + visibility = ["//src:__subpackages__"], +) diff --git a/src/test/java/com/google/devtools/build/skydoc/private/stardoc.bzl b/src/test/java/com/google/devtools/build/skydoc/private/stardoc.bzl new file mode 100644 index 00000000000000..204af387d834fd --- /dev/null +++ b/src/test/java/com/google/devtools/build/skydoc/private/stardoc.bzl @@ -0,0 +1,160 @@ +# Copyright 2018 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Starlark rule for stardoc: a documentation generator tool written in Java.""" + +def _stardoc_impl(ctx): + """Implementation of the stardoc rule.""" + for semantic_flag in ctx.attr.semantic_flags: + if not semantic_flag.startswith("--"): + fail("semantic_flags entry '%s' must start with '--'" % semantic_flag) + out_file = ctx.outputs.out + stardoc_args = ctx.actions.args() + stardoc_args.add("--input=" + str(ctx.file.input.owner)) + stardoc_args.add("--workspace_name=" + ctx.workspace_name) + stardoc_args.add_all( + ctx.attr.symbol_names, + format_each = "--symbols=%s", + omit_if_empty = True, + ) + stardoc_args.add_all(ctx.attr.semantic_flags) + stardoc = ctx.executable.stardoc + + if ctx.attr.format == "proto": + stardoc_args.add("--output=" + out_file.path) + ctx.actions.run( + outputs = [out_file], + executable = stardoc, + arguments = [stardoc_args], + mnemonic = "Stardoc", + progress_message = ("Generating Starlark doc for %s" % + (ctx.label.name)), + ) + elif ctx.attr.format == "markdown": + proto_file = ctx.actions.declare_file(ctx.label.name + ".raw", sibling = out_file) + stardoc_args.add("--output=" + proto_file.path) + ctx.actions.run( + outputs = [proto_file], + executable = stardoc, + arguments = [stardoc_args], + mnemonic = "Stardoc", + progress_message = ("Generating proto for Starlark doc for %s" % + (ctx.label.name)), + ) + renderer_args = ctx.actions.args() + renderer_args.add("--input=" + str(proto_file.path)) + renderer_args.add("--output=" + str(ctx.outputs.out.path)) + renderer_args.add("--aspect_template=" + str(ctx.file.aspect_template.path)) + renderer_args.add("--header_template=" + str(ctx.file.header_template.path)) + renderer_args.add("--func_template=" + str(ctx.file.func_template.path)) + renderer_args.add("--provider_template=" + str(ctx.file.provider_template.path)) + renderer_args.add("--rule_template=" + str(ctx.file.rule_template.path)) + renderer = ctx.executable.renderer + ctx.actions.run( + outputs = [out_file], + inputs = [proto_file, ctx.file.aspect_template, ctx.file.header_template, ctx.file.func_template, ctx.file.provider_template, ctx.file.rule_template], + executable = renderer, + arguments = [renderer_args], + mnemonic = "Renderer", + progress_message = ("Converting proto format of %s to markdown format" % + (ctx.label.name)), + ) + + # Work around default outputs not getting captured by sh_binary: + # https://github.com/bazelbuild/bazel/issues/15043. + # See discussion in https://github.com/bazelbuild/stardoc/pull/139. + outputs = [out_file] + return [DefaultInfo(files = depset(outputs), runfiles = ctx.runfiles(files = outputs))] + +stardoc = rule( + _stardoc_impl, + doc = """ +Generates documentation for starlark skylark rule definitions in a target starlark file. +""", + attrs = { + "input": attr.label( + doc = "The starlark file to generate documentation for.", + allow_single_file = [".bzl"], + mandatory = True, + ), + "out": attr.output( + doc = "The (markdown) file to which documentation will be output.", + mandatory = True, + ), + "format": attr.string( + doc = "The format of the output file. Valid values: 'markdown' or 'proto'.", + values = ["markdown", "proto"], + mandatory = True, + ), + "symbol_names": attr.string_list( + doc = """ +A list of symbol names to generate documentation for. These should correspond to +the names of rule definitions in the input file. If this list is empty, then +documentation for all exported rule definitions will be generated. +""", + mandatory = True, + ), + "semantic_flags": attr.string_list( + doc = """ +A list of canonical flags to affect Starlark semantics for the Starlark interpretter +during documentation generation. This should only be used to maintain compatibility with +non-default semantic flags required to use the given Starlark symbols. + +For example, if `//foo:bar.bzl` does not build except when a user would specify +`--incompatible_foo_semantic=false`, then this attribute should contain +"--incompatible_foo_semantic=false". +""", + mandatory = True, + ), + "stardoc": attr.label( + doc = "The location of the stardoc tool.", + allow_files = True, + cfg = "exec", + executable = True, + mandatory = True, + ), + "renderer": attr.label( + doc = "The location of the renderer tool.", + allow_files = True, + cfg = "exec", + executable = True, + mandatory = True, + ), + "aspect_template": attr.label( + doc = "The input file template for generating documentation of aspects.", + allow_single_file = [".vm"], + mandatory = True, + ), + "header_template": attr.label( + doc = "The input file template for the header of the output documentation.", + allow_single_file = [".vm"], + mandatory = True, + ), + "func_template": attr.label( + doc = "The input file template for generating documentation of functions.", + allow_single_file = [".vm"], + mandatory = True, + ), + "provider_template": attr.label( + doc = "The input file template for generating documentation of providers.", + allow_single_file = [".vm"], + mandatory = True, + ), + "rule_template": attr.label( + doc = "The input file template for generating documentation of rules.", + allow_single_file = [".vm"], + mandatory = True, + ), + }, +) diff --git a/src/test/java/com/google/devtools/build/skydoc/skydoc_test.bzl b/src/test/java/com/google/devtools/build/skydoc/skydoc_test.bzl index 9a2ea2d5a66bb3..642cd56256d9fb 100644 --- a/src/test/java/com/google/devtools/build/skydoc/skydoc_test.bzl +++ b/src/test/java/com/google/devtools/build/skydoc/skydoc_test.bzl @@ -96,7 +96,8 @@ def skydoc_test( input = input_file, deps = ["%s_lib" % name], renderer = Label("//src/main/java/com/google/devtools/build/skydoc/renderer:renderer"), - stardoc = Label("//src/main/java/com/google/devtools/build/skydoc:skydoc"), + stardoc = Label("//src/main/java/com/google/devtools/build/skydoc:skydoc_deploy.jar"), format = format, + testonly = True, **kwargs ) diff --git a/src/test/java/com/google/devtools/build/skydoc/stardoc.bzl b/src/test/java/com/google/devtools/build/skydoc/stardoc.bzl index 396c5797257615..8e5ed6c4187d4b 100644 --- a/src/test/java/com/google/devtools/build/skydoc/stardoc.bzl +++ b/src/test/java/com/google/devtools/build/skydoc/stardoc.bzl @@ -14,190 +14,79 @@ """Starlark rule for stardoc: a documentation generator tool written in Java.""" -load("@bazel_skylib//:bzl_library.bzl", "StarlarkLibraryInfo") +load("//src/test/java/com/google/devtools/build/skydoc/private:stardoc.bzl", _stardoc = "stardoc") -def _root_from_file(f): - """Given a file, returns the root path of that file.""" - return f.root.path or "." +def stardoc( + *, + name, + input, + out, + deps = [], + format = "markdown", + symbol_names = [], + semantic_flags = [], + stardoc = Label("//src/main/java/com/google/devtools/build/skydoc:skydoc_deploy.jar"), + renderer = Label("//src/main/java/com/google/devtools/build/skydoc/renderer"), + aspect_template = Label(":test_templates/markdown_tables/aspect.vm"), + func_template = Label(":test_templates/markdown_tables/func.vm"), + header_template = Label(":test_templates/markdown_tables/header.vm"), + provider_template = Label(":test_templates/markdown_tables/provider.vm"), + rule_template = Label(":test_templates/markdown_tables/rule.vm"), + **kwargs): + """Generates documentation for exported starlark rule definitions in a target starlark file. -def _stardoc_impl(ctx): - """Implementation of the stardoc rule.""" - for semantic_flag in ctx.attr.semantic_flags: - if not semantic_flag.startswith("--"): - fail("semantic_flags entry '%s' must start with '--'" % semantic_flag) - out_file = ctx.outputs.out - input_files = depset(direct = [ctx.file.input], transitive = [ - dep[StarlarkLibraryInfo].transitive_srcs - for dep in ctx.attr.deps - ]) - stardoc_args = ctx.actions.args() - stardoc_args.add("--input=" + str(ctx.file.input.owner)) - stardoc_args.add("--workspace_name=" + ctx.workspace_name) - stardoc_args.add_all( - ctx.attr.symbol_names, - format_each = "--symbols=%s", - omit_if_empty = True, - ) - - # TODO(cparsons): Note that use of dep_roots alone does not guarantee - # the correct file is loaded. If two files exist under the same path - # but are under different roots, it is possible that Stardoc loads the - # one that is not explicitly an input to this action (if sandboxing is - # disabled). The correct way to resolve this is to explicitly specify - # the full set of transitive dependency Starlark files as action args - # (maybe using a param file), but this requires some work. - stardoc_args.add_all( - input_files, - format_each = "--dep_roots=%s", - map_each = _root_from_file, - omit_if_empty = True, - uniquify = True, - ) - - # Needed in case some files are referenced across local repository - # namespace. For example, consider a file under a nested local repository @bar - # rooted under ./foo/bar/WORKSPACE. Consider a stardoc target 'lib_doc' under - # foo/bar/BUILD to document foo/bar/lib.bzl. - # The stardoc target references @bar//third_party/stardoc:lib.bzl (which appears just as :lib.bzl), but the - # actual build is taking place in the root repository, thus the source file - # is present under external/bar/lib.bzl. - stardoc_args.add( - "--dep_roots=external/" + ctx.workspace_name, - ) - stardoc_args.add_all(ctx.attr.semantic_flags) - stardoc = ctx.executable.stardoc - - if ctx.attr.format == "proto": - stardoc_args.add("--output=" + out_file.path) - ctx.actions.run( - outputs = [out_file], - inputs = input_files, - executable = stardoc, - arguments = [stardoc_args], - mnemonic = "Stardoc", - progress_message = ("Generating Starlark doc for %s" % - (ctx.label.name)), - ) - elif ctx.attr.format == "markdown": - proto_file = ctx.actions.declare_file(ctx.label.name + ".raw", sibling = out_file) - stardoc_args.add("--output=" + proto_file.path) - ctx.actions.run( - outputs = [proto_file], - inputs = input_files, - executable = stardoc, - arguments = [stardoc_args], - mnemonic = "Stardoc", - progress_message = ("Generating proto for Starlark doc for %s" % - (ctx.label.name)), - ) - renderer_args = ctx.actions.args() - renderer_args.add("--input=" + str(proto_file.path)) - renderer_args.add("--output=" + str(ctx.outputs.out.path)) - renderer_args.add("--aspect_template=" + str(ctx.file.aspect_template.path)) - renderer_args.add("--header_template=" + str(ctx.file.header_template.path)) - renderer_args.add("--func_template=" + str(ctx.file.func_template.path)) - renderer_args.add("--provider_template=" + str(ctx.file.provider_template.path)) - renderer_args.add("--rule_template=" + str(ctx.file.rule_template.path)) - renderer = ctx.executable.renderer - ctx.actions.run( - outputs = [out_file], - inputs = [proto_file, ctx.file.aspect_template, ctx.file.header_template, ctx.file.func_template, ctx.file.provider_template, ctx.file.rule_template], - executable = renderer, - arguments = [renderer_args], - mnemonic = "Renderer", - progress_message = ("Converting proto format of %s to markdown format" % - (ctx.label.name)), - ) + Args: + name: The name of the stardoc target. + input: The starlark file to generate documentation for (mandatory). + out: The file to which documentation will be output (mandatory). + deps: A list of bzl_library dependencies which the input depends on. + format: The format of the output file. Valid values: 'markdown' or 'proto'. + symbol_names: A list of symbol names to generate documentation for. These should correspond to the names of rule + definitions in the input file. If this list is empty, then documentation for all exported rule definitions will + be generated. + semantic_flags: A list of canonical flags to affect Starlark semantics for the Starlark interpreter during + documentation generation. This should only be used to maintain compatibility with non-default semantic flags + required to use the given Starlark symbols. - # Work around default outputs not getting captured by sh_binary: - # https://github.com/bazelbuild/bazel/issues/15043. - # See discussion in https://github.com/bazelbuild/stardoc/pull/139. - outputs = [out_file] - return [DefaultInfo(files = depset(outputs), runfiles = ctx.runfiles(files = outputs))] + For example, if `//foo:bar.bzl` does not build except when a user would specify + `--incompatible_foo_semantic=false`, then this attribute should contain + "--incompatible_foo_semantic=false". + stardoc: The location of the stardoc tool. + renderer: The location of the renderer tool. + aspect_template: The input file template for generating documentation of aspects + header_template: The input file template for the header of the output documentation. + func_template: The input file template for generating documentation of functions. + provider_template: The input file template for generating documentation of providers. + rule_template: The input file template for generating documentation of rules. + **kwargs: Further arguments to pass to stardoc. + """ -stardoc = rule( - _stardoc_impl, - doc = """ -Generates documentation for exported skylark rule definitions in a target starlark file. + stardoc_with_runfiles_name = name + "_stardoc" -This rule is an experimental replacement for the existing skylark_doc rule. -""", - attrs = { - "input": attr.label( - doc = "The starlark file to generate documentation for.", - allow_single_file = [".bzl"], - ), - "deps": attr.label_list( - doc = "A list of bzl_library dependencies which the input depends on.", - providers = [StarlarkLibraryInfo], - ), - "format": attr.string( - doc = "The format of the output file. Valid values: 'markdown' or 'proto'.", - default = "markdown", - values = ["markdown", "proto"], - ), - "out": attr.output( - doc = "The (markdown) file to which documentation will be output.", - mandatory = True, - ), - "symbol_names": attr.string_list( - doc = """ -A list of symbol names to generate documentation for. These should correspond to -the names of rule definitions in the input file. If this list is empty, then -documentation for all exported rule definitions will be generated. -""", - default = [], - ), - "semantic_flags": attr.string_list( - doc = """ -A list of canonical flags to affect Starlark semantics for the Starlark interpretter -during documentation generation. This should only be used to maintain compatibility with -non-default semantic flags required to use the given Starlark symbols. + testonly = {"testonly": kwargs["testonly"]} if "testonly" in kwargs else {} + native.java_binary( + name = stardoc_with_runfiles_name, + main_class = "com.google.devtools.build.skydoc.SkydocMain", + runtime_deps = [stardoc], + data = [input] + deps, + tags = ["manual"], + visibility = ["//visibility:private"], + **testonly + ) -For example, if `//foo:bar.bzl` does not build except when a user would specify -`--incompatible_foo_semantic=false`, then this attribute should contain -"--incompatible_foo_semantic=false". -""", - default = [], - ), - "stardoc": attr.label( - doc = "The location of the stardoc tool.", - allow_files = True, - default = Label("//src/main/java/com/google/devtools/build/skydoc"), - cfg = "exec", - executable = True, - ), - "renderer": attr.label( - doc = "The location of the renderer tool.", - allow_files = True, - default = Label("//src/main/java/com/google/devtools/build/skydoc/renderer"), - cfg = "exec", - executable = True, - ), - "aspect_template": attr.label( - doc = "The input file template for generating documentation of aspects.", - allow_single_file = [".vm"], - default = Label(":test_templates/markdown_tables/aspect.vm"), - ), - "header_template": attr.label( - doc = "The input file template for the header of the output documentation.", - allow_single_file = [".vm"], - default = Label(":test_templates/markdown_tables/header.vm"), - ), - "func_template": attr.label( - doc = "The input file template for generating documentation of functions.", - allow_single_file = [".vm"], - default = Label(":test_templates/markdown_tables/func.vm"), - ), - "provider_template": attr.label( - doc = "The input file template for generating documentation of providers.", - allow_single_file = [".vm"], - default = Label(":test_templates/markdown_tables/provider.vm"), - ), - "rule_template": attr.label( - doc = "The input file template for generating documentation of rules.", - allow_single_file = [".vm"], - default = Label(":test_templates/markdown_tables/rule.vm"), - ), - }, -) + _stardoc( + name = name, + input = input, + out = out, + format = format, + symbol_names = symbol_names, + semantic_flags = semantic_flags, + stardoc = stardoc_with_runfiles_name, + renderer = renderer, + aspect_template = aspect_template, + func_template = func_template, + header_template = header_template, + provider_template = provider_template, + rule_template = rule_template, + **kwargs + ) diff --git a/tools/java/runfiles/BUILD b/tools/java/runfiles/BUILD index e0487a301761e0..b7d99f89334ba8 100644 --- a/tools/java/runfiles/BUILD +++ b/tools/java/runfiles/BUILD @@ -39,18 +39,16 @@ java_library( "Runfiles.java", "Util.java", ], - exported_plugins = [":auto_bazel_repository_processor"], - visibility = ["//tools/java/runfiles/testing:__pkg__"], - exports = [":auto_bazel_repository"], + visibility = [ + "//src/main/java/com/google/devtools/build/skydoc:__pkg__", + "//src/test/java/com/google/devtools/build/skydoc:__pkg__", + "//tools/java/runfiles/testing:__pkg__", + ], ) java_library( - name = "auto_bazel_repository", - srcs = ["AutoBazelRepository.java"], -) - -java_plugin( - name = "auto_bazel_repository_processor", - srcs = ["AutoBazelRepositoryProcessor.java"], - processor_class = "com.google.devtools.build.runfiles.AutoBazelRepositoryProcessor", + name = "runfiles_for_stardoc", + srcs = ["RunfilesForStardoc.java"], + visibility = ["//src/main/java/com/google/devtools/build/skydoc:__pkg__"], + deps = [":runfiles"], ) diff --git a/tools/java/runfiles/Runfiles.java b/tools/java/runfiles/Runfiles.java index c705e1db48bd94..23d7f6ac54a4ad 100644 --- a/tools/java/runfiles/Runfiles.java +++ b/tools/java/runfiles/Runfiles.java @@ -356,12 +356,7 @@ public String rlocation(String path) { if (apparentTargetAndRemainder.length < 2) { return preloadedRunfiles.rlocationChecked(path); } - String targetCanonical = - preloadedRunfiles - .getRepoMapping() - .getOrDefault( - new Preloaded.RepoMappingKey(sourceRepository, apparentTargetAndRemainder[0]), - apparentTargetAndRemainder[0]); + String targetCanonical = getCanonicalRepositoryName(apparentTargetAndRemainder[0]); return preloadedRunfiles.rlocationChecked( targetCanonical + "/" + apparentTargetAndRemainder[1]); } @@ -376,6 +371,14 @@ public Map getEnvVars() { return preloadedRunfiles.getEnvVars(); } + String getCanonicalRepositoryName(String apparentRepositoryName) { + return preloadedRunfiles + .getRepoMapping() + .getOrDefault( + new Preloaded.RepoMappingKey(sourceRepository, apparentRepositoryName), + apparentRepositoryName); + } + /** Returns true if the platform supports runfiles only via manifests. */ private static boolean isManifestOnly(Map env) { return "1".equals(env.get("RUNFILES_MANIFEST_ONLY")); diff --git a/tools/java/runfiles/RunfilesForStardoc.java b/tools/java/runfiles/RunfilesForStardoc.java new file mode 100644 index 00000000000000..84c3a62c75be89 --- /dev/null +++ b/tools/java/runfiles/RunfilesForStardoc.java @@ -0,0 +1,32 @@ +// Copyright 2022 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.runfiles; + +/** Additional runfiles functions only meant to be used by Stardoc. */ +public final class RunfilesForStardoc { + + /** + * Returns the canonical repository name. + * + * @param runfiles the {@link Runfiles} instance whose repo mapping should be used + * @param apparentRepositoryName the apparent repository name to resolve to a canonical one + */ + public static String getCanonicalRepositoryName( + Runfiles runfiles, String apparentRepositoryName) { + return runfiles.getCanonicalRepositoryName(apparentRepositoryName); + } + + private RunfilesForStardoc() {} +}