Clean up compute() function of RepositoryDelegatorFunction
just a pure refactoring without any new functionality
Closes #7727.
PiperOrigin-RevId: 238625657
diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java
index bda4c58..7d33c99 100644
--- a/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java
@@ -15,6 +15,7 @@
import com.google.common.eventbus.Subscribe;
import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCacheHitEvent;
+import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.repository.RepositoryFailedEvent;
@@ -56,7 +57,7 @@
@Subscribe
public void failed(RepositoryFailedEvent event) {
- String repo = event.getRepo();
+ String repo = RepositoryName.stripName(event.getRepo());
Set<Pair<String, URL>> cacheHits = cacheHitsByRepo.get(repo);
if (cacheHits != null && !cacheHits.isEmpty()) {
StringBuilder info = new StringBuilder();
diff --git a/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryName.java b/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryName.java
index eedd2c6..af92bdf 100644
--- a/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryName.java
+++ b/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryName.java
@@ -196,6 +196,14 @@
}
/**
+ * Returns the repository name without the leading "{@literal @}". For the default repository,
+ * returns "".
+ */
+ public static String stripName(String repoName) {
+ return repoName.startsWith("@") ? repoName.substring(1) : repoName;
+ }
+
+ /**
* Returns if this is the default repository, that is, {@link #name} is "".
*/
public boolean isDefault() {
diff --git a/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorFunction.java b/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorFunction.java
index fd1e8e0..a9dc2f5 100644
--- a/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorFunction.java
+++ b/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorFunction.java
@@ -16,8 +16,9 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
-import com.google.devtools.build.lib.actions.FileValue;
+import com.google.common.collect.Maps;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.events.Event;
@@ -81,9 +82,6 @@
// when we decide to update the format.
private static final int MARKER_FILE_VERSION = 3;
- // A special repository delegate used to handle Skylark remote repositories if present.
- public static final String SKYLARK_DELEGATE_NAME = "$skylark";
-
// Mapping of rule class name to RepositoryFunction.
private final ImmutableMap<String, RepositoryFunction> handlers;
@@ -114,7 +112,7 @@
private void setupRepositoryRoot(Path repoRoot) throws RepositoryFunctionException {
try {
FileSystemUtils.deleteTree(repoRoot);
- FileSystemUtils.createDirectoryAndParents(repoRoot.getParentDirectory());
+ Preconditions.checkNotNull(repoRoot.getParentDirectory()).createDirectoryAndParents();
} catch (IOException e) {
throw new RepositoryFunctionException(e, Transience.TRANSIENT);
}
@@ -124,21 +122,16 @@
public SkyValue compute(SkyKey skyKey, Environment env)
throws SkyFunctionException, InterruptedException {
RepositoryName repositoryName = (RepositoryName) skyKey.argument();
+
Map<RepositoryName, PathFragment> overrides = REPOSITORY_OVERRIDES.get(env);
- if (env.valuesMissing()) {
- return null;
- }
- String fetchUnconditionally = DEPENDENCY_FOR_UNCONDITIONAL_FETCHING.get(env);
- if (env.valuesMissing()) {
- return null;
- }
+ boolean doNotFetchUnconditionally =
+ DONT_FETCH_UNCONDITIONALLY.equals(DEPENDENCY_FOR_UNCONDITIONAL_FETCHING.get(env));
Path repoRoot = RepositoryFunction.getExternalRepositoryDirectory(directories)
.getRelative(repositoryName.strippedName());
- Path markerPath = getMarkerPath(directories, repositoryName.strippedName());
- if (overrides.containsKey(repositoryName)) {
- return setupOverride(
- repositoryName, overrides.get(repositoryName), env, repoRoot, markerPath);
+
+ if (Preconditions.checkNotNull(overrides).containsKey(repositoryName)) {
+ return setupOverride(overrides.get(repositoryName), env, repoRoot);
}
Rule rule;
@@ -151,91 +144,60 @@
return null;
}
- RepositoryFunction handler;
- if (rule.getRuleClassObject().isSkylark()) {
- handler = skylarkHandler;
- } else {
- handler = handlers.get(rule.getRuleClass());
- }
+ RepositoryFunction handler = getHandler(rule);
if (handler == null) {
// If we refer to a non repository rule then the repository does not exist.
return RepositoryDirectoryValue.NO_SUCH_REPOSITORY_VALUE;
}
- handler.setClientEnvironment(clientEnvironmentSupplier.get());
-
byte[] ruleSpecificData = handler.getRuleSpecificMarkerData(rule, env);
- if (ruleSpecificData == null) {
- return null;
- }
- String ruleKey = computeRuleKey(rule, ruleSpecificData);
- Map<String, String> markerData = new TreeMap<>();
- if (isFetch.get() && handler.isLocal(rule)) {
- // Local repositories are fetched regardless of the marker file because the operation is
- // generally fast and they do not depend on non-local data, so it does not make much sense to
- // try to cache from across server instances.
- setupRepositoryRoot(repoRoot);
- env.getListener().post(new RepositoryFetching(repositoryName.getName(), false));
- RepositoryDirectoryValue.Builder localRepo =
- handler.fetch(rule, repoRoot, directories, env, markerData, skyKey);
- if (localRepo == null) {
- return null;
- } else {
- // We write the marker file for local repository essentially for getting the digest and
- // injecting it in the RepositoryDirectoryValue.
- byte[] digest = writeMarkerFile(markerPath, markerData, ruleKey);
- env.getListener().post(new RepositoryFetching(repositoryName.getName(), true));
- return localRepo.setDigest(digest).build();
- }
- }
-
- // We check the repository root for existence here, but we can't depend on the FileValue,
- // because it's possible that we eventually create that directory in which case the FileValue
- // and the state of the file system would be inconsistent.
-
- byte[] markerHash = isFilesystemUpToDate(markerPath, rule, ruleKey, handler, env);
if (env.valuesMissing()) {
return null;
}
- if (DONT_FETCH_UNCONDITIONALLY.equals(fetchUnconditionally)
- && markerHash != null
- && repoRoot.exists()) {
- // Now that we know that it exists and that we should not fetch unconditionally, we can
- // declare a Skyframe dependency on the repository root.
- RepositoryFunction.getRepositoryDirectory(repoRoot, env);
- if (env.valuesMissing()) {
- return null;
- }
+ DigestWriter digestWriter =
+ new DigestWriter(
+ directories, repositoryName, rule, Preconditions.checkNotNull(ruleSpecificData));
- return RepositoryDirectoryValue.builder().setPath(repoRoot).setDigest(markerHash).build();
+ // Local repositories are fetched regardless of the marker file because the operation is
+ // generally fast and they do not depend on non-local data, so it does not make much sense to
+ // try to cache them from across server instances.
+ boolean fetchLocalRepositoryAlways = isFetch.get() && handler.isLocal(rule);
+ if (!fetchLocalRepositoryAlways) {
+ // For the non-local repositories, check if they are already up-to-date:
+ // 1) unconditional fetching is not enabled, AND
+ // 2) repository directory exists, AND
+ // 3) marker file correctly describes the current repository state
+ if (doNotFetchUnconditionally && repoRoot.exists()) {
+ byte[] markerHash = digestWriter.areRepositoryAndMarkerFileConsistent(handler, env);
+ if (env.valuesMissing()) {
+ return null;
+ }
+ if (markerHash != null) {
+ // Now that we know that it exists and that we should not fetch unconditionally, we can
+ // declare a Skyframe dependency on the repository root.
+ RepositoryFunction.getRepositoryDirectory(repoRoot, env);
+ if (env.valuesMissing()) {
+ return null;
+ }
+ return RepositoryDirectoryValue.builder().setPath(repoRoot).setDigest(markerHash).build();
+ }
+ }
}
if (isFetch.get()) {
// Fetching enabled, go ahead.
- env.getListener().post(new RepositoryFetching(repositoryName.getName(), false));
- setupRepositoryRoot(repoRoot);
- RepositoryDirectoryValue.Builder result = null;
- try {
- result = handler.fetch(rule, repoRoot, directories, env, markerData, skyKey);
- } catch (SkyFunctionException e) {
- // Upon an exceptional exit, the fetching of that repository is over as well.
- env.getListener().post(new RepositoryFetching(repositoryName.getName(), true));
- env.getListener().post(new RepositoryFailedEvent(repositoryName.strippedName()));
- throw e;
- }
- if (env.valuesMissing()) {
- env.getListener()
- .post(new RepositoryFetching(repositoryName.getName(), false, "Restarting."));
+ RepositoryDirectoryValue.Builder builder =
+ fetchRepository(skyKey, repoRoot, env, digestWriter.getMarkerData(), handler, rule);
+ if (builder == null) {
return null;
}
- env.getListener().post(new RepositoryFetching(repositoryName.getName(), true));
// No new Skyframe dependencies must be added between calling the repository implementation
// and writing the marker file because if they aren't computed, it would cause a Skyframe
// restart thus calling the possibly very slow (networking, decompression...) fetch()
// operation again. So we write the marker file here immediately.
- byte[] digest = writeMarkerFile(markerPath, markerData, ruleKey);
- return result.setDigest(digest).build();
+ byte[] digest = digestWriter.writeMarkerFile();
+ return builder.setDigest(digest).build();
}
if (!repoRoot.exists()) {
@@ -248,7 +210,7 @@
// Declare a Skyframe dependency so that this is re-evaluated when something happens to the
// directory.
- FileValue repoRootValue = RepositoryFunction.getRepositoryDirectory(repoRoot, env);
+ RepositoryFunction.getRepositoryDirectory(repoRoot, env);
if (env.valuesMissing()) {
return null;
}
@@ -261,8 +223,53 @@
+ "run the build without the '--nofetch' command line option.",
rule.getName())));
- return RepositoryDirectoryValue.builder().setPath(repoRootValue.realRootedPath().asPath())
- .setFetchingDelayed().build();
+ return RepositoryDirectoryValue.builder().setPath(repoRoot).setFetchingDelayed().build();
+ }
+
+ private RepositoryFunction getHandler(Rule rule) {
+ RepositoryFunction handler;
+ if (rule.getRuleClassObject().isSkylark()) {
+ handler = skylarkHandler;
+ } else {
+ handler = handlers.get(rule.getRuleClass());
+ }
+ if (handler != null) {
+ handler.setClientEnvironment(clientEnvironmentSupplier.get());
+ }
+
+ return handler;
+ }
+
+ private RepositoryDirectoryValue.Builder fetchRepository(
+ SkyKey skyKey,
+ Path repoRoot,
+ Environment env,
+ Map<String, String> markerData,
+ RepositoryFunction handler,
+ Rule rule)
+ throws SkyFunctionException, InterruptedException {
+
+ setupRepositoryRoot(repoRoot);
+
+ String repositoryName = ((RepositoryName) skyKey.argument()).getName();
+ env.getListener().post(new RepositoryFetching(repositoryName, false));
+
+ RepositoryDirectoryValue.Builder repoBuilder;
+ try {
+ repoBuilder = handler.fetch(rule, repoRoot, directories, env, markerData, skyKey);
+ } catch (SkyFunctionException e) {
+ // Upon an exceptional exit, the fetching of that repository is over as well.
+ env.getListener().post(new RepositoryFetching(repositoryName, true));
+ env.getListener().post(new RepositoryFailedEvent(repositoryName));
+ throw e;
+ }
+
+ if (env.valuesMissing()) {
+ env.getListener().post(new RepositoryFetching(repositoryName, false, "Restarting."));
+ return null;
+ }
+ env.getListener().post(new RepositoryFetching(repositoryName, true));
+ return Preconditions.checkNotNull(repoBuilder);
}
/**
@@ -277,73 +284,22 @@
return ExternalPackageUtil.getRuleByName(repositoryName.strippedName(), env);
}
- private String computeRuleKey(Rule rule, byte[] ruleSpecificData) {
- return new Fingerprint().addBytes(RuleFormatter.serializeRule(rule).build().toByteArray())
- .addBytes(ruleSpecificData)
- .addInt(MARKER_FILE_VERSION).hexDigestAndReset();
+ @Override
+ public String extractTag(SkyKey skyKey) {
+ return null;
}
- /**
- * Checks if the state of the repository in the file system is consistent with the rule in the
- * WORKSPACE file.
- *
- * <p>
- * Deletes the marker file if not so that no matter what happens after, the state of the file
- * system stays consistent.
- *
- * <p>
- * Returns null if the file system is not up to date and a hash of the marker file if the file
- * system is up to date.
- */
- @Nullable
- private byte[] isFilesystemUpToDate(Path markerPath, Rule rule, String ruleKey,
- RepositoryFunction handler, Environment env)
+ private RepositoryDirectoryValue setupOverride(
+ PathFragment sourcePath, Environment env, Path repoRoot)
throws RepositoryFunctionException, InterruptedException {
- try {
- if (!markerPath.exists()) {
- return null;
- }
-
- String content = FileSystemUtils.readContent(markerPath, StandardCharsets.UTF_8);
-
- String[] lines = content.split("\n");
- Map<String, String> markerData = new TreeMap<>();
- String markerRuleKey = "";
- boolean firstLine = true;
- for (String line : lines) {
- if (firstLine) {
- markerRuleKey = line;
- firstLine = false;
- } else {
- int sChar = line.indexOf(' ');
- String key = line;
- String value = "";
- if (sChar > 0) {
- key = unescape(line.substring(0, sChar));
- value = unescape(line.substring(sChar + 1));
- }
- markerData.put(key, value);
- }
- }
- boolean result = false;
- if (markerRuleKey.equals(ruleKey)) {
- result = handler.verifyMarkerData(rule, markerData, env);
- if (env.valuesMissing()) {
- return null;
- }
- }
-
- if (result) {
- return new Fingerprint().addString(content).digestAndReset();
- } else {
- // So that we are in a consistent state if something happens while fetching the repository
- markerPath.delete();
- return null;
- }
-
- } catch (IOException e) {
- throw new RepositoryFunctionException(e, Transience.TRANSIENT);
+ setupRepositoryRoot(repoRoot);
+ RepositoryDirectoryValue.Builder directoryValue =
+ LocalRepositoryFunction.symlink(repoRoot, sourcePath, env);
+ if (directoryValue == null) {
+ return null;
}
+ byte[] digest = new byte[] {};
+ return directoryValue.setDigest(digest).build();
}
// Escape a value for the marker file
@@ -358,7 +314,7 @@
if (str.equals("\\0")) {
return null; // \0 == null string
}
- StringBuffer result = new StringBuffer();
+ StringBuilder result = new StringBuilder();
boolean escaped = false;
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
@@ -380,10 +336,24 @@
return result.toString();
}
- private byte[] writeMarkerFile(
- Path markerPath, Map<String, String> markerData, String ruleKey)
- throws RepositoryFunctionException {
- try {
+ private static class DigestWriter {
+ private final Path markerPath;
+ private final Rule rule;
+ private final Map<String, String> markerData;
+ private final String ruleKey;
+
+ DigestWriter(
+ BlazeDirectories directories,
+ RepositoryName repositoryName,
+ Rule rule,
+ byte[] ruleSpecificData) {
+ ruleKey = computeRuleKey(rule, ruleSpecificData);
+ markerPath = getMarkerPath(directories, repositoryName.strippedName());
+ this.rule = rule;
+ markerData = Maps.newHashMap();
+ }
+
+ byte[] writeMarkerFile() throws RepositoryFunctionException {
StringBuilder builder = new StringBuilder();
builder.append(ruleKey).append("\n");
for (Map.Entry<String, String> data : markerData.entrySet()) {
@@ -392,38 +362,99 @@
builder.append(escape(key)).append(" ").append(escape(value)).append("\n");
}
String content = builder.toString();
- FileSystemUtils.writeContent(markerPath, StandardCharsets.UTF_8, content);
+ try {
+ FileSystemUtils.writeContent(markerPath, StandardCharsets.UTF_8, content);
+ } catch (IOException e) {
+ throw new RepositoryFunctionException(e, Transience.TRANSIENT);
+ }
return new Fingerprint().addString(content).digestAndReset();
- } catch (IOException e) {
- throw new RepositoryFunctionException(e, Transience.TRANSIENT);
}
- }
- private static Path getMarkerPath(BlazeDirectories directories, String ruleName) {
- return RepositoryFunction.getExternalRepositoryDirectory(directories)
- .getChild("@" + ruleName + ".marker");
- }
+ /**
+ * Checks if the state of the repository in the file system is consistent with the rule in the
+ * WORKSPACE file.
+ *
+ * <p>Deletes the marker file if not so that no matter what happens after, the state of the file
+ * system stays consistent.
+ *
+ * <p>Returns null if the file system is not up to date and a hash of the marker file if the
+ * file system is up to date.
+ *
+ * <p>We check the repository root for existence here, but we can't depend on the FileValue,
+ * because it's possible that we eventually create that directory in which case the FileValue
+ * and the state of the file system would be inconsistent.
+ */
+ byte[] areRepositoryAndMarkerFileConsistent(RepositoryFunction handler, Environment env)
+ throws RepositoryFunctionException, InterruptedException {
+ if (!markerPath.exists()) {
+ return null;
+ }
- @Override
- public String extractTag(SkyKey skyKey) {
- return null;
- }
+ Map<String, String> markerData = new TreeMap<>();
+ String content;
+ try {
+ content = FileSystemUtils.readContent(markerPath, StandardCharsets.UTF_8);
+ String markerRuleKey = readMarkerFile(content, markerData);
+ boolean verified = false;
+ if (Preconditions.checkNotNull(ruleKey).equals(markerRuleKey)) {
+ verified = handler.verifyMarkerData(rule, markerData, env);
+ if (env.valuesMissing()) {
+ return null;
+ }
+ }
- private RepositoryDirectoryValue setupOverride(
- RepositoryName repositoryName, PathFragment sourcePath, Environment env, Path repoRoot,
- Path markerPath)
- throws RepositoryFunctionException, InterruptedException {
- setupRepositoryRoot(repoRoot);
- RepositoryDirectoryValue.Builder directoryValue = LocalRepositoryFunction.symlink(
- repoRoot, sourcePath, env);
- if (directoryValue == null) {
- return null;
+ if (verified) {
+ return new Fingerprint().addString(content).digestAndReset();
+ } else {
+ // So that we are in a consistent state if something happens while fetching the repository
+ markerPath.delete();
+ return null;
+ }
+ } catch (IOException e) {
+ throw new RepositoryFunctionException(e, Transience.TRANSIENT);
+ }
}
- String ruleKey = new Fingerprint().addBytes(repositoryName.strippedName().getBytes())
- .addBytes(repoRoot.getFileSystem().getPath(sourcePath).getPathString().getBytes())
- .addInt(MARKER_FILE_VERSION).hexDigestAndReset();
- byte[] digest = writeMarkerFile(markerPath, new TreeMap<String, String>(), ruleKey);
- return directoryValue.setDigest(digest).build();
+
+ Map<String, String> getMarkerData() {
+ return markerData;
+ }
+
+ @Nullable
+ private String readMarkerFile(String content, Map<String, String> markerData) {
+ String markerRuleKey = null;
+ String[] lines = content.split("\n");
+
+ boolean firstLine = true;
+ for (String line : lines) {
+ if (firstLine) {
+ markerRuleKey = line;
+ firstLine = false;
+ } else {
+ int sChar = line.indexOf(' ');
+ String key = line;
+ String value = "";
+ if (sChar > 0) {
+ key = unescape(line.substring(0, sChar));
+ value = unescape(line.substring(sChar + 1));
+ }
+ markerData.put(key, value);
+ }
+ }
+ return markerRuleKey;
+ }
+
+ private String computeRuleKey(Rule rule, byte[] ruleSpecificData) {
+ return new Fingerprint()
+ .addBytes(RuleFormatter.serializeRule(rule).build().toByteArray())
+ .addBytes(ruleSpecificData)
+ .addInt(MARKER_FILE_VERSION)
+ .hexDigestAndReset();
+ }
+
+ private static Path getMarkerPath(BlazeDirectories directories, String ruleName) {
+ return RepositoryFunction.getExternalRepositoryDirectory(directories)
+ .getChild("@" + ruleName + ".marker");
+ }
}
private class RepositoryFetching implements FetchProgress {
diff --git a/src/test/shell/bazel/external_integration_test.sh b/src/test/shell/bazel/external_integration_test.sh
index 52ab719..a9adee8 100755
--- a/src/test/shell/bazel/external_integration_test.sh
+++ b/src/test/shell/bazel/external_integration_test.sh
@@ -1450,6 +1450,9 @@
cmd = "cp $< $@"
)
EOF
+
+ echo "Build #1"
+
# build to fill the cache
bazel build //:it || fail "Expected success"
@@ -1457,6 +1460,8 @@
bazel clean --expunge
rm "${WRKDIR}/ext-1.1.zip"
+ echo "Build #2"
+
# We still should be able to build, as the file is in cache
bazel build //:it > "${TEST_log}" 2>&1 || fail "Expected success"
# As a cache hit is a perfectly normal thing, we don't expect it to be
@@ -1471,6 +1476,9 @@
w
q
EOI
+
+ echo "Build #3"
+
# The build should fail, as the prefix is not found. The available prefix
# should be reported as well as the information that the file was taken
# from cache.
@@ -1495,6 +1503,9 @@
w
q
EOI
+
+ echo "Build #4"
+
bazel build //:it > "${TEST_log}" 2>&1 && fail "Should not succeed" || :
expect_not_log 'prefix'
expect_log 'cp ext-1.2/foo.txt ext-1.2/BUILD'