Fix treatment of symlinks directories that exist above output files. This condition never arises in a clean build, but may arise on incremental builds in which output artifacts from the first build become part of the directory hierarchy of an output file in the second builds.
The existing checks were insufficient for two primary reasons:
1. The fast path that creates output directories is not sensitive to symlinks. If a/b/c exists, where b is a symlink, the check happily reports that "a/b/c" is a directory.
2. The recovery path previously did the walk upwards from the output directory, stopping when it found a directory. Again, this fails to handle upward symlinks.
A cache is introduced to mitigate the additional system calls here.
An alternative approach would be to perform the symlink checks along with the artifact prefix conflict checks. These checks are done only when necessary (eg, when any configured targets are analyzed / re-analyzed). It would be nice to limit the cases in which we do the new symlink checks, but this is unsound in the face of external edits to the output tree. Blaze is sound in all other such mutations to the output tree, so I've attempted to maintain our correctness contract here.
RELNOTES: None
PiperOrigin-RevId: 284052424
diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java
index f74d260..ed071e4 100644
--- a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java
+++ b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java
@@ -14,11 +14,13 @@
package com.google.devtools.build.lib.buildtool;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.cache.CacheBuilderSpec;
import com.google.devtools.build.lib.actions.LocalHostCapacity;
import com.google.devtools.build.lib.util.OptionsUtils;
import com.google.devtools.build.lib.util.ResourceConverter;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.common.options.Converters;
+import com.google.devtools.common.options.Converters.CacheBuilderSpecConverter;
import com.google.devtools.common.options.Converters.RangeConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionDefinition;
@@ -239,6 +241,17 @@
public boolean finalizeActions;
@Option(
+ name = "directory_creation_cache",
+ defaultValue = "maximumSize=100000",
+ documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
+ effectTags = {OptionEffectTag.EXECUTION},
+ converter = CacheBuilderSpecConverter.class,
+ help =
+ "Describes the cache used to store known regular directories as they're created. Parent"
+ + " directories of output files are created on-demand during action execution.")
+ public CacheBuilderSpec directoryCreationCacheSpec;
+
+ @Option(
name = "aspects",
converter = Converters.CommaSeparatedOptionListConverter.class,
defaultValue = "",
diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeActionExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeActionExecutor.java
index 37b8466..dd893ee 100644
--- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeActionExecutor.java
+++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeActionExecutor.java
@@ -15,6 +15,8 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
@@ -95,6 +97,7 @@
import com.google.devtools.build.lib.skyframe.ActionExecutionState.SharedActionCallback;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.io.FileOutErr;
+import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.OutputService;
import com.google.devtools.build.lib.vfs.OutputService.ActionFileSystemType;
@@ -109,6 +112,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -189,6 +193,10 @@
// at the start of its next attempt,
private ConcurrentMap<OwnerlessArtifactWrapper, ImmutableList<SkyKey>> lostDiscoveredInputsMap;
+ // Directories which are known to be created as regular directories within this invocation. This
+ // implies parent directories are also regular directories.
+ private Set<PathFragment> knownRegularDirectories;
+
// Errors found when examining all actions in the graph are stored here, so that they can be
// thrown when execution of the action is requested. This field is set during each call to
// findAndStoreArtifactConflicts, and is preserved across builds otherwise.
@@ -429,6 +437,12 @@
this.outputService = outputService;
RemoteOptions remoteOptions = options.getOptions(RemoteOptions.class);
this.bazelRemoteExecutionEnabled = remoteOptions != null && remoteOptions.isRemoteEnabled();
+
+ Cache<PathFragment, Boolean> cache =
+ CacheBuilder.from(options.getOptions(BuildRequestOptions.class).directoryCreationCacheSpec)
+ .concurrencyLevel(Runtime.getRuntime().availableProcessors())
+ .build();
+ this.knownRegularDirectories = Collections.newSetFromMap(cache.asMap());
}
public void setActionLogBufferPathGenerator(
@@ -488,6 +502,7 @@
this.lostDiscoveredInputsMap = null;
this.actionCacheChecker = null;
this.topDownActionCache = null;
+ this.knownRegularDirectories = null;
}
/**
@@ -990,7 +1005,11 @@
// that the output directories for stdout and stderr exist.
setupActionFsFileOutErr(actionExecutionContext.getFileOutErr(), action);
}
- createOutputDirectories(action, actionExecutionContext);
+ if (actionFileSystemType().inMemoryFileSystem()) {
+ createActionFsOutputDirectories(action, actionExecutionContext);
+ } else {
+ createOutputDirectories(action, actionExecutionContext);
+ }
} catch (ActionExecutionException e) {
// This try-catch block cannot trigger rewinding, so it is safe to notify the status
// reporter and also post the ActionCompletionEvent.
@@ -1229,6 +1248,63 @@
}
}
+ /**
+ * Create output directories for an ActionFS. The action-local filesystem starts empty, so we
+ * expect the output directory creation to always succeed. There can be no interference from state
+ * left behind by prior builds or other actions intra-build.
+ */
+ private void createActionFsOutputDirectories(Action action, ActionExecutionContext context)
+ throws ActionExecutionException {
+ try {
+ Set<Path> done = new HashSet<>(); // avoid redundant calls for the same directory.
+ for (Artifact outputFile : action.getOutputs()) {
+ Path outputDir;
+ if (outputFile.isTreeArtifact()) {
+ outputDir = context.getPathResolver().toPath(outputFile);
+ } else {
+ outputDir = context.getPathResolver().toPath(outputFile).getParentDirectory();
+ }
+
+ if (done.add(outputDir)) {
+ outputDir.createDirectoryAndParents();
+ }
+ }
+ } catch (IOException e) {
+ ActionExecutionException ex =
+ new ActionExecutionException(
+ "failed to create output directory: " + e.getMessage(), e, action, false);
+ printError(ex.getMessage(), action, null);
+ throw ex;
+ }
+ }
+
+ /**
+ * Ensure that no symlinks exists between the output root and the output file. These are all
+ * expected to be regular directories. Violations of this expectations can only come from state
+ * left behind by previous invocations or external filesystem mutation.
+ */
+ private void symlinkCheck(
+ final Path dir, final Artifact outputFile, ActionExecutionContext context)
+ throws IOException {
+ PathFragment root = outputFile.getRoot().getRoot().asPath().asFragment();
+ Path curDir = context.getPathResolver().convertPath(dir);
+ Set<PathFragment> checkDirs = new HashSet<>();
+ while (!curDir.asFragment().equals(root)) {
+ // Fast path: Somebody already checked that this is a regular directory this invocation.
+ if (knownRegularDirectories.contains(curDir.asFragment())) {
+ return;
+ }
+ if (!curDir.isDirectory(Symlinks.NOFOLLOW)) {
+ throw new IOException(curDir + " is not a regular directory");
+ }
+ checkDirs.add(curDir.asFragment());
+ curDir = curDir.getParentDirectory();
+ }
+
+ // Defer adding to known regular directories until we've checked all parent directories.
+ knownRegularDirectories.addAll(checkDirs);
+ }
+
private void createOutputDirectories(Action action, ActionExecutionContext context)
throws ActionExecutionException {
try {
@@ -1243,23 +1319,30 @@
if (done.add(outputDir)) {
try {
- outputDir.createDirectoryAndParents();
+ if (!knownRegularDirectories.contains(outputDir.asFragment())) {
+ outputDir.createDirectoryAndParents();
+ symlinkCheck(outputDir, outputFile, context);
+ }
continue;
} catch (IOException e) {
/* Fall through to plan B. */
}
// Possibly some direct ancestors are not directories. In that case, we traverse the
- // ancestors upward, deleting any non-directories, until we reach a directory, then try
- // again. This handles the case where a file becomes a directory, either from one build to
- // another, or within a single build.
+ // ancestors downward, deleting any non-directories. This handles the case where a file
+ // becomes a directory. The traversal is done downward because otherwise we may delete
+ // files through a symlink in a parent directory. Since Blaze never creates such
+ // directories within a build, we have no idea where on disk we're actually deleting.
//
// Symlinks should not be followed so in order to clean up symlinks pointing to Fileset
// outputs from previous builds. See bug [incremental build of Fileset fails if
// Fileset.out was changed to be a subdirectory of the old value].
try {
- Path p = outputDir;
- while (true) {
+ Path p =
+ context.getPathResolver().transformRoot(outputFile.getRoot().getRoot()).asPath();
+ PathFragment relativePath = outputDir.relativeTo(p);
+ for (int i = 0; i < relativePath.segmentCount(); i++) {
+ p = p.getRelative(relativePath.getSegment(i));
// This lock ensures that the only thread that observes a filesystem transition in
// which the path p first exists and then does not is the thread that calls
@@ -1282,21 +1365,23 @@
Lock lock = outputDirectoryDeletionLock.get(p);
lock.lock();
try {
- if (p.exists(Symlinks.NOFOLLOW)) {
- boolean isDirectory = p.isDirectory(Symlinks.NOFOLLOW);
- if (isDirectory) {
- // If this directory used to be a tree artifact it won't be writable
- p.setWritable(true);
- break;
- }
- // p may be a file or dangling symlink, or a symlink to an old Fileset output
+ FileStatus stat = p.statIfFound(Symlinks.NOFOLLOW);
+ if (stat == null) {
+ // Missing entry: Break out and create expected directories.
+ break;
+ }
+ if (stat.isDirectory()) {
+ // If this directory used to be a tree artifact it won't be writable.
+ p.setWritable(true);
+ knownRegularDirectories.add(p.asFragment());
+ } else {
+ // p may be a file or symlink (possibly from a Fileset in a previous build).
p.delete(); // throws IOException
+ break;
}
} finally {
lock.unlock();
}
-
- p = p.getParentDirectory();
}
outputDir.createDirectoryAndParents();
} catch (IOException e) {
diff --git a/src/main/java/com/google/devtools/common/options/Converters.java b/src/main/java/com/google/devtools/common/options/Converters.java
index f4e5e55..e6884fd 100644
--- a/src/main/java/com/google/devtools/common/options/Converters.java
+++ b/src/main/java/com/google/devtools/common/options/Converters.java
@@ -14,6 +14,8 @@
package com.google.devtools.common.options;
import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.cache.CacheBuilderSpec;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
@@ -612,4 +614,24 @@
super(0, 100);
}
}
+
+ /**
+ * A {@link Converter} for {@link CacheBuilderSpec}. The spec may be empty, in which case this
+ * converter returns null.
+ */
+ public static class CacheBuilderSpecConverter implements Converter<CacheBuilderSpec> {
+ @Override
+ public CacheBuilderSpec convert(String spec) throws OptionsParsingException {
+ try {
+ return Strings.isNullOrEmpty(spec) ? null : CacheBuilderSpec.parse(spec);
+ } catch (IllegalArgumentException e) {
+ throw new OptionsParsingException("Failed to parse CacheBuilderSpec: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public String getTypeDescription() {
+ return "Converts to a CacheBuilderSpec, or null if the input is empty";
+ }
+ }
}