Add experimental reuse of non-worker sandboxes.
This uses the same functionality as the worker sandboxing to reuse existing sandboxing. Where the worker sandboxes just stay in place, for non-worker sandboxes we move each sandbox aside after use, then move them back to the new sandbox location when we want to reuse them. When reusing a sandbox, we traverse the reused directory and remove anything that doesn't match what we want for the new action.
Timing on Mac of degenerate case from GitHub issue #8230, last 20 of 40 builds:
- `--strategy=standalone`: Avg. 8.45s
- `--strategy=sandbox`: Avg. 56.09s (+563%)
- `--strategy=sandbox --experimental_reuse_sandbox_directories`: Avg. 10.72s (+27%)
RELNOTES: Adds --experimental_reuse_sandbox_directories flag to reuse already-created non-worker sandboxes with cleanup.
PiperOrigin-RevId: 379686176
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/AbstractContainerizingSandboxedSpawn.java b/src/main/java/com/google/devtools/build/lib/sandbox/AbstractContainerizingSandboxedSpawn.java
index cdc5bb8..5829417 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/AbstractContainerizingSandboxedSpawn.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/AbstractContainerizingSandboxedSpawn.java
@@ -14,10 +14,9 @@
package com.google.devtools.build.lib.sandbox;
-import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Preconditions;
-import com.google.common.collect.Iterables;
+import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.exec.TreeDeleter;
import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxInputs;
import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxOutputs;
@@ -26,9 +25,11 @@
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.HashSet;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.stream.Collectors;
import javax.annotation.Nullable;
/**
@@ -37,12 +38,12 @@
*/
public abstract class AbstractContainerizingSandboxedSpawn implements SandboxedSpawn {
- private final Path sandboxPath;
- private final Path sandboxExecRoot;
+ final Path sandboxPath;
+ final Path sandboxExecRoot;
private final List<String> arguments;
private final Map<String, String> environment;
- private final SandboxInputs inputs;
- private final SandboxOutputs outputs;
+ final SandboxInputs inputs;
+ final SandboxOutputs outputs;
private final Set<Path> writableDirs;
private final TreeDeleter treeDeleter;
private final Path statisticsPath;
@@ -91,13 +92,35 @@
@Override
public void createFileSystem() throws IOException {
- createDirectories();
- createInputs(inputs);
+ // First compute all the inputs and directories that we need. This is based only on
+ // `workerFiles`, `inputs` and `outputs` and won't do any I/O.
+ Set<PathFragment> inputsToCreate = new LinkedHashSet<>();
+ LinkedHashSet<PathFragment> dirsToCreate = new LinkedHashSet<>();
+ Set<PathFragment> writableSandboxDirs =
+ writableDirs.stream()
+ .filter(p -> p.startsWith(sandboxExecRoot))
+ .map(p -> p.relativeTo(sandboxExecRoot))
+ .collect(Collectors.toSet());
+ SandboxHelpers.populateInputsAndDirsToCreate(
+ inputs, ImmutableSet.of(), outputs, writableSandboxDirs, inputsToCreate, dirsToCreate);
+
+ // Allow subclasses to filter out inputs and dirs that don't need to be created.
+ filterInputsAndDirsToCreate(inputsToCreate, dirsToCreate);
+
+ // Finally create what needs creating.
+ createDirectories(dirsToCreate);
+ createInputs(inputsToCreate, inputs);
inputs.materializeVirtualInputs(sandboxExecRoot);
}
+ protected void filterInputsAndDirsToCreate(
+ Set<PathFragment> inputsToCreate, LinkedHashSet<PathFragment> dirsToCreate)
+ throws IOException {}
+
/**
- * No input can be a child of another input, because otherwise we might try to create a symlink
+ * Creates all directories needed for the sandbox.
+ *
+ * <p>No input can be a child of another input, because otherwise we might try to create a symlink
* below another symlink we created earlier - which means we'd actually end up writing somewhere
* in the workspace.
*
@@ -108,19 +131,15 @@
* directories, too, because we'll get an IOException with EEXIST if inputs happen to be nested
* once we start creating the symlinks for all inputs.
*/
- private void createDirectories() throws IOException {
+ void createDirectories(Iterable<PathFragment> dirsToCreate) throws IOException {
Set<Path> knownDirectories = new HashSet<>();
// Add sandboxExecRoot and it's parent -- all paths must fall under the parent of
- // sandboxExecRoot and we know that sandboxExecRoot exists.
+ // sandboxExecRoot and we know that sandboxExecRoot exists. This stops the recursion in
+ // createDirectoryAndParentsInSandboxRoot.
knownDirectories.add(sandboxExecRoot);
knownDirectories.add(sandboxExecRoot.getParentDirectory());
- Iterable<PathFragment> files =
- Iterables.concat(
- inputs.getFiles().keySet(), inputs.getSymlinks().keySet(), outputs.files());
- for (PathFragment path :
- Iterables.concat(
- Iterables.transform(files, PathFragment::getParentDirectory), outputs.dirs())) {
+ for (PathFragment path : dirsToCreate) {
Preconditions.checkArgument(!path.isAbsolute(), path);
if (path.containsUplevelReferences() && path.isMultiSegment()) {
// Allow a single up-level reference to allow inputs from the siblings of the main
@@ -132,56 +151,36 @@
path);
}
- createDirectoryAndParentsInSandboxRoot(sandboxExecRoot.getRelative(path), knownDirectories);
- }
-
- for (Path dir : writableDirs) {
- if (dir.startsWith(sandboxExecRoot)) {
- createDirectoryAndParentsInSandboxRoot(dir, knownDirectories);
- }
+ SandboxHelpers.createDirectoryAndParentsInSandboxRoot(
+ sandboxExecRoot.getRelative(path), knownDirectories, sandboxExecRoot);
}
}
/**
- * Creates directory and all ancestors for it at a given path.
+ * Creates all inputs needed for this spawn's sandbox.
*
- * <p>This method uses (and updates) the set of already known directories in order to minimize the
- * IO involved with creating directories. For example a path of {@code 1/2/3/4} created after
- * {@code 1/2/3/5} only calls for creating {@code 1/2/3/5}. We can use the set of known
- * directories to discover that {@code 1/2/3} already exists instead of deferring to the
- * filesystem for it.
+ * @param inputsToCreate The inputs that actually need to be created. Some inputs may already
+ * exist if we're reusing a previously existing sandbox.
+ * @param inputs All the inputs for this spawn.
*/
- private void createDirectoryAndParentsInSandboxRoot(Path path, Set<Path> knownDirectories)
+ void createInputs(Iterable<PathFragment> inputsToCreate, SandboxInputs inputs)
throws IOException {
- if (knownDirectories.contains(path)) {
- return;
- }
- createDirectoryAndParentsInSandboxRoot(
- checkNotNull(
- path.getParentDirectory(),
- "All paths should be under/siblings of sandboxExecRoot: %s",
- sandboxExecRoot),
- knownDirectories);
- path.createDirectory();
- knownDirectories.add(path);
- }
-
- protected void createInputs(SandboxInputs inputs) throws IOException {
- // All input files are relative to the execroot.
- for (Map.Entry<PathFragment, Path> entry : inputs.getFiles().entrySet()) {
- Path key = sandboxExecRoot.getRelative(entry.getKey());
- // A null value means that we're supposed to create an empty file as the input.
- if (entry.getValue() != null) {
- copyFile(entry.getValue(), key);
- } else {
- FileSystemUtils.createEmptyFile(key);
+ for (PathFragment fragment : inputsToCreate) {
+ Path key = sandboxExecRoot.getRelative(fragment);
+ if (inputs.getFiles().containsKey(fragment)) {
+ Path fileDest = inputs.getFiles().get(fragment);
+ if (fileDest != null) {
+ key.createSymbolicLink(fileDest);
+ } else {
+ FileSystemUtils.createEmptyFile(key);
+ }
+ } else if (inputs.getSymlinks().containsKey(fragment)) {
+ PathFragment symlinkDest = inputs.getSymlinks().get(fragment);
+ if (symlinkDest != null) {
+ key.createSymbolicLink(symlinkDest);
+ }
}
}
-
- for (Map.Entry<PathFragment, PathFragment> entry : inputs.getSymlinks().entrySet()) {
- Path key = sandboxExecRoot.getRelative(entry.getKey());
- key.createSymbolicLink(entry.getValue());
- }
}
protected abstract void copyFile(Path source, Path target) throws IOException;
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/BUILD b/src/main/java/com/google/devtools/build/lib/sandbox/BUILD
index f0d7105..0dffde4 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/BUILD
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/BUILD
@@ -23,6 +23,7 @@
"//src/main/java/com/google/devtools/build/lib/analysis:blaze_directories",
"//src/main/java/com/google/devtools/build/lib/analysis:test/test_configuration",
"//src/main/java/com/google/devtools/build/lib/analysis/platform:platform_utils",
+ "//src/main/java/com/google/devtools/build/lib/cmdline",
"//src/main/java/com/google/devtools/build/lib/events",
"//src/main/java/com/google/devtools/build/lib/exec:abstract_spawn_strategy",
"//src/main/java/com/google/devtools/build/lib/exec:bin_tools",
@@ -40,7 +41,6 @@
"//src/main/java/com/google/devtools/build/lib/util:abrupt_exit_exception",
"//src/main/java/com/google/devtools/build/lib/util:command",
"//src/main/java/com/google/devtools/build/lib/util:detailed_exit_code",
- "//src/main/java/com/google/devtools/build/lib/util:exit_code",
"//src/main/java/com/google/devtools/build/lib/util:os",
"//src/main/java/com/google/devtools/build/lib/util:process",
"//src/main/java/com/google/devtools/build/lib/util:resource_converter",
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/DarwinSandboxedSpawnRunner.java b/src/main/java/com/google/devtools/build/lib/sandbox/DarwinSandboxedSpawnRunner.java
index f510d35..4148b9c 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/DarwinSandboxedSpawnRunner.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/DarwinSandboxedSpawnRunner.java
@@ -307,7 +307,10 @@
outputs,
writableDirs,
treeDeleter,
- statisticsPath) {
+ statisticsPath,
+ getSandboxOptions().reuseSandboxDirectories,
+ sandboxBase,
+ spawn.getMnemonic()) {
@Override
public void createFileSystem() throws IOException {
super.createFileSystem();
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java b/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java
index 071426f..be23fb0 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java
@@ -239,7 +239,10 @@
outputs,
writableDirs,
treeDeleter,
- statisticsPath);
+ statisticsPath,
+ getSandboxOptions().reuseSandboxDirectories,
+ sandboxBase,
+ spawn.getMnemonic());
}
}
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/ProcessWrapperSandboxedSpawnRunner.java b/src/main/java/com/google/devtools/build/lib/sandbox/ProcessWrapperSandboxedSpawnRunner.java
index 356fc67..79b625e 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/ProcessWrapperSandboxedSpawnRunner.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/ProcessWrapperSandboxedSpawnRunner.java
@@ -140,7 +140,10 @@
outputs,
getWritableDirs(sandboxExecRoot, environment),
treeDeleter,
- statisticsPath);
+ statisticsPath,
+ getSandboxOptions().reuseSandboxDirectories,
+ sandboxBase,
+ spawn.getMnemonic());
}
}
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java
index 5cd3080..0c811c5 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java
@@ -14,6 +14,10 @@
package com.google.devtools.build.lib.sandbox;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.devtools.build.lib.vfs.Dirent.Type.DIRECTORY;
+import static com.google.devtools.build.lib.vfs.Dirent.Type.SYMLINK;
+
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
@@ -25,17 +29,22 @@
import com.google.devtools.build.lib.actions.cache.VirtualActionInput;
import com.google.devtools.build.lib.actions.cache.VirtualActionInput.EmptyActionInput;
import com.google.devtools.build.lib.analysis.test.TestConfiguration;
+import com.google.devtools.build.lib.cmdline.LabelConstants;
+import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.FileSystemUtils.MoveResult;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
+import com.google.devtools.build.lib.vfs.Symlinks;
import com.google.devtools.common.options.OptionsParsingResult;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -150,6 +159,150 @@
}
}
+ /**
+ * Cleans the existing sandbox at {@code root} to match the {@code inputs}, updating {@code
+ * inputsToCreate} and {@code dirsToCreate} to not contain existing inputs and dir. Existing
+ * directories or files that are either not needed {@code inputs} or doesn't have the right
+ * content or symlink destination are removed.
+ */
+ public static void cleanExisting(
+ Path root,
+ SandboxInputs inputs,
+ Set<PathFragment> inputsToCreate,
+ Set<PathFragment> dirsToCreate,
+ Path workDir)
+ throws IOException {
+ // To avoid excessive scanning of dirsToCreate for prefix dirs, we prepopulate this set of
+ // prefixes.
+ Set<PathFragment> prefixDirs = new HashSet<>();
+ for (PathFragment dir : dirsToCreate) {
+ PathFragment parent = dir.getParentDirectory();
+ while (parent != null && !prefixDirs.contains(parent)) {
+ prefixDirs.add(parent);
+ parent = parent.getParentDirectory();
+ }
+ }
+
+ cleanRecursively(root, inputs, inputsToCreate, dirsToCreate, workDir, prefixDirs);
+ }
+
+ /**
+ * Deletes unnecessary files/directories and updates the sets if something on disk is already
+ * correct and doesn't need any changes.
+ */
+ private static void cleanRecursively(
+ Path root,
+ SandboxInputs inputs,
+ Set<PathFragment> inputsToCreate,
+ Set<PathFragment> dirsToCreate,
+ Path workDir,
+ Set<PathFragment> prefixDirs)
+ throws IOException {
+ Path execroot = workDir.getParentDirectory();
+ for (Dirent dirent : root.readdir(Symlinks.NOFOLLOW)) {
+ Path absPath = root.getChild(dirent.getName());
+ PathFragment pathRelativeToWorkDir;
+ if (absPath.startsWith(workDir)) {
+ // path is under workDir, i.e. execroot/<workspace name>. Simply get the relative path.
+ pathRelativeToWorkDir = absPath.relativeTo(workDir);
+ } else {
+ // path is not under workDir, which means it belongs to one of external repositories
+ // symlinked directly under execroot. Get the relative path based on there and prepend it
+ // with the designated prefix, '../', so that it's still a valid relative path to workDir.
+ pathRelativeToWorkDir =
+ LabelConstants.EXPERIMENTAL_EXTERNAL_PATH_PREFIX.getRelative(
+ absPath.relativeTo(execroot));
+ }
+ Optional<PathFragment> destination =
+ getExpectedSymlinkDestination(pathRelativeToWorkDir, inputs);
+ if (destination.isPresent()) {
+ if (SYMLINK.equals(dirent.getType())
+ && absPath.readSymbolicLink().equals(destination.get())) {
+ inputsToCreate.remove(pathRelativeToWorkDir);
+ } else {
+ absPath.delete();
+ }
+ } else if (DIRECTORY.equals(dirent.getType())) {
+ if (dirsToCreate.contains(pathRelativeToWorkDir)
+ || prefixDirs.contains(pathRelativeToWorkDir)) {
+ cleanRecursively(absPath, inputs, inputsToCreate, dirsToCreate, workDir, prefixDirs);
+ dirsToCreate.remove(pathRelativeToWorkDir);
+ } else {
+ absPath.deleteTree();
+ }
+ } else if (!inputsToCreate.contains(pathRelativeToWorkDir)) {
+ absPath.delete();
+ }
+ }
+ }
+
+ /**
+ * Returns what the destination of the symlink {@code file} should be, according to {@code
+ * inputs}.
+ */
+ static Optional<PathFragment> getExpectedSymlinkDestination(
+ PathFragment fragment, SandboxInputs inputs) {
+ Path file = inputs.getFiles().get(fragment);
+ if (file != null) {
+ return Optional.of(file.asFragment());
+ }
+ return Optional.ofNullable(inputs.getSymlinks().get(fragment));
+ }
+
+ /** Populates the provided sets with the inputs and directories that need to be created. */
+ public static void populateInputsAndDirsToCreate(
+ SandboxInputs inputs,
+ Set<PathFragment> workerFiles,
+ SandboxOutputs outputs,
+ Set<PathFragment> writableDirs,
+ Set<PathFragment> inputsToCreate,
+ LinkedHashSet<PathFragment> dirsToCreate) {
+ // Add all worker files, input files, and the parent directories.
+ for (PathFragment input :
+ Iterables.concat(workerFiles, inputs.getFiles().keySet(), inputs.getSymlinks().keySet())) {
+ inputsToCreate.add(input);
+ dirsToCreate.add(input.getParentDirectory());
+ }
+
+ // And all parent directories of output files. Note that we don't add the files themselves --
+ // any pre-existing files that have the same path as an output should get deleted.
+ for (PathFragment file : outputs.files()) {
+ dirsToCreate.add(file.getParentDirectory());
+ }
+
+ // Add all output directories.
+ dirsToCreate.addAll(outputs.dirs());
+
+ // Add some directories that should be writable, and thus exist.
+ dirsToCreate.addAll(writableDirs);
+ }
+
+ /**
+ * Creates directory and all ancestors for it at a given path.
+ *
+ * <p>This method uses (and updates) the set of already known directories in order to minimize the
+ * I/O involved with creating directories. For example a path of {@code 1/2/3/4} created after
+ * {@code 1/2/3/5} only calls for creating {@code 1/2/3/5}. We can use the set of known
+ * directories to discover that {@code 1/2/3} already exists instead of deferring to the
+ * filesystem for it.
+ */
+ public static void createDirectoryAndParentsInSandboxRoot(
+ Path path, Set<Path> knownDirectories, Path sandboxExecRoot) throws IOException {
+ if (knownDirectories.contains(path)) {
+ return;
+ }
+ createDirectoryAndParentsInSandboxRoot(
+ checkNotNull(
+ path.getParentDirectory(),
+ "Path %s is not under/siblings of sandboxExecRoot: %s",
+ path,
+ sandboxExecRoot),
+ knownDirectories,
+ sandboxExecRoot);
+ path.createDirectory();
+ knownDirectories.add(path);
+ }
+
/** Wrapper class for the inputs of a sandbox. */
public static final class SandboxInputs {
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxOptions.java b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxOptions.java
index 75b937b..1d4248c 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxOptions.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxOptions.java
@@ -361,6 +361,16 @@
+ " instead.")
public boolean legacyLocalFallback;
+ @Option(
+ name = "experimental_reuse_sandbox_directories",
+ defaultValue = "false",
+ documentationCategory = OptionDocumentationCategory.EXECUTION_STRATEGY,
+ effectTags = {OptionEffectTag.EXECUTION},
+ help =
+ "If set to true, directories used by sandboxed non-worker execution may be reused to"
+ + " avoid unnecessary setup costs.")
+ public boolean reuseSandboxDirectories;
+
/** Converter for the number of threads used for asynchronous tree deletion. */
public static final class AsyncTreeDeletesConverter extends ResourceConverter {
public AsyncTreeDeletesConverter() {
diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SymlinkedSandboxedSpawn.java b/src/main/java/com/google/devtools/build/lib/sandbox/SymlinkedSandboxedSpawn.java
index df8fb18..b9304ef 100644
--- a/src/main/java/com/google/devtools/build/lib/sandbox/SymlinkedSandboxedSpawn.java
+++ b/src/main/java/com/google/devtools/build/lib/sandbox/SymlinkedSandboxedSpawn.java
@@ -14,14 +14,23 @@
package com.google.devtools.build.lib.sandbox;
+import static com.google.common.base.Strings.isNullOrEmpty;
+
+import com.google.common.flogger.GoogleLogger;
import com.google.devtools.build.lib.exec.TreeDeleter;
import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxInputs;
import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxOutputs;
import com.google.devtools.build.lib.vfs.Path;
+import com.google.devtools.build.lib.vfs.PathFragment;
+import java.io.FileNotFoundException;
import java.io.IOException;
+import java.util.Collection;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
/**
@@ -29,6 +38,22 @@
* destination.
*/
public class SymlinkedSandboxedSpawn extends AbstractContainerizingSandboxedSpawn {
+ private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
+
+ /** If true, we have already warned about an error causing us to turn off reuse. */
+ private static final AtomicBoolean warnedAboutTurningOffReuse = new AtomicBoolean();
+
+ /** Base for the entire sandbox system, needed for stashing reusable sandboxes. */
+ private final Path sandboxBase;
+
+ /**
+ * Whether to attempt to reuse previously-created sandboxes. Not final because we may turn it off
+ * in case of errors.
+ */
+ private boolean reuseSandboxDirectories;
+
+ /** Mnemonic of the action running in this spawn. */
+ private final String mnemonic;
public SymlinkedSandboxedSpawn(
Path sandboxPath,
@@ -39,7 +64,10 @@
SandboxOutputs outputs,
Set<Path> writableDirs,
TreeDeleter treeDeleter,
- @Nullable Path statisticsPath) {
+ @Nullable Path statisticsPath,
+ boolean reuseSandboxDirectories,
+ Path sandboxBase,
+ String mnemonic) {
super(
sandboxPath,
sandboxExecRoot,
@@ -50,10 +78,167 @@
writableDirs,
treeDeleter,
statisticsPath);
+ this.sandboxBase = sandboxBase;
+ this.reuseSandboxDirectories = reuseSandboxDirectories;
+ this.mnemonic = isNullOrEmpty(mnemonic) ? mnemonic : "_NoMnemonic_";
+ }
+
+ @Override
+ public void filterInputsAndDirsToCreate(
+ Set<PathFragment> inputsToCreate, LinkedHashSet<PathFragment> dirsToCreate)
+ throws IOException {
+ if (reuseSandboxDirectories && takeStashedSandbox()) {
+ // When reusing an old sandbox, we do a full traversal of the parent directory of
+ // `sandboxExecRoot`. This will use what we computed above, delete anything unnecessary, and
+ // update `inputsToCreate`/`dirsToCreate` if something can be left without changes (e.g., a,
+ // symlink that already points to the right destination). We're traversing from
+ // sandboxExecRoot's parent directory because external repositories can now be symlinked as
+ // siblings of sandboxExecRoot when --experimental_sibling_repository_layout is set.
+ SandboxHelpers.cleanExisting(
+ sandboxExecRoot.getParentDirectory(),
+ inputs,
+ inputsToCreate,
+ dirsToCreate,
+ sandboxExecRoot);
+ }
+ }
+
+ /**
+ * Attempts to take an existing stashed sandbox for reuse. Returns true if it succeeds. On certain
+ * errors we disable sandbox reuse because it seems to just not work.
+ */
+ private boolean takeStashedSandbox() {
+ Path sandboxes = getSandboxStashDir();
+ if (sandboxes == null) {
+ return false;
+ }
+ try {
+ Collection<Path> stashes = sandboxes.getDirectoryEntries();
+ // We have to remove the sandbox root to move a stash there, but it is currently empty
+ // and we reinstate it if we don't get a sandbox.
+ sandboxPath.deleteTree();
+ for (Path stash : stashes) {
+ try {
+ stash.renameTo(sandboxPath);
+ return true;
+ } catch (FileNotFoundException e) {
+ // Try the next one, somebody else took this one.
+ } catch (IOException e) {
+ turnOffReuse("Error renaming sandbox stash %s to %s: %s\n", stash, sandboxPath, e);
+ return false;
+ }
+ }
+ } catch (IOException e) {
+ turnOffReuse("Failed to prepare for reusing stashed sandbox for %s: %s", sandboxPath, e);
+ return false;
+ } finally {
+ if (!sandboxPath.exists()) {
+ try {
+ // If we failed somehow, recreate the empty sandbox.
+ sandboxExecRoot.createDirectoryAndParents();
+ } catch (IOException e) {
+ System.err.printf("Failed to re-establish sandbox %s: %s\n", sandboxPath, e);
+ }
+ }
+ }
+ return false;
+ }
+
+ /** An incrementing count of stashes to avoid filename clashes. */
+ static final AtomicInteger stash = new AtomicInteger(0);
+
+ /** Atomically moves the sandboxPath directory aside for later reuse. */
+ private boolean stashSandbox(Path path) {
+ Path sandboxes = getSandboxStashDir();
+ if (sandboxes == null) {
+ return false;
+ }
+ String stashName;
+ synchronized (stash) {
+ stashName = Integer.toString(stash.incrementAndGet());
+ }
+ Path stashPath = sandboxes.getChild(stashName);
+ if (!path.exists()) {
+ return false;
+ }
+ try {
+ path.renameTo(stashPath);
+ } catch (IOException e) {
+ // Since stash names are unique, this IOException indicates some other problem with stashing,
+ // so we turn it off.
+ turnOffReuse("Error stashing sandbox at %s: %s", stashPath, e);
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Returns the sandbox stashing directory appropriate for this spawn. In order to maximize reuse,
+ * we keep stashed sandboxes separated by mnemonic. May return null if there are errors, in which
+ * case sandbox reuse also gets turned of.
+ */
+ private Path getSandboxStashDir() {
+ Path stashDir = sandboxBase.getChild("sandbox_stash");
+ try {
+ stashDir.createDirectory();
+ if (!maybeClearExistingStash(stashDir)) {
+ return null;
+ }
+ } catch (IOException e) {
+ turnOffReuse(
+ "Error creating sandbox stash dir %s, disabling sandbox reuse: %s\n",
+ stashDir, e.getMessage());
+ return null;
+ }
+ Path mnemonicStashDir = stashDir.getChild(mnemonic);
+ try {
+ mnemonicStashDir.createDirectory();
+ return mnemonicStashDir;
+ } catch (IOException e) {
+ turnOffReuse("Error creating mnemonic stash dir %s: %s\n", mnemonicStashDir, e.getMessage());
+ return null;
+ }
+ }
+
+ /**
+ * Clears away existing stash if this is the first access to the stash in this Blaze server
+ * instance.
+ *
+ * @param stashPath Path of the stashes.
+ * @return True unless there was an error deleting sandbox stashes.
+ */
+ private boolean maybeClearExistingStash(Path stashPath) {
+ synchronized (stash) {
+ if (stash.getAndIncrement() == 0) {
+ try {
+ for (Path directoryEntry : stashPath.getDirectoryEntries()) {
+ directoryEntry.deleteTree();
+ }
+ } catch (IOException e) {
+ turnOffReuse("Unable to clear old sandbox stash %s: %s\n", stashPath, e.getMessage());
+ return false;
+ }
+ }
+ }
+ return true;
}
@Override
protected void copyFile(Path source, Path target) throws IOException {
target.createSymbolicLink(source);
}
+
+ @Override
+ public void delete() {
+ if (!reuseSandboxDirectories || !stashSandbox(sandboxPath)) {
+ super.delete();
+ }
+ }
+
+ private void turnOffReuse(String fmt, Object... args) {
+ reuseSandboxDirectories = false;
+ if (warnedAboutTurningOffReuse.compareAndSet(false, true)) {
+ logger.atWarning().logVarargs("Turning off sandbox reuse: " + fmt, args);
+ }
+ }
}