Move outputDirectoryListings into OutputStore.

RELNOTES: None.
PiperOrigin-RevId: 214538112
diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/ActionMetadataHandler.java b/src/main/java/com/google/devtools/build/lib/skyframe/ActionMetadataHandler.java
index ad49d6c..c41c893 100644
--- a/src/main/java/com/google/devtools/build/lib/skyframe/ActionMetadataHandler.java
+++ b/src/main/java/com/google/devtools/build/lib/skyframe/ActionMetadataHandler.java
@@ -46,8 +46,6 @@
 import java.util.Collection;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 import javax.annotation.Nullable;
 
@@ -83,16 +81,6 @@
    */
   private final ActionInputMap inputArtifactData;
 
-  /**
-   * Maps output TreeArtifacts to their contents. These maps are either injected or read
-   * directly from the filesystem.
-   * If the value is null, this means nothing was injected, and the output TreeArtifact
-   * is to have its values read from disk instead.
-   */
-  // TODO(b/115361150): Move this to OutputStore.
-  private final ConcurrentMap<Artifact, Set<TreeFileArtifact>> outputDirectoryListings =
-      new ConcurrentHashMap<>();
-
   /** Outputs that are to be omitted. */
   private final Set<Artifact> omittedOutputs = Sets.newConcurrentHashSet();
 
@@ -253,11 +241,6 @@
         artifact, FileArtifactValue.createProxy(md5Digest.getDigestBytesUnsafe()));
   }
 
-  private Set<TreeFileArtifact> getTreeArtifactContents(Artifact artifact) {
-    Preconditions.checkArgument(artifact.isTreeArtifact(), artifact);
-    return outputDirectoryListings.computeIfAbsent(artifact, unused -> Sets.newConcurrentHashSet());
-  }
-
   private TreeArtifactValue getTreeArtifactValue(SpecialArtifact artifact) throws IOException {
     TreeArtifactValue value = store.getTreeArtifactData(artifact);
     if (value != null) {
@@ -271,7 +254,7 @@
       setTreeReadOnlyAndExecutable(artifact, PathFragment.EMPTY_FRAGMENT);
     }
 
-    Set<TreeFileArtifact> registeredContents = outputDirectoryListings.get(artifact);
+    Set<TreeFileArtifact> registeredContents = store.getTreeArtifactContents(artifact);
     if (registeredContents != null) {
       // Check that our registered outputs matches on-disk outputs. Only perform this check
       // when contents were explicitly registered.
@@ -363,26 +346,24 @@
 
     Set<PathFragment> paths =
         TreeArtifactValue.explodeDirectory(artifactPathResolver.toPath(artifact));
-    // If you're reading tree artifacts from disk while outputDirectoryListings are being injected,
+    // If you're reading tree artifacts from disk while tree artifact contents are being injected,
     // something has gone terribly wrong.
-    Object previousDirectoryListing =
-        outputDirectoryListings.put(artifact, Sets.newConcurrentHashSet());
-    Preconditions.checkState(previousDirectoryListing == null,
-        "Race condition while constructing TreeArtifactValue: %s, %s",
-        artifact, previousDirectoryListing);
+    Object previousContents = store.getTreeArtifactContents(artifact);
+    Preconditions.checkState(
+        previousContents == null,
+        "Race condition while constructing TreeArtifactValue: %s, %s", artifact, previousContents);
     return constructTreeArtifactValue(ActionInputHelper.asTreeFileArtifacts(artifact, paths));
   }
 
   @Override
   public void addExpandedTreeOutput(TreeFileArtifact output) {
     Preconditions.checkState(executionMode.get());
-    Set<TreeFileArtifact> values = getTreeArtifactContents(output.getParent());
-    values.add(output);
+    store.addTreeArtifactContents(output.getParent(), output);
   }
 
   @Override
   public Iterable<TreeFileArtifact> getExpandedOutputs(Artifact artifact) {
-    return ImmutableSet.copyOf(getTreeArtifactContents(artifact));
+    return ImmutableSet.copyOf(store.getOrCreateTreeArtifactContents(artifact));
   }
 
   @Override
@@ -478,7 +459,6 @@
         "Files cannot be injected before action execution: %s", store.injectedFiles());
     Preconditions.checkState(omittedOutputs.isEmpty(),
         "Artifacts cannot be marked omitted before action execution: %s", omittedOutputs);
-    outputDirectoryListings.clear();
     store.clear();
   }
 
diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/OutputStore.java b/src/main/java/com/google/devtools/build/lib/skyframe/OutputStore.java
index 342b564..95c34df 100644
--- a/src/main/java/com/google/devtools/build/lib/skyframe/OutputStore.java
+++ b/src/main/java/com/google/devtools/build/lib/skyframe/OutputStore.java
@@ -13,9 +13,11 @@
 // limitations under the License.
 package com.google.devtools.build.lib.skyframe;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Sets;
 import com.google.devtools.build.lib.actions.Artifact;
+import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
 import com.google.devtools.build.lib.actions.ArtifactFileMetadata;
 import com.google.devtools.build.lib.actions.FileArtifactValue;
 import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
@@ -27,12 +29,13 @@
 /**
  * Storage layer for data associated with outputs of an action.
  *
- * <p>Data is mainly stored in three maps, {@link #artifactData}, {@link #treeArtifactData}, and
- * {@link #additionalOutputData}, all of which are keyed on an {@link Artifact}. For each of these
- * maps, this class exposes {@code get}, {@code put}, and {@code getAll} methods.
+ * <p>Data is mainly stored in four maps, {@link #artifactData}, {@link #treeArtifactData}, {@link
+ * #additionalOutputData}, and {@link #treeArtifactContents}, all of which are keyed on an {@link
+ * Artifact}. For each of these maps, this class exposes standard methods such as {@code get},
+ * {@code put}, {@code add}, and {@code getAll}.
  *
- * <p>This implementation aggresively stores all data. Subclasses may override the {@code put}
- * methods to avoid storing unnecessary data.
+ * <p>This implementation aggresively stores all data. Subclasses may override mutating methods to
+ * avoid storing unnecessary data.
  */
 @ThreadSafe
 class OutputStore {
@@ -46,6 +49,9 @@
   private final ConcurrentMap<Artifact, FileArtifactValue> additionalOutputData =
       new ConcurrentHashMap<>();
 
+  private final ConcurrentMap<Artifact, Set<TreeFileArtifact>> treeArtifactContents =
+      new ConcurrentHashMap<>();
+
   private final Set<Artifact> injectedFiles = Sets.newConcurrentHashSet();
 
   @Nullable
@@ -129,6 +135,30 @@
     return ImmutableMap.copyOf(additionalOutputData);
   }
 
+  /**
+   * Returns a set of the given tree artifact's contents.
+   *
+   * <p>If the return value is {@code null}, this means nothing was injected, and the output
+   * TreeArtifact is to have its values read from disk instead.
+   */
+  @Nullable
+  final Set<TreeFileArtifact> getTreeArtifactContents(Artifact artifact) {
+    return treeArtifactContents.get(artifact);
+  }
+
+  /**
+   * Same as {@link #getTreeArtifactContents} except that a new set of contents is created and
+   * stored instead of returning {@code null}.
+   */
+  Set<TreeFileArtifact> getOrCreateTreeArtifactContents(Artifact artifact) {
+    Preconditions.checkArgument(artifact.isTreeArtifact(), artifact);
+    return treeArtifactContents.computeIfAbsent(artifact, unused -> Sets.newConcurrentHashSet());
+  }
+
+  void addTreeArtifactContents(Artifact artifact, TreeFileArtifact contents) {
+    getOrCreateTreeArtifactContents(artifact).add(contents);
+  }
+
   void injectRemoteFile(Artifact output, byte[] digest, long size, int locationIndex) {
     // TODO(shahan): there are a couple of things that could reduce memory usage
     // 1. We might be able to skip creating an entry in `outputArtifactData` and only create
@@ -159,6 +189,7 @@
     artifactData.clear();
     treeArtifactData.clear();
     additionalOutputData.clear();
+    treeArtifactContents.clear();
     injectedFiles.clear();
   }
 }