Simplify DigestUtils.getDigestOrFail():

Don't validate the result of getFastDigest(); there is no reason to
suspect the filesystem of returning malformed digests.  (And bugs are
far more likely to return the wrong *digest* rather than the wrong
*type* of digest.)

Don't check something statically verifiable from the local control flow
(that key is set).

Don't bother marking globalCache as volatile.  Surely any user of this
code will have a happens-before edge between calling configureCache()
and later calling getDigestOrFail().

RELNOTES: None.
PiperOrigin-RevId: 209984140
diff --git a/src/main/java/com/google/devtools/build/lib/actions/cache/DigestUtils.java b/src/main/java/com/google/devtools/build/lib/actions/cache/DigestUtils.java
index e45995c..38408bc 100644
--- a/src/main/java/com/google/devtools/build/lib/actions/cache/DigestUtils.java
+++ b/src/main/java/com/google/devtools/build/lib/actions/cache/DigestUtils.java
@@ -18,14 +18,12 @@
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheStats;
-import com.google.common.io.BaseEncoding;
 import com.google.common.primitives.Longs;
 import com.google.devtools.build.lib.actions.FileArtifactValue;
 import com.google.devtools.build.lib.clock.BlazeClock;
 import com.google.devtools.build.lib.profiler.Profiler;
 import com.google.devtools.build.lib.profiler.ProfilerTask;
 import com.google.devtools.build.lib.util.Fingerprint;
-import com.google.devtools.build.lib.util.LoggingUtil;
 import com.google.devtools.build.lib.util.VarInt;
 import com.google.devtools.build.lib.vfs.FileStatus;
 import com.google.devtools.build.lib.vfs.Path;
@@ -35,7 +33,6 @@
 import java.nio.ByteBuffer;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
 
 /**
  * Utility class for getting md5 digests of files.
@@ -140,7 +137,7 @@
    * represent the paths as strings, not as {@link Path} instances. As a result, the loading
    * function cannot actually compute the digests of the files so we have to handle this externally.
    */
-  private static volatile Cache<CacheKey, byte[]> globalCache = null;
+  private static Cache<CacheKey, byte[]> globalCache = null;
 
   /** Private constructor to prevent instantiation of utility class. */
   private DigestUtils() {}
@@ -226,30 +223,22 @@
   public static byte[] getDigestOrFail(Path path, long fileSize)
       throws IOException {
     byte[] digest = path.getFastDigest();
-
-    if (digest != null && !path.isValidDigest(digest)) {
-      // Fail-soft in cases where md5bin is non-null, but not a valid digest.
-      String msg = String.format("Malformed digest '%s' for file %s",
-                                 BaseEncoding.base16().lowerCase().encode(digest),
-                                 path);
-      LoggingUtil.logToRemote(Level.SEVERE, msg, new IllegalStateException(msg));
-      digest = null;
-    }
-
-    // At this point, either we could not get a fast digest or the fast digest we got is corrupt.
-    // Attempt a cache lookup if the cache is enabled and return the cached digest if found.
-    Cache<CacheKey, byte[]> cache = globalCache;
-    CacheKey key = null;
-    if (cache != null && digest == null) {
-      key = new CacheKey(path, path.stat());
-      digest = cache.getIfPresent(key);
-    }
     if (digest != null) {
       return digest;
     }
 
-    // All right, we have neither a fast nor a cached digest. Let's go through the costly process of
-    // computing it from the file contents.
+    // Attempt a cache lookup if the cache is enabled.
+    Cache<CacheKey, byte[]> cache = globalCache;
+    CacheKey key = null;
+    if (cache != null) {
+      key = new CacheKey(path, path.stat());
+      digest = cache.getIfPresent(key);
+      if (digest != null) {
+        return digest;
+      }
+    }
+
+    // Compute digest from the file contents.
     if (fileSize > MULTI_THREADED_DIGEST_MAX_FILE_SIZE && !MULTI_THREADED_DIGEST.get()) {
       // We'll have to read file content in order to calculate the digest.
       // We avoid overlapping this process for multiple large files, as
@@ -260,16 +249,8 @@
       digest = getDigestInternal(path);
     }
 
-    Preconditions.checkNotNull(
-        digest,
-        "We should have gotten a digest for %s at this point but we still don't have one",
-        path);
+    Preconditions.checkNotNull(digest);
     if (cache != null) {
-      Preconditions.checkNotNull(
-          key,
-          "We should have computed a cache key earlier for %s because the cache is enabled and we"
-              + " did not get a fast digest for this file, but we don't have a key here",
-          path);
       cache.put(key, digest);
     }
     return digest;
diff --git a/src/test/java/com/google/devtools/build/lib/actions/DigestUtilsTest.java b/src/test/java/com/google/devtools/build/lib/actions/DigestUtilsTest.java
index fc6376e..512483c 100644
--- a/src/test/java/com/google/devtools/build/lib/actions/DigestUtilsTest.java
+++ b/src/test/java/com/google/devtools/build/lib/actions/DigestUtilsTest.java
@@ -14,7 +14,6 @@
 package com.google.devtools.build.lib.actions;
 
 import static com.google.common.truth.Truth.assertThat;
-import static org.junit.Assert.fail;
 
 import com.google.common.base.Strings;
 import com.google.common.cache.CacheStats;
@@ -130,56 +129,6 @@
     }
   }
 
-  public void assertRecoverFromMalformedDigest(DigestHashFunction... hashFunctions)
-      throws Exception {
-    for (DigestHashFunction hf : hashFunctions) {
-      final byte[] malformed = {0, 0, 0};
-      FileSystem myFS =
-          new InMemoryFileSystem(BlazeClock.instance(), hf) {
-            @Override
-            protected byte[] getFastDigest(Path path) throws IOException {
-              // Digest functions have more than 3 bytes, usually at least 16.
-              return malformed;
-            }
-          };
-      Path path = myFS.getPath("/file");
-      FileSystemUtils.writeContentAsLatin1(path, "a");
-      byte[] result = DigestUtils.getDigestOrFail(path, 1);
-      assertThat(result).isEqualTo(path.getDigest());
-      assertThat(result).isNotSameAs(malformed);
-      assertThat(path.isValidDigest(result)).isTrue();
-    }
-  }
-
-  @Test
-  public void testRecoverFromMalformedDigestWithoutCache() throws Exception {
-    try {
-      DigestUtils.getCacheStats();
-      fail("Digests cache should remain disabled until configureCache is called");
-    } catch (NullPointerException expected) {
-    }
-    assertRecoverFromMalformedDigest(DigestHashFunction.MD5, DigestHashFunction.SHA1);
-    try {
-      DigestUtils.getCacheStats();
-      fail("Digests cache was unexpectedly enabled through the test");
-    } catch (NullPointerException expected) {
-    }
-  }
-
-  @Test
-  public void testRecoverFromMalformedDigestWithCache() throws Exception {
-    DigestUtils.configureCache(10);
-    assertThat(DigestUtils.getCacheStats()).isNotNull(); // Ensure the cache is enabled.
-
-    // When using the cache, we cannot run our test using different hash functions because the
-    // hash function is not part of the cache key. This is intentional: the hash function is
-    // essentially final and can only be changed for tests. Therefore, just test the same hash
-    // function twice to further exercise the cache code.
-    assertRecoverFromMalformedDigest(DigestHashFunction.MD5, DigestHashFunction.MD5);
-
-    assertThat(DigestUtils.getCacheStats()).isNotNull(); // Ensure the cache remains enabled.
-  }
-
   /** Helper class to assert the cache statistics. */
   private static class CacheStatsChecker {
     /** Cache statistics, grabbed at construction time. */