blob: 376db25705d406a7252fc4ca99259eb4e59baaea [file] [log] [blame]
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.ArtifactResolver;
import com.google.devtools.build.lib.cmdline.LabelConstants;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* HeaderDiscovery checks whether all header files that a compile action uses are actually declared
* as inputs.
*
* <p>Tree artifacts: a tree artifact with path P causes any header file prefixed by P to be
* accepted. Testing whether a used header file is prefixed by any tree artifact is linear search,
* but the result is cached. If all files in a tree artifact are at the root of the artifact, the
* entire check is performed by hash lookups.
*/
final class HeaderDiscovery {
/** Indicates if a compile should perform dotd pruning. */
public enum DotdPruningMode {
USE,
DO_NOT_USE
}
private HeaderDiscovery() {}
/**
* Returns a collection with additional input artifacts relevant to the action by reading the
* dynamically-discovered dependency information from the parsed dependency set after the action
* has run.
*
* <p>Artifacts are considered inputs but not "mandatory" inputs.
*
* @throws ActionExecutionException iff the .d is missing (when required), malformed, or has
* unresolvable included artifacts.
*/
static NestedSet<Artifact> discoverInputsFromDependencies(
Action action,
Artifact sourceFile,
boolean shouldValidateInclusions,
Collection<Path> dependencies,
List<Path> permittedSystemIncludePrefixes,
NestedSet<Artifact> allowedDerivedInputs,
Path execRoot,
ArtifactResolver artifactResolver,
boolean siblingRepositoryLayout)
throws ActionExecutionException {
Map<PathFragment, Artifact> regularDerivedArtifacts = new HashMap<>();
Map<PathFragment, SpecialArtifact> treeArtifacts = new HashMap<>();
for (Artifact a : allowedDerivedInputs.toList()) {
if (a.isSourceArtifact()) {
continue;
}
// We may encounter duplicate keys in the derived inputs if two artifacts have different
// owners. Just use the first one. The two artifacts must be generated by equivalent
// (shareable) actions in order to have not generated a conflict in Bazel. If on an
// incremental build one changes without the other one changing, then if their paths remain
// the same, that will trigger an action conflict and fail the build. If one path changes,
// then this action will be re-analyzed, and will execute in Skyframe. It can legitimately get
// an action cache hit in that case, since even if it previously depended on the artifact
// whose path changed, that is not taken into account by the action cache, and it will get an
// action cache hit using the remaining un-renamed artifact.
if (a.isTreeArtifact()) {
treeArtifacts.putIfAbsent(a.getExecPath(), (SpecialArtifact) a);
} else {
regularDerivedArtifacts.putIfAbsent(a.getExecPath(), a);
}
}
return runDiscovery(
action,
sourceFile,
shouldValidateInclusions,
dependencies,
permittedSystemIncludePrefixes,
regularDerivedArtifacts,
treeArtifacts,
execRoot,
artifactResolver,
siblingRepositoryLayout);
}
private static NestedSet<Artifact> runDiscovery(
Action action,
Artifact sourceFile,
boolean shouldValidateInclusions,
Collection<Path> dependencies,
List<Path> permittedSystemIncludePrefixes,
Map<PathFragment, Artifact> regularDerivedArtifacts,
Map<PathFragment, SpecialArtifact> treeArtifacts,
Path execRoot,
ArtifactResolver artifactResolver,
boolean siblingRepositoryLayout)
throws ActionExecutionException {
NestedSetBuilder<Artifact> inputs = NestedSetBuilder.stableOrder();
// Check inclusions.
IncludeProblems absolutePathProblems = new IncludeProblems();
IncludeProblems unresolvablePathProblems = new IncludeProblems();
for (Path execPath : dependencies) {
PathFragment execPathFragment = execPath.asFragment();
if (execPathFragment.isAbsolute()) {
// Absolute includes from system paths are ignored.
if (FileSystemUtils.startsWithAny(execPath, permittedSystemIncludePrefixes)) {
continue;
}
// Since gcc is given only relative paths on the command line, non-builtin include paths
// here should never be absolute. If they are, it's probably due to a non-hermetic #include,
// and we should stop the build with an error.
if (execPath.startsWith(execRoot)) {
execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path
} else if (siblingRepositoryLayout && execPath.startsWith(execRoot.getParentDirectory())) {
// for --experimental_sibling_repository_layout
execPathFragment =
LabelConstants.EXPERIMENTAL_EXTERNAL_PATH_PREFIX.getRelative(
execPath.relativeTo(execRoot.getParentDirectory()));
} else {
absolutePathProblems.add(execPathFragment.getPathString());
continue;
}
}
Artifact artifact = regularDerivedArtifacts.get(execPathFragment);
if (artifact == null) {
RepositoryName repository =
PackageIdentifier.discoverFromExecPath(execPathFragment, false, siblingRepositoryLayout)
.getRepository();
artifact = artifactResolver.resolveSourceArtifact(execPathFragment, repository);
}
if (artifact != null) {
// We don't need to add the sourceFile itself as it is a mandatory input.
if (!artifact.equals(sourceFile)) {
inputs.add(artifact);
}
continue;
}
SpecialArtifact treeArtifact = findOwningTreeArtifact(execPathFragment, treeArtifacts);
if (treeArtifact != null) {
inputs.add(treeArtifact);
} else {
// Record a problem if we see files that we can't resolve, likely caused by undeclared
// includes or illegal include constructs.
unresolvablePathProblems.add(execPathFragment.getPathString());
}
}
if (shouldValidateInclusions) {
absolutePathProblems.assertProblemFree(
"absolute path inclusion(s) found in rule '"
+ action.getOwner().getLabel()
+ "':\n"
+ "the source file '"
+ sourceFile.prettyPrint()
+ "' includes the following non-builtin files with absolute paths "
+ "(if these are builtin files, make sure these paths are in your toolchain):",
action);
unresolvablePathProblems.assertProblemFree(
"undeclared inclusion(s) in rule '"
+ action.getOwner().getLabel()
+ "':\n"
+ "this rule is missing dependency declarations for the following files "
+ "included by '"
+ sourceFile.prettyPrint()
+ "':",
action);
}
return inputs.build();
}
@Nullable
private static SpecialArtifact findOwningTreeArtifact(
PathFragment execPath, Map<PathFragment, SpecialArtifact> treeArtifacts) {
// Check the map for the exec path's parent directory first. If the exec path matches a direct
// child of a tree artifact (a common case), we can skip the full iteration below.
PathFragment dir = execPath.getParentDirectory();
SpecialArtifact tree = treeArtifacts.get(dir);
if (tree != null) {
return tree;
}
// Search for any tree artifact that encloses the exec path.
return treeArtifacts.values().stream()
.filter(a -> dir.startsWith(a.getExecPath()))
.findAny()
.orElse(null);
}
}