mirror of
https://github.com/bkaradzic/bgfx.git
synced 2026-02-20 22:03:12 +01:00
Updated spirv-tools.
This commit is contained in:
2
3rdparty/spirv-tools/README.md
vendored
2
3rdparty/spirv-tools/README.md
vendored
@@ -480,7 +480,7 @@ assembly and binary files with suffix `.spvasm` and `.spv`, respectively.
|
||||
|
||||
The assembler reads the assembly language text, and emits the binary form.
|
||||
|
||||
The standalone assembler is the exectuable called `spirv-as`, and is located in
|
||||
The standalone assembler is the executable called `spirv-as`, and is located in
|
||||
`<spirv-build-dir>/tools/spirv-as`. The functionality of the assembler is implemented
|
||||
by the `spvTextToBinary` library function.
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
"v2020.3-dev", "SPIRV-Tools v2020.3-dev 0778398f58d93ce8a861aeeb7a6a60324383525e"
|
||||
"v2020.3-dev", "SPIRV-Tools v2020.3-dev ad8e2138c256f6fb0b38e883fe1ae0e2a59c7620"
|
||||
|
||||
@@ -245,17 +245,17 @@ static const spv_operand_desc_t pygen_variable_KernelProfilingInfoEntries[] = {
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_RayFlagsEntries[] = {
|
||||
{"NoneKHR", 0x0000, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"OpaqueKHR", 0x0001, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"NoOpaqueKHR", 0x0002, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"TerminateOnFirstHitKHR", 0x0004, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"SkipClosestHitShaderKHR", 0x0008, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"CullBackFacingTrianglesKHR", 0x0010, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"CullFrontFacingTrianglesKHR", 0x0020, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"CullOpaqueKHR", 0x0040, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"CullNoOpaqueKHR", 0x0080, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"SkipTrianglesKHR", 0x0100, 1, pygen_variable_caps_RayTraversalPrimitiveCullingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"SkipAABBsKHR", 0x0200, 1, pygen_variable_caps_RayTraversalPrimitiveCullingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu}
|
||||
{"NoneKHR", 0x0000, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"OpaqueKHR", 0x0001, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"NoOpaqueKHR", 0x0002, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"TerminateOnFirstHitKHR", 0x0004, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"SkipClosestHitShaderKHR", 0x0008, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"CullBackFacingTrianglesKHR", 0x0010, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"CullFrontFacingTrianglesKHR", 0x0020, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"CullOpaqueKHR", 0x0040, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"CullNoOpaqueKHR", 0x0080, 2, pygen_variable_caps_RayQueryProvisionalKHRRayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"SkipTrianglesKHR", 0x0100, 1, pygen_variable_caps_RayTraversalPrimitiveCullingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"SkipAABBsKHR", 0x0200, 1, pygen_variable_caps_RayTraversalPrimitiveCullingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu}
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_SourceLanguageEntries[] = {
|
||||
@@ -734,7 +734,7 @@ static const spv_operand_desc_t pygen_variable_ScopeEntries[] = {
|
||||
{"Invocation", 4, 0, nullptr, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"QueueFamily", 5, 1, pygen_variable_caps_VulkanMemoryModel, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1,5), 0xffffffffu},
|
||||
{"QueueFamilyKHR", 5, 1, pygen_variable_caps_VulkanMemoryModel, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1,5), 0xffffffffu},
|
||||
{"ShaderCallKHR", 6, 1, pygen_variable_caps_RayTracingProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu}
|
||||
{"ShaderCallKHR", 6, 1, pygen_variable_caps_RayTracingProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu}
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_GroupOperationEntries[] = {
|
||||
@@ -919,19 +919,19 @@ static const spv_operand_desc_t pygen_variable_CapabilityEntries[] = {
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_RayQueryIntersectionEntries[] = {
|
||||
{"RayQueryCandidateIntersectionKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu}
|
||||
{"RayQueryCandidateIntersectionKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu}
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_RayQueryCommittedIntersectionTypeEntries[] = {
|
||||
{"RayQueryCommittedIntersectionNoneKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionTriangleKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionGeneratedKHR", 2, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu}
|
||||
{"RayQueryCommittedIntersectionNoneKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionTriangleKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"RayQueryCommittedIntersectionGeneratedKHR", 2, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu}
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_RayQueryCandidateIntersectionTypeEntries[] = {
|
||||
{"RayQueryCandidateIntersectionTriangleKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu},
|
||||
{"RayQueryCandidateIntersectionAABBKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, SPV_SPIRV_VERSION_WORD(1, 0), 0xffffffffu}
|
||||
{"RayQueryCandidateIntersectionTriangleKHR", 0, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu},
|
||||
{"RayQueryCandidateIntersectionAABBKHR", 1, 1, pygen_variable_caps_RayQueryProvisionalKHR, 0, nullptr, {}, 0xffffffffu, 0xffffffffu}
|
||||
};
|
||||
|
||||
static const spv_operand_desc_t pygen_variable_DebugInfoFlagsEntries[] = {
|
||||
|
||||
@@ -100,6 +100,7 @@ if(SPIRV_BUILD_FUZZER)
|
||||
transformation_add_type_vector.h
|
||||
transformation_composite_construct.h
|
||||
transformation_composite_extract.h
|
||||
transformation_compute_data_synonym_fact_closure.h
|
||||
transformation_context.h
|
||||
transformation_copy_object.h
|
||||
transformation_equation_instruction.h
|
||||
@@ -193,6 +194,7 @@ if(SPIRV_BUILD_FUZZER)
|
||||
transformation_add_type_vector.cpp
|
||||
transformation_composite_construct.cpp
|
||||
transformation_composite_extract.cpp
|
||||
transformation_compute_data_synonym_fact_closure.cpp
|
||||
transformation_context.cpp
|
||||
transformation_copy_object.cpp
|
||||
transformation_equation_instruction.cpp
|
||||
|
||||
137
3rdparty/spirv-tools/source/fuzz/fact_manager.cpp
vendored
137
3rdparty/spirv-tools/source/fuzz/fact_manager.cpp
vendored
@@ -416,17 +416,18 @@ class FactManager::DataSynonymAndIdEquationFacts {
|
||||
|
||||
// See method in FactManager which delegates to this method.
|
||||
std::vector<const protobufs::DataDescriptor*> GetSynonymsForDataDescriptor(
|
||||
const protobufs::DataDescriptor& data_descriptor,
|
||||
opt::IRContext* context) const;
|
||||
const protobufs::DataDescriptor& data_descriptor) const;
|
||||
|
||||
// See method in FactManager which delegates to this method.
|
||||
std::vector<uint32_t> GetIdsForWhichSynonymsAreKnown(
|
||||
opt::IRContext* context) const;
|
||||
std::vector<uint32_t> GetIdsForWhichSynonymsAreKnown() const;
|
||||
|
||||
// See method in FactManager which delegates to this method.
|
||||
bool IsSynonymous(const protobufs::DataDescriptor& data_descriptor1,
|
||||
const protobufs::DataDescriptor& data_descriptor2,
|
||||
opt::IRContext* context) const;
|
||||
const protobufs::DataDescriptor& data_descriptor2) const;
|
||||
|
||||
// See method in FactManager which delegates to this method.
|
||||
void ComputeClosureOfFacts(opt::IRContext* context,
|
||||
uint32_t maximum_equivalence_class_size);
|
||||
|
||||
private:
|
||||
// Adds the synonym |dd1| = |dd2| to the set of managed facts, and recurses
|
||||
@@ -436,23 +437,10 @@ class FactManager::DataSynonymAndIdEquationFacts {
|
||||
const protobufs::DataDescriptor& dd2,
|
||||
opt::IRContext* context);
|
||||
|
||||
// Inspects all known facts and adds corollary facts; e.g. if we know that
|
||||
// a.x == b.x and a.y == b.y, where a and b have vec2 type, we can record
|
||||
// that a == b holds.
|
||||
//
|
||||
// This method is expensive, and is thus called on demand: rather than
|
||||
// computing the closure of facts each time a data synonym fact is added, we
|
||||
// compute the closure only when a data synonym fact is *queried*.
|
||||
void ComputeClosureOfFacts(opt::IRContext* context) const;
|
||||
|
||||
// Records the fact that |dd1| and |dd2| are equivalent, and merges the sets
|
||||
// of equations that are known about them.
|
||||
//
|
||||
// This is a const method, despite the fact that it mutates the (mutable)
|
||||
// set of facts about data descriptors because it is invoked in a lazy fashion
|
||||
// when querying facts.
|
||||
void MakeEquivalent(const protobufs::DataDescriptor& dd1,
|
||||
const protobufs::DataDescriptor& dd2) const;
|
||||
const protobufs::DataDescriptor& dd2);
|
||||
|
||||
// Returns true if and only if |dd1| and |dd2| are valid data descriptors
|
||||
// whose associated data have the same type (modulo integer signedness).
|
||||
@@ -473,28 +461,17 @@ class FactManager::DataSynonymAndIdEquationFacts {
|
||||
|
||||
// The data descriptors that are known to be synonymous with one another are
|
||||
// captured by this equivalence relation.
|
||||
//
|
||||
// This member is mutable in order to allow the closure of facts captured by
|
||||
// the relation to be computed lazily when a question about data synonym
|
||||
// facts is asked.
|
||||
mutable EquivalenceRelation<protobufs::DataDescriptor, DataDescriptorHash,
|
||||
DataDescriptorEquals>
|
||||
EquivalenceRelation<protobufs::DataDescriptor, DataDescriptorHash,
|
||||
DataDescriptorEquals>
|
||||
synonymous_;
|
||||
|
||||
// When a new synonym fact is added, it may be possible to deduce further
|
||||
// synonym facts by computing a closure of all known facts. However, there is
|
||||
// no point computing this closure until a question regarding synonym facts is
|
||||
// actually asked: if several facts are added in succession with no questions
|
||||
// asked in between, we can avoid computing fact closures multiple times.
|
||||
//
|
||||
// This boolean tracks whether a closure computation is required - i.e.,
|
||||
// whether a new fact has been added since the last time such a computation
|
||||
// was performed.
|
||||
//
|
||||
// It is mutable to facilitate having const methods, that provide answers to
|
||||
// questions about data synonym facts, triggering closure computation on
|
||||
// demand.
|
||||
mutable bool closure_computation_required_ = false;
|
||||
// synonym facts by computing a closure of all known facts. However, this is
|
||||
// an expensive operation, so it should be performed sparingly and only there
|
||||
// is some chance of new facts being deduced. This boolean tracks whether a
|
||||
// closure computation is required - i.e., whether a new fact has been added
|
||||
// since the last time such a computation was performed.
|
||||
bool closure_computation_required_ = false;
|
||||
|
||||
// Represents a set of equations on data descriptors as a map indexed by
|
||||
// left-hand-side, mapping a left-hand-side to a set of operations, each of
|
||||
@@ -503,12 +480,7 @@ class FactManager::DataSynonymAndIdEquationFacts {
|
||||
// All data descriptors occurring in equations are required to be present in
|
||||
// the |synonymous_| equivalence relation, and to be their own representatives
|
||||
// in that relation.
|
||||
//
|
||||
// It is mutable because a closure computation can be triggered from a const
|
||||
// method, and when a closure computation detects that two data descriptors
|
||||
// are equivalent it is necessary to merge the equation facts for those data
|
||||
// descriptors.
|
||||
mutable std::unordered_map<
|
||||
std::unordered_map<
|
||||
const protobufs::DataDescriptor*,
|
||||
std::unordered_set<Operation, OperationHash, OperationEquals>>
|
||||
id_equations_;
|
||||
@@ -770,7 +742,17 @@ void FactManager::DataSynonymAndIdEquationFacts::AddDataSynonymFactRecursive(
|
||||
// obj_1[a_1, ..., a_m] == obj_2[b_1, ..., b_n]
|
||||
// then for each composite index i, we add a fact of the form:
|
||||
// obj_1[a_1, ..., a_m, i] == obj_2[b_1, ..., b_n, i]
|
||||
for (uint32_t i = 0; i < num_composite_elements; i++) {
|
||||
//
|
||||
// However, to avoid adding a large number of synonym facts e.g. in the case
|
||||
// of arrays, we bound the number of composite elements to which this is
|
||||
// applied. Nevertheless, we always add a synonym fact for the final
|
||||
// components, as this may be an interesting edge case.
|
||||
|
||||
// The bound on the number of indices of the composite pair to note as being
|
||||
// synonymous.
|
||||
const uint32_t kCompositeElementBound = 10;
|
||||
|
||||
for (uint32_t i = 0; i < num_composite_elements;) {
|
||||
std::vector<uint32_t> extended_indices1 =
|
||||
fuzzerutil::RepeatedFieldToVector(dd1.index());
|
||||
extended_indices1.push_back(i);
|
||||
@@ -781,11 +763,21 @@ void FactManager::DataSynonymAndIdEquationFacts::AddDataSynonymFactRecursive(
|
||||
MakeDataDescriptor(dd1.object(), std::move(extended_indices1)),
|
||||
MakeDataDescriptor(dd2.object(), std::move(extended_indices2)),
|
||||
context);
|
||||
|
||||
if (i < kCompositeElementBound - 1 || i == num_composite_elements - 1) {
|
||||
// We have not reached the bound yet, or have already skipped ahead to the
|
||||
// last element, so increment the loop counter as standard.
|
||||
i++;
|
||||
} else {
|
||||
// We have reached the bound, so skip ahead to the last element.
|
||||
assert(i == kCompositeElementBound - 1);
|
||||
i = num_composite_elements - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void FactManager::DataSynonymAndIdEquationFacts::ComputeClosureOfFacts(
|
||||
opt::IRContext* context) const {
|
||||
opt::IRContext* context, uint32_t maximum_equivalence_class_size) {
|
||||
// Suppose that obj_1[a_1, ..., a_m] and obj_2[b_1, ..., b_n] are distinct
|
||||
// data descriptors that describe objects of the same composite type, and that
|
||||
// the composite type is comprised of k components.
|
||||
@@ -871,6 +863,13 @@ void FactManager::DataSynonymAndIdEquationFacts::ComputeClosureOfFacts(
|
||||
synonymous_.GetEquivalenceClassRepresentatives()) {
|
||||
auto equivalence_class = synonymous_.GetEquivalenceClass(*representative);
|
||||
|
||||
if (equivalence_class.size() > maximum_equivalence_class_size) {
|
||||
// This equivalence class is larger than the maximum size we are willing
|
||||
// to consider, so we skip it. This potentially leads to missed fact
|
||||
// deductions, but avoids excessive runtime for closure computation.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Consider every data descriptor in the equivalence class.
|
||||
for (auto dd1_it = equivalence_class.begin();
|
||||
dd1_it != equivalence_class.end(); ++dd1_it) {
|
||||
@@ -1059,7 +1058,7 @@ void FactManager::DataSynonymAndIdEquationFacts::ComputeClosureOfFacts(
|
||||
|
||||
void FactManager::DataSynonymAndIdEquationFacts::MakeEquivalent(
|
||||
const protobufs::DataDescriptor& dd1,
|
||||
const protobufs::DataDescriptor& dd2) const {
|
||||
const protobufs::DataDescriptor& dd2) {
|
||||
// Register the data descriptors if they are not already known to the
|
||||
// equivalence relation.
|
||||
for (const auto& dd : {dd1, dd2}) {
|
||||
@@ -1185,9 +1184,7 @@ bool FactManager::DataSynonymAndIdEquationFacts::
|
||||
|
||||
std::vector<const protobufs::DataDescriptor*>
|
||||
FactManager::DataSynonymAndIdEquationFacts::GetSynonymsForDataDescriptor(
|
||||
const protobufs::DataDescriptor& data_descriptor,
|
||||
opt::IRContext* context) const {
|
||||
ComputeClosureOfFacts(context);
|
||||
const protobufs::DataDescriptor& data_descriptor) const {
|
||||
if (synonymous_.Exists(data_descriptor)) {
|
||||
return synonymous_.GetEquivalenceClass(data_descriptor);
|
||||
}
|
||||
@@ -1195,9 +1192,8 @@ FactManager::DataSynonymAndIdEquationFacts::GetSynonymsForDataDescriptor(
|
||||
}
|
||||
|
||||
std::vector<uint32_t>
|
||||
FactManager::DataSynonymAndIdEquationFacts ::GetIdsForWhichSynonymsAreKnown(
|
||||
opt::IRContext* context) const {
|
||||
ComputeClosureOfFacts(context);
|
||||
FactManager::DataSynonymAndIdEquationFacts::GetIdsForWhichSynonymsAreKnown()
|
||||
const {
|
||||
std::vector<uint32_t> result;
|
||||
for (auto& data_descriptor : synonymous_.GetAllKnownValues()) {
|
||||
if (data_descriptor->index().empty()) {
|
||||
@@ -1209,10 +1205,7 @@ FactManager::DataSynonymAndIdEquationFacts ::GetIdsForWhichSynonymsAreKnown(
|
||||
|
||||
bool FactManager::DataSynonymAndIdEquationFacts::IsSynonymous(
|
||||
const protobufs::DataDescriptor& data_descriptor1,
|
||||
const protobufs::DataDescriptor& data_descriptor2,
|
||||
opt::IRContext* context) const {
|
||||
const_cast<FactManager::DataSynonymAndIdEquationFacts*>(this)
|
||||
->ComputeClosureOfFacts(context);
|
||||
const protobufs::DataDescriptor& data_descriptor2) const {
|
||||
return synonymous_.Exists(data_descriptor1) &&
|
||||
synonymous_.Exists(data_descriptor2) &&
|
||||
synonymous_.IsEquivalent(data_descriptor1, data_descriptor2);
|
||||
@@ -1394,31 +1387,27 @@ FactManager::GetConstantUniformFactsAndTypes() const {
|
||||
return uniform_constant_facts_->GetConstantUniformFactsAndTypes();
|
||||
}
|
||||
|
||||
std::vector<uint32_t> FactManager::GetIdsForWhichSynonymsAreKnown(
|
||||
opt::IRContext* context) const {
|
||||
return data_synonym_and_id_equation_facts_->GetIdsForWhichSynonymsAreKnown(
|
||||
context);
|
||||
std::vector<uint32_t> FactManager::GetIdsForWhichSynonymsAreKnown() const {
|
||||
return data_synonym_and_id_equation_facts_->GetIdsForWhichSynonymsAreKnown();
|
||||
}
|
||||
|
||||
std::vector<const protobufs::DataDescriptor*>
|
||||
FactManager::GetSynonymsForDataDescriptor(
|
||||
const protobufs::DataDescriptor& data_descriptor,
|
||||
opt::IRContext* context) const {
|
||||
const protobufs::DataDescriptor& data_descriptor) const {
|
||||
return data_synonym_and_id_equation_facts_->GetSynonymsForDataDescriptor(
|
||||
data_descriptor, context);
|
||||
data_descriptor);
|
||||
}
|
||||
|
||||
std::vector<const protobufs::DataDescriptor*> FactManager::GetSynonymsForId(
|
||||
uint32_t id, opt::IRContext* context) const {
|
||||
return GetSynonymsForDataDescriptor(MakeDataDescriptor(id, {}), context);
|
||||
uint32_t id) const {
|
||||
return GetSynonymsForDataDescriptor(MakeDataDescriptor(id, {}));
|
||||
}
|
||||
|
||||
bool FactManager::IsSynonymous(
|
||||
const protobufs::DataDescriptor& data_descriptor1,
|
||||
const protobufs::DataDescriptor& data_descriptor2,
|
||||
opt::IRContext* context) const {
|
||||
return data_synonym_and_id_equation_facts_->IsSynonymous(
|
||||
data_descriptor1, data_descriptor2, context);
|
||||
const protobufs::DataDescriptor& data_descriptor2) const {
|
||||
return data_synonym_and_id_equation_facts_->IsSynonymous(data_descriptor1,
|
||||
data_descriptor2);
|
||||
}
|
||||
|
||||
bool FactManager::BlockIsDead(uint32_t block_id) const {
|
||||
@@ -1463,5 +1452,11 @@ void FactManager::AddFactIdEquation(uint32_t lhs_id, SpvOp opcode,
|
||||
data_synonym_and_id_equation_facts_->AddFact(fact, context);
|
||||
}
|
||||
|
||||
void FactManager::ComputeClosureOfFacts(
|
||||
opt::IRContext* ir_context, uint32_t maximum_equivalence_class_size) {
|
||||
data_synonym_and_id_equation_facts_->ComputeClosureOfFacts(
|
||||
ir_context, maximum_equivalence_class_size);
|
||||
}
|
||||
|
||||
} // namespace fuzz
|
||||
} // namespace spvtools
|
||||
|
||||
26
3rdparty/spirv-tools/source/fuzz/fact_manager.h
vendored
26
3rdparty/spirv-tools/source/fuzz/fact_manager.h
vendored
@@ -76,6 +76,21 @@ class FactManager {
|
||||
const std::vector<uint32_t>& rhs_id,
|
||||
opt::IRContext* context);
|
||||
|
||||
// Inspects all known facts and adds corollary facts; e.g. if we know that
|
||||
// a.x == b.x and a.y == b.y, where a and b have vec2 type, we can record
|
||||
// that a == b holds.
|
||||
//
|
||||
// This method is expensive, and should only be called (by applying a
|
||||
// transformation) at the start of a fuzzer pass that depends on data
|
||||
// synonym facts, rather than calling it every time a new data synonym fact
|
||||
// is added.
|
||||
//
|
||||
// The parameter |maximum_equivalence_class_size| specifies the size beyond
|
||||
// which equivalence classes should not be mined for new facts, to avoid
|
||||
// excessively-long closure computations.
|
||||
void ComputeClosureOfFacts(opt::IRContext* ir_context,
|
||||
uint32_t maximum_equivalence_class_size);
|
||||
|
||||
// The fact manager is responsible for managing a few distinct categories of
|
||||
// facts. In principle there could be different fact managers for each kind
|
||||
// of fact, but in practice providing one 'go to' place for facts is
|
||||
@@ -125,25 +140,22 @@ class FactManager {
|
||||
|
||||
// Returns every id for which a fact of the form "this id is synonymous with
|
||||
// this piece of data" is known.
|
||||
std::vector<uint32_t> GetIdsForWhichSynonymsAreKnown(
|
||||
opt::IRContext* context) const;
|
||||
std::vector<uint32_t> GetIdsForWhichSynonymsAreKnown() const;
|
||||
|
||||
// Returns the equivalence class of all known synonyms of |id|, or an empty
|
||||
// set if no synonyms are known.
|
||||
std::vector<const protobufs::DataDescriptor*> GetSynonymsForId(
|
||||
uint32_t id, opt::IRContext* context) const;
|
||||
uint32_t id) const;
|
||||
|
||||
// Returns the equivalence class of all known synonyms of |data_descriptor|,
|
||||
// or empty if no synonyms are known.
|
||||
std::vector<const protobufs::DataDescriptor*> GetSynonymsForDataDescriptor(
|
||||
const protobufs::DataDescriptor& data_descriptor,
|
||||
opt::IRContext* context) const;
|
||||
const protobufs::DataDescriptor& data_descriptor) const;
|
||||
|
||||
// Returns true if and ony if |data_descriptor1| and |data_descriptor2| are
|
||||
// known to be synonymous.
|
||||
bool IsSynonymous(const protobufs::DataDescriptor& data_descriptor1,
|
||||
const protobufs::DataDescriptor& data_descriptor2,
|
||||
opt::IRContext* context) const;
|
||||
const protobufs::DataDescriptor& data_descriptor2) const;
|
||||
|
||||
// End of id synonym facts
|
||||
//==============================
|
||||
|
||||
@@ -68,6 +68,7 @@ const std::pair<uint32_t, uint32_t> kChanceOfTogglingAccessChainInstruction = {
|
||||
|
||||
// Default limits for various quantities that are chosen during fuzzing.
|
||||
// Keep them in alphabetical order.
|
||||
const uint32_t kDefaultMaxEquivalenceClassSizeForDataSynonymFactClosure = 1000;
|
||||
const uint32_t kDefaultMaxLoopControlPartialCount = 100;
|
||||
const uint32_t kDefaultMaxLoopControlPeelCount = 100;
|
||||
const uint32_t kDefaultMaxLoopLimit = 20;
|
||||
@@ -89,6 +90,8 @@ FuzzerContext::FuzzerContext(RandomGenerator* random_generator,
|
||||
uint32_t min_fresh_id)
|
||||
: random_generator_(random_generator),
|
||||
next_fresh_id_(min_fresh_id),
|
||||
max_equivalence_class_size_for_data_synonym_fact_closure_(
|
||||
kDefaultMaxEquivalenceClassSizeForDataSynonymFactClosure),
|
||||
max_loop_control_partial_count_(kDefaultMaxLoopControlPartialCount),
|
||||
max_loop_control_peel_count_(kDefaultMaxLoopControlPeelCount),
|
||||
max_loop_limit_(kDefaultMaxLoopLimit),
|
||||
|
||||
@@ -183,12 +183,21 @@ class FuzzerContext {
|
||||
uint32_t GetChanceOfTogglingAccessChainInstruction() {
|
||||
return chance_of_toggling_access_chain_instruction_;
|
||||
}
|
||||
uint32_t GetRandomLoopControlPeelCount() {
|
||||
return random_generator_->RandomUint32(max_loop_control_peel_count_);
|
||||
|
||||
// Other functions to control transformations. Keep them in alphabetical
|
||||
// order.
|
||||
uint32_t GetMaximumEquivalenceClassSizeForDataSynonymFactClosure() {
|
||||
return max_equivalence_class_size_for_data_synonym_fact_closure_;
|
||||
}
|
||||
uint32_t GetRandomIndexForAccessChain(uint32_t composite_size_bound) {
|
||||
return random_generator_->RandomUint32(composite_size_bound);
|
||||
}
|
||||
uint32_t GetRandomLoopControlPartialCount() {
|
||||
return random_generator_->RandomUint32(max_loop_control_partial_count_);
|
||||
}
|
||||
uint32_t GetRandomLoopControlPeelCount() {
|
||||
return random_generator_->RandomUint32(max_loop_control_peel_count_);
|
||||
}
|
||||
uint32_t GetRandomLoopLimit() {
|
||||
return random_generator_->RandomUint32(max_loop_limit_);
|
||||
}
|
||||
@@ -196,12 +205,6 @@ class FuzzerContext {
|
||||
// Ensure that the array size is non-zero.
|
||||
return random_generator_->RandomUint32(max_new_array_size_limit_ - 1) + 1;
|
||||
}
|
||||
|
||||
// Other functions to control transformations. Keep them in alphabetical
|
||||
// order.
|
||||
uint32_t GetRandomIndexForAccessChain(uint32_t composite_size_bound) {
|
||||
return random_generator_->RandomUint32(composite_size_bound);
|
||||
}
|
||||
bool GoDeeperInConstantObfuscation(uint32_t depth) {
|
||||
return go_deeper_in_constant_obfuscation_(depth, random_generator_);
|
||||
}
|
||||
@@ -251,6 +254,7 @@ class FuzzerContext {
|
||||
// Limits associated with various quantities for which random values are
|
||||
// chosen during fuzzing.
|
||||
// Keep them in alphabetical order.
|
||||
uint32_t max_equivalence_class_size_for_data_synonym_fact_closure_;
|
||||
uint32_t max_loop_control_partial_count_;
|
||||
uint32_t max_loop_control_peel_count_;
|
||||
uint32_t max_loop_limit_;
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#include "source/fuzz/id_use_descriptor.h"
|
||||
#include "source/fuzz/instruction_descriptor.h"
|
||||
#include "source/fuzz/transformation_composite_extract.h"
|
||||
#include "source/fuzz/transformation_compute_data_synonym_fact_closure.h"
|
||||
#include "source/fuzz/transformation_replace_id_with_synonym.h"
|
||||
|
||||
namespace spvtools {
|
||||
@@ -34,10 +35,15 @@ FuzzerPassApplyIdSynonyms::FuzzerPassApplyIdSynonyms(
|
||||
FuzzerPassApplyIdSynonyms::~FuzzerPassApplyIdSynonyms() = default;
|
||||
|
||||
void FuzzerPassApplyIdSynonyms::Apply() {
|
||||
for (auto id_with_known_synonyms :
|
||||
GetTransformationContext()
|
||||
->GetFactManager()
|
||||
->GetIdsForWhichSynonymsAreKnown(GetIRContext())) {
|
||||
// Compute a closure of data synonym facts, to enrich the pool of synonyms
|
||||
// that are available.
|
||||
ApplyTransformation(TransformationComputeDataSynonymFactClosure(
|
||||
GetFuzzerContext()
|
||||
->GetMaximumEquivalenceClassSizeForDataSynonymFactClosure()));
|
||||
|
||||
for (auto id_with_known_synonyms : GetTransformationContext()
|
||||
->GetFactManager()
|
||||
->GetIdsForWhichSynonymsAreKnown()) {
|
||||
// Gather up all uses of |id_with_known_synonym| as a regular id, and
|
||||
// subsequently iterate over these uses. We use this separation because,
|
||||
// when considering a given use, we might apply a transformation that will
|
||||
@@ -79,7 +85,7 @@ void FuzzerPassApplyIdSynonyms::Apply() {
|
||||
std::vector<const protobufs::DataDescriptor*> synonyms_to_try;
|
||||
for (auto& data_descriptor :
|
||||
GetTransformationContext()->GetFactManager()->GetSynonymsForId(
|
||||
id_with_known_synonyms, GetIRContext())) {
|
||||
id_with_known_synonyms)) {
|
||||
protobufs::DataDescriptor descriptor_for_this_id =
|
||||
MakeDataDescriptor(id_with_known_synonyms, {});
|
||||
if (DataDescriptorEquals()(data_descriptor, &descriptor_for_this_id)) {
|
||||
|
||||
@@ -373,6 +373,7 @@ message Transformation {
|
||||
TransformationPermuteFunctionParameters permute_function_parameters = 42;
|
||||
TransformationToggleAccessChainInstruction toggle_access_chain_instruction = 43;
|
||||
TransformationAddConstantNull add_constant_null = 44;
|
||||
TransformationComputeDataSynonymFactClosure compute_data_synonym_fact_closure = 45;
|
||||
// Add additional option using the next available number.
|
||||
}
|
||||
}
|
||||
@@ -781,6 +782,19 @@ message TransformationCompositeExtract {
|
||||
|
||||
}
|
||||
|
||||
message TransformationComputeDataSynonymFactClosure {
|
||||
|
||||
// A transformation that impacts the fact manager only, forcing a computation
|
||||
// of the closure of data synonym facts, so that e.g. if the components of
|
||||
// vectors v and w are known to be pairwise synonymous, it is deduced that v
|
||||
// and w are themselves synonymous.
|
||||
|
||||
// When searching equivalence classes for implied facts, equivalence classes
|
||||
// larger than this size will be skipped.
|
||||
uint32 maximum_equivalence_class_size = 1;
|
||||
|
||||
}
|
||||
|
||||
message TransformationCopyObject {
|
||||
|
||||
// A transformation that introduces an OpCopyObject instruction to make a
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
#include "source/fuzz/transformation_add_type_vector.h"
|
||||
#include "source/fuzz/transformation_composite_construct.h"
|
||||
#include "source/fuzz/transformation_composite_extract.h"
|
||||
#include "source/fuzz/transformation_compute_data_synonym_fact_closure.h"
|
||||
#include "source/fuzz/transformation_copy_object.h"
|
||||
#include "source/fuzz/transformation_equation_instruction.h"
|
||||
#include "source/fuzz/transformation_function_call.h"
|
||||
@@ -134,6 +135,10 @@ std::unique_ptr<Transformation> Transformation::FromMessage(
|
||||
case protobufs::Transformation::TransformationCase::kCompositeExtract:
|
||||
return MakeUnique<TransformationCompositeExtract>(
|
||||
message.composite_extract());
|
||||
case protobufs::Transformation::TransformationCase::
|
||||
kComputeDataSynonymFactClosure:
|
||||
return MakeUnique<TransformationComputeDataSynonymFactClosure>(
|
||||
message.compute_data_synonym_fact_closure());
|
||||
case protobufs::Transformation::TransformationCase::kCopyObject:
|
||||
return MakeUnique<TransformationCopyObject>(message.copy_object());
|
||||
case protobufs::Transformation::TransformationCase::kEquationInstruction:
|
||||
|
||||
52
3rdparty/spirv-tools/source/fuzz/transformation_compute_data_synonym_fact_closure.cpp
vendored
Normal file
52
3rdparty/spirv-tools/source/fuzz/transformation_compute_data_synonym_fact_closure.cpp
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copyright (c) 2020 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "source/fuzz/transformation_compute_data_synonym_fact_closure.h"
|
||||
|
||||
namespace spvtools {
|
||||
namespace fuzz {
|
||||
|
||||
TransformationComputeDataSynonymFactClosure::
|
||||
TransformationComputeDataSynonymFactClosure(
|
||||
const spvtools::fuzz::protobufs::
|
||||
TransformationComputeDataSynonymFactClosure& message)
|
||||
: message_(message) {}
|
||||
|
||||
TransformationComputeDataSynonymFactClosure::
|
||||
TransformationComputeDataSynonymFactClosure(
|
||||
uint32_t maximum_equivalence_class_size) {
|
||||
message_.set_maximum_equivalence_class_size(maximum_equivalence_class_size);
|
||||
}
|
||||
|
||||
bool TransformationComputeDataSynonymFactClosure::IsApplicable(
|
||||
opt::IRContext* /*unused*/, const TransformationContext& /*unused*/) const {
|
||||
return true;
|
||||
}
|
||||
|
||||
void TransformationComputeDataSynonymFactClosure::Apply(
|
||||
opt::IRContext* ir_context,
|
||||
TransformationContext* transformation_context) const {
|
||||
transformation_context->GetFactManager()->ComputeClosureOfFacts(
|
||||
ir_context, message_.maximum_equivalence_class_size());
|
||||
}
|
||||
|
||||
protobufs::Transformation
|
||||
TransformationComputeDataSynonymFactClosure::ToMessage() const {
|
||||
protobufs::Transformation result;
|
||||
*result.mutable_compute_data_synonym_fact_closure() = message_;
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace fuzz
|
||||
} // namespace spvtools
|
||||
53
3rdparty/spirv-tools/source/fuzz/transformation_compute_data_synonym_fact_closure.h
vendored
Normal file
53
3rdparty/spirv-tools/source/fuzz/transformation_compute_data_synonym_fact_closure.h
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright (c) 2020 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef SOURCE_FUZZ_TRANSFORMATION_COMPUTE_DATA_SYNONYM_FACT_CLOSURE_H_
|
||||
#define SOURCE_FUZZ_TRANSFORMATION_COMPUTE_DATA_SYNONYM_FACT_CLOSURE_H_
|
||||
|
||||
#include "source/fuzz/protobufs/spirvfuzz_protobufs.h"
|
||||
#include "source/fuzz/transformation.h"
|
||||
#include "source/fuzz/transformation_context.h"
|
||||
#include "source/opt/ir_context.h"
|
||||
|
||||
namespace spvtools {
|
||||
namespace fuzz {
|
||||
|
||||
class TransformationComputeDataSynonymFactClosure : public Transformation {
|
||||
public:
|
||||
explicit TransformationComputeDataSynonymFactClosure(
|
||||
const protobufs::TransformationComputeDataSynonymFactClosure& message);
|
||||
|
||||
explicit TransformationComputeDataSynonymFactClosure(
|
||||
uint32_t maximum_equivalence_class_size);
|
||||
|
||||
// This transformation is trivially applicable.
|
||||
bool IsApplicable(
|
||||
opt::IRContext* ir_context,
|
||||
const TransformationContext& transformation_context) const override;
|
||||
|
||||
// Forces the fact manager to compute a closure of data synonym facts, so that
|
||||
// facts implied by existing facts are deduced.
|
||||
void Apply(opt::IRContext* ir_context,
|
||||
TransformationContext* transformation_context) const override;
|
||||
|
||||
protobufs::Transformation ToMessage() const override;
|
||||
|
||||
private:
|
||||
protobufs::TransformationComputeDataSynonymFactClosure message_;
|
||||
};
|
||||
|
||||
} // namespace fuzz
|
||||
} // namespace spvtools
|
||||
|
||||
#endif // SOURCE_FUZZ_TRANSFORMATION_COMPUTE_DATA_SYNONYM_FACT_CLOSURE_H_
|
||||
@@ -46,7 +46,7 @@ bool TransformationReplaceIdWithSynonym::IsApplicable(
|
||||
MakeDataDescriptor(message_.synonymous_id(), {});
|
||||
if (!transformation_context.GetFactManager()->IsSynonymous(
|
||||
MakeDataDescriptor(id_of_interest, {}),
|
||||
data_descriptor_for_synonymous_id, ir_context)) {
|
||||
data_descriptor_for_synonymous_id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user