Add call graph fixtures for various languages and scenarios
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Reachability Corpus Validation / validate-corpus (push) Has been cancelled
Reachability Corpus Validation / validate-ground-truths (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Reachability Corpus Validation / determinism-check (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Reachability Corpus Validation / validate-corpus (push) Has been cancelled
Reachability Corpus Validation / validate-ground-truths (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Reachability Corpus Validation / determinism-check (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
- Introduced `all-edge-reasons.json` to test edge resolution reasons in .NET. - Added `all-visibility-levels.json` to validate method visibility levels in .NET. - Created `dotnet-aspnetcore-minimal.json` for a minimal ASP.NET Core application. - Included `go-gin-api.json` for a Go Gin API application structure. - Added `java-spring-boot.json` for the Spring PetClinic application in Java. - Introduced `legacy-no-schema.json` for legacy application structure without schema. - Created `node-express-api.json` for an Express.js API application structure.
This commit is contained in:
@@ -0,0 +1,163 @@
|
||||
using StellaOps.Scanner.Worker.Determinism;
|
||||
using StellaOps.Scanner.Worker.Determinism.Calculators;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.Determinism;
|
||||
|
||||
public sealed class BitwiseFidelityCalculatorTests
|
||||
{
|
||||
private readonly BitwiseFidelityCalculator _calculator = new();
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithEmptyReplays_ReturnsFullScore()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["file1.json"] = "hash1",
|
||||
["file2.json"] = "hash2"
|
||||
};
|
||||
var replays = Array.Empty<IReadOnlyDictionary<string, string>>();
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(1.0, score);
|
||||
Assert.Equal(0, identicalCount);
|
||||
Assert.Empty(mismatches);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithIdenticalReplays_ReturnsFullScore()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
};
|
||||
var replays = new List<IReadOnlyDictionary<string, string>>
|
||||
{
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
},
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
}
|
||||
};
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(1.0, score);
|
||||
Assert.Equal(2, identicalCount);
|
||||
Assert.Empty(mismatches);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithPartialMismatch_ReturnsPartialScore()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
};
|
||||
var replays = new List<IReadOnlyDictionary<string, string>>
|
||||
{
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
},
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:DIFFERENT" // Mismatch
|
||||
},
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["sbom.json"] = "sha256:abc",
|
||||
["findings.ndjson"] = "sha256:def"
|
||||
}
|
||||
};
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(2.0 / 3, score, precision: 4);
|
||||
Assert.Equal(2, identicalCount);
|
||||
Assert.Single(mismatches);
|
||||
Assert.Equal(1, mismatches[0].RunIndex);
|
||||
Assert.Equal(FidelityMismatchType.BitwiseOnly, mismatches[0].Type);
|
||||
Assert.Contains("findings.ndjson", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithMissingArtifact_DetectsMismatch()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["file1.json"] = "hash1",
|
||||
["file2.json"] = "hash2"
|
||||
};
|
||||
var replays = new List<IReadOnlyDictionary<string, string>>
|
||||
{
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["file1.json"] = "hash1"
|
||||
// file2.json missing
|
||||
}
|
||||
};
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Equal(0, identicalCount);
|
||||
Assert.Single(mismatches);
|
||||
Assert.Contains("file2.json", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithExtraArtifact_DetectsMismatch()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["file1.json"] = "hash1"
|
||||
};
|
||||
var replays = new List<IReadOnlyDictionary<string, string>>
|
||||
{
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["file1.json"] = "hash1",
|
||||
["extra.json"] = "extra_hash" // Extra artifact
|
||||
}
|
||||
};
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Single(mismatches);
|
||||
Assert.Contains("extra.json", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_IsCaseInsensitiveForHashes()
|
||||
{
|
||||
var baseline = new Dictionary<string, string>
|
||||
{
|
||||
["file.json"] = "SHA256:ABCDEF"
|
||||
};
|
||||
var replays = new List<IReadOnlyDictionary<string, string>>
|
||||
{
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["file.json"] = "sha256:abcdef" // Different case
|
||||
}
|
||||
};
|
||||
|
||||
var (score, identicalCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(1.0, score);
|
||||
Assert.Equal(1, identicalCount);
|
||||
Assert.Empty(mismatches);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
using StellaOps.Scanner.Worker.Determinism;
|
||||
using StellaOps.Scanner.Worker.Determinism.Calculators;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.Determinism;
|
||||
|
||||
public sealed class SemanticFidelityCalculatorTests
|
||||
{
|
||||
private readonly SemanticFidelityCalculator _calculator = new();
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithEmptyReplays_ReturnsFullScore()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = Array.Empty<NormalizedFindings>();
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(1.0, score);
|
||||
Assert.Equal(0, matchCount);
|
||||
Assert.Empty(mismatches);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithIdenticalFindings_ReturnsFullScore()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
CreateBaseline(),
|
||||
CreateBaseline()
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(1.0, score);
|
||||
Assert.Equal(2, matchCount);
|
||||
Assert.Empty(mismatches);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithDifferentPackages_DetectsMismatch()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
new NormalizedFindings
|
||||
{
|
||||
Packages = new List<NormalizedPackage>
|
||||
{
|
||||
new("pkg:npm/lodash@4.17.21", "4.17.21"),
|
||||
new("pkg:npm/extra@1.0.0", "1.0.0") // Extra package
|
||||
},
|
||||
Cves = new HashSet<string> { "CVE-2021-23337" },
|
||||
SeverityCounts = new Dictionary<string, int> { ["HIGH"] = 1 },
|
||||
Verdicts = new Dictionary<string, string> { ["overall"] = "fail" }
|
||||
}
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Equal(0, matchCount);
|
||||
Assert.Single(mismatches);
|
||||
Assert.Contains("packages", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithDifferentCves_DetectsMismatch()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
new NormalizedFindings
|
||||
{
|
||||
Packages = new List<NormalizedPackage>
|
||||
{
|
||||
new("pkg:npm/lodash@4.17.21", "4.17.21")
|
||||
},
|
||||
Cves = new HashSet<string> { "CVE-2021-23337", "CVE-2022-12345" }, // Extra CVE
|
||||
SeverityCounts = new Dictionary<string, int> { ["HIGH"] = 1 },
|
||||
Verdicts = new Dictionary<string, string> { ["overall"] = "fail" }
|
||||
}
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Contains("cves", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithDifferentSeverities_DetectsMismatch()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
new NormalizedFindings
|
||||
{
|
||||
Packages = new List<NormalizedPackage>
|
||||
{
|
||||
new("pkg:npm/lodash@4.17.21", "4.17.21")
|
||||
},
|
||||
Cves = new HashSet<string> { "CVE-2021-23337" },
|
||||
SeverityCounts = new Dictionary<string, int> { ["CRITICAL"] = 1 }, // Different severity
|
||||
Verdicts = new Dictionary<string, string> { ["overall"] = "fail" }
|
||||
}
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Contains("severities", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithDifferentVerdicts_DetectsMismatch()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
new NormalizedFindings
|
||||
{
|
||||
Packages = new List<NormalizedPackage>
|
||||
{
|
||||
new("pkg:npm/lodash@4.17.21", "4.17.21")
|
||||
},
|
||||
Cves = new HashSet<string> { "CVE-2021-23337" },
|
||||
SeverityCounts = new Dictionary<string, int> { ["HIGH"] = 1 },
|
||||
Verdicts = new Dictionary<string, string> { ["overall"] = "pass" } // Different verdict
|
||||
}
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(0.0, score);
|
||||
Assert.Contains("verdicts", mismatches[0].AffectedArtifacts!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithPartialMatches_ReturnsCorrectScore()
|
||||
{
|
||||
var baseline = CreateBaseline();
|
||||
var replays = new List<NormalizedFindings>
|
||||
{
|
||||
CreateBaseline(), // Match
|
||||
new NormalizedFindings // Mismatch
|
||||
{
|
||||
Packages = new List<NormalizedPackage>(),
|
||||
Cves = new HashSet<string>(),
|
||||
SeverityCounts = new Dictionary<string, int>(),
|
||||
Verdicts = new Dictionary<string, string>()
|
||||
},
|
||||
CreateBaseline() // Match
|
||||
};
|
||||
|
||||
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
|
||||
|
||||
Assert.Equal(2.0 / 3, score, precision: 4);
|
||||
Assert.Equal(2, matchCount);
|
||||
Assert.Single(mismatches);
|
||||
}
|
||||
|
||||
private static NormalizedFindings CreateBaseline() => new()
|
||||
{
|
||||
Packages = new List<NormalizedPackage>
|
||||
{
|
||||
new("pkg:npm/lodash@4.17.21", "4.17.21")
|
||||
},
|
||||
Cves = new HashSet<string> { "CVE-2021-23337" },
|
||||
SeverityCounts = new Dictionary<string, int> { ["HIGH"] = 1 },
|
||||
Verdicts = new Dictionary<string, string> { ["overall"] = "fail" }
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user