Add comprehensive security tests for OWASP A02, A05, A07, and A08 categories
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
- Implemented tests for Cryptographic Failures (A02) to ensure proper handling of sensitive data, secure algorithms, and key management. - Added tests for Security Misconfiguration (A05) to validate production configurations, security headers, CORS settings, and feature management. - Developed tests for Authentication Failures (A07) to enforce strong password policies, rate limiting, session management, and MFA support. - Created tests for Software and Data Integrity Failures (A08) to verify artifact signatures, SBOM integrity, attestation chains, and feed updates.
This commit is contained in:
@@ -185,10 +185,4 @@ public enum VexFormat
|
||||
Unknown
|
||||
}
|
||||
|
||||
public enum SourcePrecedence
|
||||
{
|
||||
Vendor = 1,
|
||||
Maintainer = 2,
|
||||
ThirdParty = 3,
|
||||
Unknown = 99
|
||||
}
|
||||
// Note: SourcePrecedence is defined in SourcePrecedenceLattice.cs
|
||||
|
||||
@@ -0,0 +1,326 @@
|
||||
// =============================================================================
|
||||
// AttestationCollector.cs
|
||||
// Attestation evidence collector for reconciliation workflow
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// Integrated with DsseVerifier (Task T7)
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Collects attestation evidence from an evidence directory and populates the artifact index.
|
||||
/// Integrates with DsseVerifier for signature validation.
|
||||
/// </summary>
|
||||
public sealed class AttestationCollector
|
||||
{
|
||||
private readonly IAttestationParser _parser;
|
||||
private readonly DsseVerifier? _dsseVerifier;
|
||||
private readonly ILogger<AttestationCollector> _logger;
|
||||
|
||||
public AttestationCollector(
|
||||
IAttestationParser? parser = null,
|
||||
DsseVerifier? dsseVerifier = null,
|
||||
ILogger<AttestationCollector>? logger = null)
|
||||
{
|
||||
_parser = parser ?? new DsseAttestationParser();
|
||||
_dsseVerifier = dsseVerifier;
|
||||
_logger = logger ?? NullLogger<AttestationCollector>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collects attestation evidence from the attestations directory.
|
||||
/// </summary>
|
||||
/// <param name="attestationsDirectory">Path to the attestations directory.</param>
|
||||
/// <param name="index">Artifact index to populate.</param>
|
||||
/// <param name="options">Collection options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection result with statistics.</returns>
|
||||
public async Task<AttestationCollectionResult> CollectAsync(
|
||||
string attestationsDirectory,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(attestationsDirectory);
|
||||
ArgumentNullException.ThrowIfNull(index);
|
||||
|
||||
options ??= AttestationCollectionOptions.Default;
|
||||
var result = new AttestationCollectionResult();
|
||||
|
||||
if (!Directory.Exists(attestationsDirectory))
|
||||
{
|
||||
_logger.LogDebug("Attestation directory does not exist: {Directory}", attestationsDirectory);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find all potential attestation files (ordered deterministically)
|
||||
var files = Directory.EnumerateFiles(attestationsDirectory, "*.*", SearchOption.AllDirectories)
|
||||
.Where(_parser.IsAttestation)
|
||||
.OrderBy(f => NormalizeRelativePath(Path.GetRelativePath(attestationsDirectory, f)), StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("Found {Count} potential attestation files in {Directory}", files.Count, attestationsDirectory);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await ProcessAttestationFileAsync(file, attestationsDirectory, index, options, result, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process attestation file: {File}", file);
|
||||
result.FailedFiles.Add((file, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task ProcessAttestationFileAsync(
|
||||
string filePath,
|
||||
string baseDirectory,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionOptions options,
|
||||
AttestationCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Compute content hash for the attestation file itself
|
||||
var contentHash = await ComputeFileHashAsync(filePath, cancellationToken);
|
||||
var relativePath = NormalizeRelativePath(Path.GetRelativePath(baseDirectory, filePath));
|
||||
|
||||
// Parse the attestation
|
||||
var parseResult = await _parser.ParseAsync(filePath, cancellationToken);
|
||||
|
||||
if (!parseResult.IsSuccess)
|
||||
{
|
||||
_logger.LogWarning("Failed to parse attestation {File}: {Error}", filePath, parseResult.ErrorMessage);
|
||||
result.FailedFiles.Add((filePath, parseResult.ErrorMessage ?? "Unknown error"));
|
||||
return;
|
||||
}
|
||||
|
||||
result.ParsedFiles++;
|
||||
|
||||
var statement = parseResult.Statement!;
|
||||
var envelope = parseResult.Envelope!;
|
||||
|
||||
// Track predicate types
|
||||
if (!result.PredicateTypeCounts.TryGetValue(statement.PredicateType, out var count))
|
||||
{
|
||||
count = 0;
|
||||
}
|
||||
result.PredicateTypeCounts[statement.PredicateType] = count + 1;
|
||||
|
||||
// Verify signature using DsseVerifier (T7 integration)
|
||||
bool signatureVerified = false;
|
||||
bool tlogVerified = false;
|
||||
string? rekorUuid = null;
|
||||
|
||||
if (options.TrustRoots is not null && _dsseVerifier is not null)
|
||||
{
|
||||
var verifyResult = _dsseVerifier.Verify(envelope, options.TrustRoots, _logger);
|
||||
signatureVerified = verifyResult.IsValid;
|
||||
|
||||
if (signatureVerified)
|
||||
{
|
||||
result.VerifiedSignatures++;
|
||||
_logger.LogDebug("DSSE signature verified for attestation: {File}", relativePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"DSSE signature verification failed for attestation: {File}, reason={Reason}",
|
||||
relativePath,
|
||||
verifyResult.ErrorCode);
|
||||
}
|
||||
}
|
||||
else if (options.MarkAsUnverified)
|
||||
{
|
||||
// Mark all attestations as unverified when no trust roots configured
|
||||
signatureVerified = false;
|
||||
tlogVerified = false;
|
||||
}
|
||||
|
||||
// Get all subject digests for this attestation
|
||||
var subjectDigests = statement.Subjects
|
||||
.Select(s => s.GetSha256Digest())
|
||||
.Where(d => d is not null)
|
||||
.Cast<string>()
|
||||
.ToList();
|
||||
|
||||
// Create attestation reference
|
||||
var attestationRef = new AttestationReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: relativePath,
|
||||
PredicateType: statement.PredicateType,
|
||||
Subjects: subjectDigests,
|
||||
SignatureVerified: signatureVerified,
|
||||
TlogVerified: tlogVerified,
|
||||
RekorUuid: rekorUuid);
|
||||
|
||||
// Add to index for each subject
|
||||
foreach (var subject in statement.Subjects)
|
||||
{
|
||||
var digest = subject.GetSha256Digest();
|
||||
if (digest is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [],
|
||||
Attestations: [attestationRef],
|
||||
VexDocuments: []);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
result.IndexedSubjects++;
|
||||
}
|
||||
|
||||
// Handle VEX attestations specially
|
||||
if (IsVexAttestation(statement.PredicateType))
|
||||
{
|
||||
result.VexAttestationCount++;
|
||||
await CollectVexFromAttestationAsync(
|
||||
statement,
|
||||
relativePath,
|
||||
contentHash,
|
||||
index,
|
||||
result,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Parsed attestation: {File}, predicateType={PredicateType}, {SubjectCount} subjects",
|
||||
relativePath,
|
||||
statement.PredicateType,
|
||||
statement.Subjects.Count);
|
||||
}
|
||||
|
||||
private async Task CollectVexFromAttestationAsync(
|
||||
InTotoStatement statement,
|
||||
string filePath,
|
||||
string contentHash,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// VEX attestations contain VEX documents in their predicate
|
||||
// For now, just track them - actual VEX parsing will be enhanced later
|
||||
await Task.CompletedTask;
|
||||
|
||||
foreach (var subject in statement.Subjects)
|
||||
{
|
||||
var digest = subject.GetSha256Digest();
|
||||
if (digest is null) continue;
|
||||
|
||||
var vexRef = new VexReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: filePath,
|
||||
Format: VexFormat.OpenVex,
|
||||
Precedence: SourcePrecedence.Unknown,
|
||||
Timestamp: null);
|
||||
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [],
|
||||
Attestations: [],
|
||||
VexDocuments: [vexRef]);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsVexAttestation(string predicateType)
|
||||
{
|
||||
return predicateType.Contains("vex", StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Contains("csaf", StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Equals(PredicateTypes.OpenVex, StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Equals(PredicateTypes.Csaf, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string NormalizeRelativePath(string path) =>
|
||||
path.Replace('\\', '/');
|
||||
|
||||
private static async Task<string> ComputeFileHashAsync(string filePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation collection.
|
||||
/// </summary>
|
||||
public sealed record AttestationCollectionOptions
|
||||
{
|
||||
public static readonly AttestationCollectionOptions Default = new();
|
||||
|
||||
/// <summary>
|
||||
/// Mark all attestations as unverified (skip signature verification).
|
||||
/// </summary>
|
||||
public bool MarkAsUnverified { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify DSSE signatures.
|
||||
/// </summary>
|
||||
public bool VerifySignatures { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion proofs.
|
||||
/// </summary>
|
||||
public bool VerifyRekorProofs { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Trust roots configuration for DSSE signature verification.
|
||||
/// Required when VerifySignatures is true.
|
||||
/// </summary>
|
||||
public TrustRootConfig? TrustRoots { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation collection operation.
|
||||
/// </summary>
|
||||
public sealed class AttestationCollectionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of attestation files successfully parsed.
|
||||
/// </summary>
|
||||
public int ParsedFiles { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of subjects indexed.
|
||||
/// </summary>
|
||||
public int IndexedSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of VEX attestations found.
|
||||
/// </summary>
|
||||
public int VexAttestationCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of attestations with verified DSSE signatures.
|
||||
/// </summary>
|
||||
public int VerifiedSignatures { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of attestations by predicate type.
|
||||
/// </summary>
|
||||
public Dictionary<string, int> PredicateTypeCounts { get; } = new(StringComparer.Ordinal);
|
||||
|
||||
/// <summary>
|
||||
/// Files that failed to parse, with error messages.
|
||||
/// </summary>
|
||||
public List<(string FilePath, string Error)> FailedFiles { get; } = [];
|
||||
}
|
||||
@@ -0,0 +1,336 @@
|
||||
// =============================================================================
|
||||
// CycloneDxParser.cs
|
||||
// CycloneDX SBOM parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for CycloneDX SBOM format (JSON).
|
||||
/// Supports CycloneDX 1.4, 1.5, and 1.6 schemas.
|
||||
/// </summary>
|
||||
public sealed class CycloneDxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// CycloneDX files typically end with .cdx.json or .bom.json
|
||||
if (filePath.EndsWith(".cdx.json", StringComparison.OrdinalIgnoreCase) ||
|
||||
filePath.EndsWith(".bom.json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.CycloneDx;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[1024];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
if (content.Contains("\"bomFormat\"", StringComparison.OrdinalIgnoreCase) ||
|
||||
content.Contains("\"$schema\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.CycloneDx;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return SbomParseResult.Failure($"File not found: {filePath}", SbomFormat.CycloneDx);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, SbomFormat.CycloneDx, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"Failed to parse CycloneDX file: {ex.Message}", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate bomFormat
|
||||
if (!root.TryGetProperty("bomFormat", out var bomFormatProp) ||
|
||||
!bomFormatProp.GetString()?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
// Try alternative detection
|
||||
if (!root.TryGetProperty("$schema", out var schemaProp) ||
|
||||
!schemaProp.GetString()?.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid CycloneDX document", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract spec version
|
||||
string? specVersion = null;
|
||||
if (root.TryGetProperty("specVersion", out var specProp))
|
||||
{
|
||||
specVersion = specProp.GetString();
|
||||
}
|
||||
|
||||
// Extract serial number
|
||||
string? serialNumber = null;
|
||||
if (root.TryGetProperty("serialNumber", out var serialProp))
|
||||
{
|
||||
serialNumber = serialProp.GetString();
|
||||
}
|
||||
|
||||
// Extract creation timestamp
|
||||
DateTimeOffset? createdAt = null;
|
||||
if (root.TryGetProperty("metadata", out var metadataProp))
|
||||
{
|
||||
if (metadataProp.TryGetProperty("timestamp", out var timestampProp))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(timestampProp.GetString(), out var parsed))
|
||||
{
|
||||
createdAt = parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract generator tool
|
||||
string? generatorTool = null;
|
||||
if (root.TryGetProperty("metadata", out var meta) &&
|
||||
meta.TryGetProperty("tools", out var toolsProp))
|
||||
{
|
||||
generatorTool = ExtractToolInfo(toolsProp);
|
||||
}
|
||||
|
||||
// Extract primary component (metadata.component)
|
||||
SbomSubject? primarySubject = null;
|
||||
if (root.TryGetProperty("metadata", out var metaData) &&
|
||||
metaData.TryGetProperty("component", out var primaryComponent))
|
||||
{
|
||||
primarySubject = ParseComponent(primaryComponent);
|
||||
}
|
||||
|
||||
// Extract all components
|
||||
var subjects = new List<SbomSubject>();
|
||||
int totalComponentCount = 0;
|
||||
|
||||
if (root.TryGetProperty("components", out var componentsProp) &&
|
||||
componentsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var component in componentsProp.EnumerateArray())
|
||||
{
|
||||
totalComponentCount++;
|
||||
var subject = ParseComponent(component);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add primary subject if it has a digest and isn't already in the list
|
||||
if (primarySubject is not null &&
|
||||
!subjects.Any(s => s.Digest.Equals(primarySubject.Digest, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
subjects.Insert(0, primarySubject);
|
||||
}
|
||||
|
||||
// Sort subjects for deterministic ordering
|
||||
subjects = subjects
|
||||
.OrderBy(s => s.Digest, StringComparer.Ordinal)
|
||||
.ThenBy(s => s.Name ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
return SbomParseResult.Success(
|
||||
format: SbomFormat.CycloneDx,
|
||||
subjects: subjects,
|
||||
specVersion: specVersion,
|
||||
serialNumber: serialNumber,
|
||||
createdAt: createdAt,
|
||||
generatorTool: generatorTool,
|
||||
primarySubject: primarySubject,
|
||||
totalComponentCount: totalComponentCount);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"JSON parsing error: {ex.Message}", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
private static SbomSubject? ParseComponent(JsonElement component)
|
||||
{
|
||||
// Extract hashes
|
||||
var hashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (component.TryGetProperty("hashes", out var hashesProp) &&
|
||||
hashesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var hash in hashesProp.EnumerateArray())
|
||||
{
|
||||
if (hash.TryGetProperty("alg", out var algProp) &&
|
||||
hash.TryGetProperty("content", out var contentProp))
|
||||
{
|
||||
var alg = algProp.GetString();
|
||||
var content = contentProp.GetString();
|
||||
if (!string.IsNullOrEmpty(alg) && !string.IsNullOrEmpty(content))
|
||||
{
|
||||
hashes[alg] = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine primary digest (prefer SHA-256)
|
||||
string? digest = null;
|
||||
if (hashes.TryGetValue("SHA-256", out var sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.TryGetValue("SHA256", out sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.Count > 0)
|
||||
{
|
||||
// Use first available hash
|
||||
var first = hashes.First();
|
||||
digest = NormalizeDigest($"{first.Key.ToLowerInvariant().Replace("-", "")}:{first.Value}");
|
||||
}
|
||||
|
||||
// If no digest, this component can't be indexed by digest
|
||||
if (string.IsNullOrEmpty(digest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract other properties
|
||||
string? name = null;
|
||||
if (component.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
string? version = null;
|
||||
if (component.TryGetProperty("version", out var versionProp))
|
||||
{
|
||||
version = versionProp.GetString();
|
||||
}
|
||||
|
||||
string? purl = null;
|
||||
if (component.TryGetProperty("purl", out var purlProp))
|
||||
{
|
||||
purl = purlProp.GetString();
|
||||
}
|
||||
|
||||
string? type = null;
|
||||
if (component.TryGetProperty("type", out var typeProp))
|
||||
{
|
||||
type = typeProp.GetString();
|
||||
}
|
||||
|
||||
string? bomRef = null;
|
||||
if (component.TryGetProperty("bom-ref", out var bomRefProp))
|
||||
{
|
||||
bomRef = bomRefProp.GetString();
|
||||
}
|
||||
|
||||
return new SbomSubject
|
||||
{
|
||||
Digest = digest,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Purl = purl,
|
||||
Type = type,
|
||||
BomRef = bomRef,
|
||||
Hashes = hashes
|
||||
};
|
||||
}
|
||||
|
||||
private static string? ExtractToolInfo(JsonElement tools)
|
||||
{
|
||||
// CycloneDX 1.5+ uses tools.components array
|
||||
if (tools.TryGetProperty("components", out var components) &&
|
||||
components.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var toolList = new List<string>();
|
||||
foreach (var tool in components.EnumerateArray())
|
||||
{
|
||||
if (tool.TryGetProperty("name", out var name))
|
||||
{
|
||||
var toolName = name.GetString();
|
||||
if (!string.IsNullOrEmpty(toolName))
|
||||
{
|
||||
if (tool.TryGetProperty("version", out var version))
|
||||
{
|
||||
toolName += $"@{version.GetString()}";
|
||||
}
|
||||
toolList.Add(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
|
||||
}
|
||||
|
||||
// CycloneDX 1.4 and earlier uses tools array directly
|
||||
if (tools.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var toolList = new List<string>();
|
||||
foreach (var tool in tools.EnumerateArray())
|
||||
{
|
||||
if (tool.TryGetProperty("name", out var name))
|
||||
{
|
||||
var toolName = name.GetString();
|
||||
if (!string.IsNullOrEmpty(toolName))
|
||||
{
|
||||
if (tool.TryGetProperty("version", out var version))
|
||||
{
|
||||
toolName += $"@{version.GetString()}";
|
||||
}
|
||||
toolList.Add(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
return ArtifactIndex.NormalizeDigest(digest);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,301 @@
|
||||
// =============================================================================
|
||||
// DsseAttestationParser.cs
|
||||
// DSSE attestation parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for DSSE-wrapped in-toto attestations.
|
||||
/// </summary>
|
||||
public sealed class DsseAttestationParser : IAttestationParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public bool IsAttestation(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var lower = filePath.ToLowerInvariant();
|
||||
|
||||
// Common attestation file extensions
|
||||
if (lower.EndsWith(".intoto.jsonl") ||
|
||||
lower.EndsWith(".intoto.json") ||
|
||||
lower.EndsWith(".dsig") ||
|
||||
lower.EndsWith(".dsse") ||
|
||||
lower.EndsWith(".att") ||
|
||||
lower.EndsWith(".attestation"))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[512];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
// DSSE envelope markers
|
||||
if (content.Contains("\"payloadType\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("\"payload\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("\"signatures\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public async Task<AttestationParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return AttestationParseResult.Failure($"File not found: {filePath}");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return AttestationParseResult.Failure($"Failed to parse attestation file: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<AttestationParseResult> ParseAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Parse DSSE envelope
|
||||
var envelope = ParseEnvelope(root);
|
||||
if (envelope is null)
|
||||
{
|
||||
return AttestationParseResult.Failure("Invalid DSSE envelope structure");
|
||||
}
|
||||
|
||||
// Decode and parse in-toto statement
|
||||
var statement = DecodeAndParseStatement(envelope);
|
||||
if (statement is null)
|
||||
{
|
||||
return AttestationParseResult.Failure("Failed to decode or parse in-toto statement");
|
||||
}
|
||||
|
||||
return AttestationParseResult.Success(envelope, statement);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return AttestationParseResult.Failure($"JSON parsing error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
private static DsseEnvelope? ParseEnvelope(JsonElement root)
|
||||
{
|
||||
// Validate required fields
|
||||
if (!root.TryGetProperty("payloadType", out var payloadTypeProp) ||
|
||||
!root.TryGetProperty("payload", out var payloadProp) ||
|
||||
!root.TryGetProperty("signatures", out var signaturesProp))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var payloadType = payloadTypeProp.GetString();
|
||||
var payload = payloadProp.GetString();
|
||||
|
||||
if (string.IsNullOrEmpty(payloadType) || string.IsNullOrEmpty(payload))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse signatures
|
||||
var signatures = new List<DsseSignature>();
|
||||
if (signaturesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var sigElement in signaturesProp.EnumerateArray())
|
||||
{
|
||||
var sig = ParseSignature(sigElement);
|
||||
if (sig is not null)
|
||||
{
|
||||
signatures.Add(sig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new DsseEnvelope
|
||||
{
|
||||
PayloadType = payloadType,
|
||||
Payload = payload,
|
||||
Signatures = signatures
|
||||
};
|
||||
}
|
||||
|
||||
private static DsseSignature? ParseSignature(JsonElement element)
|
||||
{
|
||||
if (!element.TryGetProperty("sig", out var sigProp))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var sig = sigProp.GetString();
|
||||
if (string.IsNullOrEmpty(sig))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
string? keyId = null;
|
||||
if (element.TryGetProperty("keyid", out var keyIdProp))
|
||||
{
|
||||
keyId = keyIdProp.GetString();
|
||||
}
|
||||
|
||||
string? cert = null;
|
||||
if (element.TryGetProperty("cert", out var certProp))
|
||||
{
|
||||
cert = certProp.GetString();
|
||||
}
|
||||
|
||||
return new DsseSignature
|
||||
{
|
||||
Sig = sig,
|
||||
KeyId = keyId,
|
||||
Cert = cert
|
||||
};
|
||||
}
|
||||
|
||||
private static InTotoStatement? DecodeAndParseStatement(DsseEnvelope envelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Decode base64 payload
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
var payloadJson = Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
using var document = JsonDocument.Parse(payloadJson);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Parse statement type
|
||||
string? statementType = null;
|
||||
if (root.TryGetProperty("_type", out var typeProp))
|
||||
{
|
||||
statementType = typeProp.GetString();
|
||||
}
|
||||
else if (root.TryGetProperty("type", out typeProp))
|
||||
{
|
||||
statementType = typeProp.GetString();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(statementType))
|
||||
{
|
||||
statementType = "https://in-toto.io/Statement/v1";
|
||||
}
|
||||
|
||||
// Parse predicate type
|
||||
string? predicateType = null;
|
||||
if (root.TryGetProperty("predicateType", out var predicateTypeProp))
|
||||
{
|
||||
predicateType = predicateTypeProp.GetString();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(predicateType))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse subjects
|
||||
var subjects = new List<InTotoSubject>();
|
||||
if (root.TryGetProperty("subject", out var subjectsProp) &&
|
||||
subjectsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var subjectElement in subjectsProp.EnumerateArray())
|
||||
{
|
||||
var subject = ParseSubject(subjectElement);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract predicate JSON for further processing
|
||||
string? predicateJson = null;
|
||||
if (root.TryGetProperty("predicate", out var predicateProp))
|
||||
{
|
||||
predicateJson = predicateProp.GetRawText();
|
||||
}
|
||||
|
||||
return new InTotoStatement
|
||||
{
|
||||
Type = statementType,
|
||||
PredicateType = predicateType,
|
||||
Subjects = subjects,
|
||||
PredicateJson = predicateJson
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static InTotoSubject? ParseSubject(JsonElement element)
|
||||
{
|
||||
string? name = null;
|
||||
if (element.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
var digest = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (element.TryGetProperty("digest", out var digestProp) &&
|
||||
digestProp.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
foreach (var prop in digestProp.EnumerateObject())
|
||||
{
|
||||
var value = prop.Value.GetString();
|
||||
if (!string.IsNullOrEmpty(value))
|
||||
{
|
||||
digest[prop.Name] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (digest.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new InTotoSubject
|
||||
{
|
||||
Name = name,
|
||||
Digest = digest
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,199 @@
|
||||
// =============================================================================
|
||||
// IAttestationParser.cs
|
||||
// Attestation parsing abstraction for DSSE/in-toto attestations
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for parsing DSSE-wrapped in-toto attestations.
|
||||
/// </summary>
|
||||
public interface IAttestationParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Parses a DSSE envelope from the given file path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the attestation file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed attestation result.</returns>
|
||||
Task<AttestationParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Parses a DSSE envelope from a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing the attestation content.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed attestation result.</returns>
|
||||
Task<AttestationParseResult> ParseAsync(Stream stream, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Detects if a file is a DSSE attestation.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the file.</param>
|
||||
/// <returns>True if the file appears to be a DSSE attestation.</returns>
|
||||
bool IsAttestation(string filePath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of parsing an attestation document.
|
||||
/// </summary>
|
||||
public sealed record AttestationParseResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether parsing was successful.
|
||||
/// </summary>
|
||||
public bool IsSuccess { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if parsing failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The parsed DSSE envelope.
|
||||
/// </summary>
|
||||
public DsseEnvelope? Envelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The parsed in-toto statement (payload).
|
||||
/// </summary>
|
||||
public InTotoStatement? Statement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful parse result.
|
||||
/// </summary>
|
||||
public static AttestationParseResult Success(DsseEnvelope envelope, InTotoStatement statement)
|
||||
{
|
||||
return new AttestationParseResult
|
||||
{
|
||||
IsSuccess = true,
|
||||
Envelope = envelope,
|
||||
Statement = statement
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed parse result.
|
||||
/// </summary>
|
||||
public static AttestationParseResult Failure(string errorMessage)
|
||||
{
|
||||
return new AttestationParseResult
|
||||
{
|
||||
IsSuccess = false,
|
||||
ErrorMessage = errorMessage
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a DSSE (Dead Simple Signing Envelope).
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope
|
||||
{
|
||||
/// <summary>
|
||||
/// Payload type (typically "application/vnd.in-toto+json").
|
||||
/// </summary>
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded payload.
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures on the envelope.
|
||||
/// </summary>
|
||||
public IReadOnlyList<DsseSignature> Signatures { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a signature in a DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// Key identifier (e.g., key ID or certificate fingerprint).
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature.
|
||||
/// </summary>
|
||||
public required string Sig { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Certificate chain (if present).
|
||||
/// </summary>
|
||||
public string? Cert { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an in-toto statement (attestation payload).
|
||||
/// </summary>
|
||||
public sealed record InTotoStatement
|
||||
{
|
||||
/// <summary>
|
||||
/// Statement type (typically "https://in-toto.io/Statement/v1").
|
||||
/// </summary>
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Predicate type URI (e.g., "https://slsa.dev/provenance/v1").
|
||||
/// </summary>
|
||||
public required string PredicateType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subjects (artifacts) this statement applies to.
|
||||
/// </summary>
|
||||
public IReadOnlyList<InTotoSubject> Subjects { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Raw predicate JSON for further processing.
|
||||
/// </summary>
|
||||
public string? PredicateJson { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a subject in an in-toto statement.
|
||||
/// </summary>
|
||||
public sealed record InTotoSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject name (typically a file path or artifact reference).
|
||||
/// </summary>
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subject digests (algorithm -> hash).
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Digest { get; init; } = new Dictionary<string, string>();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the normalized SHA-256 digest if available.
|
||||
/// </summary>
|
||||
public string? GetSha256Digest()
|
||||
{
|
||||
if (Digest.TryGetValue("sha256", out var hash))
|
||||
{
|
||||
return "sha256:" + hash.ToLowerInvariant();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Well-known predicate types for attestations.
|
||||
/// </summary>
|
||||
public static class PredicateTypes
|
||||
{
|
||||
public const string SlsaProvenanceV1 = "https://slsa.dev/provenance/v1";
|
||||
public const string SlsaProvenanceV02 = "https://slsa.dev/provenance/v0.2";
|
||||
public const string InTotoLink = "https://in-toto.io/Link/v1";
|
||||
public const string Spdx = "https://spdx.dev/Document";
|
||||
public const string CycloneDx = "https://cyclonedx.org/bom";
|
||||
public const string OpenVex = "https://openvex.dev/ns/v0.2.0";
|
||||
public const string Csaf = "https://docs.oasis-open.org/csaf/csaf/v2.0";
|
||||
public const string ScorecardV2 = "https://ossf.github.io/scorecard/v2";
|
||||
public const string VulnerabilityReport = "https://cosign.sigstore.dev/attestation/vuln/v1";
|
||||
}
|
||||
@@ -0,0 +1,188 @@
|
||||
// =============================================================================
|
||||
// ISbomParser.cs
|
||||
// SBOM parsing abstraction for CycloneDX and SPDX formats
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for parsing SBOM documents into a normalized representation.
|
||||
/// Supports CycloneDX and SPDX formats.
|
||||
/// </summary>
|
||||
public interface ISbomParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Parses an SBOM file from the given path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the SBOM file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed SBOM result containing subjects and metadata.</returns>
|
||||
Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM from a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing the SBOM content.</param>
|
||||
/// <param name="format">Expected SBOM format.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed SBOM result containing subjects and metadata.</returns>
|
||||
Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Detects the SBOM format from file extension or content.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the SBOM file.</param>
|
||||
/// <returns>Detected SBOM format.</returns>
|
||||
SbomFormat DetectFormat(string filePath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of parsing an SBOM document.
|
||||
/// </summary>
|
||||
public sealed record SbomParseResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether parsing was successful.
|
||||
/// </summary>
|
||||
public bool IsSuccess { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if parsing failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected or specified SBOM format.
|
||||
/// </summary>
|
||||
public SbomFormat Format { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM specification version (e.g., "1.6" for CycloneDX, "2.3" for SPDX).
|
||||
/// </summary>
|
||||
public string? SpecVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM serial number or document namespace.
|
||||
/// </summary>
|
||||
public string? SerialNumber { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the SBOM was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tool that generated the SBOM.
|
||||
/// </summary>
|
||||
public string? GeneratorTool { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Primary component (for CycloneDX) or main package (for SPDX).
|
||||
/// </summary>
|
||||
public SbomSubject? PrimarySubject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All subjects (components/packages) in the SBOM that have digests.
|
||||
/// </summary>
|
||||
public IReadOnlyList<SbomSubject> Subjects { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Total number of components/packages in the SBOM.
|
||||
/// </summary>
|
||||
public int TotalComponentCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw normalized JSON content for hashing.
|
||||
/// </summary>
|
||||
public string? NormalizedContent { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful parse result.
|
||||
/// </summary>
|
||||
public static SbomParseResult Success(
|
||||
SbomFormat format,
|
||||
IReadOnlyList<SbomSubject> subjects,
|
||||
string? specVersion = null,
|
||||
string? serialNumber = null,
|
||||
DateTimeOffset? createdAt = null,
|
||||
string? generatorTool = null,
|
||||
SbomSubject? primarySubject = null,
|
||||
int totalComponentCount = 0,
|
||||
string? normalizedContent = null)
|
||||
{
|
||||
return new SbomParseResult
|
||||
{
|
||||
IsSuccess = true,
|
||||
Format = format,
|
||||
Subjects = subjects,
|
||||
SpecVersion = specVersion,
|
||||
SerialNumber = serialNumber,
|
||||
CreatedAt = createdAt,
|
||||
GeneratorTool = generatorTool,
|
||||
PrimarySubject = primarySubject,
|
||||
TotalComponentCount = totalComponentCount,
|
||||
NormalizedContent = normalizedContent
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed parse result.
|
||||
/// </summary>
|
||||
public static SbomParseResult Failure(string errorMessage, SbomFormat format = SbomFormat.Unknown)
|
||||
{
|
||||
return new SbomParseResult
|
||||
{
|
||||
IsSuccess = false,
|
||||
ErrorMessage = errorMessage,
|
||||
Format = format,
|
||||
Subjects = []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a subject (artifact) described by an SBOM.
|
||||
/// </summary>
|
||||
public sealed record SbomSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Artifact digest in normalized format (sha256:hex).
|
||||
/// </summary>
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name of the artifact.
|
||||
/// </summary>
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) if available.
|
||||
/// </summary>
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version string.
|
||||
/// </summary>
|
||||
public string? Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Component type (application, library, container, etc.).
|
||||
/// </summary>
|
||||
public string? Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BOM reference identifier (for CycloneDX).
|
||||
/// </summary>
|
||||
public string? BomRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SPDX identifier (for SPDX).
|
||||
/// </summary>
|
||||
public string? SpdxId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All hash values for the subject.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Hashes { get; init; } = new Dictionary<string, string>();
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
// =============================================================================
|
||||
// SbomCollector.cs
|
||||
// SBOM evidence collector for reconciliation workflow
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Collects SBOM evidence from an evidence directory and populates the artifact index.
|
||||
/// </summary>
|
||||
public sealed class SbomCollector
|
||||
{
|
||||
private readonly ISbomParser _parser;
|
||||
private readonly ILogger<SbomCollector> _logger;
|
||||
|
||||
public SbomCollector(ISbomParser? parser = null, ILogger<SbomCollector>? logger = null)
|
||||
{
|
||||
_parser = parser ?? new SbomParserFactory();
|
||||
_logger = logger ?? NullLogger<SbomCollector>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collects SBOM evidence from the sboms directory.
|
||||
/// </summary>
|
||||
/// <param name="sbomsDirectory">Path to the sboms directory.</param>
|
||||
/// <param name="index">Artifact index to populate.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection result with statistics.</returns>
|
||||
public async Task<SbomCollectionResult> CollectAsync(
|
||||
string sbomsDirectory,
|
||||
ArtifactIndex index,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomsDirectory);
|
||||
ArgumentNullException.ThrowIfNull(index);
|
||||
|
||||
var result = new SbomCollectionResult();
|
||||
|
||||
if (!Directory.Exists(sbomsDirectory))
|
||||
{
|
||||
_logger.LogDebug("SBOM directory does not exist: {Directory}", sbomsDirectory);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find all potential SBOM files (ordered deterministically)
|
||||
var files = Directory.EnumerateFiles(sbomsDirectory, "*.*", SearchOption.AllDirectories)
|
||||
.Where(IsSbomFile)
|
||||
.OrderBy(f => NormalizeRelativePath(Path.GetRelativePath(sbomsDirectory, f)), StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("Found {Count} potential SBOM files in {Directory}", files.Count, sbomsDirectory);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await ProcessSbomFileAsync(file, sbomsDirectory, index, result, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process SBOM file: {File}", file);
|
||||
result.FailedFiles.Add((file, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task ProcessSbomFileAsync(
|
||||
string filePath,
|
||||
string baseDirectory,
|
||||
ArtifactIndex index,
|
||||
SbomCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Compute content hash for the SBOM file itself
|
||||
var contentHash = await ComputeFileHashAsync(filePath, cancellationToken);
|
||||
var relativePath = NormalizeRelativePath(Path.GetRelativePath(baseDirectory, filePath));
|
||||
|
||||
// Parse the SBOM
|
||||
var parseResult = await _parser.ParseAsync(filePath, cancellationToken);
|
||||
|
||||
if (!parseResult.IsSuccess)
|
||||
{
|
||||
_logger.LogWarning("Failed to parse SBOM {File}: {Error}", filePath, parseResult.ErrorMessage);
|
||||
result.FailedFiles.Add((filePath, parseResult.ErrorMessage ?? "Unknown error"));
|
||||
return;
|
||||
}
|
||||
|
||||
result.ParsedFiles++;
|
||||
result.TotalSubjects += parseResult.Subjects.Count;
|
||||
|
||||
// Create SBOM reference
|
||||
var sbomRef = new SbomReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: relativePath,
|
||||
Format: parseResult.Format,
|
||||
CreatedAt: parseResult.CreatedAt);
|
||||
|
||||
// Add each subject to the index
|
||||
foreach (var subject in parseResult.Subjects)
|
||||
{
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: subject.Digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [sbomRef],
|
||||
Attestations: [],
|
||||
VexDocuments: []);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
result.IndexedSubjects++;
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Parsed {Format} SBOM: {File}, {SubjectCount} subjects indexed",
|
||||
parseResult.Format,
|
||||
relativePath,
|
||||
parseResult.Subjects.Count);
|
||||
}
|
||||
|
||||
private static bool IsSbomFile(string filePath)
|
||||
{
|
||||
var lower = filePath.ToLowerInvariant();
|
||||
return lower.EndsWith(".cdx.json") ||
|
||||
lower.EndsWith(".bom.json") ||
|
||||
lower.EndsWith(".spdx.json") ||
|
||||
lower.EndsWith("sbom.json") ||
|
||||
lower.EndsWith("bom.json");
|
||||
}
|
||||
|
||||
private static string NormalizeRelativePath(string path) =>
|
||||
path.Replace('\\', '/');
|
||||
|
||||
private static async Task<string> ComputeFileHashAsync(string filePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of SBOM collection operation.
|
||||
/// </summary>
|
||||
public sealed class SbomCollectionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of SBOM files successfully parsed.
|
||||
/// </summary>
|
||||
public int ParsedFiles { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of subjects found across all SBOMs.
|
||||
/// </summary>
|
||||
public int TotalSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of subjects indexed (with valid digests).
|
||||
/// </summary>
|
||||
public int IndexedSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Files that failed to parse, with error messages.
|
||||
/// </summary>
|
||||
public List<(string FilePath, string Error)> FailedFiles { get; } = [];
|
||||
}
|
||||
@@ -0,0 +1,490 @@
|
||||
// =============================================================================
|
||||
// SbomNormalizer.cs
|
||||
// Canonical SBOM transformer for deterministic reconciliation
|
||||
// Part of Step 3: Normalization (Task T13)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Transforms SBOMs into a canonical form for deterministic hashing and comparison.
|
||||
/// Applies normalization rules per advisory §5 step 3.
|
||||
/// </summary>
|
||||
public sealed class SbomNormalizer
|
||||
{
|
||||
private readonly NormalizationOptions _options;
|
||||
|
||||
public SbomNormalizer(NormalizationOptions? options = null)
|
||||
{
|
||||
_options = options ?? NormalizationOptions.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SBOM JSON document to canonical form.
|
||||
/// </summary>
|
||||
/// <param name="sbomJson">Raw SBOM JSON content.</param>
|
||||
/// <param name="format">SBOM format (CycloneDX or SPDX).</param>
|
||||
/// <returns>Normalized JSON string.</returns>
|
||||
public string Normalize(string sbomJson, SbomFormat format)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomJson);
|
||||
|
||||
var node = JsonNode.Parse(sbomJson);
|
||||
if (node is null)
|
||||
{
|
||||
return "null";
|
||||
}
|
||||
|
||||
var normalized = format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => NormalizeCycloneDx(node),
|
||||
SbomFormat.Spdx => NormalizeSpdx(node),
|
||||
_ => NormalizeGeneric(node)
|
||||
};
|
||||
|
||||
return SerializeCanonical(normalized);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a CycloneDX SBOM.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeCycloneDx(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return node;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
// Process in deterministic key order
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripCycloneDxField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
if (value is null) continue;
|
||||
|
||||
var normalizedValue = key switch
|
||||
{
|
||||
"components" => NormalizeComponents(value.DeepClone()),
|
||||
"metadata" => NormalizeCycloneDxMetadata(value.DeepClone()),
|
||||
"dependencies" => NormalizeDependencies(value.DeepClone()),
|
||||
"vulnerabilities" => NormalizeVulnerabilities(value.DeepClone()),
|
||||
_ => NormalizeNode(value.DeepClone())
|
||||
};
|
||||
|
||||
normalized[key] = normalizedValue;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SPDX SBOM.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdx(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return node;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripSpdxField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
if (value is null) continue;
|
||||
|
||||
var normalizedValue = key switch
|
||||
{
|
||||
"packages" => NormalizeSpdxPackages(value.DeepClone()),
|
||||
"relationships" => NormalizeSpdxRelationships(value.DeepClone()),
|
||||
"files" => NormalizeSpdxFiles(value.DeepClone()),
|
||||
"creationInfo" => NormalizeSpdxCreationInfo(value.DeepClone()),
|
||||
_ => NormalizeNode(value.DeepClone())
|
||||
};
|
||||
|
||||
normalized[key] = normalizedValue;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generic normalization for unknown formats.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeGeneric(JsonNode node)
|
||||
{
|
||||
return NormalizeNode(node);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively normalizes a JSON node.
|
||||
/// </summary>
|
||||
private JsonNode? NormalizeNode(JsonNode? node)
|
||||
{
|
||||
return node switch
|
||||
{
|
||||
JsonObject obj => NormalizeObject(obj),
|
||||
JsonArray arr => NormalizeArray(arr),
|
||||
JsonValue val => NormalizeValue(val),
|
||||
_ => node
|
||||
};
|
||||
}
|
||||
|
||||
private JsonObject NormalizeObject(JsonObject obj)
|
||||
{
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripTimestampField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private JsonArray NormalizeArray(JsonArray arr)
|
||||
{
|
||||
var normalized = new JsonArray();
|
||||
|
||||
var elements = arr
|
||||
.Select(n => NormalizeNode(n?.DeepClone()))
|
||||
.ToList();
|
||||
|
||||
// Sort arrays of objects by a deterministic key
|
||||
if (_options.SortArrays && elements.All(e => e is JsonObject))
|
||||
{
|
||||
elements = elements
|
||||
.Cast<JsonObject>()
|
||||
.OrderBy(o => GetSortKey(o), StringComparer.Ordinal)
|
||||
.Cast<JsonNode?>()
|
||||
.ToList();
|
||||
}
|
||||
|
||||
foreach (var element in elements)
|
||||
{
|
||||
normalized.Add(element);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private JsonValue NormalizeValue(JsonValue val)
|
||||
{
|
||||
var value = val.GetValue<object>();
|
||||
|
||||
if (value is string str)
|
||||
{
|
||||
// Lowercase URIs
|
||||
if (_options.LowercaseUris && IsUri(str))
|
||||
{
|
||||
str = str.ToLowerInvariant();
|
||||
}
|
||||
|
||||
return JsonValue.Create(str)!;
|
||||
}
|
||||
|
||||
return val.DeepClone().AsValue();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX components array.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeComponents(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var components = arr
|
||||
.Select(c => NormalizeObject((c as JsonObject)!))
|
||||
.OrderBy(c => GetComponentSortKey(c), StringComparer.Ordinal);
|
||||
|
||||
foreach (var component in components)
|
||||
{
|
||||
normalized.Add(component);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX metadata.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeCycloneDxMetadata(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => _options.StripTimestamps ? key != "timestamp" : true)
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX dependencies.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeDependencies(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var deps = arr
|
||||
.Select(d => NormalizeObject((d as JsonObject)!))
|
||||
.OrderBy(d => d["ref"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var dep in deps)
|
||||
{
|
||||
// Also sort dependsOn arrays
|
||||
if (dep["dependsOn"] is JsonArray dependsOn)
|
||||
{
|
||||
var sortedDeps = new JsonArray();
|
||||
foreach (var item in dependsOn.OrderBy(x => x?.GetValue<string>() ?? "", StringComparer.Ordinal))
|
||||
{
|
||||
sortedDeps.Add(item?.DeepClone());
|
||||
}
|
||||
dep["dependsOn"] = sortedDeps;
|
||||
}
|
||||
normalized.Add(dep);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX vulnerabilities.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeVulnerabilities(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var vulns = arr
|
||||
.Select(v => NormalizeObject((v as JsonObject)!))
|
||||
.OrderBy(v => v["id"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var vuln in vulns)
|
||||
{
|
||||
normalized.Add(vuln);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX packages.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxPackages(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var packages = arr
|
||||
.Select(p => NormalizeObject((p as JsonObject)!))
|
||||
.OrderBy(p => p["SPDXID"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var pkg in packages)
|
||||
{
|
||||
normalized.Add(pkg);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX relationships.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxRelationships(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var rels = arr
|
||||
.Select(r => NormalizeObject((r as JsonObject)!))
|
||||
.OrderBy(r => r["spdxElementId"]?.GetValue<string>() ?? "", StringComparer.Ordinal)
|
||||
.ThenBy(r => r["relatedSpdxElement"]?.GetValue<string>() ?? "", StringComparer.Ordinal)
|
||||
.ThenBy(r => r["relationshipType"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var rel in rels)
|
||||
{
|
||||
normalized.Add(rel);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX files.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxFiles(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var files = arr
|
||||
.Select(f => NormalizeObject((f as JsonObject)!))
|
||||
.OrderBy(f => f["SPDXID"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
normalized.Add(file);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX creation info.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxCreationInfo(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => _options.StripTimestamps ? key != "created" : true)
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static string GetComponentSortKey(JsonObject obj)
|
||||
{
|
||||
// Sort by bom-ref or purl or name+version
|
||||
if (obj.TryGetPropertyValue("bom-ref", out var bomRef) && bomRef is JsonValue bv)
|
||||
{
|
||||
return bv.GetValue<string>() ?? "";
|
||||
}
|
||||
if (obj.TryGetPropertyValue("purl", out var purl) && purl is JsonValue pv)
|
||||
{
|
||||
return pv.GetValue<string>() ?? "";
|
||||
}
|
||||
|
||||
var name = obj["name"]?.GetValue<string>() ?? "";
|
||||
var version = obj["version"]?.GetValue<string>() ?? "";
|
||||
return $"{name}@{version}";
|
||||
}
|
||||
|
||||
private static string GetSortKey(JsonObject obj)
|
||||
{
|
||||
var keyPriority = new[] { "id", "@id", "bom-ref", "SPDXID", "name", "digest", "uri", "ref" };
|
||||
|
||||
foreach (var key in keyPriority)
|
||||
{
|
||||
if (obj.TryGetPropertyValue(key, out var value) && value is JsonValue jv)
|
||||
{
|
||||
return jv.GetValue<string>() ?? "";
|
||||
}
|
||||
}
|
||||
|
||||
return obj.ToJsonString();
|
||||
}
|
||||
|
||||
private static bool ShouldStripCycloneDxField(string key)
|
||||
{
|
||||
// Fields that should be stripped for canonical form
|
||||
return key == "$schema";
|
||||
}
|
||||
|
||||
private static bool ShouldStripSpdxField(string key)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
private bool ShouldStripTimestampField(string key)
|
||||
{
|
||||
if (!_options.StripTimestamps)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var timestampFields = new[]
|
||||
{
|
||||
"timestamp", "created", "modified", "updated", "createdAt", "updatedAt",
|
||||
"modifiedAt", "date", "time", "datetime", "lastModified", "generated"
|
||||
};
|
||||
|
||||
return timestampFields.Any(f => key.Equals(f, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static bool IsUri(string value)
|
||||
{
|
||||
return value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("urn:", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string SerializeCanonical(JsonNode node)
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = null,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
return node.ToJsonString(options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
// =============================================================================
|
||||
// SbomParserFactory.cs
|
||||
// Factory for creating and selecting SBOM parsers
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating SBOM parsers and detecting SBOM formats.
|
||||
/// </summary>
|
||||
public sealed class SbomParserFactory : ISbomParser
|
||||
{
|
||||
private readonly CycloneDxParser _cycloneDxParser;
|
||||
private readonly SpdxParser _spdxParser;
|
||||
|
||||
public SbomParserFactory()
|
||||
{
|
||||
_cycloneDxParser = new CycloneDxParser();
|
||||
_spdxParser = new SpdxParser();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detects the SBOM format from file extension or content.
|
||||
/// </summary>
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// Try CycloneDX first
|
||||
var format = _cycloneDxParser.DetectFormat(filePath);
|
||||
if (format != SbomFormat.Unknown)
|
||||
{
|
||||
return format;
|
||||
}
|
||||
|
||||
// Try SPDX
|
||||
format = _spdxParser.DetectFormat(filePath);
|
||||
if (format != SbomFormat.Unknown)
|
||||
{
|
||||
return format;
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM file using auto-detected format.
|
||||
/// </summary>
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var format = DetectFormat(filePath);
|
||||
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => await _cycloneDxParser.ParseAsync(filePath, cancellationToken),
|
||||
SbomFormat.Spdx => await _spdxParser.ParseAsync(filePath, cancellationToken),
|
||||
_ => SbomParseResult.Failure($"Unknown SBOM format for file: {filePath}", SbomFormat.Unknown)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM from a stream using the specified format.
|
||||
/// </summary>
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => await _cycloneDxParser.ParseAsync(stream, format, cancellationToken),
|
||||
SbomFormat.Spdx => await _spdxParser.ParseAsync(stream, format, cancellationToken),
|
||||
_ => SbomParseResult.Failure($"Unknown SBOM format: {format}", format)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a parser for the specified format.
|
||||
/// </summary>
|
||||
public ISbomParser GetParser(SbomFormat format)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => _cycloneDxParser,
|
||||
SbomFormat.Spdx => _spdxParser,
|
||||
_ => throw new ArgumentException($"No parser available for format: {format}", nameof(format))
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,305 @@
|
||||
// =============================================================================
|
||||
// SpdxParser.cs
|
||||
// SPDX SBOM parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for SPDX SBOM format (JSON).
|
||||
/// Supports SPDX 2.2 and 2.3 schemas.
|
||||
/// </summary>
|
||||
public sealed class SpdxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// SPDX files typically end with .spdx.json
|
||||
if (filePath.EndsWith(".spdx.json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[1024];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
if (content.Contains("\"spdxVersion\"", StringComparison.OrdinalIgnoreCase) ||
|
||||
content.Contains("\"SPDXID\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return SbomParseResult.Failure($"File not found: {filePath}", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, SbomFormat.Spdx, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"Failed to parse SPDX file: {ex.Message}", SbomFormat.Spdx);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate spdxVersion
|
||||
if (!root.TryGetProperty("spdxVersion", out var versionProp))
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid SPDX document: missing spdxVersion", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
var specVersion = versionProp.GetString();
|
||||
if (string.IsNullOrEmpty(specVersion) ||
|
||||
!specVersion.StartsWith("SPDX-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid SPDX document: invalid spdxVersion", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
// Extract version number (e.g., "SPDX-2.3" -> "2.3")
|
||||
specVersion = specVersion[5..];
|
||||
|
||||
// Extract document namespace (serves as serial number)
|
||||
string? serialNumber = null;
|
||||
if (root.TryGetProperty("documentNamespace", out var namespaceProp))
|
||||
{
|
||||
serialNumber = namespaceProp.GetString();
|
||||
}
|
||||
|
||||
// Extract creation timestamp
|
||||
DateTimeOffset? createdAt = null;
|
||||
if (root.TryGetProperty("creationInfo", out var creationInfoProp) &&
|
||||
creationInfoProp.TryGetProperty("created", out var createdProp))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(createdProp.GetString(), out var parsed))
|
||||
{
|
||||
createdAt = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract generator tool
|
||||
string? generatorTool = null;
|
||||
if (root.TryGetProperty("creationInfo", out var creationInfo) &&
|
||||
creationInfo.TryGetProperty("creators", out var creatorsProp) &&
|
||||
creatorsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var tools = new List<string>();
|
||||
foreach (var creator in creatorsProp.EnumerateArray())
|
||||
{
|
||||
var creatorStr = creator.GetString();
|
||||
if (creatorStr?.StartsWith("Tool:", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
tools.Add(creatorStr[5..].Trim());
|
||||
}
|
||||
}
|
||||
generatorTool = tools.Count > 0 ? string.Join(", ", tools) : null;
|
||||
}
|
||||
|
||||
// Extract primary package (documentDescribes)
|
||||
SbomSubject? primarySubject = null;
|
||||
var describedIds = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
if (root.TryGetProperty("documentDescribes", out var describesProp) &&
|
||||
describesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var id in describesProp.EnumerateArray())
|
||||
{
|
||||
var spdxId = id.GetString();
|
||||
if (!string.IsNullOrEmpty(spdxId))
|
||||
{
|
||||
describedIds.Add(spdxId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract all packages
|
||||
var subjects = new List<SbomSubject>();
|
||||
int totalComponentCount = 0;
|
||||
|
||||
if (root.TryGetProperty("packages", out var packagesProp) &&
|
||||
packagesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var package in packagesProp.EnumerateArray())
|
||||
{
|
||||
totalComponentCount++;
|
||||
var subject = ParsePackage(package);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
|
||||
// Check if this is the primary subject
|
||||
if (subject.SpdxId is not null && describedIds.Contains(subject.SpdxId))
|
||||
{
|
||||
primarySubject ??= subject;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort subjects for deterministic ordering
|
||||
subjects = subjects
|
||||
.OrderBy(s => s.Digest, StringComparer.Ordinal)
|
||||
.ThenBy(s => s.Name ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
return SbomParseResult.Success(
|
||||
format: SbomFormat.Spdx,
|
||||
subjects: subjects,
|
||||
specVersion: specVersion,
|
||||
serialNumber: serialNumber,
|
||||
createdAt: createdAt,
|
||||
generatorTool: generatorTool,
|
||||
primarySubject: primarySubject,
|
||||
totalComponentCount: totalComponentCount);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"JSON parsing error: {ex.Message}", SbomFormat.Spdx);
|
||||
}
|
||||
}
|
||||
|
||||
private static SbomSubject? ParsePackage(JsonElement package)
|
||||
{
|
||||
// Extract checksums
|
||||
var hashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (package.TryGetProperty("checksums", out var checksumsProp) &&
|
||||
checksumsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var checksum in checksumsProp.EnumerateArray())
|
||||
{
|
||||
if (checksum.TryGetProperty("algorithm", out var algProp) &&
|
||||
checksum.TryGetProperty("checksumValue", out var valueProp))
|
||||
{
|
||||
var alg = algProp.GetString();
|
||||
var value = valueProp.GetString();
|
||||
if (!string.IsNullOrEmpty(alg) && !string.IsNullOrEmpty(value))
|
||||
{
|
||||
hashes[alg] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine primary digest (prefer SHA256)
|
||||
string? digest = null;
|
||||
if (hashes.TryGetValue("SHA256", out var sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.Count > 0)
|
||||
{
|
||||
// Use first available hash
|
||||
var first = hashes.First();
|
||||
digest = NormalizeDigest($"{first.Key.ToLowerInvariant()}:{first.Value}");
|
||||
}
|
||||
|
||||
// If no digest, this package can't be indexed by digest
|
||||
if (string.IsNullOrEmpty(digest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract SPDXID
|
||||
string? spdxId = null;
|
||||
if (package.TryGetProperty("SPDXID", out var spdxIdProp))
|
||||
{
|
||||
spdxId = spdxIdProp.GetString();
|
||||
}
|
||||
|
||||
// Extract other properties
|
||||
string? name = null;
|
||||
if (package.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
string? version = null;
|
||||
if (package.TryGetProperty("versionInfo", out var versionProp))
|
||||
{
|
||||
version = versionProp.GetString();
|
||||
}
|
||||
|
||||
// SPDX uses external refs for purl
|
||||
string? purl = null;
|
||||
if (package.TryGetProperty("externalRefs", out var refsProp) &&
|
||||
refsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var extRef in refsProp.EnumerateArray())
|
||||
{
|
||||
if (extRef.TryGetProperty("referenceType", out var refTypeProp) &&
|
||||
refTypeProp.GetString()?.Equals("purl", StringComparison.OrdinalIgnoreCase) == true &&
|
||||
extRef.TryGetProperty("referenceLocator", out var locatorProp))
|
||||
{
|
||||
purl = locatorProp.GetString();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SPDX doesn't have component type directly, check primaryPackagePurpose
|
||||
string? type = null;
|
||||
if (package.TryGetProperty("primaryPackagePurpose", out var purposeProp))
|
||||
{
|
||||
type = purposeProp.GetString();
|
||||
}
|
||||
|
||||
return new SbomSubject
|
||||
{
|
||||
Digest = digest,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Purl = purl,
|
||||
Type = type,
|
||||
SpdxId = spdxId,
|
||||
Hashes = hashes
|
||||
};
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
return ArtifactIndex.NormalizeDigest(digest);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user