sprints enhancements
This commit is contained in:
511
src/Cli/StellaOps.Cli/Commands/ProvCommandGroup.cs
Normal file
511
src/Cli/StellaOps.Cli/Commands/ProvCommandGroup.cs
Normal file
@@ -0,0 +1,511 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProvCommandGroup.cs
|
||||
// Sprint: SPRINT_8200_0001_0002 (Provcache Invalidation & Air-Gap)
|
||||
// Tasks: PROV-8200-135 to PROV-8200-143 - CLI commands for provcache operations.
|
||||
// Description: CLI commands for minimal proof export, import, and verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.CommandLine;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Cli.Extensions;
|
||||
using StellaOps.Provcache;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// Command group for Provcache operations.
|
||||
/// Implements minimal proof export/import for air-gap scenarios.
|
||||
/// </summary>
|
||||
public static class ProvCommandGroup
|
||||
{
|
||||
/// <summary>
|
||||
/// Build the prov command tree.
|
||||
/// </summary>
|
||||
public static Command BuildProvCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var provCommand = new Command("prov", "Provenance cache operations for air-gap scenarios");
|
||||
|
||||
provCommand.Add(BuildExportCommand(services, verboseOption, cancellationToken));
|
||||
provCommand.Add(BuildImportCommand(services, verboseOption, cancellationToken));
|
||||
provCommand.Add(BuildVerifyCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
return provCommand;
|
||||
}
|
||||
|
||||
private static Command BuildExportCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var verikeyOption = new Option<string>("--verikey", "-k")
|
||||
{
|
||||
Description = "The VeriKey (sha256:...) identifying the cache entry to export",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var densityOption = new Option<string>("--density", "-d")
|
||||
{
|
||||
Description = "Evidence density level: lite (digest only), standard (+ first N chunks), strict (all chunks)"
|
||||
};
|
||||
densityOption.SetDefaultValue("standard");
|
||||
densityOption.FromAmong("lite", "standard", "strict");
|
||||
|
||||
var chunksOption = new Option<int>("--chunks", "-c")
|
||||
{
|
||||
Description = "Number of chunks to include for standard density (default: 3)"
|
||||
};
|
||||
chunksOption.SetDefaultValue(3);
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output file path for the bundle (default: proof-<verikey>.json)",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var signOption = new Option<bool>("--sign", "-s")
|
||||
{
|
||||
Description = "Sign the exported bundle"
|
||||
};
|
||||
|
||||
var signerOption = new Option<string?>("--signer")
|
||||
{
|
||||
Description = "Signer key ID to use (if --sign is specified)"
|
||||
};
|
||||
|
||||
var command = new Command("export", "Export a minimal proof bundle for air-gapped transfer")
|
||||
{
|
||||
verikeyOption,
|
||||
densityOption,
|
||||
chunksOption,
|
||||
outputOption,
|
||||
signOption,
|
||||
signerOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(async (parseResult, ct) =>
|
||||
{
|
||||
var verikey = parseResult.GetValue(verikeyOption) ?? string.Empty;
|
||||
var densityStr = parseResult.GetValue(densityOption) ?? "standard";
|
||||
var chunks = parseResult.GetValue(chunksOption);
|
||||
var output = parseResult.GetValue(outputOption) ?? string.Empty;
|
||||
var sign = parseResult.GetValue(signOption);
|
||||
var signer = parseResult.GetValue(signerOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return await HandleExportAsync(
|
||||
services,
|
||||
verikey,
|
||||
densityStr,
|
||||
chunks,
|
||||
output,
|
||||
sign,
|
||||
signer,
|
||||
verbose,
|
||||
ct);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildImportCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var inputArg = new Argument<string>("input")
|
||||
{
|
||||
Description = "Path to the proof bundle file"
|
||||
};
|
||||
|
||||
var lazyFetchOption = new Option<bool>("--lazy-fetch")
|
||||
{
|
||||
Description = "Enable lazy chunk fetching for missing chunks"
|
||||
};
|
||||
|
||||
var backendOption = new Option<string?>("--backend")
|
||||
{
|
||||
Description = "Backend URL for lazy fetch (e.g., https://stellaops.example.com)"
|
||||
};
|
||||
|
||||
var chunksDirOption = new Option<string?>("--chunks-dir")
|
||||
{
|
||||
Description = "Local directory containing chunk files for offline import"
|
||||
};
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
};
|
||||
outputOption.SetDefaultValue("text");
|
||||
outputOption.FromAmong("text", "json");
|
||||
|
||||
var command = new Command("import", "Import a minimal proof bundle")
|
||||
{
|
||||
inputArg,
|
||||
lazyFetchOption,
|
||||
backendOption,
|
||||
chunksDirOption,
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(async (parseResult, ct) =>
|
||||
{
|
||||
var input = parseResult.GetValue(inputArg) ?? string.Empty;
|
||||
var lazyFetch = parseResult.GetValue(lazyFetchOption);
|
||||
var backend = parseResult.GetValue(backendOption);
|
||||
var chunksDir = parseResult.GetValue(chunksDirOption);
|
||||
var output = parseResult.GetValue(outputOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return await HandleImportAsync(
|
||||
services,
|
||||
input,
|
||||
lazyFetch,
|
||||
backend,
|
||||
chunksDir,
|
||||
output,
|
||||
verbose,
|
||||
ct);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildVerifyCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var inputArg = new Argument<string>("input")
|
||||
{
|
||||
Description = "Path to the proof bundle file to verify"
|
||||
};
|
||||
|
||||
var signerCertOption = new Option<string?>("--signer-cert")
|
||||
{
|
||||
Description = "Path to signer certificate for signature verification"
|
||||
};
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
};
|
||||
outputOption.SetDefaultValue("text");
|
||||
outputOption.FromAmong("text", "json");
|
||||
|
||||
var command = new Command("verify", "Verify a proof bundle without importing")
|
||||
{
|
||||
inputArg,
|
||||
signerCertOption,
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(async (parseResult, ct) =>
|
||||
{
|
||||
var input = parseResult.GetValue(inputArg) ?? string.Empty;
|
||||
var signerCert = parseResult.GetValue(signerCertOption);
|
||||
var output = parseResult.GetValue(outputOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return await HandleVerifyAsync(
|
||||
services,
|
||||
input,
|
||||
signerCert,
|
||||
output,
|
||||
verbose,
|
||||
ct);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
#region Handlers
|
||||
|
||||
private static async Task<int> HandleExportAsync(
|
||||
IServiceProvider services,
|
||||
string verikey,
|
||||
string densityStr,
|
||||
int chunks,
|
||||
string output,
|
||||
bool sign,
|
||||
string? signer,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var loggerFactory = services.GetService<ILoggerFactory>();
|
||||
var logger = loggerFactory?.CreateLogger("ProvCommands");
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
logger?.LogInformation("Exporting proof bundle for {VeriKey} with density {Density}",
|
||||
verikey, densityStr);
|
||||
}
|
||||
|
||||
var density = densityStr.ToLowerInvariant() switch
|
||||
{
|
||||
"lite" => ProofDensity.Lite,
|
||||
"standard" => ProofDensity.Standard,
|
||||
"strict" => ProofDensity.Strict,
|
||||
_ => ProofDensity.Standard
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var exporter = services.GetService<IMinimalProofExporter>();
|
||||
if (exporter is null)
|
||||
{
|
||||
Console.Error.WriteLine("Error: Provcache services not configured.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = density,
|
||||
StandardDensityChunkCount = chunks,
|
||||
Sign = sign,
|
||||
SigningKeyId = signer,
|
||||
ExportedBy = Environment.MachineName
|
||||
};
|
||||
|
||||
Console.WriteLine($"Exporting proof bundle: {verikey}");
|
||||
Console.WriteLine($" Density: {density}");
|
||||
Console.WriteLine($" Output: {output}");
|
||||
|
||||
using var fileStream = File.Create(output);
|
||||
await exporter.ExportToStreamAsync(verikey, options, fileStream, cancellationToken);
|
||||
|
||||
var fileInfo = new FileInfo(output);
|
||||
Console.WriteLine($" Size: {fileInfo.Length:N0} bytes");
|
||||
Console.WriteLine("[green]Export complete.[/]");
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Export failed: {ex.Message}");
|
||||
if (verbose)
|
||||
{
|
||||
Console.Error.WriteLine(ex.ToString());
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<int> HandleImportAsync(
|
||||
IServiceProvider services,
|
||||
string input,
|
||||
bool lazyFetch,
|
||||
string? backend,
|
||||
string? chunksDir,
|
||||
string output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var loggerFactory = services.GetService<ILoggerFactory>();
|
||||
var logger = loggerFactory?.CreateLogger("ProvCommands");
|
||||
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
Console.Error.WriteLine($"Error: File not found: {input}");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
logger?.LogInformation("Importing proof bundle from {Input}", input);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var exporter = services.GetService<IMinimalProofExporter>();
|
||||
if (exporter is null)
|
||||
{
|
||||
Console.Error.WriteLine("Error: Provcache services not configured.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Importing proof bundle: {input}");
|
||||
|
||||
using var fileStream = File.OpenRead(input);
|
||||
var result = await exporter.ImportFromStreamAsync(fileStream, cancellationToken);
|
||||
|
||||
if (output == "json")
|
||||
{
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(result, new System.Text.Json.JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase
|
||||
});
|
||||
Console.WriteLine(json);
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($" Success: {result.Success}");
|
||||
Console.WriteLine($" Chunks imported: {result.ChunksImported}");
|
||||
Console.WriteLine($" Chunks pending: {result.ChunksPending}");
|
||||
Console.WriteLine($" Merkle valid: {result.Verification.MerkleRootValid}");
|
||||
Console.WriteLine($" Digest valid: {result.Verification.DigestValid}");
|
||||
Console.WriteLine($" Chunks valid: {result.Verification.ChunksValid}");
|
||||
|
||||
if (result.Verification.SignatureValid.HasValue)
|
||||
{
|
||||
Console.WriteLine($" Signature valid: {result.Verification.SignatureValid.Value}");
|
||||
}
|
||||
|
||||
if (result.Warnings.Count > 0)
|
||||
{
|
||||
Console.WriteLine(" Warnings:");
|
||||
foreach (var warning in result.Warnings)
|
||||
{
|
||||
Console.WriteLine($" - {warning}");
|
||||
}
|
||||
}
|
||||
|
||||
if (result.ChunksPending > 0 && lazyFetch)
|
||||
{
|
||||
Console.WriteLine($"\n Lazy fetch enabled: {result.ChunksPending} chunks can be fetched on demand.");
|
||||
if (!string.IsNullOrEmpty(backend))
|
||||
{
|
||||
Console.WriteLine($" Backend: {backend}");
|
||||
}
|
||||
if (!string.IsNullOrEmpty(chunksDir))
|
||||
{
|
||||
Console.WriteLine($" Chunks dir: {chunksDir}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.Success ? 0 : 1;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Import failed: {ex.Message}");
|
||||
if (verbose)
|
||||
{
|
||||
Console.Error.WriteLine(ex.ToString());
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<int> HandleVerifyAsync(
|
||||
IServiceProvider services,
|
||||
string input,
|
||||
string? signerCert,
|
||||
string output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var loggerFactory = services.GetService<ILoggerFactory>();
|
||||
var logger = loggerFactory?.CreateLogger("ProvCommands");
|
||||
|
||||
if (!File.Exists(input))
|
||||
{
|
||||
Console.Error.WriteLine($"Error: File not found: {input}");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
logger?.LogInformation("Verifying proof bundle: {Input}", input);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var exporter = services.GetService<IMinimalProofExporter>();
|
||||
if (exporter is null)
|
||||
{
|
||||
Console.Error.WriteLine("Error: Provcache services not configured.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Verifying proof bundle: {input}");
|
||||
|
||||
var jsonBytes = await File.ReadAllBytesAsync(input, cancellationToken);
|
||||
var bundle = System.Text.Json.JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes,
|
||||
new System.Text.Json.JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
if (bundle is null)
|
||||
{
|
||||
Console.Error.WriteLine("Error: Failed to parse bundle file.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
var verification = await exporter.VerifyAsync(bundle, cancellationToken);
|
||||
|
||||
if (output == "json")
|
||||
{
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(verification, new System.Text.Json.JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase
|
||||
});
|
||||
Console.WriteLine(json);
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($" Digest valid: {verification.DigestValid}");
|
||||
Console.WriteLine($" Merkle root valid: {verification.MerkleRootValid}");
|
||||
Console.WriteLine($" Chunks valid: {verification.ChunksValid}");
|
||||
|
||||
if (verification.SignatureValid.HasValue)
|
||||
{
|
||||
Console.WriteLine($" Signature valid: {verification.SignatureValid.Value}");
|
||||
}
|
||||
|
||||
if (verification.FailedChunkIndices.Count > 0)
|
||||
{
|
||||
Console.WriteLine($" Failed chunks: {string.Join(", ", verification.FailedChunkIndices)}");
|
||||
}
|
||||
|
||||
var overall = verification.DigestValid &&
|
||||
verification.MerkleRootValid &&
|
||||
verification.ChunksValid &&
|
||||
(verification.SignatureValid ?? true);
|
||||
|
||||
Console.WriteLine();
|
||||
if (overall)
|
||||
{
|
||||
Console.WriteLine("[green]Verification PASSED[/]");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("[red]Verification FAILED[/]");
|
||||
}
|
||||
}
|
||||
|
||||
var success = verification.DigestValid &&
|
||||
verification.MerkleRootValid &&
|
||||
verification.ChunksValid;
|
||||
|
||||
return success ? 0 : 1;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Verification failed: {ex.Message}");
|
||||
if (verbose)
|
||||
{
|
||||
Console.Error.WriteLine(ex.ToString());
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -82,6 +82,7 @@
|
||||
<ProjectReference Include="../../ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj" />
|
||||
<ProjectReference Include="../../ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.AuditPack/StellaOps.AuditPack.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Provcache/StellaOps.Provcache.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<!-- GOST Crypto Plugins (Russia distribution) -->
|
||||
|
||||
@@ -0,0 +1,400 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalAdvisoryEndpointExtensions.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Tasks: CANSVC-8200-016 through CANSVC-8200-019
|
||||
// Description: API endpoints for canonical advisory service
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Extensions;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoint extensions for canonical advisory operations.
|
||||
/// </summary>
|
||||
internal static class CanonicalAdvisoryEndpointExtensions
|
||||
{
|
||||
private const string CanonicalReadPolicy = "Concelier.Canonical.Read";
|
||||
private const string CanonicalIngestPolicy = "Concelier.Canonical.Ingest";
|
||||
|
||||
public static void MapCanonicalAdvisoryEndpoints(this WebApplication app)
|
||||
{
|
||||
var group = app.MapGroup("/api/v1/canonical")
|
||||
.WithTags("Canonical Advisories");
|
||||
|
||||
// GET /api/v1/canonical/{id} - Get canonical advisory by ID
|
||||
group.MapGet("/{id:guid}", async (
|
||||
Guid id,
|
||||
ICanonicalAdvisoryService service,
|
||||
HttpContext context,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
var canonical = await service.GetByIdAsync(id, ct).ConfigureAwait(false);
|
||||
|
||||
return canonical is null
|
||||
? HttpResults.NotFound(new { error = "Canonical advisory not found", id })
|
||||
: HttpResults.Ok(MapToResponse(canonical));
|
||||
})
|
||||
.WithName("GetCanonicalById")
|
||||
.WithSummary("Get canonical advisory by ID")
|
||||
.Produces<CanonicalAdvisoryResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound);
|
||||
|
||||
// GET /api/v1/canonical?cve={cve}&artifact={artifact} - Query canonical advisories
|
||||
group.MapGet("/", async (
|
||||
[FromQuery] string? cve,
|
||||
[FromQuery] string? artifact,
|
||||
[FromQuery] string? mergeHash,
|
||||
[FromQuery] int? offset,
|
||||
[FromQuery] int? limit,
|
||||
ICanonicalAdvisoryService service,
|
||||
HttpContext context,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
// Query by merge hash takes precedence
|
||||
if (!string.IsNullOrEmpty(mergeHash))
|
||||
{
|
||||
var byHash = await service.GetByMergeHashAsync(mergeHash, ct).ConfigureAwait(false);
|
||||
return byHash is null
|
||||
? HttpResults.Ok(new CanonicalAdvisoryListResponse { Items = [], TotalCount = 0 })
|
||||
: HttpResults.Ok(new CanonicalAdvisoryListResponse
|
||||
{
|
||||
Items = [MapToResponse(byHash)],
|
||||
TotalCount = 1
|
||||
});
|
||||
}
|
||||
|
||||
// Query by CVE
|
||||
if (!string.IsNullOrEmpty(cve))
|
||||
{
|
||||
var byCve = await service.GetByCveAsync(cve, ct).ConfigureAwait(false);
|
||||
return HttpResults.Ok(new CanonicalAdvisoryListResponse
|
||||
{
|
||||
Items = byCve.Select(MapToResponse).ToList(),
|
||||
TotalCount = byCve.Count
|
||||
});
|
||||
}
|
||||
|
||||
// Query by artifact
|
||||
if (!string.IsNullOrEmpty(artifact))
|
||||
{
|
||||
var byArtifact = await service.GetByArtifactAsync(artifact, ct).ConfigureAwait(false);
|
||||
return HttpResults.Ok(new CanonicalAdvisoryListResponse
|
||||
{
|
||||
Items = byArtifact.Select(MapToResponse).ToList(),
|
||||
TotalCount = byArtifact.Count
|
||||
});
|
||||
}
|
||||
|
||||
// Generic query with pagination
|
||||
var options = new CanonicalQueryOptions
|
||||
{
|
||||
Offset = offset ?? 0,
|
||||
Limit = limit ?? 50
|
||||
};
|
||||
|
||||
var result = await service.QueryAsync(options, ct).ConfigureAwait(false);
|
||||
return HttpResults.Ok(new CanonicalAdvisoryListResponse
|
||||
{
|
||||
Items = result.Items.Select(MapToResponse).ToList(),
|
||||
TotalCount = result.TotalCount,
|
||||
Offset = result.Offset,
|
||||
Limit = result.Limit
|
||||
});
|
||||
})
|
||||
.WithName("QueryCanonical")
|
||||
.WithSummary("Query canonical advisories by CVE, artifact, or merge hash")
|
||||
.Produces<CanonicalAdvisoryListResponse>(StatusCodes.Status200OK);
|
||||
|
||||
// POST /api/v1/canonical/ingest/{source} - Ingest raw advisory
|
||||
group.MapPost("/ingest/{source}", async (
|
||||
string source,
|
||||
[FromBody] RawAdvisoryRequest request,
|
||||
ICanonicalAdvisoryService service,
|
||||
HttpContext context,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(source))
|
||||
{
|
||||
return HttpResults.BadRequest(new { error = "Source is required" });
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.Cve))
|
||||
{
|
||||
return HttpResults.BadRequest(new { error = "CVE is required" });
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.AffectsKey))
|
||||
{
|
||||
return HttpResults.BadRequest(new { error = "AffectsKey is required" });
|
||||
}
|
||||
|
||||
var rawAdvisory = new RawAdvisory
|
||||
{
|
||||
SourceAdvisoryId = request.SourceAdvisoryId ?? $"{source.ToUpperInvariant()}-{request.Cve}",
|
||||
Cve = request.Cve,
|
||||
AffectsKey = request.AffectsKey,
|
||||
VersionRangeJson = request.VersionRangeJson,
|
||||
Weaknesses = request.Weaknesses ?? [],
|
||||
PatchLineage = request.PatchLineage,
|
||||
Severity = request.Severity,
|
||||
Title = request.Title,
|
||||
Summary = request.Summary,
|
||||
VendorStatus = request.VendorStatus,
|
||||
RawPayloadJson = request.RawPayloadJson,
|
||||
FetchedAt = request.FetchedAt ?? DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var result = await service.IngestAsync(source, rawAdvisory, ct).ConfigureAwait(false);
|
||||
|
||||
var response = new IngestResultResponse
|
||||
{
|
||||
CanonicalId = result.CanonicalId,
|
||||
MergeHash = result.MergeHash,
|
||||
Decision = result.Decision.ToString(),
|
||||
SourceEdgeId = result.SourceEdgeId,
|
||||
SignatureRef = result.SignatureRef,
|
||||
ConflictReason = result.ConflictReason
|
||||
};
|
||||
|
||||
return result.Decision == MergeDecision.Conflict
|
||||
? HttpResults.Conflict(response)
|
||||
: HttpResults.Ok(response);
|
||||
})
|
||||
.WithName("IngestAdvisory")
|
||||
.WithSummary("Ingest raw advisory from source into canonical pipeline")
|
||||
.Produces<IngestResultResponse>(StatusCodes.Status200OK)
|
||||
.Produces<IngestResultResponse>(StatusCodes.Status409Conflict)
|
||||
.Produces(StatusCodes.Status400BadRequest);
|
||||
|
||||
// POST /api/v1/canonical/ingest/{source}/batch - Batch ingest advisories
|
||||
group.MapPost("/ingest/{source}/batch", async (
|
||||
string source,
|
||||
[FromBody] IEnumerable<RawAdvisoryRequest> requests,
|
||||
ICanonicalAdvisoryService service,
|
||||
HttpContext context,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(source))
|
||||
{
|
||||
return HttpResults.BadRequest(new { error = "Source is required" });
|
||||
}
|
||||
|
||||
var rawAdvisories = requests.Select(request => new RawAdvisory
|
||||
{
|
||||
SourceAdvisoryId = request.SourceAdvisoryId ?? $"{source.ToUpperInvariant()}-{request.Cve}",
|
||||
Cve = request.Cve ?? throw new InvalidOperationException("CVE is required"),
|
||||
AffectsKey = request.AffectsKey ?? throw new InvalidOperationException("AffectsKey is required"),
|
||||
VersionRangeJson = request.VersionRangeJson,
|
||||
Weaknesses = request.Weaknesses ?? [],
|
||||
PatchLineage = request.PatchLineage,
|
||||
Severity = request.Severity,
|
||||
Title = request.Title,
|
||||
Summary = request.Summary,
|
||||
VendorStatus = request.VendorStatus,
|
||||
RawPayloadJson = request.RawPayloadJson,
|
||||
FetchedAt = request.FetchedAt ?? DateTimeOffset.UtcNow
|
||||
}).ToList();
|
||||
|
||||
var results = await service.IngestBatchAsync(source, rawAdvisories, ct).ConfigureAwait(false);
|
||||
|
||||
var response = new BatchIngestResultResponse
|
||||
{
|
||||
Results = results.Select(r => new IngestResultResponse
|
||||
{
|
||||
CanonicalId = r.CanonicalId,
|
||||
MergeHash = r.MergeHash,
|
||||
Decision = r.Decision.ToString(),
|
||||
SourceEdgeId = r.SourceEdgeId,
|
||||
SignatureRef = r.SignatureRef,
|
||||
ConflictReason = r.ConflictReason
|
||||
}).ToList(),
|
||||
Summary = new BatchIngestSummary
|
||||
{
|
||||
Total = results.Count,
|
||||
Created = results.Count(r => r.Decision == MergeDecision.Created),
|
||||
Merged = results.Count(r => r.Decision == MergeDecision.Merged),
|
||||
Duplicates = results.Count(r => r.Decision == MergeDecision.Duplicate),
|
||||
Conflicts = results.Count(r => r.Decision == MergeDecision.Conflict)
|
||||
}
|
||||
};
|
||||
|
||||
return HttpResults.Ok(response);
|
||||
})
|
||||
.WithName("IngestAdvisoryBatch")
|
||||
.WithSummary("Batch ingest multiple advisories from source")
|
||||
.Produces<BatchIngestResultResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest);
|
||||
|
||||
// PATCH /api/v1/canonical/{id}/status - Update canonical status
|
||||
group.MapPatch("/{id:guid}/status", async (
|
||||
Guid id,
|
||||
[FromBody] UpdateStatusRequest request,
|
||||
ICanonicalAdvisoryService service,
|
||||
HttpContext context,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
if (!Enum.TryParse<CanonicalStatus>(request.Status, true, out var status))
|
||||
{
|
||||
return HttpResults.BadRequest(new { error = "Invalid status", validValues = Enum.GetNames<CanonicalStatus>() });
|
||||
}
|
||||
|
||||
await service.UpdateStatusAsync(id, status, ct).ConfigureAwait(false);
|
||||
|
||||
return HttpResults.Ok(new { id, status = status.ToString() });
|
||||
})
|
||||
.WithName("UpdateCanonicalStatus")
|
||||
.WithSummary("Update canonical advisory status")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
private static CanonicalAdvisoryResponse MapToResponse(CanonicalAdvisory canonical) => new()
|
||||
{
|
||||
Id = canonical.Id,
|
||||
Cve = canonical.Cve,
|
||||
AffectsKey = canonical.AffectsKey,
|
||||
MergeHash = canonical.MergeHash,
|
||||
Status = canonical.Status.ToString(),
|
||||
Severity = canonical.Severity,
|
||||
EpssScore = canonical.EpssScore,
|
||||
ExploitKnown = canonical.ExploitKnown,
|
||||
Title = canonical.Title,
|
||||
Summary = canonical.Summary,
|
||||
VersionRange = canonical.VersionRange,
|
||||
Weaknesses = canonical.Weaknesses,
|
||||
CreatedAt = canonical.CreatedAt,
|
||||
UpdatedAt = canonical.UpdatedAt,
|
||||
SourceEdges = canonical.SourceEdges.Select(e => new SourceEdgeResponse
|
||||
{
|
||||
Id = e.Id,
|
||||
SourceName = e.SourceName,
|
||||
SourceAdvisoryId = e.SourceAdvisoryId,
|
||||
SourceDocHash = e.SourceDocHash,
|
||||
VendorStatus = e.VendorStatus?.ToString(),
|
||||
PrecedenceRank = e.PrecedenceRank,
|
||||
HasDsseEnvelope = e.DsseEnvelope is not null,
|
||||
FetchedAt = e.FetchedAt
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
#region Response DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Response for a single canonical advisory.
|
||||
/// </summary>
|
||||
public sealed record CanonicalAdvisoryResponse
|
||||
{
|
||||
public Guid Id { get; init; }
|
||||
public required string Cve { get; init; }
|
||||
public required string AffectsKey { get; init; }
|
||||
public required string MergeHash { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public decimal? EpssScore { get; init; }
|
||||
public bool ExploitKnown { get; init; }
|
||||
public string? Title { get; init; }
|
||||
public string? Summary { get; init; }
|
||||
public VersionRange? VersionRange { get; init; }
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset UpdatedAt { get; init; }
|
||||
public IReadOnlyList<SourceEdgeResponse> SourceEdges { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for a source edge.
|
||||
/// </summary>
|
||||
public sealed record SourceEdgeResponse
|
||||
{
|
||||
public Guid Id { get; init; }
|
||||
public required string SourceName { get; init; }
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
public required string SourceDocHash { get; init; }
|
||||
public string? VendorStatus { get; init; }
|
||||
public int PrecedenceRank { get; init; }
|
||||
public bool HasDsseEnvelope { get; init; }
|
||||
public DateTimeOffset FetchedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for a list of canonical advisories.
|
||||
/// </summary>
|
||||
public sealed record CanonicalAdvisoryListResponse
|
||||
{
|
||||
public IReadOnlyList<CanonicalAdvisoryResponse> Items { get; init; } = [];
|
||||
public long TotalCount { get; init; }
|
||||
public int Offset { get; init; }
|
||||
public int Limit { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for ingest result.
|
||||
/// </summary>
|
||||
public sealed record IngestResultResponse
|
||||
{
|
||||
public Guid CanonicalId { get; init; }
|
||||
public required string MergeHash { get; init; }
|
||||
public required string Decision { get; init; }
|
||||
public Guid? SourceEdgeId { get; init; }
|
||||
public Guid? SignatureRef { get; init; }
|
||||
public string? ConflictReason { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for batch ingest.
|
||||
/// </summary>
|
||||
public sealed record BatchIngestResultResponse
|
||||
{
|
||||
public IReadOnlyList<IngestResultResponse> Results { get; init; } = [];
|
||||
public required BatchIngestSummary Summary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of batch ingest results.
|
||||
/// </summary>
|
||||
public sealed record BatchIngestSummary
|
||||
{
|
||||
public int Total { get; init; }
|
||||
public int Created { get; init; }
|
||||
public int Merged { get; init; }
|
||||
public int Duplicates { get; init; }
|
||||
public int Conflicts { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Request DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Request to ingest a raw advisory.
|
||||
/// </summary>
|
||||
public sealed record RawAdvisoryRequest
|
||||
{
|
||||
public string? SourceAdvisoryId { get; init; }
|
||||
public string? Cve { get; init; }
|
||||
public string? AffectsKey { get; init; }
|
||||
public string? VersionRangeJson { get; init; }
|
||||
public IReadOnlyList<string>? Weaknesses { get; init; }
|
||||
public string? PatchLineage { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public string? Title { get; init; }
|
||||
public string? Summary { get; init; }
|
||||
public VendorStatus? VendorStatus { get; init; }
|
||||
public string? RawPayloadJson { get; init; }
|
||||
public DateTimeOffset? FetchedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to update canonical status.
|
||||
/// </summary>
|
||||
public sealed record UpdateStatusRequest
|
||||
{
|
||||
public required string Status { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -511,6 +511,9 @@ app.UseDeprecationHeaders();
|
||||
|
||||
app.MapConcelierMirrorEndpoints(authorityConfigured, enforceAuthority);
|
||||
|
||||
// Canonical advisory endpoints (Sprint 8200.0012.0003)
|
||||
app.MapCanonicalAdvisoryEndpoints();
|
||||
|
||||
app.MapGet("/.well-known/openapi", ([FromServices] OpenApiDiscoveryDocumentProvider provider, HttpContext context) =>
|
||||
{
|
||||
var (payload, etag) = provider.GetDocument();
|
||||
|
||||
@@ -16,6 +16,7 @@ using StellaOps.Concelier.Storage.Advisories;
|
||||
using StellaOps.Concelier.Storage;
|
||||
using StellaOps.Concelier.Storage;
|
||||
using StellaOps.Concelier.Storage.ChangeHistory;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using StellaOps.Plugin;
|
||||
using Json.Schema;
|
||||
using StellaOps.Cryptography;
|
||||
@@ -37,6 +38,7 @@ public sealed class NvdConnector : IFeedConnector
|
||||
private readonly ILogger<NvdConnector> _logger;
|
||||
private readonly NvdDiagnostics _diagnostics;
|
||||
private readonly ICryptoHash _hash;
|
||||
private readonly ICanonicalAdvisoryService? _canonicalService;
|
||||
|
||||
private static readonly JsonSchema Schema = NvdSchemaProvider.Schema;
|
||||
|
||||
@@ -53,7 +55,8 @@ public sealed class NvdConnector : IFeedConnector
|
||||
NvdDiagnostics diagnostics,
|
||||
ICryptoHash hash,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<NvdConnector> logger)
|
||||
ILogger<NvdConnector> logger,
|
||||
ICanonicalAdvisoryService? canonicalService = null)
|
||||
{
|
||||
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
|
||||
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||
@@ -69,6 +72,7 @@ public sealed class NvdConnector : IFeedConnector
|
||||
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_canonicalService = canonicalService; // Optional - canonical ingest
|
||||
}
|
||||
|
||||
public string SourceName => NvdConnectorPlugin.SourceName;
|
||||
@@ -292,6 +296,13 @@ public sealed class NvdConnector : IFeedConnector
|
||||
{
|
||||
await RecordChangeHistoryAsync(advisory, previous, document, now, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Ingest to canonical advisory service if available
|
||||
if (_canonicalService is not null)
|
||||
{
|
||||
await IngestToCanonicalAsync(advisory, json, document.FetchedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
mappedCount++;
|
||||
}
|
||||
|
||||
@@ -565,4 +576,88 @@ public sealed class NvdConnector : IFeedConnector
|
||||
builder.Query = string.Join("&", parameters.Select(static kvp => $"{System.Net.WebUtility.UrlEncode(kvp.Key)}={System.Net.WebUtility.UrlEncode(kvp.Value)}"));
|
||||
return builder.Uri;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ingests NVD advisory to canonical advisory service for deduplication.
|
||||
/// Creates one RawAdvisory per affected package.
|
||||
/// </summary>
|
||||
private async Task IngestToCanonicalAsync(
|
||||
Advisory advisory,
|
||||
string rawPayloadJson,
|
||||
DateTimeOffset fetchedAt,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// NVD advisories are keyed by CVE ID
|
||||
var cve = advisory.AdvisoryKey;
|
||||
|
||||
// Extract CWE weaknesses
|
||||
var weaknesses = advisory.Cwes
|
||||
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
||||
.Select(w => w.Identifier)
|
||||
.ToList();
|
||||
|
||||
// Create one RawAdvisory per affected package (CPE)
|
||||
foreach (var affected in advisory.AffectedPackages)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(affected.Identifier))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Build version range JSON
|
||||
string? versionRangeJson = null;
|
||||
if (!affected.VersionRanges.IsEmpty)
|
||||
{
|
||||
var firstRange = affected.VersionRanges[0];
|
||||
var rangeObj = new
|
||||
{
|
||||
introduced = firstRange.IntroducedVersion,
|
||||
@fixed = firstRange.FixedVersion,
|
||||
last_affected = firstRange.LastAffectedVersion
|
||||
};
|
||||
versionRangeJson = JsonSerializer.Serialize(rangeObj);
|
||||
}
|
||||
|
||||
var rawAdvisory = new RawAdvisory
|
||||
{
|
||||
SourceAdvisoryId = cve,
|
||||
Cve = cve,
|
||||
AffectsKey = affected.Identifier,
|
||||
VersionRangeJson = versionRangeJson,
|
||||
Weaknesses = weaknesses,
|
||||
PatchLineage = null,
|
||||
Severity = advisory.Severity,
|
||||
Title = advisory.Title,
|
||||
Summary = advisory.Summary,
|
||||
VendorStatus = VendorStatus.Affected,
|
||||
RawPayloadJson = rawPayloadJson,
|
||||
FetchedAt = fetchedAt
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (_logger.IsEnabled(LogLevel.Debug))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Canonical ingest for {CveId}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
|
||||
cve, affected.Identifier, result.Decision, result.CanonicalId);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to ingest {CveId}/{AffectsKey} to canonical service",
|
||||
cve, affected.Identifier);
|
||||
// Don't fail the mapping operation for canonical ingest failures
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,7 @@ using StellaOps.Concelier.Connector.Osv.Configuration;
|
||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||
using StellaOps.Concelier.Storage;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
using StellaOps.Concelier.Storage;
|
||||
using StellaOps.Concelier.Storage;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
@@ -41,6 +40,7 @@ public sealed class OsvConnector : IFeedConnector
|
||||
private readonly IDtoStore _dtoStore;
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly ISourceStateRepository _stateRepository;
|
||||
private readonly ICanonicalAdvisoryService? _canonicalService;
|
||||
private readonly OsvOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<OsvConnector> _logger;
|
||||
@@ -58,7 +58,8 @@ public sealed class OsvConnector : IFeedConnector
|
||||
OsvDiagnostics diagnostics,
|
||||
ICryptoHash hash,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<OsvConnector> logger)
|
||||
ILogger<OsvConnector> logger,
|
||||
ICanonicalAdvisoryService? canonicalService = null)
|
||||
{
|
||||
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
|
||||
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||
@@ -66,6 +67,7 @@ public sealed class OsvConnector : IFeedConnector
|
||||
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||
_canonicalService = canonicalService; // Optional - canonical ingest
|
||||
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
|
||||
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
|
||||
@@ -287,6 +289,12 @@ public sealed class OsvConnector : IFeedConnector
|
||||
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Ingest to canonical advisory service if available
|
||||
if (_canonicalService is not null)
|
||||
{
|
||||
await IngestToCanonicalAsync(osvDto, advisory, payloadJson, document.FetchedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
|
||||
@@ -518,4 +526,91 @@ public sealed class OsvConnector : IFeedConnector
|
||||
var safeId = vulnerabilityId.Replace(' ', '-');
|
||||
return $"https://osv-vulnerabilities.storage.googleapis.com/{ecosystem}/{safeId}.json";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ingests OSV advisory to canonical advisory service for deduplication.
|
||||
/// Creates one RawAdvisory per affected package.
|
||||
/// </summary>
|
||||
private async Task IngestToCanonicalAsync(
|
||||
OsvVulnerabilityDto dto,
|
||||
Advisory advisory,
|
||||
string rawPayloadJson,
|
||||
DateTimeOffset fetchedAt,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_canonicalService is null || dto.Affected is null || dto.Affected.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Find primary CVE from aliases
|
||||
var cve = advisory.Aliases
|
||||
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
|
||||
?? dto.Id; // Fall back to OSV ID if no CVE
|
||||
|
||||
// Extract CWE weaknesses
|
||||
var weaknesses = advisory.Cwes
|
||||
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
||||
.Select(w => w.Identifier)
|
||||
.ToList();
|
||||
|
||||
// Create one RawAdvisory per affected package
|
||||
foreach (var affected in advisory.AffectedPackages)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(affected.Identifier))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Build version range JSON
|
||||
string? versionRangeJson = null;
|
||||
if (affected.VersionRanges.Length > 0)
|
||||
{
|
||||
var firstRange = affected.VersionRanges[0];
|
||||
var rangeObj = new
|
||||
{
|
||||
introduced = firstRange.IntroducedVersion,
|
||||
@fixed = firstRange.FixedVersion,
|
||||
last_affected = firstRange.LastAffectedVersion
|
||||
};
|
||||
versionRangeJson = JsonSerializer.Serialize(rangeObj, SerializerOptions);
|
||||
}
|
||||
|
||||
var rawAdvisory = new RawAdvisory
|
||||
{
|
||||
SourceAdvisoryId = dto.Id,
|
||||
Cve = cve,
|
||||
AffectsKey = affected.Identifier,
|
||||
VersionRangeJson = versionRangeJson,
|
||||
Weaknesses = weaknesses,
|
||||
PatchLineage = null, // OSV doesn't have patch lineage
|
||||
Severity = advisory.Severity,
|
||||
Title = advisory.Title,
|
||||
Summary = advisory.Summary,
|
||||
VendorStatus = VendorStatus.Affected,
|
||||
RawPayloadJson = rawPayloadJson,
|
||||
FetchedAt = fetchedAt
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (_logger.IsEnabled(LogLevel.Debug))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Canonical ingest for {OsvId}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
|
||||
dto.Id, affected.Identifier, result.Decision, result.CanonicalId);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to ingest {OsvId}/{AffectsKey} to canonical service",
|
||||
dto.Id, affected.Identifier);
|
||||
// Don't fail the mapping operation for canonical ingest failures
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,66 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
|
||||
---
|
||||
|
||||
## Canonical Advisory Service
|
||||
|
||||
### Role
|
||||
Deduplicated canonical advisory management with provenance-scoped source edges. Ingests raw advisories from multiple sources (NVD, GHSA, OSV, vendor, distro), computes merge hashes for deduplication, and maintains canonical records with linked source edges.
|
||||
|
||||
### Scope
|
||||
- **Ingestion**: `IngestAsync` and `IngestBatchAsync` - Raw advisory to canonical pipeline with merge hash computation, duplicate detection, and source edge creation.
|
||||
- **Query**: `GetByIdAsync`, `GetByCveAsync`, `GetByArtifactAsync`, `GetByMergeHashAsync`, `QueryAsync` - Lookup canonical advisories with source edges.
|
||||
- **Status**: `UpdateStatusAsync`, `DegradeToStubsAsync` - Lifecycle management (Active, Stub, Withdrawn).
|
||||
- **Caching**: `CachingCanonicalAdvisoryService` decorator with configurable TTLs for hot queries.
|
||||
- **Signing**: Optional DSSE signing of source edges via `ISourceEdgeSigner` integration.
|
||||
|
||||
### Interfaces & Contracts
|
||||
- **ICanonicalAdvisoryService**: Main service interface for ingest and query operations.
|
||||
- **ICanonicalAdvisoryStore**: Storage abstraction for canonical/source edge persistence.
|
||||
- **IMergeHashCalculator**: Merge hash computation (CVE + PURL + version range + CWE + patch lineage).
|
||||
- **ISourceEdgeSigner**: Optional DSSE envelope signing for source edges.
|
||||
|
||||
### Domain Models
|
||||
- **CanonicalAdvisory**: Deduplicated advisory record with merge hash, status, severity, EPSS, weaknesses.
|
||||
- **SourceEdge**: Link from source advisory to canonical with precedence rank, doc hash, DSSE envelope.
|
||||
- **IngestResult**: Outcome with MergeDecision (Created, Merged, Duplicate, Conflict).
|
||||
- **RawAdvisory**: Input from connectors with CVE, affects key, version range, weaknesses.
|
||||
|
||||
### Source Precedence
|
||||
Lower rank = higher priority for metadata updates:
|
||||
- `vendor` = 10 (authoritative)
|
||||
- `redhat/debian/suse/ubuntu/alpine` = 20 (distro)
|
||||
- `osv` = 30
|
||||
- `ghsa` = 35
|
||||
- `nvd` = 40 (fallback)
|
||||
|
||||
### API Endpoints
|
||||
- `GET /api/v1/canonical/{id}` - Get by ID
|
||||
- `GET /api/v1/canonical?cve={cve}&artifact={purl}&mergeHash={hash}` - Query
|
||||
- `POST /api/v1/canonical/ingest/{source}` - Ingest single advisory
|
||||
- `POST /api/v1/canonical/ingest/{source}/batch` - Batch ingest
|
||||
- `PATCH /api/v1/canonical/{id}/status` - Update status
|
||||
|
||||
### In/Out of Scope
|
||||
**In**: Merge hash computation, canonical upsert, source edge linking, duplicate detection, caching, DSSE signing.
|
||||
**Out**: Raw advisory fetching (connectors), database schema (Storage.Postgres), HTTP routing (WebService).
|
||||
|
||||
### Observability
|
||||
- Logs: canonical ID, merge hash, decision, source, precedence rank, signing status.
|
||||
- Cache: hit/miss tracing at Trace level.
|
||||
|
||||
### Tests
|
||||
- Unit tests in `Core.Tests/Canonical/` covering ingest pipeline, caching, signing.
|
||||
- Integration tests in `WebService.Tests/Canonical/` for API endpoints.
|
||||
|
||||
---
|
||||
|
||||
## Job Orchestration
|
||||
|
||||
### Role
|
||||
Job orchestration and lifecycle. Registers job definitions, schedules execution, triggers runs, reports status for connectors and exporters.
|
||||
## Scope
|
||||
|
||||
### Scope
|
||||
- Contracts: IJob (execute with CancellationToken), JobRunStatus, JobTriggerOutcome/Result.
|
||||
- Registration: JobSchedulerBuilder.AddJob<T>(kind, cronExpression?, timeout?, leaseDuration?); options recorded in JobSchedulerOptions.
|
||||
- Plugin host integration discovers IJob providers via registered IDependencyInjectionRoutine implementations.
|
||||
|
||||
@@ -0,0 +1,264 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CachingCanonicalAdvisoryService.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-014
|
||||
// Description: Caching decorator for canonical advisory service
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Caching decorator for canonical advisory service.
|
||||
/// Caches hot queries (by ID, merge hash, CVE) with short TTL.
|
||||
/// </summary>
|
||||
public sealed class CachingCanonicalAdvisoryService : ICanonicalAdvisoryService
|
||||
{
|
||||
private readonly ICanonicalAdvisoryService _inner;
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly ILogger<CachingCanonicalAdvisoryService> _logger;
|
||||
private readonly CanonicalCacheOptions _options;
|
||||
|
||||
private const string CacheKeyPrefix = "canonical:";
|
||||
|
||||
public CachingCanonicalAdvisoryService(
|
||||
ICanonicalAdvisoryService inner,
|
||||
IMemoryCache cache,
|
||||
IOptions<CanonicalCacheOptions> options,
|
||||
ILogger<CachingCanonicalAdvisoryService> logger)
|
||||
{
|
||||
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value ?? new CanonicalCacheOptions();
|
||||
}
|
||||
|
||||
#region Ingest Operations (Pass-through with cache invalidation)
|
||||
|
||||
public async Task<IngestResult> IngestAsync(
|
||||
string source,
|
||||
RawAdvisory rawAdvisory,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var result = await _inner.IngestAsync(source, rawAdvisory, ct).ConfigureAwait(false);
|
||||
|
||||
// Invalidate cache for affected entries
|
||||
if (result.Decision != MergeDecision.Duplicate)
|
||||
{
|
||||
InvalidateCacheForCanonical(result.CanonicalId, result.MergeHash, rawAdvisory.Cve);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<IngestResult>> IngestBatchAsync(
|
||||
string source,
|
||||
IEnumerable<RawAdvisory> advisories,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = await _inner.IngestBatchAsync(source, advisories, ct).ConfigureAwait(false);
|
||||
|
||||
// Invalidate cache for all affected entries
|
||||
foreach (var result in results.Where(r => r.Decision != MergeDecision.Duplicate))
|
||||
{
|
||||
InvalidateCacheForCanonical(result.CanonicalId, result.MergeHash, null);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations (Cached)
|
||||
|
||||
public async Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}id:{id}";
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out CanonicalAdvisory? cached))
|
||||
{
|
||||
_logger.LogTrace("Cache hit for canonical {CanonicalId}", id);
|
||||
return cached;
|
||||
}
|
||||
|
||||
var result = await _inner.GetByIdAsync(id, ct).ConfigureAwait(false);
|
||||
|
||||
if (result is not null)
|
||||
{
|
||||
SetCache(cacheKey, result, _options.DefaultTtl);
|
||||
// Also cache by merge hash for cross-lookup
|
||||
SetCache($"{CacheKeyPrefix}hash:{result.MergeHash}", result, _options.DefaultTtl);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}hash:{mergeHash}";
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out CanonicalAdvisory? cached))
|
||||
{
|
||||
_logger.LogTrace("Cache hit for merge hash {MergeHash}", mergeHash);
|
||||
return cached;
|
||||
}
|
||||
|
||||
var result = await _inner.GetByMergeHashAsync(mergeHash, ct).ConfigureAwait(false);
|
||||
|
||||
if (result is not null)
|
||||
{
|
||||
SetCache(cacheKey, result, _options.DefaultTtl);
|
||||
// Also cache by ID for cross-lookup
|
||||
SetCache($"{CacheKeyPrefix}id:{result.Id}", result, _options.DefaultTtl);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}cve:{cve.ToUpperInvariant()}";
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out IReadOnlyList<CanonicalAdvisory>? cached) && cached is not null)
|
||||
{
|
||||
_logger.LogTrace("Cache hit for CVE {Cve} ({Count} items)", cve, cached.Count);
|
||||
return cached;
|
||||
}
|
||||
|
||||
var result = await _inner.GetByCveAsync(cve, ct).ConfigureAwait(false);
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
SetCache(cacheKey, result, _options.CveTtl);
|
||||
|
||||
// Also cache individual items
|
||||
foreach (var item in result)
|
||||
{
|
||||
SetCache($"{CacheKeyPrefix}id:{item.Id}", item, _options.DefaultTtl);
|
||||
SetCache($"{CacheKeyPrefix}hash:{item.MergeHash}", item, _options.DefaultTtl);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(
|
||||
string artifactKey,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}artifact:{artifactKey.ToLowerInvariant()}";
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out IReadOnlyList<CanonicalAdvisory>? cached) && cached is not null)
|
||||
{
|
||||
_logger.LogTrace("Cache hit for artifact {ArtifactKey} ({Count} items)", artifactKey, cached.Count);
|
||||
return cached;
|
||||
}
|
||||
|
||||
var result = await _inner.GetByArtifactAsync(artifactKey, ct).ConfigureAwait(false);
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
SetCache(cacheKey, result, _options.ArtifactTtl);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public Task<PagedResult<CanonicalAdvisory>> QueryAsync(
|
||||
CanonicalQueryOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Don't cache complex queries - pass through
|
||||
return _inner.QueryAsync(options, ct);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Operations (Pass-through with cache invalidation)
|
||||
|
||||
public async Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default)
|
||||
{
|
||||
await _inner.UpdateStatusAsync(id, status, ct).ConfigureAwait(false);
|
||||
|
||||
// Invalidate cache for this canonical
|
||||
InvalidateCacheById(id);
|
||||
}
|
||||
|
||||
public Task<int> DegradeToStubsAsync(double scoreThreshold, CancellationToken ct = default)
|
||||
{
|
||||
// This may affect many entries - don't try to invalidate individually
|
||||
// The cache will naturally expire
|
||||
return _inner.DegradeToStubsAsync(scoreThreshold, ct);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Helpers
|
||||
|
||||
private void SetCache<T>(string key, T value, TimeSpan ttl) where T : class
|
||||
{
|
||||
if (ttl <= TimeSpan.Zero || !_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var options = new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = ttl,
|
||||
Size = 1 // For size-limited caches
|
||||
};
|
||||
|
||||
_cache.Set(key, value, options);
|
||||
}
|
||||
|
||||
private void InvalidateCacheForCanonical(Guid id, string? mergeHash, string? cve)
|
||||
{
|
||||
InvalidateCacheById(id);
|
||||
|
||||
if (!string.IsNullOrEmpty(mergeHash))
|
||||
{
|
||||
_cache.Remove($"{CacheKeyPrefix}hash:{mergeHash}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(cve))
|
||||
{
|
||||
_cache.Remove($"{CacheKeyPrefix}cve:{cve.ToUpperInvariant()}");
|
||||
}
|
||||
}
|
||||
|
||||
private void InvalidateCacheById(Guid id)
|
||||
{
|
||||
_cache.Remove($"{CacheKeyPrefix}id:{id}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for canonical advisory caching.
|
||||
/// </summary>
|
||||
public sealed class CanonicalCacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether caching is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Default TTL for individual canonical lookups. Default: 5 minutes.
|
||||
/// </summary>
|
||||
public TimeSpan DefaultTtl { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// TTL for CVE-based queries. Default: 2 minutes.
|
||||
/// </summary>
|
||||
public TimeSpan CveTtl { get; set; } = TimeSpan.FromMinutes(2);
|
||||
|
||||
/// <summary>
|
||||
/// TTL for artifact-based queries. Default: 2 minutes.
|
||||
/// </summary>
|
||||
public TimeSpan ArtifactTtl { get; set; } = TimeSpan.FromMinutes(2);
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalAdvisory.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-001
|
||||
// Description: Domain model for canonical advisory with source edges
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical advisory with all source edges.
|
||||
/// </summary>
|
||||
public sealed record CanonicalAdvisory
|
||||
{
|
||||
/// <summary>Unique canonical advisory identifier.</summary>
|
||||
public Guid Id { get; init; }
|
||||
|
||||
/// <summary>CVE identifier (e.g., "CVE-2024-1234").</summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Normalized PURL or CPE identifying the affected package.</summary>
|
||||
public required string AffectsKey { get; init; }
|
||||
|
||||
/// <summary>Structured version range (introduced, fixed, last_affected).</summary>
|
||||
public VersionRange? VersionRange { get; init; }
|
||||
|
||||
/// <summary>Sorted CWE identifiers.</summary>
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
|
||||
/// <summary>Deterministic SHA256 hash of identity components.</summary>
|
||||
public required string MergeHash { get; init; }
|
||||
|
||||
/// <summary>Status: active, stub, or withdrawn.</summary>
|
||||
public CanonicalStatus Status { get; init; } = CanonicalStatus.Active;
|
||||
|
||||
/// <summary>Normalized severity: critical, high, medium, low, none.</summary>
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>EPSS exploit prediction probability (0.0000-1.0000).</summary>
|
||||
public decimal? EpssScore { get; init; }
|
||||
|
||||
/// <summary>Whether an exploit is known to exist.</summary>
|
||||
public bool ExploitKnown { get; init; }
|
||||
|
||||
/// <summary>Advisory title.</summary>
|
||||
public string? Title { get; init; }
|
||||
|
||||
/// <summary>Advisory summary.</summary>
|
||||
public string? Summary { get; init; }
|
||||
|
||||
/// <summary>When the canonical record was created.</summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>When the canonical record was last updated.</summary>
|
||||
public DateTimeOffset UpdatedAt { get; init; }
|
||||
|
||||
/// <summary>All source edges for this canonical, ordered by precedence.</summary>
|
||||
public IReadOnlyList<SourceEdge> SourceEdges { get; init; } = [];
|
||||
|
||||
/// <summary>Primary source edge (highest precedence).</summary>
|
||||
public SourceEdge? PrimarySource => SourceEdges.Count > 0 ? SourceEdges[0] : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of a canonical advisory.
|
||||
/// </summary>
|
||||
public enum CanonicalStatus
|
||||
{
|
||||
/// <summary>Full active record with all data.</summary>
|
||||
Active,
|
||||
|
||||
/// <summary>Minimal record for low-interest advisories.</summary>
|
||||
Stub,
|
||||
|
||||
/// <summary>Withdrawn or superseded advisory.</summary>
|
||||
Withdrawn
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Structured version range for affected packages.
|
||||
/// </summary>
|
||||
public sealed record VersionRange
|
||||
{
|
||||
/// <summary>Version where vulnerability was introduced.</summary>
|
||||
public string? Introduced { get; init; }
|
||||
|
||||
/// <summary>Version where vulnerability was fixed.</summary>
|
||||
public string? Fixed { get; init; }
|
||||
|
||||
/// <summary>Last known affected version.</summary>
|
||||
public string? LastAffected { get; init; }
|
||||
|
||||
/// <summary>Canonical range expression (e.g., ">=1.0.0,<2.0.0").</summary>
|
||||
public string? RangeExpression { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,375 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalAdvisoryService.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Tasks: CANSVC-8200-004 through CANSVC-8200-008
|
||||
// Description: Service implementation for canonical advisory management
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Service for managing canonical advisories with provenance-scoped deduplication.
|
||||
/// </summary>
|
||||
public sealed class CanonicalAdvisoryService : ICanonicalAdvisoryService
|
||||
{
|
||||
private readonly ICanonicalAdvisoryStore _store;
|
||||
private readonly IMergeHashCalculator _mergeHashCalculator;
|
||||
private readonly ISourceEdgeSigner? _signer;
|
||||
private readonly ILogger<CanonicalAdvisoryService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Source precedence ranks (lower = higher priority).
|
||||
/// </summary>
|
||||
private static readonly Dictionary<string, int> SourcePrecedence = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["vendor"] = 10,
|
||||
["redhat"] = 20,
|
||||
["debian"] = 20,
|
||||
["suse"] = 20,
|
||||
["ubuntu"] = 20,
|
||||
["alpine"] = 20,
|
||||
["osv"] = 30,
|
||||
["ghsa"] = 35,
|
||||
["nvd"] = 40
|
||||
};
|
||||
|
||||
public CanonicalAdvisoryService(
|
||||
ICanonicalAdvisoryStore store,
|
||||
IMergeHashCalculator mergeHashCalculator,
|
||||
ILogger<CanonicalAdvisoryService> logger,
|
||||
ISourceEdgeSigner? signer = null)
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_mergeHashCalculator = mergeHashCalculator ?? throw new ArgumentNullException(nameof(mergeHashCalculator));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_signer = signer; // Optional - if not provided, source edges are stored unsigned
|
||||
}
|
||||
|
||||
#region Ingest Operations
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IngestResult> IngestAsync(
|
||||
string source,
|
||||
RawAdvisory rawAdvisory,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(source);
|
||||
ArgumentNullException.ThrowIfNull(rawAdvisory);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Ingesting advisory {SourceAdvisoryId} from {Source}",
|
||||
rawAdvisory.SourceAdvisoryId, source);
|
||||
|
||||
// 1. Compute merge hash from identity components
|
||||
var mergeHashInput = new MergeHashInput
|
||||
{
|
||||
Cve = rawAdvisory.Cve,
|
||||
AffectsKey = rawAdvisory.AffectsKey,
|
||||
VersionRange = rawAdvisory.VersionRangeJson,
|
||||
Weaknesses = rawAdvisory.Weaknesses,
|
||||
PatchLineage = rawAdvisory.PatchLineage
|
||||
};
|
||||
var mergeHash = _mergeHashCalculator.ComputeMergeHash(mergeHashInput);
|
||||
|
||||
// 2. Check for existing canonical
|
||||
var existing = await _store.GetByMergeHashAsync(mergeHash, ct).ConfigureAwait(false);
|
||||
|
||||
MergeDecision decision;
|
||||
Guid canonicalId;
|
||||
|
||||
if (existing is null)
|
||||
{
|
||||
// 3a. Create new canonical
|
||||
var upsertRequest = new UpsertCanonicalRequest
|
||||
{
|
||||
Cve = rawAdvisory.Cve,
|
||||
AffectsKey = rawAdvisory.AffectsKey,
|
||||
MergeHash = mergeHash,
|
||||
VersionRangeJson = rawAdvisory.VersionRangeJson,
|
||||
Weaknesses = rawAdvisory.Weaknesses,
|
||||
Severity = rawAdvisory.Severity,
|
||||
Title = rawAdvisory.Title,
|
||||
Summary = rawAdvisory.Summary
|
||||
};
|
||||
|
||||
canonicalId = await _store.UpsertCanonicalAsync(upsertRequest, ct).ConfigureAwait(false);
|
||||
decision = MergeDecision.Created;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created canonical {CanonicalId} with merge_hash {MergeHash} for {Cve}",
|
||||
canonicalId, mergeHash, rawAdvisory.Cve);
|
||||
}
|
||||
else
|
||||
{
|
||||
// 3b. Merge into existing canonical
|
||||
canonicalId = existing.Id;
|
||||
decision = MergeDecision.Merged;
|
||||
|
||||
// Update metadata if we have better data
|
||||
await UpdateCanonicalMetadataIfBetterAsync(existing, rawAdvisory, source, ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Merging into existing canonical {CanonicalId} for {Cve}",
|
||||
canonicalId, rawAdvisory.Cve);
|
||||
}
|
||||
|
||||
// 4. Compute source document hash
|
||||
var sourceDocHash = ComputeDocumentHash(rawAdvisory);
|
||||
|
||||
// 5. Resolve source ID
|
||||
var sourceId = await _store.ResolveSourceIdAsync(source, ct).ConfigureAwait(false);
|
||||
|
||||
// 6. Check if source edge already exists (duplicate detection)
|
||||
var edgeExists = await _store.SourceEdgeExistsAsync(canonicalId, sourceId, sourceDocHash, ct).ConfigureAwait(false);
|
||||
if (edgeExists)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Duplicate source edge detected for canonical {CanonicalId} from {Source}",
|
||||
canonicalId, source);
|
||||
|
||||
return IngestResult.Duplicate(canonicalId, mergeHash, source, rawAdvisory.SourceAdvisoryId);
|
||||
}
|
||||
|
||||
// 7. Sign source edge if signer is available
|
||||
string? dsseEnvelopeJson = null;
|
||||
Guid? signatureRef = null;
|
||||
|
||||
if (_signer is not null && rawAdvisory.RawPayloadJson is not null)
|
||||
{
|
||||
var signingRequest = new SourceEdgeSigningRequest
|
||||
{
|
||||
SourceAdvisoryId = rawAdvisory.SourceAdvisoryId,
|
||||
SourceName = source,
|
||||
PayloadHash = sourceDocHash,
|
||||
PayloadJson = rawAdvisory.RawPayloadJson
|
||||
};
|
||||
|
||||
var signingResult = await _signer.SignAsync(signingRequest, ct).ConfigureAwait(false);
|
||||
|
||||
if (signingResult.Success && signingResult.Envelope is not null)
|
||||
{
|
||||
dsseEnvelopeJson = JsonSerializer.Serialize(signingResult.Envelope);
|
||||
signatureRef = signingResult.SignatureRef;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Signed source edge for {SourceAdvisoryId} from {Source} (ref: {SignatureRef})",
|
||||
rawAdvisory.SourceAdvisoryId, source, signatureRef);
|
||||
}
|
||||
else if (!signingResult.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to sign source edge for {SourceAdvisoryId}: {Error}",
|
||||
rawAdvisory.SourceAdvisoryId, signingResult.ErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// 8. Create source edge
|
||||
var precedenceRank = GetPrecedenceRank(source);
|
||||
var addEdgeRequest = new AddSourceEdgeRequest
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
SourceId = sourceId,
|
||||
SourceAdvisoryId = rawAdvisory.SourceAdvisoryId,
|
||||
SourceDocHash = sourceDocHash,
|
||||
VendorStatus = rawAdvisory.VendorStatus,
|
||||
PrecedenceRank = precedenceRank,
|
||||
DsseEnvelopeJson = dsseEnvelopeJson,
|
||||
RawPayloadJson = rawAdvisory.RawPayloadJson,
|
||||
FetchedAt = rawAdvisory.FetchedAt
|
||||
};
|
||||
|
||||
var edgeResult = await _store.AddSourceEdgeAsync(addEdgeRequest, ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Added source edge {EdgeId} from {Source} ({SourceAdvisoryId}) to canonical {CanonicalId}{Signed}",
|
||||
edgeResult.EdgeId, source, rawAdvisory.SourceAdvisoryId, canonicalId,
|
||||
dsseEnvelopeJson is not null ? " [signed]" : "");
|
||||
|
||||
return decision == MergeDecision.Created
|
||||
? IngestResult.Created(canonicalId, mergeHash, edgeResult.EdgeId, source, rawAdvisory.SourceAdvisoryId, signatureRef)
|
||||
: IngestResult.Merged(canonicalId, mergeHash, edgeResult.EdgeId, source, rawAdvisory.SourceAdvisoryId, signatureRef);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<IngestResult>> IngestBatchAsync(
|
||||
string source,
|
||||
IEnumerable<RawAdvisory> advisories,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(source);
|
||||
ArgumentNullException.ThrowIfNull(advisories);
|
||||
|
||||
var results = new List<IngestResult>();
|
||||
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var result = await IngestAsync(source, advisory, ct).ConfigureAwait(false);
|
||||
results.Add(result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to ingest advisory {SourceAdvisoryId} from {Source}",
|
||||
advisory.SourceAdvisoryId, source);
|
||||
|
||||
// Create a conflict result for failed ingestion
|
||||
results.Add(IngestResult.Conflict(
|
||||
Guid.Empty,
|
||||
string.Empty,
|
||||
ex.Message,
|
||||
source,
|
||||
advisory.SourceAdvisoryId));
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Batch ingest complete: {Created} created, {Merged} merged, {Duplicates} duplicates, {Conflicts} conflicts",
|
||||
results.Count(r => r.Decision == MergeDecision.Created),
|
||||
results.Count(r => r.Decision == MergeDecision.Merged),
|
||||
results.Count(r => r.Decision == MergeDecision.Duplicate),
|
||||
results.Count(r => r.Decision == MergeDecision.Conflict));
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default)
|
||||
=> _store.GetByIdAsync(id, ct);
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(mergeHash);
|
||||
return _store.GetByMergeHashAsync(mergeHash, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cve);
|
||||
return _store.GetByCveAsync(cve, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(string artifactKey, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactKey);
|
||||
return _store.GetByArtifactAsync(artifactKey, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<PagedResult<CanonicalAdvisory>> QueryAsync(CanonicalQueryOptions options, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
return _store.QueryAsync(options, ct);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Operations
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default)
|
||||
{
|
||||
await _store.UpdateStatusAsync(id, status, ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Updated canonical {CanonicalId} status to {Status}",
|
||||
id, status);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> DegradeToStubsAsync(double scoreThreshold, CancellationToken ct = default)
|
||||
{
|
||||
// TODO: Implement stub degradation based on EPSS score or other criteria
|
||||
// This would query for low-interest canonicals and update their status to Stub
|
||||
_logger.LogWarning(
|
||||
"DegradeToStubsAsync not yet implemented (threshold={Threshold})",
|
||||
scoreThreshold);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Helpers
|
||||
|
||||
private async Task UpdateCanonicalMetadataIfBetterAsync(
|
||||
CanonicalAdvisory existing,
|
||||
RawAdvisory newAdvisory,
|
||||
string source,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Only update if the new source has higher precedence
|
||||
var newPrecedence = GetPrecedenceRank(source);
|
||||
var existingPrecedence = existing.PrimarySource?.PrecedenceRank ?? int.MaxValue;
|
||||
|
||||
if (newPrecedence >= existingPrecedence)
|
||||
{
|
||||
return; // New source is lower or equal precedence, don't update
|
||||
}
|
||||
|
||||
// Update with better metadata
|
||||
var updateRequest = new UpsertCanonicalRequest
|
||||
{
|
||||
Cve = existing.Cve,
|
||||
AffectsKey = existing.AffectsKey,
|
||||
MergeHash = existing.MergeHash,
|
||||
Severity = newAdvisory.Severity ?? existing.Severity,
|
||||
Title = newAdvisory.Title ?? existing.Title,
|
||||
Summary = newAdvisory.Summary ?? existing.Summary
|
||||
};
|
||||
|
||||
await _store.UpsertCanonicalAsync(updateRequest, ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Updated canonical {CanonicalId} metadata from higher-precedence source {Source}",
|
||||
existing.Id, source);
|
||||
}
|
||||
|
||||
private static string ComputeDocumentHash(RawAdvisory advisory)
|
||||
{
|
||||
// Hash the raw payload if available, otherwise hash the key identity fields
|
||||
var content = advisory.RawPayloadJson
|
||||
?? JsonSerializer.Serialize(new
|
||||
{
|
||||
advisory.SourceAdvisoryId,
|
||||
advisory.Cve,
|
||||
advisory.AffectsKey,
|
||||
advisory.VersionRangeJson,
|
||||
advisory.Weaknesses,
|
||||
advisory.Title,
|
||||
advisory.Summary
|
||||
});
|
||||
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(content));
|
||||
return $"sha256:{Convert.ToHexStringLower(hashBytes)}";
|
||||
}
|
||||
|
||||
private static int GetPrecedenceRank(string source)
|
||||
{
|
||||
if (SourcePrecedence.TryGetValue(source, out var rank))
|
||||
{
|
||||
return rank;
|
||||
}
|
||||
|
||||
// Unknown sources get default precedence
|
||||
return 100;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ICanonicalAdvisoryService.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-000
|
||||
// Description: Service interface for canonical advisory management
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Service for managing canonical advisories with provenance-scoped deduplication.
|
||||
/// </summary>
|
||||
public interface ICanonicalAdvisoryService
|
||||
{
|
||||
// === Ingest Operations ===
|
||||
|
||||
/// <summary>
|
||||
/// Ingest raw advisory from source, creating or updating canonical record.
|
||||
/// </summary>
|
||||
/// <param name="source">Source identifier (osv, nvd, ghsa, redhat, debian, etc.)</param>
|
||||
/// <param name="rawAdvisory">Raw advisory document</param>
|
||||
/// <param name="ct">Cancellation token</param>
|
||||
/// <returns>Ingest result with canonical ID and merge decision</returns>
|
||||
Task<IngestResult> IngestAsync(
|
||||
string source,
|
||||
RawAdvisory rawAdvisory,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Batch ingest multiple advisories from same source.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<IngestResult>> IngestBatchAsync(
|
||||
string source,
|
||||
IEnumerable<RawAdvisory> advisories,
|
||||
CancellationToken ct = default);
|
||||
|
||||
// === Query Operations ===
|
||||
|
||||
/// <summary>
|
||||
/// Get canonical advisory by ID with all source edges.
|
||||
/// </summary>
|
||||
Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get canonical advisory by merge hash.
|
||||
/// </summary>
|
||||
Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all canonical advisories for a CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get canonical advisories affecting an artifact (PURL or CPE).
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(
|
||||
string artifactKey,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Query canonical advisories with filters.
|
||||
/// </summary>
|
||||
Task<PagedResult<CanonicalAdvisory>> QueryAsync(
|
||||
CanonicalQueryOptions options,
|
||||
CancellationToken ct = default);
|
||||
|
||||
// === Status Operations ===
|
||||
|
||||
/// <summary>
|
||||
/// Update canonical status (active, stub, withdrawn).
|
||||
/// </summary>
|
||||
Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Degrade low-interest canonicals to stub status.
|
||||
/// </summary>
|
||||
Task<int> DegradeToStubsAsync(double scoreThreshold, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raw advisory document before normalization.
|
||||
/// </summary>
|
||||
public sealed record RawAdvisory
|
||||
{
|
||||
/// <summary>Source advisory ID (DSA-5678, RHSA-2024:1234, etc.)</summary>
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
|
||||
/// <summary>Primary CVE identifier.</summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Affected package identifier (PURL or CPE).</summary>
|
||||
public required string AffectsKey { get; init; }
|
||||
|
||||
/// <summary>Affected version range as JSON string.</summary>
|
||||
public string? VersionRangeJson { get; init; }
|
||||
|
||||
/// <summary>CWE identifiers.</summary>
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
|
||||
/// <summary>Patch lineage (commit SHA, patch ID).</summary>
|
||||
public string? PatchLineage { get; init; }
|
||||
|
||||
/// <summary>Advisory title.</summary>
|
||||
public string? Title { get; init; }
|
||||
|
||||
/// <summary>Advisory summary.</summary>
|
||||
public string? Summary { get; init; }
|
||||
|
||||
/// <summary>Severity level.</summary>
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>VEX-style vendor status.</summary>
|
||||
public VendorStatus? VendorStatus { get; init; }
|
||||
|
||||
/// <summary>Raw payload as JSON.</summary>
|
||||
public string? RawPayloadJson { get; init; }
|
||||
|
||||
/// <summary>When the advisory was fetched.</summary>
|
||||
public DateTimeOffset FetchedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Query options for canonical advisories.
|
||||
/// </summary>
|
||||
public sealed record CanonicalQueryOptions
|
||||
{
|
||||
/// <summary>Filter by CVE (exact match).</summary>
|
||||
public string? Cve { get; init; }
|
||||
|
||||
/// <summary>Filter by artifact key (PURL or CPE).</summary>
|
||||
public string? ArtifactKey { get; init; }
|
||||
|
||||
/// <summary>Filter by severity.</summary>
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>Filter by status.</summary>
|
||||
public CanonicalStatus? Status { get; init; }
|
||||
|
||||
/// <summary>Only include canonicals with known exploits.</summary>
|
||||
public bool? ExploitKnown { get; init; }
|
||||
|
||||
/// <summary>Include canonicals updated since this time.</summary>
|
||||
public DateTimeOffset? UpdatedSince { get; init; }
|
||||
|
||||
/// <summary>Page size.</summary>
|
||||
public int Limit { get; init; } = 100;
|
||||
|
||||
/// <summary>Page offset.</summary>
|
||||
public int Offset { get; init; } = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Paged result for queries.
|
||||
/// </summary>
|
||||
public sealed record PagedResult<T>
|
||||
{
|
||||
/// <summary>Items in this page.</summary>
|
||||
public required IReadOnlyList<T> Items { get; init; }
|
||||
|
||||
/// <summary>Total count across all pages.</summary>
|
||||
public long TotalCount { get; init; }
|
||||
|
||||
/// <summary>Current page offset.</summary>
|
||||
public int Offset { get; init; }
|
||||
|
||||
/// <summary>Page size.</summary>
|
||||
public int Limit { get; init; }
|
||||
|
||||
/// <summary>Whether there are more items.</summary>
|
||||
public bool HasMore => Offset + Items.Count < TotalCount;
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ICanonicalAdvisoryStore.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-004
|
||||
// Description: Storage abstraction for canonical advisory persistence
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Storage abstraction for canonical advisory and source edge persistence.
|
||||
/// Implemented by PostgresCanonicalAdvisoryStore.
|
||||
/// </summary>
|
||||
public interface ICanonicalAdvisoryStore
|
||||
{
|
||||
#region Canonical Advisory Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets a canonical advisory by ID with source edges.
|
||||
/// </summary>
|
||||
Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a canonical advisory by merge hash.
|
||||
/// </summary>
|
||||
Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all canonical advisories for a CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets canonical advisories affecting an artifact (PURL or CPE).
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(string artifactKey, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Queries canonical advisories with filters.
|
||||
/// </summary>
|
||||
Task<PagedResult<CanonicalAdvisory>> QueryAsync(CanonicalQueryOptions options, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts a canonical advisory (creates or updates by merge_hash).
|
||||
/// </summary>
|
||||
Task<Guid> UpsertCanonicalAsync(UpsertCanonicalRequest request, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates the status of a canonical advisory.
|
||||
/// </summary>
|
||||
Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Counts active canonicals.
|
||||
/// </summary>
|
||||
Task<long> CountAsync(CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Source Edge Operations
|
||||
|
||||
/// <summary>
|
||||
/// Adds a source edge to a canonical advisory.
|
||||
/// Returns existing edge ID if duplicate (canonical_id, source_id, doc_hash).
|
||||
/// </summary>
|
||||
Task<SourceEdgeResult> AddSourceEdgeAsync(AddSourceEdgeRequest request, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all source edges for a canonical.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SourceEdge>> GetSourceEdgesAsync(Guid canonicalId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a source edge already exists.
|
||||
/// </summary>
|
||||
Task<bool> SourceEdgeExistsAsync(Guid canonicalId, Guid sourceId, string docHash, CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Source Operations
|
||||
|
||||
/// <summary>
|
||||
/// Resolves a source key to its ID, creating if necessary.
|
||||
/// </summary>
|
||||
Task<Guid> ResolveSourceIdAsync(string sourceKey, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the precedence rank for a source.
|
||||
/// </summary>
|
||||
Task<int> GetSourcePrecedenceAsync(string sourceKey, CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to upsert a canonical advisory.
|
||||
/// </summary>
|
||||
public sealed record UpsertCanonicalRequest
|
||||
{
|
||||
public required string Cve { get; init; }
|
||||
public required string AffectsKey { get; init; }
|
||||
public required string MergeHash { get; init; }
|
||||
public string? VersionRangeJson { get; init; }
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
public string? Severity { get; init; }
|
||||
public decimal? EpssScore { get; init; }
|
||||
public bool ExploitKnown { get; init; }
|
||||
public string? Title { get; init; }
|
||||
public string? Summary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to add a source edge.
|
||||
/// </summary>
|
||||
public sealed record AddSourceEdgeRequest
|
||||
{
|
||||
public required Guid CanonicalId { get; init; }
|
||||
public required Guid SourceId { get; init; }
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
public required string SourceDocHash { get; init; }
|
||||
public VendorStatus? VendorStatus { get; init; }
|
||||
public int PrecedenceRank { get; init; } = 100;
|
||||
public string? DsseEnvelopeJson { get; init; }
|
||||
public string? RawPayloadJson { get; init; }
|
||||
public DateTimeOffset FetchedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of adding a source edge.
|
||||
/// </summary>
|
||||
public sealed record SourceEdgeResult
|
||||
{
|
||||
public required Guid EdgeId { get; init; }
|
||||
public required bool WasCreated { get; init; }
|
||||
|
||||
public static SourceEdgeResult Created(Guid edgeId) => new() { EdgeId = edgeId, WasCreated = true };
|
||||
public static SourceEdgeResult Existing(Guid edgeId) => new() { EdgeId = edgeId, WasCreated = false };
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IMergeHashCalculator.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-004
|
||||
// Description: Merge hash calculator abstraction for Core (avoids circular ref)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic semantic merge hash for advisory deduplication.
|
||||
/// This is a local abstraction in Core to avoid circular dependency with Merge library.
|
||||
/// The Merge library's MergeHashCalculator implements this interface.
|
||||
/// </summary>
|
||||
public interface IMergeHashCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute merge hash from advisory identity components.
|
||||
/// </summary>
|
||||
/// <param name="input">The identity components to hash.</param>
|
||||
/// <returns>Hex-encoded SHA256 hash prefixed with "sha256:".</returns>
|
||||
string ComputeMergeHash(MergeHashInput input);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input components for merge hash computation.
|
||||
/// </summary>
|
||||
public sealed record MergeHashInput
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2024-1234"). Required.
|
||||
/// </summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected package identifier (PURL or CPE). Required.
|
||||
/// </summary>
|
||||
public required string AffectsKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected version range expression. Optional.
|
||||
/// </summary>
|
||||
public string? VersionRange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Associated CWE identifiers. Optional.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Upstream patch provenance (commit SHA, patch ID). Optional.
|
||||
/// </summary>
|
||||
public string? PatchLineage { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ISourceEdgeSigner.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-008
|
||||
// Description: Interface for DSSE signing of source edges
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Service for signing source edges with DSSE envelopes.
|
||||
/// This is an optional component - if not registered, source edges are stored unsigned.
|
||||
/// </summary>
|
||||
public interface ISourceEdgeSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs a source edge payload and returns a DSSE envelope.
|
||||
/// </summary>
|
||||
/// <param name="request">The signing request with payload.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Signing result with envelope or error.</returns>
|
||||
Task<SourceEdgeSigningResult> SignAsync(SourceEdgeSigningRequest request, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to sign a source edge.
|
||||
/// </summary>
|
||||
public sealed record SourceEdgeSigningRequest
|
||||
{
|
||||
/// <summary>Source advisory ID being signed.</summary>
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
|
||||
/// <summary>Source name (e.g., "nvd", "debian").</summary>
|
||||
public required string SourceName { get; init; }
|
||||
|
||||
/// <summary>SHA256 hash of the payload.</summary>
|
||||
public required string PayloadHash { get; init; }
|
||||
|
||||
/// <summary>Raw payload JSON to be signed.</summary>
|
||||
public required string PayloadJson { get; init; }
|
||||
|
||||
/// <summary>Payload type URI.</summary>
|
||||
public string PayloadType { get; init; } = "application/vnd.stellaops.advisory.v1+json";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signing a source edge.
|
||||
/// </summary>
|
||||
public sealed record SourceEdgeSigningResult
|
||||
{
|
||||
/// <summary>Whether signing was successful.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>DSSE envelope (if successful).</summary>
|
||||
public DsseEnvelope? Envelope { get; init; }
|
||||
|
||||
/// <summary>Error message (if failed).</summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>Signature reference ID for audit.</summary>
|
||||
public Guid? SignatureRef { get; init; }
|
||||
|
||||
/// <summary>Creates a successful result.</summary>
|
||||
public static SourceEdgeSigningResult Signed(DsseEnvelope envelope, Guid signatureRef) => new()
|
||||
{
|
||||
Success = true,
|
||||
Envelope = envelope,
|
||||
SignatureRef = signatureRef
|
||||
};
|
||||
|
||||
/// <summary>Creates a failed result.</summary>
|
||||
public static SourceEdgeSigningResult Failed(string errorMessage) => new()
|
||||
{
|
||||
Success = false,
|
||||
ErrorMessage = errorMessage
|
||||
};
|
||||
|
||||
/// <summary>Creates a skipped result (signer not available).</summary>
|
||||
public static SourceEdgeSigningResult Skipped() => new()
|
||||
{
|
||||
Success = true,
|
||||
ErrorMessage = "Signing skipped - no signer configured"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IngestResult.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-003
|
||||
// Description: Result type for advisory ingestion with merge decision
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Result of ingesting a raw advisory.
|
||||
/// </summary>
|
||||
public sealed record IngestResult
|
||||
{
|
||||
/// <summary>ID of the canonical advisory (new or existing).</summary>
|
||||
public required Guid CanonicalId { get; init; }
|
||||
|
||||
/// <summary>Computed merge hash for the ingested advisory.</summary>
|
||||
public required string MergeHash { get; init; }
|
||||
|
||||
/// <summary>Decision made during ingestion.</summary>
|
||||
public required MergeDecision Decision { get; init; }
|
||||
|
||||
/// <summary>Reference to the signature (if DSSE signed).</summary>
|
||||
public Guid? SignatureRef { get; init; }
|
||||
|
||||
/// <summary>Reason for conflict (if Decision is Conflict).</summary>
|
||||
public string? ConflictReason { get; init; }
|
||||
|
||||
/// <summary>ID of the created source edge.</summary>
|
||||
public Guid? SourceEdgeId { get; init; }
|
||||
|
||||
/// <summary>Source that provided the advisory.</summary>
|
||||
public string? SourceName { get; init; }
|
||||
|
||||
/// <summary>Source's advisory ID.</summary>
|
||||
public string? SourceAdvisoryId { get; init; }
|
||||
|
||||
/// <summary>Creates a successful creation result.</summary>
|
||||
public static IngestResult Created(
|
||||
Guid canonicalId,
|
||||
string mergeHash,
|
||||
Guid sourceEdgeId,
|
||||
string sourceName,
|
||||
string sourceAdvisoryId,
|
||||
Guid? signatureRef = null) => new()
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
MergeHash = mergeHash,
|
||||
Decision = MergeDecision.Created,
|
||||
SourceEdgeId = sourceEdgeId,
|
||||
SourceName = sourceName,
|
||||
SourceAdvisoryId = sourceAdvisoryId,
|
||||
SignatureRef = signatureRef
|
||||
};
|
||||
|
||||
/// <summary>Creates a successful merge result.</summary>
|
||||
public static IngestResult Merged(
|
||||
Guid canonicalId,
|
||||
string mergeHash,
|
||||
Guid sourceEdgeId,
|
||||
string sourceName,
|
||||
string sourceAdvisoryId,
|
||||
Guid? signatureRef = null) => new()
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
MergeHash = mergeHash,
|
||||
Decision = MergeDecision.Merged,
|
||||
SourceEdgeId = sourceEdgeId,
|
||||
SourceName = sourceName,
|
||||
SourceAdvisoryId = sourceAdvisoryId,
|
||||
SignatureRef = signatureRef
|
||||
};
|
||||
|
||||
/// <summary>Creates a duplicate result (no changes made).</summary>
|
||||
public static IngestResult Duplicate(
|
||||
Guid canonicalId,
|
||||
string mergeHash,
|
||||
string sourceName,
|
||||
string sourceAdvisoryId) => new()
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
MergeHash = mergeHash,
|
||||
Decision = MergeDecision.Duplicate,
|
||||
SourceName = sourceName,
|
||||
SourceAdvisoryId = sourceAdvisoryId
|
||||
};
|
||||
|
||||
/// <summary>Creates a conflict result.</summary>
|
||||
public static IngestResult Conflict(
|
||||
Guid canonicalId,
|
||||
string mergeHash,
|
||||
string conflictReason,
|
||||
string sourceName,
|
||||
string sourceAdvisoryId) => new()
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
MergeHash = mergeHash,
|
||||
Decision = MergeDecision.Conflict,
|
||||
ConflictReason = conflictReason,
|
||||
SourceName = sourceName,
|
||||
SourceAdvisoryId = sourceAdvisoryId
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decision made when ingesting an advisory.
|
||||
/// </summary>
|
||||
public enum MergeDecision
|
||||
{
|
||||
/// <summary>New canonical advisory was created.</summary>
|
||||
Created,
|
||||
|
||||
/// <summary>Advisory was merged into an existing canonical.</summary>
|
||||
Merged,
|
||||
|
||||
/// <summary>Exact duplicate was detected, no changes made.</summary>
|
||||
Duplicate,
|
||||
|
||||
/// <summary>Merge conflict was detected.</summary>
|
||||
Conflict
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SourceEdge.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-002
|
||||
// Description: Domain model for source edge linking canonical to source document
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
/// <summary>
|
||||
/// Link from canonical advisory to source document.
|
||||
/// </summary>
|
||||
public sealed record SourceEdge
|
||||
{
|
||||
/// <summary>Unique source edge identifier.</summary>
|
||||
public Guid Id { get; init; }
|
||||
|
||||
/// <summary>Reference to the canonical advisory.</summary>
|
||||
public Guid CanonicalId { get; init; }
|
||||
|
||||
/// <summary>Source identifier (osv, nvd, ghsa, redhat, debian, etc.).</summary>
|
||||
public required string SourceName { get; init; }
|
||||
|
||||
/// <summary>Source's advisory ID (DSA-5678, RHSA-2024:1234, etc.).</summary>
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
|
||||
/// <summary>SHA256 hash of the raw source document.</summary>
|
||||
public required string SourceDocHash { get; init; }
|
||||
|
||||
/// <summary>VEX-style status from the source.</summary>
|
||||
public VendorStatus? VendorStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source priority: vendor=10, distro=20, osv=30, nvd=40, default=100.
|
||||
/// Lower value = higher priority.
|
||||
/// </summary>
|
||||
public int PrecedenceRank { get; init; } = 100;
|
||||
|
||||
/// <summary>DSSE signature envelope.</summary>
|
||||
public DsseEnvelope? DsseEnvelope { get; init; }
|
||||
|
||||
/// <summary>When the source document was fetched.</summary>
|
||||
public DateTimeOffset FetchedAt { get; init; }
|
||||
|
||||
/// <summary>When the edge record was created.</summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX-style vendor status for vulnerability.
|
||||
/// </summary>
|
||||
public enum VendorStatus
|
||||
{
|
||||
/// <summary>The product is affected by the vulnerability.</summary>
|
||||
Affected,
|
||||
|
||||
/// <summary>The product is not affected by the vulnerability.</summary>
|
||||
NotAffected,
|
||||
|
||||
/// <summary>The vulnerability has been fixed in this version.</summary>
|
||||
Fixed,
|
||||
|
||||
/// <summary>The vendor is investigating the vulnerability.</summary>
|
||||
UnderInvestigation
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE (Dead Simple Signing Envelope) for cryptographic signatures.
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope
|
||||
{
|
||||
/// <summary>Payload type URI (e.g., "application/vnd.stellaops.advisory.v1+json").</summary>
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
/// <summary>Base64-encoded payload.</summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>Signatures over the payload.</summary>
|
||||
public IReadOnlyList<DsseSignature> Signatures { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Single signature in a DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature
|
||||
{
|
||||
/// <summary>Key ID or identifier for the signing key.</summary>
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>Base64-encoded signature.</summary>
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IMergeHashCalculator.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-002
|
||||
// Description: Interface for deterministic semantic merge hash computation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic semantic merge hash for advisory deduplication.
|
||||
/// Unlike content hashing, merge hash is based on identity components only:
|
||||
/// (CVE + affects_key + version_range + weaknesses + patch_lineage).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The same CVE affecting the same package should produce the same merge hash
|
||||
/// regardless of which source (Debian, RHEL, etc.) reported it.
|
||||
/// </remarks>
|
||||
public interface IMergeHashCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute merge hash from advisory identity components.
|
||||
/// </summary>
|
||||
/// <param name="input">The identity components to hash.</param>
|
||||
/// <returns>Hex-encoded SHA256 hash prefixed with "sha256:".</returns>
|
||||
string ComputeMergeHash(MergeHashInput input);
|
||||
|
||||
/// <summary>
|
||||
/// Compute merge hash directly from Advisory domain model.
|
||||
/// Extracts identity components from the advisory and computes hash.
|
||||
/// </summary>
|
||||
/// <param name="advisory">The advisory to compute hash for.</param>
|
||||
/// <returns>Hex-encoded SHA256 hash prefixed with "sha256:".</returns>
|
||||
string ComputeMergeHash(Advisory advisory);
|
||||
|
||||
/// <summary>
|
||||
/// Compute merge hash for a specific affected package within an advisory.
|
||||
/// </summary>
|
||||
/// <param name="advisory">The advisory containing the CVE and weaknesses.</param>
|
||||
/// <param name="affectedPackage">The specific affected package.</param>
|
||||
/// <returns>Hex-encoded SHA256 hash prefixed with "sha256:".</returns>
|
||||
string ComputeMergeHash(Advisory advisory, AffectedPackage affectedPackage);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input components for merge hash computation.
|
||||
/// </summary>
|
||||
public sealed record MergeHashInput
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2024-1234"). Required.
|
||||
/// Will be normalized to uppercase.
|
||||
/// </summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected package identifier (PURL or CPE). Required.
|
||||
/// Will be normalized according to package type rules.
|
||||
/// </summary>
|
||||
public required string AffectsKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected version range expression. Optional.
|
||||
/// Will be normalized to canonical interval notation.
|
||||
/// </summary>
|
||||
public string? VersionRange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Associated CWE identifiers. Optional.
|
||||
/// Will be normalized to uppercase, sorted, deduplicated.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Weaknesses { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Upstream patch provenance (commit SHA, patch ID). Optional.
|
||||
/// Enables differentiation of distro backports from upstream fixes.
|
||||
/// </summary>
|
||||
public string? PatchLineage { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,288 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashCalculator.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Tasks: MHASH-8200-009, MHASH-8200-010, MHASH-8200-011
|
||||
// Description: Core merge hash calculator implementation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic semantic merge hash for advisory deduplication.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The merge hash is computed from identity components only:
|
||||
/// <list type="bullet">
|
||||
/// <item>CVE identifier (normalized, uppercase)</item>
|
||||
/// <item>Affected package identifier (PURL/CPE, normalized)</item>
|
||||
/// <item>Version range (canonical interval notation)</item>
|
||||
/// <item>CWE weaknesses (sorted, deduplicated)</item>
|
||||
/// <item>Patch lineage (optional, for backport differentiation)</item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
public sealed class MergeHashCalculator : IMergeHashCalculator
|
||||
{
|
||||
private static readonly UTF8Encoding Utf8NoBom = new(false);
|
||||
|
||||
private readonly ICveNormalizer _cveNormalizer;
|
||||
private readonly IPurlNormalizer _purlNormalizer;
|
||||
private readonly ICpeNormalizer _cpeNormalizer;
|
||||
private readonly IVersionRangeNormalizer _versionRangeNormalizer;
|
||||
private readonly ICweNormalizer _cweNormalizer;
|
||||
private readonly IPatchLineageNormalizer _patchLineageNormalizer;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new MergeHashCalculator with default normalizers.
|
||||
/// </summary>
|
||||
public MergeHashCalculator()
|
||||
: this(
|
||||
CveNormalizer.Instance,
|
||||
PurlNormalizer.Instance,
|
||||
CpeNormalizer.Instance,
|
||||
VersionRangeNormalizer.Instance,
|
||||
CweNormalizer.Instance,
|
||||
PatchLineageNormalizer.Instance)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new MergeHashCalculator with custom normalizers.
|
||||
/// </summary>
|
||||
public MergeHashCalculator(
|
||||
ICveNormalizer cveNormalizer,
|
||||
IPurlNormalizer purlNormalizer,
|
||||
ICpeNormalizer cpeNormalizer,
|
||||
IVersionRangeNormalizer versionRangeNormalizer,
|
||||
ICweNormalizer cweNormalizer,
|
||||
IPatchLineageNormalizer patchLineageNormalizer)
|
||||
{
|
||||
_cveNormalizer = cveNormalizer ?? throw new ArgumentNullException(nameof(cveNormalizer));
|
||||
_purlNormalizer = purlNormalizer ?? throw new ArgumentNullException(nameof(purlNormalizer));
|
||||
_cpeNormalizer = cpeNormalizer ?? throw new ArgumentNullException(nameof(cpeNormalizer));
|
||||
_versionRangeNormalizer = versionRangeNormalizer ?? throw new ArgumentNullException(nameof(versionRangeNormalizer));
|
||||
_cweNormalizer = cweNormalizer ?? throw new ArgumentNullException(nameof(cweNormalizer));
|
||||
_patchLineageNormalizer = patchLineageNormalizer ?? throw new ArgumentNullException(nameof(patchLineageNormalizer));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string ComputeMergeHash(MergeHashInput input)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var canonical = BuildCanonicalString(input);
|
||||
return ComputeHash(canonical);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string ComputeMergeHash(Advisory advisory)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(advisory);
|
||||
|
||||
// Extract CVE from advisory key or aliases
|
||||
var cve = ExtractCve(advisory);
|
||||
|
||||
// If no affected packages, compute hash from CVE and weaknesses only
|
||||
if (advisory.AffectedPackages.IsDefaultOrEmpty)
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = cve,
|
||||
AffectsKey = string.Empty,
|
||||
VersionRange = null,
|
||||
Weaknesses = ExtractWeaknesses(advisory),
|
||||
PatchLineage = null
|
||||
};
|
||||
return ComputeMergeHash(input);
|
||||
}
|
||||
|
||||
// Compute hash for first affected package (primary identity)
|
||||
// For multi-package advisories, each package gets its own hash
|
||||
return ComputeMergeHash(advisory, advisory.AffectedPackages[0]);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string ComputeMergeHash(Advisory advisory, AffectedPackage affectedPackage)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(advisory);
|
||||
ArgumentNullException.ThrowIfNull(affectedPackage);
|
||||
|
||||
var cve = ExtractCve(advisory);
|
||||
var affectsKey = BuildAffectsKey(affectedPackage);
|
||||
var versionRange = BuildVersionRange(affectedPackage);
|
||||
var weaknesses = ExtractWeaknesses(advisory);
|
||||
var patchLineage = ExtractPatchLineage(advisory, affectedPackage);
|
||||
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = cve,
|
||||
AffectsKey = affectsKey,
|
||||
VersionRange = versionRange,
|
||||
Weaknesses = weaknesses,
|
||||
PatchLineage = patchLineage
|
||||
};
|
||||
|
||||
return ComputeMergeHash(input);
|
||||
}
|
||||
|
||||
private string BuildCanonicalString(MergeHashInput input)
|
||||
{
|
||||
// Normalize all components
|
||||
var cve = _cveNormalizer.Normalize(input.Cve);
|
||||
var affectsKey = NormalizeAffectsKey(input.AffectsKey);
|
||||
var versionRange = _versionRangeNormalizer.Normalize(input.VersionRange);
|
||||
var weaknesses = _cweNormalizer.Normalize(input.Weaknesses);
|
||||
var patchLineage = _patchLineageNormalizer.Normalize(input.PatchLineage);
|
||||
|
||||
// Build deterministic canonical string with field ordering
|
||||
// Format: CVE|AFFECTS|VERSION|CWE|LINEAGE
|
||||
var sb = new StringBuilder();
|
||||
|
||||
sb.Append("CVE:");
|
||||
sb.Append(cve);
|
||||
sb.Append('|');
|
||||
|
||||
sb.Append("AFFECTS:");
|
||||
sb.Append(affectsKey);
|
||||
sb.Append('|');
|
||||
|
||||
sb.Append("VERSION:");
|
||||
sb.Append(versionRange);
|
||||
sb.Append('|');
|
||||
|
||||
sb.Append("CWE:");
|
||||
sb.Append(weaknesses);
|
||||
sb.Append('|');
|
||||
|
||||
sb.Append("LINEAGE:");
|
||||
sb.Append(patchLineage ?? string.Empty);
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private string NormalizeAffectsKey(string affectsKey)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(affectsKey))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = affectsKey.Trim();
|
||||
|
||||
// Route to appropriate normalizer
|
||||
if (trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return _purlNormalizer.Normalize(trimmed);
|
||||
}
|
||||
|
||||
if (trimmed.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return _cpeNormalizer.Normalize(trimmed);
|
||||
}
|
||||
|
||||
// Default to PURL normalizer for unknown formats
|
||||
return _purlNormalizer.Normalize(trimmed);
|
||||
}
|
||||
|
||||
private static string ComputeHash(string canonical)
|
||||
{
|
||||
var bytes = Utf8NoBom.GetBytes(canonical);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ExtractCve(Advisory advisory)
|
||||
{
|
||||
// Check if advisory key is a CVE
|
||||
if (advisory.AdvisoryKey.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return advisory.AdvisoryKey;
|
||||
}
|
||||
|
||||
// Look for CVE in aliases
|
||||
var cveAlias = advisory.Aliases
|
||||
.FirstOrDefault(static a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
return cveAlias ?? advisory.AdvisoryKey;
|
||||
}
|
||||
|
||||
private static string BuildAffectsKey(AffectedPackage package)
|
||||
{
|
||||
// Build PURL-like identifier from package
|
||||
return package.Identifier;
|
||||
}
|
||||
|
||||
private static string? BuildVersionRange(AffectedPackage package)
|
||||
{
|
||||
if (package.VersionRanges.IsDefaultOrEmpty)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Combine all version ranges - use RangeExpression or build from primitives
|
||||
var ranges = package.VersionRanges
|
||||
.Select(static r => r.RangeExpression ?? BuildRangeFromPrimitives(r))
|
||||
.Where(static r => !string.IsNullOrWhiteSpace(r))
|
||||
.OrderBy(static r => r, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return string.Join(",", ranges);
|
||||
}
|
||||
|
||||
private static string? BuildRangeFromPrimitives(AffectedVersionRange range)
|
||||
{
|
||||
// Build a range expression from introduced/fixed/lastAffected
|
||||
var parts = new List<string>();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(range.IntroducedVersion))
|
||||
{
|
||||
parts.Add($">={range.IntroducedVersion}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(range.FixedVersion))
|
||||
{
|
||||
parts.Add($"<{range.FixedVersion}");
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(range.LastAffectedVersion))
|
||||
{
|
||||
parts.Add($"<={range.LastAffectedVersion}");
|
||||
}
|
||||
|
||||
return parts.Count > 0 ? string.Join(",", parts) : null;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ExtractWeaknesses(Advisory advisory)
|
||||
{
|
||||
if (advisory.Cwes.IsDefaultOrEmpty)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
return advisory.Cwes
|
||||
.Select(static w => w.Identifier)
|
||||
.Where(static w => !string.IsNullOrWhiteSpace(w))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static string? ExtractPatchLineage(Advisory advisory, AffectedPackage package)
|
||||
{
|
||||
// Look for patch lineage in provenance or references
|
||||
// This is a simplified implementation - real implementation would
|
||||
// extract from backport proof or upstream references
|
||||
var patchRef = advisory.References
|
||||
.Where(static r => r.Kind is "patch" or "fix" or "commit")
|
||||
.Select(static r => r.Url)
|
||||
.FirstOrDefault();
|
||||
|
||||
return patchRef;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,159 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashShadowWriteService.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-020
|
||||
// Description: Shadow-write merge hashes for existing advisories during migration
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity;
|
||||
|
||||
/// <summary>
|
||||
/// Service to compute and persist merge hashes for existing advisories
|
||||
/// without changing their identity. Used during migration to backfill
|
||||
/// merge_hash for pre-existing data.
|
||||
/// </summary>
|
||||
public sealed class MergeHashShadowWriteService
|
||||
{
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly IMergeHashCalculator _mergeHashCalculator;
|
||||
private readonly ILogger<MergeHashShadowWriteService> _logger;
|
||||
|
||||
public MergeHashShadowWriteService(
|
||||
IAdvisoryStore advisoryStore,
|
||||
IMergeHashCalculator mergeHashCalculator,
|
||||
ILogger<MergeHashShadowWriteService> logger)
|
||||
{
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_mergeHashCalculator = mergeHashCalculator ?? throw new ArgumentNullException(nameof(mergeHashCalculator));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Backfills merge hashes for all advisories that don't have one.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Summary of the backfill operation.</returns>
|
||||
public async Task<ShadowWriteResult> BackfillAllAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var processed = 0;
|
||||
var updated = 0;
|
||||
var skipped = 0;
|
||||
var failed = 0;
|
||||
|
||||
await foreach (var advisory in _advisoryStore.StreamAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
processed++;
|
||||
|
||||
// Skip if already has merge hash
|
||||
if (!string.IsNullOrEmpty(advisory.MergeHash))
|
||||
{
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var mergeHash = _mergeHashCalculator.ComputeMergeHash(advisory);
|
||||
var enriched = EnrichWithMergeHash(advisory, mergeHash);
|
||||
await _advisoryStore.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false);
|
||||
updated++;
|
||||
|
||||
if (updated % 100 == 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Merge hash backfill progress: processed={Processed}, updated={Updated}, skipped={Skipped}, failed={Failed}",
|
||||
processed, updated, skipped, failed);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
_logger.LogWarning(ex, "Failed to compute merge hash for {AdvisoryKey}", advisory.AdvisoryKey);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Merge hash backfill complete: processed={Processed}, updated={Updated}, skipped={Skipped}, failed={Failed}",
|
||||
processed, updated, skipped, failed);
|
||||
|
||||
return new ShadowWriteResult(processed, updated, skipped, failed);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes and persists merge hash for a single advisory.
|
||||
/// </summary>
|
||||
/// <param name="advisoryKey">The advisory key to process.</param>
|
||||
/// <param name="force">If true, recomputes even if hash exists.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if advisory was updated, false otherwise.</returns>
|
||||
public async Task<bool> BackfillOneAsync(string advisoryKey, bool force, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
|
||||
|
||||
var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
|
||||
if (advisory is null)
|
||||
{
|
||||
_logger.LogWarning("Advisory {AdvisoryKey} not found for merge hash backfill", advisoryKey);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip if already has merge hash and not forcing
|
||||
if (!force && !string.IsNullOrEmpty(advisory.MergeHash))
|
||||
{
|
||||
_logger.LogDebug("Skipping {AdvisoryKey}: already has merge hash", advisoryKey);
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var mergeHash = _mergeHashCalculator.ComputeMergeHash(advisory);
|
||||
var enriched = EnrichWithMergeHash(advisory, mergeHash);
|
||||
await _advisoryStore.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Computed merge hash for {AdvisoryKey}: {MergeHash}", advisoryKey, mergeHash);
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to compute merge hash for {AdvisoryKey}", advisoryKey);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private static Advisory EnrichWithMergeHash(Advisory advisory, string mergeHash)
|
||||
{
|
||||
return new Advisory(
|
||||
advisory.AdvisoryKey,
|
||||
advisory.Title,
|
||||
advisory.Summary,
|
||||
advisory.Language,
|
||||
advisory.Published,
|
||||
advisory.Modified,
|
||||
advisory.Severity,
|
||||
advisory.ExploitKnown,
|
||||
advisory.Aliases,
|
||||
advisory.Credits,
|
||||
advisory.References,
|
||||
advisory.AffectedPackages,
|
||||
advisory.CvssMetrics,
|
||||
advisory.Provenance,
|
||||
advisory.Description,
|
||||
advisory.Cwes,
|
||||
advisory.CanonicalMetricId,
|
||||
mergeHash);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a shadow-write backfill operation.
|
||||
/// </summary>
|
||||
/// <param name="Processed">Total advisories examined.</param>
|
||||
/// <param name="Updated">Advisories updated with new merge hash.</param>
|
||||
/// <param name="Skipped">Advisories skipped (already had merge hash).</param>
|
||||
/// <param name="Failed">Advisories that failed hash computation.</param>
|
||||
public sealed record ShadowWriteResult(int Processed, int Updated, int Skipped, int Failed);
|
||||
@@ -0,0 +1,120 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CpeNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-004
|
||||
// Description: CPE normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CPE identifiers to canonical CPE 2.3 format.
|
||||
/// </summary>
|
||||
public sealed partial class CpeNormalizer : ICpeNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static CpeNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for CPE 2.3 formatted string binding.
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"^cpe:2\.3:([aho]):([^:]+):([^:]+):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*)$",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex Cpe23Pattern();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for CPE 2.2 URI binding.
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"^cpe:/([aho]):([^:]+):([^:]+)(?::([^:]+))?(?::([^:]+))?(?::([^:]+))?(?::([^:]+))?$",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex Cpe22Pattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Normalize(string cpe)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cpe))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = cpe.Trim();
|
||||
|
||||
// Try CPE 2.3 format first
|
||||
var match23 = Cpe23Pattern().Match(trimmed);
|
||||
if (match23.Success)
|
||||
{
|
||||
return NormalizeCpe23(match23);
|
||||
}
|
||||
|
||||
// Try CPE 2.2 format
|
||||
var match22 = Cpe22Pattern().Match(trimmed);
|
||||
if (match22.Success)
|
||||
{
|
||||
return ConvertCpe22ToCpe23(match22);
|
||||
}
|
||||
|
||||
// Return as lowercase if unrecognized
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string NormalizeCpe23(Match match)
|
||||
{
|
||||
var part = match.Groups[1].Value.ToLowerInvariant();
|
||||
var vendor = NormalizeComponent(match.Groups[2].Value);
|
||||
var product = NormalizeComponent(match.Groups[3].Value);
|
||||
var version = NormalizeComponent(match.Groups[4].Value);
|
||||
var update = NormalizeComponent(match.Groups[5].Value);
|
||||
var edition = NormalizeComponent(match.Groups[6].Value);
|
||||
var language = NormalizeComponent(match.Groups[7].Value);
|
||||
var swEdition = NormalizeComponent(match.Groups[8].Value);
|
||||
var targetSw = NormalizeComponent(match.Groups[9].Value);
|
||||
var targetHw = NormalizeComponent(match.Groups[10].Value);
|
||||
var other = NormalizeComponent(match.Groups[11].Value);
|
||||
|
||||
return $"cpe:2.3:{part}:{vendor}:{product}:{version}:{update}:{edition}:{language}:{swEdition}:{targetSw}:{targetHw}:{other}";
|
||||
}
|
||||
|
||||
private static string ConvertCpe22ToCpe23(Match match)
|
||||
{
|
||||
var part = match.Groups[1].Value.ToLowerInvariant();
|
||||
var vendor = NormalizeComponent(match.Groups[2].Value);
|
||||
var product = NormalizeComponent(match.Groups[3].Value);
|
||||
var version = match.Groups[4].Success ? NormalizeComponent(match.Groups[4].Value) : "*";
|
||||
var update = match.Groups[5].Success ? NormalizeComponent(match.Groups[5].Value) : "*";
|
||||
var edition = match.Groups[6].Success ? NormalizeComponent(match.Groups[6].Value) : "*";
|
||||
var language = match.Groups[7].Success ? NormalizeComponent(match.Groups[7].Value) : "*";
|
||||
|
||||
return $"cpe:2.3:{part}:{vendor}:{product}:{version}:{update}:{edition}:{language}:*:*:*:*";
|
||||
}
|
||||
|
||||
private static string NormalizeComponent(string component)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(component))
|
||||
{
|
||||
return "*";
|
||||
}
|
||||
|
||||
var trimmed = component.Trim();
|
||||
|
||||
// Wildcards
|
||||
if (trimmed is "*" or "-" or "ANY" or "NA")
|
||||
{
|
||||
return trimmed switch
|
||||
{
|
||||
"ANY" => "*",
|
||||
"NA" => "-",
|
||||
_ => trimmed
|
||||
};
|
||||
}
|
||||
|
||||
// Lowercase and handle escaping
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CveNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-003 (part of normalization helpers)
|
||||
// Description: CVE identifier normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CVE identifiers to canonical uppercase format.
|
||||
/// </summary>
|
||||
public sealed partial class CveNormalizer : ICveNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static CveNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern matching CVE identifier: CVE-YYYY-NNNNN (4+ digits after year).
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"^CVE-(\d{4})-(\d{4,})$", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex CvePattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Normalize(string? cve)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cve))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = cve.Trim();
|
||||
|
||||
// Handle common prefixes
|
||||
if (trimmed.StartsWith("cve-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
trimmed = "CVE-" + trimmed[4..];
|
||||
}
|
||||
else if (!trimmed.StartsWith("CVE-", StringComparison.Ordinal))
|
||||
{
|
||||
// Try to extract CVE from the string
|
||||
var match = CvePattern().Match(trimmed);
|
||||
if (match.Success)
|
||||
{
|
||||
trimmed = match.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Assume it's just the number part: 2024-1234 -> CVE-2024-1234
|
||||
if (Regex.IsMatch(trimmed, @"^\d{4}-\d{4,}$"))
|
||||
{
|
||||
trimmed = "CVE-" + trimmed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and uppercase
|
||||
var normalized = trimmed.ToUpperInvariant();
|
||||
if (!CvePattern().IsMatch(normalized))
|
||||
{
|
||||
// Return as-is if not a valid CVE (will still be hashed consistently)
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CweNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-006
|
||||
// Description: CWE identifier list normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CWE identifier lists for deterministic hashing.
|
||||
/// </summary>
|
||||
public sealed partial class CweNormalizer : ICweNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static CweNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern matching CWE identifier: CWE-NNN or just NNN.
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"(?:CWE-)?(\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex CwePattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Normalize(IEnumerable<string>? cwes)
|
||||
{
|
||||
if (cwes is null)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var normalized = cwes
|
||||
.Where(static cwe => !string.IsNullOrWhiteSpace(cwe))
|
||||
.Select(NormalizeSingle)
|
||||
.Where(static cwe => cwe is not null)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(ExtractCweNumber)
|
||||
.ThenBy(static cwe => cwe, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
if (normalized.Count == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return string.Join(",", normalized);
|
||||
}
|
||||
|
||||
private static string? NormalizeSingle(string cwe)
|
||||
{
|
||||
var trimmed = cwe.Trim();
|
||||
var match = CwePattern().Match(trimmed);
|
||||
|
||||
if (!match.Success)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var number = match.Groups[1].Value;
|
||||
return $"CWE-{number}";
|
||||
}
|
||||
|
||||
private static int ExtractCweNumber(string? cwe)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cwe))
|
||||
{
|
||||
return int.MaxValue;
|
||||
}
|
||||
|
||||
var match = CwePattern().Match(cwe);
|
||||
if (match.Success && int.TryParse(match.Groups[1].Value, out var number))
|
||||
{
|
||||
return number;
|
||||
}
|
||||
|
||||
return int.MaxValue;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// INormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Tasks: MHASH-8200-003 to MHASH-8200-007
|
||||
// Description: Normalizer interfaces for merge hash components
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes PURL identifiers to canonical form for deterministic hashing.
|
||||
/// </summary>
|
||||
public interface IPurlNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize PURL to canonical form.
|
||||
/// - Lowercase package type
|
||||
/// - URL-encode special characters in namespace
|
||||
/// - Strip non-essential qualifiers (arch, type, checksum)
|
||||
/// - Sort remaining qualifiers alphabetically
|
||||
/// </summary>
|
||||
string Normalize(string purl);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CPE identifiers to canonical CPE 2.3 format.
|
||||
/// </summary>
|
||||
public interface ICpeNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize CPE to canonical CPE 2.3 format.
|
||||
/// - Convert CPE 2.2 URI format to CPE 2.3 formatted string
|
||||
/// - Lowercase vendor and product
|
||||
/// - Normalize wildcards
|
||||
/// </summary>
|
||||
string Normalize(string cpe);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes version range expressions to canonical interval notation.
|
||||
/// </summary>
|
||||
public interface IVersionRangeNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize version range to canonical expression.
|
||||
/// - Convert various formats to canonical interval notation
|
||||
/// - Trim whitespace
|
||||
/// - Normalize operators (e.g., "[1.0, 2.0)" → ">=1.0,<2.0")
|
||||
/// </summary>
|
||||
string Normalize(string? range);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CWE identifier lists for deterministic hashing.
|
||||
/// </summary>
|
||||
public interface ICweNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize CWE list to sorted, deduplicated, uppercase set.
|
||||
/// - Uppercase all identifiers
|
||||
/// - Ensure "CWE-" prefix
|
||||
/// - Sort numerically by CWE number
|
||||
/// - Deduplicate
|
||||
/// - Return comma-joined string
|
||||
/// </summary>
|
||||
string Normalize(IEnumerable<string>? cwes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes patch lineage references for deterministic hashing.
|
||||
/// </summary>
|
||||
public interface IPatchLineageNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize patch lineage to canonical commit reference.
|
||||
/// - Extract commit SHAs from various formats
|
||||
/// - Normalize to lowercase hex
|
||||
/// - Handle patch IDs, bug tracker references
|
||||
/// </summary>
|
||||
string? Normalize(string? lineage);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CVE identifiers for deterministic hashing.
|
||||
/// </summary>
|
||||
public interface ICveNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize CVE identifier to canonical uppercase format.
|
||||
/// - Ensure "CVE-" prefix
|
||||
/// - Uppercase
|
||||
/// - Validate format (CVE-YYYY-NNNNN+)
|
||||
/// </summary>
|
||||
string Normalize(string? cve);
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PatchLineageNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-007
|
||||
// Description: Patch lineage normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes patch lineage references for deterministic hashing.
|
||||
/// Extracts upstream commit references from various formats.
|
||||
/// </summary>
|
||||
public sealed partial class PatchLineageNormalizer : IPatchLineageNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static PatchLineageNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for full Git commit SHA (40 hex chars).
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"\b([0-9a-f]{40})\b", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex FullShaPattern();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for abbreviated Git commit SHA (7-12 hex chars).
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"\b([0-9a-f]{7,12})\b", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex AbbrevShaPattern();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for GitHub/GitLab commit URLs.
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"(?:github\.com|gitlab\.com)/[^/]+/[^/]+/commit/([0-9a-f]{7,40})",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex CommitUrlPattern();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for patch IDs in format "patch-NNNNN" or "PATCH-NNNNN".
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"\b(PATCH-\d+)\b", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex PatchIdPattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string? Normalize(string? lineage)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(lineage))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = lineage.Trim();
|
||||
|
||||
// Try to extract commit SHA from URL first
|
||||
var urlMatch = CommitUrlPattern().Match(trimmed);
|
||||
if (urlMatch.Success)
|
||||
{
|
||||
return NormalizeSha(urlMatch.Groups[1].Value);
|
||||
}
|
||||
|
||||
// Try full SHA
|
||||
var fullMatch = FullShaPattern().Match(trimmed);
|
||||
if (fullMatch.Success)
|
||||
{
|
||||
return NormalizeSha(fullMatch.Groups[1].Value);
|
||||
}
|
||||
|
||||
// Try abbreviated SHA (only if it looks like a commit reference)
|
||||
if (LooksLikeCommitReference(trimmed))
|
||||
{
|
||||
var abbrevMatch = AbbrevShaPattern().Match(trimmed);
|
||||
if (abbrevMatch.Success)
|
||||
{
|
||||
return NormalizeSha(abbrevMatch.Groups[1].Value);
|
||||
}
|
||||
}
|
||||
|
||||
// Try patch ID
|
||||
var patchMatch = PatchIdPattern().Match(trimmed);
|
||||
if (patchMatch.Success)
|
||||
{
|
||||
return patchMatch.Groups[1].Value.ToUpperInvariant();
|
||||
}
|
||||
|
||||
// Return null if no recognizable pattern
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool LooksLikeCommitReference(string value)
|
||||
{
|
||||
// Heuristic: if it contains "commit", "sha", "fix", "patch" it's likely a commit ref
|
||||
var lower = value.ToLowerInvariant();
|
||||
return lower.Contains("commit") ||
|
||||
lower.Contains("sha") ||
|
||||
lower.Contains("fix") ||
|
||||
lower.Contains("patch") ||
|
||||
lower.Contains("backport");
|
||||
}
|
||||
|
||||
private static string NormalizeSha(string sha)
|
||||
{
|
||||
// Lowercase and ensure we have the full SHA or a consistent abbreviation
|
||||
var normalized = sha.ToLowerInvariant();
|
||||
|
||||
// If it's a full SHA, return it
|
||||
if (normalized.Length == 40)
|
||||
{
|
||||
return normalized;
|
||||
}
|
||||
|
||||
// For abbreviated SHAs, return as-is (they'll still hash consistently)
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,178 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PurlNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-003
|
||||
// Description: PURL normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Web;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes PURL identifiers to canonical form for deterministic hashing.
|
||||
/// </summary>
|
||||
public sealed partial class PurlNormalizer : IPurlNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static PurlNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Qualifiers to strip from PURL for identity hashing (architecture-specific, non-identity).
|
||||
/// </summary>
|
||||
private static readonly HashSet<string> StrippedQualifiers = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
"arch",
|
||||
"architecture",
|
||||
"os",
|
||||
"platform",
|
||||
"type",
|
||||
"classifier",
|
||||
"checksum",
|
||||
"download_url",
|
||||
"vcs_url",
|
||||
"repository_url"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for parsing PURL: pkg:type/namespace/name@version?qualifiers#subpath
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"^pkg:([a-zA-Z][a-zA-Z0-9+.-]*)(?:/([^/@#?]+))?/([^/@#?]+)(?:@([^?#]+))?(?:\?([^#]+))?(?:#(.+))?$",
|
||||
RegexOptions.Compiled)]
|
||||
private static partial Regex PurlPattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Normalize(string purl)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = purl.Trim();
|
||||
|
||||
// Handle non-PURL identifiers (CPE, plain package names)
|
||||
if (!trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// If it looks like a CPE, return as-is for CPE normalizer
|
||||
if (trimmed.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
// Return lowercase for plain identifiers
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
var match = PurlPattern().Match(trimmed);
|
||||
if (!match.Success)
|
||||
{
|
||||
// Invalid PURL format, return lowercase
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
var type = match.Groups[1].Value.ToLowerInvariant();
|
||||
var ns = match.Groups[2].Success ? NormalizeNamespace(match.Groups[2].Value, type) : null;
|
||||
var name = NormalizeName(match.Groups[3].Value, type);
|
||||
var version = match.Groups[4].Success ? match.Groups[4].Value : null;
|
||||
var qualifiers = match.Groups[5].Success ? NormalizeQualifiers(match.Groups[5].Value) : null;
|
||||
// Subpath is stripped for identity purposes
|
||||
|
||||
return BuildPurl(type, ns, name, version, qualifiers);
|
||||
}
|
||||
|
||||
private static string NormalizeNamespace(string ns, string type)
|
||||
{
|
||||
// URL-decode then re-encode consistently
|
||||
var decoded = HttpUtility.UrlDecode(ns);
|
||||
|
||||
// For npm, handle scoped packages (@org/pkg)
|
||||
if (type == "npm" && decoded.StartsWith("@"))
|
||||
{
|
||||
decoded = decoded.ToLowerInvariant();
|
||||
return HttpUtility.UrlEncode(decoded)?.Replace("%40", "%40") ?? decoded;
|
||||
}
|
||||
|
||||
// Most ecosystems: lowercase namespace
|
||||
return decoded.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string NormalizeName(string name, string type)
|
||||
{
|
||||
var decoded = HttpUtility.UrlDecode(name);
|
||||
|
||||
// Most ecosystems use lowercase names
|
||||
return type switch
|
||||
{
|
||||
"golang" => decoded, // Go uses mixed case
|
||||
"nuget" => decoded.ToLowerInvariant(), // NuGet is case-insensitive
|
||||
_ => decoded.ToLowerInvariant()
|
||||
};
|
||||
}
|
||||
|
||||
private static string? NormalizeQualifiers(string qualifiers)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(qualifiers))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var pairs = qualifiers
|
||||
.Split('&', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Select(static pair =>
|
||||
{
|
||||
var eqIndex = pair.IndexOf('=');
|
||||
if (eqIndex < 0)
|
||||
{
|
||||
return (Key: pair.ToLowerInvariant(), Value: (string?)null);
|
||||
}
|
||||
|
||||
return (Key: pair[..eqIndex].ToLowerInvariant(), Value: pair[(eqIndex + 1)..]);
|
||||
})
|
||||
.Where(pair => !StrippedQualifiers.Contains(pair.Key))
|
||||
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (pairs.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return string.Join("&", pairs.Select(static p =>
|
||||
p.Value is null ? p.Key : $"{p.Key}={p.Value}"));
|
||||
}
|
||||
|
||||
private static string BuildPurl(string type, string? ns, string name, string? version, string? qualifiers)
|
||||
{
|
||||
var sb = new StringBuilder("pkg:");
|
||||
sb.Append(type);
|
||||
sb.Append('/');
|
||||
|
||||
if (!string.IsNullOrEmpty(ns))
|
||||
{
|
||||
sb.Append(ns);
|
||||
sb.Append('/');
|
||||
}
|
||||
|
||||
sb.Append(name);
|
||||
|
||||
if (!string.IsNullOrEmpty(version))
|
||||
{
|
||||
sb.Append('@');
|
||||
sb.Append(version);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(qualifiers))
|
||||
{
|
||||
sb.Append('?');
|
||||
sb.Append(qualifiers);
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VersionRangeNormalizer.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-005
|
||||
// Description: Version range normalization for merge hash
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes version range expressions to canonical interval notation.
|
||||
/// </summary>
|
||||
public sealed partial class VersionRangeNormalizer : IVersionRangeNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// Singleton instance.
|
||||
/// </summary>
|
||||
public static VersionRangeNormalizer Instance { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for mathematical interval notation: [1.0, 2.0) or (1.0, 2.0]
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"^([\[\(])\s*([^,\s]*)\s*,\s*([^)\]\s]*)\s*([\]\)])$",
|
||||
RegexOptions.Compiled)]
|
||||
private static partial Regex IntervalPattern();
|
||||
|
||||
/// <summary>
|
||||
/// Pattern for comparison operators: >= 1.0, < 2.0
|
||||
/// </summary>
|
||||
[GeneratedRegex(
|
||||
@"^(>=?|<=?|=|!=|~=|~>|\^)\s*(.+)$",
|
||||
RegexOptions.Compiled)]
|
||||
private static partial Regex ComparisonPattern();
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Normalize(string? range)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(range))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = range.Trim();
|
||||
|
||||
// Handle "all versions" markers
|
||||
if (trimmed is "*" or "all" or "any")
|
||||
{
|
||||
return "*";
|
||||
}
|
||||
|
||||
// Try interval notation: [1.0, 2.0)
|
||||
var intervalMatch = IntervalPattern().Match(trimmed);
|
||||
if (intervalMatch.Success)
|
||||
{
|
||||
return NormalizeInterval(intervalMatch);
|
||||
}
|
||||
|
||||
// Try comparison operators: >= 1.0
|
||||
var compMatch = ComparisonPattern().Match(trimmed);
|
||||
if (compMatch.Success)
|
||||
{
|
||||
return NormalizeComparison(compMatch);
|
||||
}
|
||||
|
||||
// Handle comma-separated constraints: >=1.0, <2.0
|
||||
if (trimmed.Contains(','))
|
||||
{
|
||||
return NormalizeMultiConstraint(trimmed);
|
||||
}
|
||||
|
||||
// Handle "fixed" version notation
|
||||
if (trimmed.StartsWith("fixed:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var fixedVersion = trimmed[6..].Trim();
|
||||
return $">={fixedVersion}";
|
||||
}
|
||||
|
||||
// Handle plain version (treat as exact match)
|
||||
if (Regex.IsMatch(trimmed, @"^[\d.]+"))
|
||||
{
|
||||
return $"={trimmed}";
|
||||
}
|
||||
|
||||
// Return trimmed if unrecognized
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
private static string NormalizeInterval(Match match)
|
||||
{
|
||||
var leftBracket = match.Groups[1].Value;
|
||||
var lower = match.Groups[2].Value.Trim();
|
||||
var upper = match.Groups[3].Value.Trim();
|
||||
var rightBracket = match.Groups[4].Value;
|
||||
|
||||
var parts = new List<string>();
|
||||
|
||||
if (!string.IsNullOrEmpty(lower))
|
||||
{
|
||||
var op = leftBracket == "[" ? ">=" : ">";
|
||||
parts.Add($"{op}{lower}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(upper))
|
||||
{
|
||||
var op = rightBracket == "]" ? "<=" : "<";
|
||||
parts.Add($"{op}{upper}");
|
||||
}
|
||||
|
||||
return string.Join(",", parts);
|
||||
}
|
||||
|
||||
private static string NormalizeComparison(Match match)
|
||||
{
|
||||
var op = NormalizeOperator(match.Groups[1].Value);
|
||||
var version = match.Groups[2].Value.Trim();
|
||||
return $"{op}{version}";
|
||||
}
|
||||
|
||||
private static string NormalizeMultiConstraint(string range)
|
||||
{
|
||||
var constraints = range
|
||||
.Split(',', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Select(static c => c.Trim())
|
||||
.Where(static c => !string.IsNullOrEmpty(c))
|
||||
.Select(NormalizeSingleConstraint)
|
||||
.OrderBy(static c => c, StringComparer.Ordinal)
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
return string.Join(",", constraints);
|
||||
}
|
||||
|
||||
private static string NormalizeSingleConstraint(string constraint)
|
||||
{
|
||||
var match = ComparisonPattern().Match(constraint);
|
||||
if (match.Success)
|
||||
{
|
||||
var op = NormalizeOperator(match.Groups[1].Value);
|
||||
var version = match.Groups[2].Value.Trim();
|
||||
return $"{op}{version}";
|
||||
}
|
||||
|
||||
return constraint;
|
||||
}
|
||||
|
||||
private static string NormalizeOperator(string op)
|
||||
{
|
||||
return op switch
|
||||
{
|
||||
"~=" or "~>" => "~=",
|
||||
"^" => "^",
|
||||
">=" => ">=",
|
||||
">" => ">",
|
||||
"<=" => "<=",
|
||||
"<" => "<",
|
||||
"=" => "=",
|
||||
"!=" => "!=",
|
||||
_ => op
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashBackfillJob.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-020
|
||||
// Description: Job to backfill merge hashes for existing advisories
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Jobs;
|
||||
|
||||
/// <summary>
|
||||
/// Job to backfill merge hashes for existing advisories during migration.
|
||||
/// Can target all advisories or a specific advisory key.
|
||||
/// </summary>
|
||||
public sealed class MergeHashBackfillJob : IJob
|
||||
{
|
||||
private readonly MergeHashShadowWriteService _shadowWriteService;
|
||||
private readonly ILogger<MergeHashBackfillJob> _logger;
|
||||
|
||||
public MergeHashBackfillJob(
|
||||
MergeHashShadowWriteService shadowWriteService,
|
||||
ILogger<MergeHashBackfillJob> logger)
|
||||
{
|
||||
_shadowWriteService = shadowWriteService ?? throw new ArgumentNullException(nameof(shadowWriteService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes the backfill job.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Parameters:
|
||||
/// - "seed" (optional): Specific advisory key to backfill. If empty, backfills all.
|
||||
/// - "force" (optional): If "true", recomputes hash even for advisories that have one.
|
||||
/// </remarks>
|
||||
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var hasSeed = context.Parameters.TryGetValue("seed", out var seedValue);
|
||||
var seed = seedValue as string;
|
||||
var force = context.Parameters.TryGetValue("force", out var forceValue)
|
||||
&& forceValue is string forceStr
|
||||
&& string.Equals(forceStr, "true", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (hasSeed && !string.IsNullOrWhiteSpace(seed))
|
||||
{
|
||||
_logger.LogInformation("Starting merge hash backfill for single advisory: {AdvisoryKey}, force={Force}", seed, force);
|
||||
var updated = await _shadowWriteService.BackfillOneAsync(seed, force, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation(
|
||||
"Merge hash backfill for {AdvisoryKey} complete: updated={Updated}",
|
||||
seed,
|
||||
updated);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("Starting merge hash backfill for all advisories");
|
||||
var result = await _shadowWriteService.BackfillAllAsync(cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation(
|
||||
"Merge hash backfill complete: processed={Processed}, updated={Updated}, skipped={Skipped}, failed={Failed}",
|
||||
result.Processed,
|
||||
result.Updated,
|
||||
result.Skipped,
|
||||
result.Failed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,4 +3,5 @@ namespace StellaOps.Concelier.Merge.Jobs;
|
||||
internal static class MergeJobKinds
|
||||
{
|
||||
public const string Reconcile = "merge:reconcile";
|
||||
public const string HashBackfill = "merge:hash-backfill";
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Core;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
using StellaOps.Concelier.Storage.Aliases;
|
||||
@@ -41,6 +42,7 @@ public sealed class AdvisoryMergeService
|
||||
private readonly IAdvisoryEventLog _eventLog;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly CanonicalMerger _canonicalMerger;
|
||||
private readonly IMergeHashCalculator? _mergeHashCalculator;
|
||||
private readonly ILogger<AdvisoryMergeService> _logger;
|
||||
|
||||
public AdvisoryMergeService(
|
||||
@@ -51,7 +53,8 @@ public sealed class AdvisoryMergeService
|
||||
CanonicalMerger canonicalMerger,
|
||||
IAdvisoryEventLog eventLog,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<AdvisoryMergeService> logger)
|
||||
ILogger<AdvisoryMergeService> logger,
|
||||
IMergeHashCalculator? mergeHashCalculator = null)
|
||||
{
|
||||
_aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver));
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
@@ -61,6 +64,7 @@ public sealed class AdvisoryMergeService
|
||||
_eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_mergeHashCalculator = mergeHashCalculator; // Optional during migration
|
||||
}
|
||||
|
||||
public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken)
|
||||
@@ -102,7 +106,7 @@ public sealed class AdvisoryMergeService
|
||||
throw;
|
||||
}
|
||||
|
||||
var merged = precedenceResult.Advisory;
|
||||
var merged = EnrichWithMergeHash(precedenceResult.Advisory);
|
||||
var conflictDetails = precedenceResult.Conflicts;
|
||||
|
||||
if (component.Collisions.Count > 0)
|
||||
@@ -309,7 +313,48 @@ public sealed class AdvisoryMergeService
|
||||
source.Provenance,
|
||||
source.Description,
|
||||
source.Cwes,
|
||||
source.CanonicalMetricId);
|
||||
source.CanonicalMetricId,
|
||||
source.MergeHash);
|
||||
|
||||
/// <summary>
|
||||
/// Enriches an advisory with its computed merge hash if calculator is available.
|
||||
/// </summary>
|
||||
private Advisory EnrichWithMergeHash(Advisory advisory)
|
||||
{
|
||||
if (_mergeHashCalculator is null)
|
||||
{
|
||||
return advisory;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var mergeHash = _mergeHashCalculator.ComputeMergeHash(advisory);
|
||||
return new Advisory(
|
||||
advisory.AdvisoryKey,
|
||||
advisory.Title,
|
||||
advisory.Summary,
|
||||
advisory.Language,
|
||||
advisory.Published,
|
||||
advisory.Modified,
|
||||
advisory.Severity,
|
||||
advisory.ExploitKnown,
|
||||
advisory.Aliases,
|
||||
advisory.Credits,
|
||||
advisory.References,
|
||||
advisory.AffectedPackages,
|
||||
advisory.CvssMetrics,
|
||||
advisory.Provenance,
|
||||
advisory.Description,
|
||||
advisory.Cwes,
|
||||
advisory.CanonicalMetricId,
|
||||
mergeHash);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to compute merge hash for {AdvisoryKey}, continuing without hash", advisory.AdvisoryKey);
|
||||
return advisory;
|
||||
}
|
||||
}
|
||||
|
||||
private CanonicalMergeResult? ApplyCanonicalMergeIfNeeded(string canonicalKey, List<Advisory> inputs)
|
||||
{
|
||||
|
||||
@@ -0,0 +1,172 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashBackfillService.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-020
|
||||
// Description: Shadow-write mode for computing merge_hash on existing advisories
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for backfilling merge hashes on existing advisories without changing their identity.
|
||||
/// Runs in shadow-write mode: computes merge_hash and updates only that field.
|
||||
/// </summary>
|
||||
public sealed class MergeHashBackfillService
|
||||
{
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly IMergeHashCalculator _mergeHashCalculator;
|
||||
private readonly ILogger<MergeHashBackfillService> _logger;
|
||||
|
||||
public MergeHashBackfillService(
|
||||
IAdvisoryStore advisoryStore,
|
||||
IMergeHashCalculator mergeHashCalculator,
|
||||
ILogger<MergeHashBackfillService> logger)
|
||||
{
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_mergeHashCalculator = mergeHashCalculator ?? throw new ArgumentNullException(nameof(mergeHashCalculator));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Backfills merge hashes for all advisories that don't have one.
|
||||
/// </summary>
|
||||
/// <param name="batchSize">Number of advisories to process before yielding progress.</param>
|
||||
/// <param name="dryRun">If true, computes hashes but doesn't persist them.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Backfill result with statistics.</returns>
|
||||
public async Task<MergeHashBackfillResult> BackfillAsync(
|
||||
int batchSize = 100,
|
||||
bool dryRun = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var processed = 0;
|
||||
var updated = 0;
|
||||
var skipped = 0;
|
||||
var errors = 0;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting merge hash backfill (dryRun={DryRun}, batchSize={BatchSize})",
|
||||
dryRun, batchSize);
|
||||
|
||||
await foreach (var advisory in _advisoryStore.StreamAsync(cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
processed++;
|
||||
|
||||
// Skip if already has merge hash
|
||||
if (!string.IsNullOrEmpty(advisory.MergeHash))
|
||||
{
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var mergeHash = _mergeHashCalculator.ComputeMergeHash(advisory);
|
||||
|
||||
if (!dryRun)
|
||||
{
|
||||
var enrichedAdvisory = CreateAdvisoryWithMergeHash(advisory, mergeHash);
|
||||
await _advisoryStore.UpsertAsync(enrichedAdvisory, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
updated++;
|
||||
|
||||
if (updated % batchSize == 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Backfill progress: {Updated} updated, {Skipped} skipped, {Errors} errors (of {Processed} processed)",
|
||||
updated, skipped, errors, processed);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors++;
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to compute/update merge hash for {AdvisoryKey}",
|
||||
advisory.AdvisoryKey);
|
||||
}
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
var result = new MergeHashBackfillResult(
|
||||
TotalProcessed: processed,
|
||||
Updated: updated,
|
||||
Skipped: skipped,
|
||||
Errors: errors,
|
||||
DryRun: dryRun,
|
||||
Duration: stopwatch.Elapsed);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Merge hash backfill completed: {Updated} updated, {Skipped} skipped, {Errors} errors (of {Processed} processed) in {Duration}",
|
||||
result.Updated, result.Skipped, result.Errors, result.TotalProcessed, result.Duration);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes merge hash for a single advisory without persisting.
|
||||
/// Useful for testing or preview mode.
|
||||
/// </summary>
|
||||
public string ComputeMergeHash(Advisory advisory)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(advisory);
|
||||
return _mergeHashCalculator.ComputeMergeHash(advisory);
|
||||
}
|
||||
|
||||
private static Advisory CreateAdvisoryWithMergeHash(Advisory source, string mergeHash)
|
||||
=> new(
|
||||
source.AdvisoryKey,
|
||||
source.Title,
|
||||
source.Summary,
|
||||
source.Language,
|
||||
source.Published,
|
||||
source.Modified,
|
||||
source.Severity,
|
||||
source.ExploitKnown,
|
||||
source.Aliases,
|
||||
source.Credits,
|
||||
source.References,
|
||||
source.AffectedPackages,
|
||||
source.CvssMetrics,
|
||||
source.Provenance,
|
||||
source.Description,
|
||||
source.Cwes,
|
||||
source.CanonicalMetricId,
|
||||
mergeHash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a merge hash backfill operation.
|
||||
/// </summary>
|
||||
public sealed record MergeHashBackfillResult(
|
||||
int TotalProcessed,
|
||||
int Updated,
|
||||
int Skipped,
|
||||
int Errors,
|
||||
bool DryRun,
|
||||
TimeSpan Duration)
|
||||
{
|
||||
/// <summary>
|
||||
/// Percentage of advisories that were successfully updated.
|
||||
/// </summary>
|
||||
public double SuccessRate => TotalProcessed > 0
|
||||
? (double)(Updated + Skipped) / TotalProcessed * 100
|
||||
: 100;
|
||||
|
||||
/// <summary>
|
||||
/// Average time per advisory in milliseconds.
|
||||
/// </summary>
|
||||
public double AvgTimePerAdvisoryMs => TotalProcessed > 0
|
||||
? Duration.TotalMilliseconds / TotalProcessed
|
||||
: 0;
|
||||
}
|
||||
@@ -26,7 +26,8 @@ public sealed record Advisory
|
||||
provenance: Array.Empty<AdvisoryProvenance>(),
|
||||
description: null,
|
||||
cwes: Array.Empty<AdvisoryWeakness>(),
|
||||
canonicalMetricId: null);
|
||||
canonicalMetricId: null,
|
||||
mergeHash: null);
|
||||
|
||||
public Advisory(
|
||||
string advisoryKey,
|
||||
@@ -44,7 +45,8 @@ public sealed record Advisory
|
||||
IEnumerable<AdvisoryProvenance>? provenance,
|
||||
string? description = null,
|
||||
IEnumerable<AdvisoryWeakness>? cwes = null,
|
||||
string? canonicalMetricId = null)
|
||||
string? canonicalMetricId = null,
|
||||
string? mergeHash = null)
|
||||
: this(
|
||||
advisoryKey,
|
||||
title,
|
||||
@@ -62,7 +64,8 @@ public sealed record Advisory
|
||||
provenance,
|
||||
description,
|
||||
cwes,
|
||||
canonicalMetricId)
|
||||
canonicalMetricId,
|
||||
mergeHash)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -83,7 +86,8 @@ public sealed record Advisory
|
||||
IEnumerable<AdvisoryProvenance>? provenance,
|
||||
string? description = null,
|
||||
IEnumerable<AdvisoryWeakness>? cwes = null,
|
||||
string? canonicalMetricId = null)
|
||||
string? canonicalMetricId = null,
|
||||
string? mergeHash = null)
|
||||
{
|
||||
AdvisoryKey = Validation.EnsureNotNullOrWhiteSpace(advisoryKey, nameof(advisoryKey));
|
||||
Title = Validation.EnsureNotNullOrWhiteSpace(title, nameof(title));
|
||||
@@ -145,6 +149,8 @@ public sealed record Advisory
|
||||
.ThenBy(static p => p.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(static p => p.RecordedAt)
|
||||
.ToImmutableArray();
|
||||
|
||||
MergeHash = Validation.TrimToNull(mergeHash);
|
||||
}
|
||||
|
||||
[JsonConstructor]
|
||||
@@ -165,7 +171,8 @@ public sealed record Advisory
|
||||
ImmutableArray<AdvisoryProvenance> provenance,
|
||||
string? description,
|
||||
ImmutableArray<AdvisoryWeakness> cwes,
|
||||
string? canonicalMetricId)
|
||||
string? canonicalMetricId,
|
||||
string? mergeHash = null)
|
||||
: this(
|
||||
advisoryKey,
|
||||
title,
|
||||
@@ -183,7 +190,8 @@ public sealed record Advisory
|
||||
provenance.IsDefault ? null : provenance.AsEnumerable(),
|
||||
description,
|
||||
cwes.IsDefault ? null : cwes.AsEnumerable(),
|
||||
canonicalMetricId)
|
||||
canonicalMetricId,
|
||||
mergeHash)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -220,4 +228,10 @@ public sealed record Advisory
|
||||
public string? CanonicalMetricId { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryProvenance> Provenance { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Semantic merge hash for provenance-scoped deduplication.
|
||||
/// Nullable during migration; computed from (CVE + PURL + version-range + CWE + patch-lineage).
|
||||
/// </summary>
|
||||
public string? MergeHash { get; }
|
||||
}
|
||||
|
||||
@@ -8,21 +8,22 @@
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `advisoryKey` | string | yes | Globally unique identifier selected by the merge layer (often a CVE/GHSA/vendor key). Stored lowercased unless vendor casing is significant. |
|
||||
| `title` | string | yes | Human readable title. Must be non-empty and trimmed. |
|
||||
| `summary` | string? | optional | Short description; trimmed to `null` when empty. |
|
||||
| `language` | string? | optional | ISO language code (lowercase). |
|
||||
| `published` | DateTimeOffset? | optional | UTC timestamp when vendor originally published. |
|
||||
| `modified` | DateTimeOffset? | optional | UTC timestamp when vendor last updated. |
|
||||
| `severity` | string? | optional | Normalized severity label (`critical`, `high`, etc.). |
|
||||
| `exploitKnown` | bool | yes | Whether KEV/other sources confirm active exploitation. |
|
||||
| `aliases` | string[] | yes | Sorted, de-duplicated list of normalized aliases (see [Alias Schemes](#alias-schemes)). |
|
||||
| `credits` | AdvisoryCredit[] | yes | Deterministically ordered acknowledgements (role + contact metadata). |
|
||||
| `references` | AdvisoryReference[] | yes | Deterministically ordered reference set. |
|
||||
| `affectedPackages` | AffectedPackage[] | yes | Deterministically ordered affected packages. |
|
||||
| `cvssMetrics` | CvssMetric[] | yes | Deterministically ordered CVSS metrics (v3, v4 first). |
|
||||
| `provenance` | AdvisoryProvenance[] | yes | Normalized provenance entries sorted by source then kind then recorded timestamp. |
|
||||
|
||||
| `advisoryKey` | string | yes | Globally unique identifier selected by the merge layer (often a CVE/GHSA/vendor key). Stored lowercased unless vendor casing is significant. |
|
||||
| `title` | string | yes | Human readable title. Must be non-empty and trimmed. |
|
||||
| `summary` | string? | optional | Short description; trimmed to `null` when empty. |
|
||||
| `language` | string? | optional | ISO language code (lowercase). |
|
||||
| `published` | DateTimeOffset? | optional | UTC timestamp when vendor originally published. |
|
||||
| `modified` | DateTimeOffset? | optional | UTC timestamp when vendor last updated. |
|
||||
| `severity` | string? | optional | Normalized severity label (`critical`, `high`, etc.). |
|
||||
| `exploitKnown` | bool | yes | Whether KEV/other sources confirm active exploitation. |
|
||||
| `aliases` | string[] | yes | Sorted, de-duplicated list of normalized aliases (see [Alias Schemes](#alias-schemes)). |
|
||||
| `credits` | AdvisoryCredit[] | yes | Deterministically ordered acknowledgements (role + contact metadata). |
|
||||
| `references` | AdvisoryReference[] | yes | Deterministically ordered reference set. |
|
||||
| `affectedPackages` | AffectedPackage[] | yes | Deterministically ordered affected packages. |
|
||||
| `cvssMetrics` | CvssMetric[] | yes | Deterministically ordered CVSS metrics (v3, v4 first). |
|
||||
| `provenance` | AdvisoryProvenance[] | yes | Normalized provenance entries sorted by source then kind then recorded timestamp. |
|
||||
| `mergeHash` | string? | optional | Semantic identity hash for deduplication (see [Merge Hash](#merge-hash)). |
|
||||
|
||||
### Invariants
|
||||
- Collections are immutable (`ImmutableArray<T>`) and always sorted deterministically.
|
||||
- `AdvisoryKey` and `Title` are mandatory and trimmed.
|
||||
@@ -36,27 +37,27 @@
|
||||
| `url` | string | yes | Absolute HTTP/HTTPS URL. |
|
||||
| `kind` | string? | optional | Categorized reference role (e.g. `advisory`, `patch`, `changelog`). |
|
||||
| `sourceTag` | string? | optional | Free-form tag identifying originating source. |
|
||||
| `summary` | string? | optional | Short description. |
|
||||
| `provenance` | AdvisoryProvenance | yes | Provenance entry describing how the reference was mapped. |
|
||||
|
||||
Deterministic ordering: by `url`, then `kind`, then `sourceTag`, then `provenance.RecordedAt`.
|
||||
|
||||
## AdvisoryCredit
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `displayName` | string | yes | Human-readable acknowledgement (reporter, maintainer, analyst, etc.). |
|
||||
| `role` | string? | optional | Normalized role token (lowercase with `_` separators). |
|
||||
| `contacts` | string[] | yes | Sorted set of vendor-supplied handles or URLs; may be empty. |
|
||||
| `provenance` | AdvisoryProvenance | yes | Provenance entry describing how the credit was captured. |
|
||||
|
||||
Deterministic ordering: by `role` (nulls first) then `displayName`.
|
||||
| `summary` | string? | optional | Short description. |
|
||||
| `provenance` | AdvisoryProvenance | yes | Provenance entry describing how the reference was mapped. |
|
||||
|
||||
Deterministic ordering: by `url`, then `kind`, then `sourceTag`, then `provenance.RecordedAt`.
|
||||
|
||||
## AdvisoryCredit
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `displayName` | string | yes | Human-readable acknowledgement (reporter, maintainer, analyst, etc.). |
|
||||
| `role` | string? | optional | Normalized role token (lowercase with `_` separators). |
|
||||
| `contacts` | string[] | yes | Sorted set of vendor-supplied handles or URLs; may be empty. |
|
||||
| `provenance` | AdvisoryProvenance | yes | Provenance entry describing how the credit was captured. |
|
||||
|
||||
Deterministic ordering: by `role` (nulls first) then `displayName`.
|
||||
|
||||
## AffectedPackage
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `type` | string | yes | Semantic type (`semver`, `rpm`, `deb`, `apk`, `purl`, `cpe`, etc.). Lowercase. |
|
||||
| `type` | string | yes | Semantic type (`semver`, `rpm`, `deb`, `apk`, `purl`, `cpe`, etc.). Lowercase. |
|
||||
| `identifier` | string | yes | Canonical identifier (package name, PURL, CPE, NEVRA, etc.). |
|
||||
| `platform` | string? | optional | Explicit platform / distro (e.g. `ubuntu`, `rhel-8`). |
|
||||
| `versionRanges` | AffectedVersionRange[] | yes | Deduplicated + sorted by introduced/fixed/last/expr/kind. |
|
||||
@@ -69,7 +70,7 @@ Deterministic ordering: packages sorted by `type`, then `identifier`, then `plat
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `rangeKind` | string | yes | Classification of range semantics (`semver`, `evr`, `nevra`, `apk`, `version`, `purl`). Lowercase. |
|
||||
| `rangeKind` | string | yes | Classification of range semantics (`semver`, `evr`, `nevra`, `apk`, `version`, `purl`). Lowercase. |
|
||||
| `introducedVersion` | string? | optional | Inclusive lower bound when impact begins. |
|
||||
| `fixedVersion` | string? | optional | Exclusive bounding version containing the fix. |
|
||||
| `lastAffectedVersion` | string? | optional | Inclusive upper bound when no fix exists. |
|
||||
@@ -95,18 +96,18 @@ Sorted by version then vector for determinism.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|-------|------|----------|-------|
|
||||
| `source` | string | yes | Logical source identifier (`nvd`, `redhat`, `osv`, etc.). |
|
||||
| `kind` | string | yes | Operation performed (`fetch`, `parse`, `map`, `merge`, `enrich`). |
|
||||
| `value` | string? | optional | Free-form pipeline detail (parser identifier, rule set, resume cursor). |
|
||||
| `recordedAt` | DateTimeOffset | yes | UTC timestamp when provenance was captured. |
|
||||
| `fieldMask` | string[] | optional | Canonical field coverage expressed as lowercase masks (e.g. `affectedpackages[]`, `affectedpackages[].versionranges[]`). |
|
||||
| `source` | string | yes | Logical source identifier (`nvd`, `redhat`, `osv`, etc.). |
|
||||
| `kind` | string | yes | Operation performed (`fetch`, `parse`, `map`, `merge`, `enrich`). |
|
||||
| `value` | string? | optional | Free-form pipeline detail (parser identifier, rule set, resume cursor). |
|
||||
| `recordedAt` | DateTimeOffset | yes | UTC timestamp when provenance was captured. |
|
||||
| `fieldMask` | string[] | optional | Canonical field coverage expressed as lowercase masks (e.g. `affectedpackages[]`, `affectedpackages[].versionranges[]`). |
|
||||
|
||||
### Provenance Mask Expectations
|
||||
Each canonical field is expected to carry at least one provenance entry derived from the
|
||||
responsible pipeline stage. Populate `fieldMask` with the lowercase canonical mask(s) describing the
|
||||
covered field(s); downstream metrics and resume helpers rely on this signal to reason about
|
||||
coverage. When aggregating provenance from subcomponents (e.g., affected package ranges), merge code
|
||||
should ensure:
|
||||
Each canonical field is expected to carry at least one provenance entry derived from the
|
||||
responsible pipeline stage. Populate `fieldMask` with the lowercase canonical mask(s) describing the
|
||||
covered field(s); downstream metrics and resume helpers rely on this signal to reason about
|
||||
coverage. When aggregating provenance from subcomponents (e.g., affected package ranges), merge code
|
||||
should ensure:
|
||||
|
||||
- Advisory level provenance documents the source document and merge actions.
|
||||
- References, packages, ranges, and metrics each include their own provenance entry reflecting
|
||||
@@ -142,3 +143,112 @@ Supported alias scheme prefixes:
|
||||
|
||||
The registry exposed via `AliasSchemes` and `AliasSchemeRegistry` can be used to validate aliases and
|
||||
drive downstream conditionals without re-implementing pattern rules.
|
||||
|
||||
## Merge Hash
|
||||
|
||||
The merge hash is a deterministic semantic identity hash that enables provenance-scoped deduplication.
|
||||
Unlike content hashing (which changes when any field changes), merge hash is computed from identity
|
||||
components only, allowing the same CVE from different sources (Debian, RHEL, NVD, etc.) to produce
|
||||
identical hashes when semantically equivalent.
|
||||
|
||||
### Purpose
|
||||
|
||||
- **Deduplication**: Identify equivalent advisories across multiple sources
|
||||
- **Stable Identity**: Hash remains constant despite variations in non-identity fields (title, description, CVSS scores)
|
||||
- **Source Independence**: Same CVE affecting the same package produces the same hash regardless of source
|
||||
|
||||
### Hash Format
|
||||
|
||||
The merge hash is a hex-encoded SHA256 hash prefixed with `sha256:`:
|
||||
|
||||
```
|
||||
sha256:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2
|
||||
```
|
||||
|
||||
Total length: 71 characters (`sha256:` prefix + 64 hex characters).
|
||||
|
||||
### Identity Components
|
||||
|
||||
The merge hash is computed from the following canonical string format:
|
||||
|
||||
```
|
||||
CVE:{cve}|AFFECTS:{affects_key}|VERSION:{version_range}|CWE:{cwes}|LINEAGE:{patch_lineage}
|
||||
```
|
||||
|
||||
| Component | Source | Notes |
|
||||
|-----------|--------|-------|
|
||||
| `cve` | Advisory key or CVE alias | Normalized to uppercase (e.g., `CVE-2024-1234`) |
|
||||
| `affects_key` | First affected package identifier | PURL or CPE, normalized to canonical form |
|
||||
| `version_range` | First affected package version ranges | Canonical interval notation, sorted |
|
||||
| `cwes` | Advisory weaknesses | Uppercase, sorted numerically, comma-joined |
|
||||
| `patch_lineage` | Patch references | Extracted commit SHA or PATCH-ID (optional) |
|
||||
|
||||
### Normalization Rules
|
||||
|
||||
#### CVE Normalization
|
||||
|
||||
- Uppercase: `cve-2024-1234` → `CVE-2024-1234`
|
||||
- Numeric-only input prefixed: `2024-1234` → `CVE-2024-1234`
|
||||
- Non-CVE advisories use advisory key as-is
|
||||
|
||||
#### PURL Normalization
|
||||
|
||||
- Type lowercase: `pkg:NPM/lodash` → `pkg:npm/lodash`
|
||||
- Namespace/name lowercase: `pkg:npm/LODASH` → `pkg:npm/lodash`
|
||||
- Strip non-identity qualifiers: `?arch=amd64`, `?checksum=...`, `?platform=linux`
|
||||
- Preserve version: `@4.17.0` retained
|
||||
|
||||
#### CPE Normalization
|
||||
|
||||
- Convert CPE 2.2 to 2.3: `cpe:/a:vendor:product:1.0` → `cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*`
|
||||
- Lowercase all components
|
||||
- Normalize wildcards: `ANY` → `*`, `NA` → `-`
|
||||
|
||||
#### Version Range Normalization
|
||||
|
||||
- Interval to comparison: `[1.0.0, 2.0.0)` → `>=1.0.0,<2.0.0`
|
||||
- Trim whitespace: `< 1.5.0` → `<1.5.0`
|
||||
- Fixed notation: `fixed: 1.5.1` → `>=1.5.1`
|
||||
- Multiple constraints sorted and comma-joined
|
||||
|
||||
#### CWE Normalization
|
||||
|
||||
- Uppercase: `cwe-79` → `CWE-79`
|
||||
- Sort numerically: `CWE-89,CWE-79` → `CWE-79,CWE-89`
|
||||
- Deduplicate
|
||||
- Comma-joined output
|
||||
|
||||
#### Patch Lineage Normalization
|
||||
|
||||
- Extract 40-character SHA from GitHub/GitLab URLs
|
||||
- Extract SHA from `commit {sha}` or `backport of {sha}` patterns
|
||||
- Normalize PATCH-ID to uppercase: `patch-12345` → `PATCH-12345`
|
||||
- Returns `null` for unrecognized formats (produces empty string in canonical form)
|
||||
|
||||
### Multi-Package Advisories
|
||||
|
||||
When an advisory affects multiple packages, the merge hash is computed from the first affected package.
|
||||
Use `ComputeMergeHash(advisory, affectedPackage)` to compute per-package hashes for deduplication
|
||||
at the package level.
|
||||
|
||||
### Implementation
|
||||
|
||||
The merge hash is computed by `MergeHashCalculator` in `StellaOps.Concelier.Merge.Identity`:
|
||||
|
||||
```csharp
|
||||
var calculator = new MergeHashCalculator();
|
||||
var hash = calculator.ComputeMergeHash(advisory);
|
||||
// or for specific package:
|
||||
var packageHash = calculator.ComputeMergeHash(advisory, affectedPackage);
|
||||
```
|
||||
|
||||
### Migration
|
||||
|
||||
During migration, the `mergeHash` field is nullable. Use `MergeHashShadowWriteService` to backfill
|
||||
hashes for existing advisories:
|
||||
|
||||
```csharp
|
||||
var shadowWriter = new MergeHashShadowWriteService(advisoryStore, calculator, logger);
|
||||
var result = await shadowWriter.BackfillAllAsync(cancellationToken);
|
||||
// result.Updated: count of advisories updated with merge hashes
|
||||
```
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
-- Concelier Migration 008: Sync Ledger for Federation
|
||||
-- Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
-- Task: SYNC-8200-002
|
||||
-- Creates sync_ledger and site_policy tables for federation cursor tracking
|
||||
|
||||
-- Helper function for updated_at triggers
|
||||
CREATE OR REPLACE FUNCTION vuln.update_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Track federation sync state per remote site
|
||||
CREATE TABLE IF NOT EXISTS vuln.sync_ledger (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
site_id TEXT NOT NULL, -- Remote site identifier (e.g., "site-us-west", "airgap-dc2")
|
||||
cursor TEXT NOT NULL, -- Opaque cursor (usually ISO8601 timestamp#sequence)
|
||||
bundle_hash TEXT NOT NULL, -- SHA256 of imported bundle
|
||||
items_count INT NOT NULL DEFAULT 0, -- Number of items in bundle
|
||||
signed_at TIMESTAMPTZ NOT NULL, -- When bundle was signed by remote
|
||||
imported_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT uq_sync_ledger_site_cursor UNIQUE (site_id, cursor),
|
||||
CONSTRAINT uq_sync_ledger_bundle UNIQUE (bundle_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_ledger_site ON vuln.sync_ledger(site_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_ledger_site_time ON vuln.sync_ledger(site_id, signed_at DESC);
|
||||
|
||||
COMMENT ON TABLE vuln.sync_ledger IS 'Federation sync cursor tracking per remote site';
|
||||
COMMENT ON COLUMN vuln.sync_ledger.cursor IS 'Position marker for incremental sync (monotonically increasing)';
|
||||
COMMENT ON COLUMN vuln.sync_ledger.site_id IS 'Remote site identifier for federation sync';
|
||||
COMMENT ON COLUMN vuln.sync_ledger.bundle_hash IS 'SHA256 hash of imported bundle for deduplication';
|
||||
|
||||
-- Site federation policies
|
||||
CREATE TABLE IF NOT EXISTS vuln.site_policy (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
site_id TEXT NOT NULL UNIQUE,
|
||||
display_name TEXT,
|
||||
allowed_sources TEXT[] NOT NULL DEFAULT '{}', -- Empty = allow all
|
||||
denied_sources TEXT[] NOT NULL DEFAULT '{}',
|
||||
max_bundle_size_mb INT NOT NULL DEFAULT 100,
|
||||
max_items_per_bundle INT NOT NULL DEFAULT 10000,
|
||||
require_signature BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
allowed_signers TEXT[] NOT NULL DEFAULT '{}', -- Key IDs or issuers
|
||||
enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_site_policy_enabled ON vuln.site_policy(enabled) WHERE enabled = TRUE;
|
||||
|
||||
COMMENT ON TABLE vuln.site_policy IS 'Per-site federation governance policies';
|
||||
COMMENT ON COLUMN vuln.site_policy.allowed_sources IS 'Source keys to allow; empty array allows all sources';
|
||||
COMMENT ON COLUMN vuln.site_policy.denied_sources IS 'Source keys to deny; takes precedence over allowed';
|
||||
COMMENT ON COLUMN vuln.site_policy.allowed_signers IS 'Signing key IDs or issuer patterns allowed for bundle verification';
|
||||
|
||||
-- Trigger for automatic updated_at
|
||||
CREATE TRIGGER trg_site_policy_updated
|
||||
BEFORE UPDATE ON vuln.site_policy
|
||||
FOR EACH ROW EXECUTE FUNCTION vuln.update_timestamp();
|
||||
@@ -0,0 +1,61 @@
|
||||
-- Concelier Migration 009: Advisory Canonical Table
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-003
|
||||
-- Creates deduplicated canonical advisories with merge_hash
|
||||
|
||||
-- Deduplicated canonical advisory records
|
||||
CREATE TABLE IF NOT EXISTS vuln.advisory_canonical (
|
||||
-- Identity
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Merge key components (used to compute merge_hash)
|
||||
cve TEXT NOT NULL,
|
||||
affects_key TEXT NOT NULL, -- normalized purl or cpe
|
||||
version_range JSONB, -- structured: { introduced, fixed, last_affected }
|
||||
weakness TEXT[] NOT NULL DEFAULT '{}', -- sorted CWE array
|
||||
|
||||
-- Computed identity
|
||||
merge_hash TEXT NOT NULL, -- SHA256 of normalized (cve|affects|range|weakness|lineage)
|
||||
|
||||
-- Metadata
|
||||
status TEXT NOT NULL DEFAULT 'active' CHECK (status IN ('active', 'stub', 'withdrawn')),
|
||||
severity TEXT CHECK (severity IN ('critical', 'high', 'medium', 'low', 'none', 'unknown')),
|
||||
epss_score NUMERIC(5,4), -- EPSS probability (0.0000-1.0000)
|
||||
exploit_known BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Content (for stub degradation)
|
||||
title TEXT,
|
||||
summary TEXT,
|
||||
|
||||
-- Audit
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT uq_advisory_canonical_merge_hash UNIQUE (merge_hash)
|
||||
);
|
||||
|
||||
-- Primary lookup indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_cve ON vuln.advisory_canonical(cve);
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_affects ON vuln.advisory_canonical(affects_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_merge_hash ON vuln.advisory_canonical(merge_hash);
|
||||
|
||||
-- Filtered indexes for common queries
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_status ON vuln.advisory_canonical(status) WHERE status = 'active';
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_severity ON vuln.advisory_canonical(severity) WHERE severity IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_exploit ON vuln.advisory_canonical(exploit_known) WHERE exploit_known = TRUE;
|
||||
|
||||
-- Time-based index for incremental queries
|
||||
CREATE INDEX IF NOT EXISTS idx_advisory_canonical_updated ON vuln.advisory_canonical(updated_at DESC);
|
||||
|
||||
-- Trigger for automatic updated_at
|
||||
CREATE TRIGGER trg_advisory_canonical_updated
|
||||
BEFORE UPDATE ON vuln.advisory_canonical
|
||||
FOR EACH ROW EXECUTE FUNCTION vuln.update_timestamp();
|
||||
|
||||
-- Comments
|
||||
COMMENT ON TABLE vuln.advisory_canonical IS 'Deduplicated canonical advisories with semantic merge_hash';
|
||||
COMMENT ON COLUMN vuln.advisory_canonical.merge_hash IS 'Deterministic hash of (cve, affects_key, version_range, weakness, patch_lineage)';
|
||||
COMMENT ON COLUMN vuln.advisory_canonical.affects_key IS 'Normalized PURL or CPE identifying the affected package';
|
||||
COMMENT ON COLUMN vuln.advisory_canonical.status IS 'active=full record, stub=minimal for low interest, withdrawn=no longer valid';
|
||||
COMMENT ON COLUMN vuln.advisory_canonical.epss_score IS 'EPSS exploit prediction probability (0.0000-1.0000)';
|
||||
@@ -0,0 +1,64 @@
|
||||
-- Concelier Migration 010: Advisory Source Edge Table
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-004
|
||||
-- Creates source edge linking canonical advisories to source documents
|
||||
|
||||
-- Source edge linking canonical advisory to source documents
|
||||
CREATE TABLE IF NOT EXISTS vuln.advisory_source_edge (
|
||||
-- Identity
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Relationships
|
||||
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id) ON DELETE CASCADE,
|
||||
source_id UUID NOT NULL REFERENCES vuln.sources(id) ON DELETE RESTRICT,
|
||||
|
||||
-- Source document
|
||||
source_advisory_id TEXT NOT NULL, -- vendor's advisory ID (DSA-5678, RHSA-2024:1234)
|
||||
source_doc_hash TEXT NOT NULL, -- SHA256 of raw source document
|
||||
|
||||
-- VEX-style status
|
||||
vendor_status TEXT CHECK (vendor_status IN (
|
||||
'affected', 'not_affected', 'fixed', 'under_investigation'
|
||||
)),
|
||||
|
||||
-- Precedence (lower = higher priority)
|
||||
precedence_rank INT NOT NULL DEFAULT 100,
|
||||
|
||||
-- DSSE signature envelope
|
||||
dsse_envelope JSONB, -- { payloadType, payload, signatures[] }
|
||||
|
||||
-- Content snapshot
|
||||
raw_payload JSONB, -- original advisory document
|
||||
|
||||
-- Audit
|
||||
fetched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT uq_advisory_source_edge_unique
|
||||
UNIQUE (canonical_id, source_id, source_doc_hash)
|
||||
);
|
||||
|
||||
-- Primary lookup indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_canonical ON vuln.advisory_source_edge(canonical_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_source ON vuln.advisory_source_edge(source_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_advisory_id ON vuln.advisory_source_edge(source_advisory_id);
|
||||
|
||||
-- Join optimization index
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_canonical_source ON vuln.advisory_source_edge(canonical_id, source_id);
|
||||
|
||||
-- Time-based index for incremental queries
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_fetched ON vuln.advisory_source_edge(fetched_at DESC);
|
||||
|
||||
-- GIN index for JSONB queries on dsse_envelope
|
||||
CREATE INDEX IF NOT EXISTS idx_source_edge_dsse_gin ON vuln.advisory_source_edge
|
||||
USING GIN (dsse_envelope jsonb_path_ops);
|
||||
|
||||
-- Comments
|
||||
COMMENT ON TABLE vuln.advisory_source_edge IS 'Links canonical advisories to source documents with signatures';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.canonical_id IS 'Reference to deduplicated canonical advisory';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.source_id IS 'Reference to feed source';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.source_advisory_id IS 'Vendor advisory ID (e.g., DSA-5678, RHSA-2024:1234)';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.precedence_rank IS 'Source priority: vendor=10, distro=20, osv=30, nvd=40';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.dsse_envelope IS 'DSSE envelope with signature over raw_payload';
|
||||
COMMENT ON COLUMN vuln.advisory_source_edge.vendor_status IS 'VEX-style status from source';
|
||||
@@ -0,0 +1,116 @@
|
||||
-- Concelier Migration 011: Canonical Helper Functions
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-005
|
||||
-- Creates helper functions for canonical advisory operations
|
||||
|
||||
-- Function to get canonical by merge_hash (most common lookup)
|
||||
CREATE OR REPLACE FUNCTION vuln.get_canonical_by_hash(p_merge_hash TEXT)
|
||||
RETURNS vuln.advisory_canonical
|
||||
LANGUAGE sql STABLE
|
||||
AS $$
|
||||
SELECT * FROM vuln.advisory_canonical
|
||||
WHERE merge_hash = p_merge_hash;
|
||||
$$;
|
||||
|
||||
-- Function to get all source edges for a canonical
|
||||
CREATE OR REPLACE FUNCTION vuln.get_source_edges(p_canonical_id UUID)
|
||||
RETURNS SETOF vuln.advisory_source_edge
|
||||
LANGUAGE sql STABLE
|
||||
AS $$
|
||||
SELECT * FROM vuln.advisory_source_edge
|
||||
WHERE canonical_id = p_canonical_id
|
||||
ORDER BY precedence_rank ASC, fetched_at DESC;
|
||||
$$;
|
||||
|
||||
-- Function to upsert canonical with merge_hash dedup
|
||||
CREATE OR REPLACE FUNCTION vuln.upsert_canonical(
|
||||
p_cve TEXT,
|
||||
p_affects_key TEXT,
|
||||
p_version_range JSONB,
|
||||
p_weakness TEXT[],
|
||||
p_merge_hash TEXT,
|
||||
p_severity TEXT DEFAULT NULL,
|
||||
p_epss_score NUMERIC DEFAULT NULL,
|
||||
p_exploit_known BOOLEAN DEFAULT FALSE,
|
||||
p_title TEXT DEFAULT NULL,
|
||||
p_summary TEXT DEFAULT NULL
|
||||
)
|
||||
RETURNS UUID
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
v_id UUID;
|
||||
BEGIN
|
||||
INSERT INTO vuln.advisory_canonical (
|
||||
cve, affects_key, version_range, weakness, merge_hash,
|
||||
severity, epss_score, exploit_known, title, summary
|
||||
)
|
||||
VALUES (
|
||||
p_cve, p_affects_key, p_version_range, p_weakness, p_merge_hash,
|
||||
p_severity, p_epss_score, p_exploit_known, p_title, p_summary
|
||||
)
|
||||
ON CONFLICT (merge_hash) DO UPDATE SET
|
||||
severity = COALESCE(EXCLUDED.severity, vuln.advisory_canonical.severity),
|
||||
epss_score = COALESCE(EXCLUDED.epss_score, vuln.advisory_canonical.epss_score),
|
||||
exploit_known = EXCLUDED.exploit_known OR vuln.advisory_canonical.exploit_known,
|
||||
title = COALESCE(EXCLUDED.title, vuln.advisory_canonical.title),
|
||||
summary = COALESCE(EXCLUDED.summary, vuln.advisory_canonical.summary),
|
||||
updated_at = NOW()
|
||||
RETURNING id INTO v_id;
|
||||
|
||||
RETURN v_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Function to add source edge with dedup
|
||||
CREATE OR REPLACE FUNCTION vuln.add_source_edge(
|
||||
p_canonical_id UUID,
|
||||
p_source_id UUID,
|
||||
p_source_advisory_id TEXT,
|
||||
p_source_doc_hash TEXT,
|
||||
p_vendor_status TEXT DEFAULT NULL,
|
||||
p_precedence_rank INT DEFAULT 100,
|
||||
p_dsse_envelope JSONB DEFAULT NULL,
|
||||
p_raw_payload JSONB DEFAULT NULL,
|
||||
p_fetched_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)
|
||||
RETURNS UUID
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
v_id UUID;
|
||||
BEGIN
|
||||
INSERT INTO vuln.advisory_source_edge (
|
||||
canonical_id, source_id, source_advisory_id, source_doc_hash,
|
||||
vendor_status, precedence_rank, dsse_envelope, raw_payload, fetched_at
|
||||
)
|
||||
VALUES (
|
||||
p_canonical_id, p_source_id, p_source_advisory_id, p_source_doc_hash,
|
||||
p_vendor_status, p_precedence_rank, p_dsse_envelope, p_raw_payload, p_fetched_at
|
||||
)
|
||||
ON CONFLICT (canonical_id, source_id, source_doc_hash) DO UPDATE SET
|
||||
vendor_status = COALESCE(EXCLUDED.vendor_status, vuln.advisory_source_edge.vendor_status),
|
||||
precedence_rank = LEAST(EXCLUDED.precedence_rank, vuln.advisory_source_edge.precedence_rank),
|
||||
dsse_envelope = COALESCE(EXCLUDED.dsse_envelope, vuln.advisory_source_edge.dsse_envelope),
|
||||
raw_payload = COALESCE(EXCLUDED.raw_payload, vuln.advisory_source_edge.raw_payload)
|
||||
RETURNING id INTO v_id;
|
||||
|
||||
RETURN v_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Function to count active canonicals by CVE prefix
|
||||
CREATE OR REPLACE FUNCTION vuln.count_canonicals_by_cve_year(p_year INT)
|
||||
RETURNS BIGINT
|
||||
LANGUAGE sql STABLE
|
||||
AS $$
|
||||
SELECT COUNT(*) FROM vuln.advisory_canonical
|
||||
WHERE cve LIKE 'CVE-' || p_year::TEXT || '-%'
|
||||
AND status = 'active';
|
||||
$$;
|
||||
|
||||
-- Comments
|
||||
COMMENT ON FUNCTION vuln.get_canonical_by_hash(TEXT) IS 'Lookup canonical advisory by merge_hash';
|
||||
COMMENT ON FUNCTION vuln.get_source_edges(UUID) IS 'Get all source edges for a canonical, ordered by precedence';
|
||||
COMMENT ON FUNCTION vuln.upsert_canonical IS 'Insert or update canonical advisory with merge_hash deduplication';
|
||||
COMMENT ON FUNCTION vuln.add_source_edge IS 'Add source edge with deduplication by (canonical, source, doc_hash)';
|
||||
@@ -0,0 +1,144 @@
|
||||
-- Concelier Migration 012: Populate advisory_canonical table
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-012
|
||||
-- Populates advisory_canonical from existing advisories with placeholder merge_hash
|
||||
-- NOTE: merge_hash will be backfilled by application-side MergeHashBackfillService
|
||||
|
||||
-- Populate advisory_canonical from existing advisories
|
||||
-- Each advisory + affected package combination becomes a canonical record
|
||||
INSERT INTO vuln.advisory_canonical (
|
||||
id,
|
||||
cve,
|
||||
affects_key,
|
||||
version_range,
|
||||
weakness,
|
||||
merge_hash,
|
||||
status,
|
||||
severity,
|
||||
epss_score,
|
||||
exploit_known,
|
||||
title,
|
||||
summary,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid() AS id,
|
||||
COALESCE(
|
||||
-- Try to get CVE from aliases
|
||||
(SELECT alias_value FROM vuln.advisory_aliases
|
||||
WHERE advisory_id = a.id AND alias_type = 'CVE'
|
||||
ORDER BY is_primary DESC LIMIT 1),
|
||||
-- Fall back to primary_vuln_id
|
||||
a.primary_vuln_id
|
||||
) AS cve,
|
||||
COALESCE(
|
||||
-- Prefer PURL if available
|
||||
aa.purl,
|
||||
-- Otherwise construct from ecosystem/package
|
||||
CASE
|
||||
WHEN aa.ecosystem IS NOT NULL AND aa.package_name IS NOT NULL
|
||||
THEN 'pkg:' || lower(aa.ecosystem) || '/' || aa.package_name
|
||||
ELSE 'unknown:' || a.id::text
|
||||
END
|
||||
) AS affects_key,
|
||||
aa.version_range AS version_range,
|
||||
-- Aggregate CWE IDs into sorted array
|
||||
COALESCE(
|
||||
(SELECT array_agg(DISTINCT upper(w.cwe_id) ORDER BY upper(w.cwe_id))
|
||||
FROM vuln.advisory_weaknesses w
|
||||
WHERE w.advisory_id = a.id),
|
||||
'{}'::text[]
|
||||
) AS weakness,
|
||||
-- Placeholder merge_hash - will be backfilled by application
|
||||
'PLACEHOLDER_' || a.id::text || '_' || COALESCE(aa.id::text, 'noaffects') AS merge_hash,
|
||||
CASE
|
||||
WHEN a.withdrawn_at IS NOT NULL THEN 'withdrawn'
|
||||
ELSE 'active'
|
||||
END AS status,
|
||||
a.severity,
|
||||
-- EPSS score if available from KEV
|
||||
(SELECT CASE WHEN kf.known_ransomware_use THEN 0.95 ELSE NULL END
|
||||
FROM vuln.kev_flags kf
|
||||
WHERE kf.advisory_id = a.id
|
||||
LIMIT 1) AS epss_score,
|
||||
-- exploit_known from KEV flags
|
||||
EXISTS(SELECT 1 FROM vuln.kev_flags kf WHERE kf.advisory_id = a.id) AS exploit_known,
|
||||
a.title,
|
||||
a.summary,
|
||||
a.created_at,
|
||||
NOW() AS updated_at
|
||||
FROM vuln.advisories a
|
||||
LEFT JOIN vuln.advisory_affected aa ON aa.advisory_id = a.id
|
||||
WHERE NOT EXISTS (
|
||||
-- Skip if already migrated (idempotent)
|
||||
SELECT 1 FROM vuln.advisory_canonical c
|
||||
WHERE c.merge_hash LIKE 'PLACEHOLDER_' || a.id::text || '%'
|
||||
)
|
||||
ON CONFLICT (merge_hash) DO NOTHING;
|
||||
|
||||
-- Handle advisories without affected packages
|
||||
INSERT INTO vuln.advisory_canonical (
|
||||
id,
|
||||
cve,
|
||||
affects_key,
|
||||
version_range,
|
||||
weakness,
|
||||
merge_hash,
|
||||
status,
|
||||
severity,
|
||||
exploit_known,
|
||||
title,
|
||||
summary,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid() AS id,
|
||||
COALESCE(
|
||||
(SELECT alias_value FROM vuln.advisory_aliases
|
||||
WHERE advisory_id = a.id AND alias_type = 'CVE'
|
||||
ORDER BY is_primary DESC LIMIT 1),
|
||||
a.primary_vuln_id
|
||||
) AS cve,
|
||||
'unknown:' || a.primary_vuln_id AS affects_key,
|
||||
NULL AS version_range,
|
||||
COALESCE(
|
||||
(SELECT array_agg(DISTINCT upper(w.cwe_id) ORDER BY upper(w.cwe_id))
|
||||
FROM vuln.advisory_weaknesses w
|
||||
WHERE w.advisory_id = a.id),
|
||||
'{}'::text[]
|
||||
) AS weakness,
|
||||
'PLACEHOLDER_' || a.id::text || '_noaffects' AS merge_hash,
|
||||
CASE
|
||||
WHEN a.withdrawn_at IS NOT NULL THEN 'withdrawn'
|
||||
ELSE 'active'
|
||||
END AS status,
|
||||
a.severity,
|
||||
EXISTS(SELECT 1 FROM vuln.kev_flags kf WHERE kf.advisory_id = a.id) AS exploit_known,
|
||||
a.title,
|
||||
a.summary,
|
||||
a.created_at,
|
||||
NOW() AS updated_at
|
||||
FROM vuln.advisories a
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM vuln.advisory_affected aa WHERE aa.advisory_id = a.id
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM vuln.advisory_canonical c
|
||||
WHERE c.merge_hash LIKE 'PLACEHOLDER_' || a.id::text || '%'
|
||||
)
|
||||
ON CONFLICT (merge_hash) DO NOTHING;
|
||||
|
||||
-- Log migration progress
|
||||
DO $$
|
||||
DECLARE
|
||||
canonical_count BIGINT;
|
||||
placeholder_count BIGINT;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO canonical_count FROM vuln.advisory_canonical;
|
||||
SELECT COUNT(*) INTO placeholder_count FROM vuln.advisory_canonical WHERE merge_hash LIKE 'PLACEHOLDER_%';
|
||||
|
||||
RAISE NOTICE 'Migration 012 complete: % canonical records, % with placeholder hash (need backfill)',
|
||||
canonical_count, placeholder_count;
|
||||
END $$;
|
||||
@@ -0,0 +1,129 @@
|
||||
-- Concelier Migration 013: Populate advisory_source_edge table
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-013
|
||||
-- Creates source edges from existing advisory snapshots and provenance data
|
||||
|
||||
-- Create source edges from advisory snapshots
|
||||
INSERT INTO vuln.advisory_source_edge (
|
||||
id,
|
||||
canonical_id,
|
||||
source_id,
|
||||
source_advisory_id,
|
||||
source_doc_hash,
|
||||
vendor_status,
|
||||
precedence_rank,
|
||||
dsse_envelope,
|
||||
raw_payload,
|
||||
fetched_at,
|
||||
created_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid() AS id,
|
||||
c.id AS canonical_id,
|
||||
a.source_id AS source_id,
|
||||
a.advisory_key AS source_advisory_id,
|
||||
snap.content_hash AS source_doc_hash,
|
||||
CASE
|
||||
WHEN a.withdrawn_at IS NOT NULL THEN 'not_affected'
|
||||
ELSE 'affected'
|
||||
END AS vendor_status,
|
||||
COALESCE(s.priority, 100) AS precedence_rank,
|
||||
NULL AS dsse_envelope, -- DSSE signatures added later
|
||||
a.raw_payload AS raw_payload,
|
||||
snap.created_at AS fetched_at,
|
||||
NOW() AS created_at
|
||||
FROM vuln.advisory_canonical c
|
||||
JOIN vuln.advisories a ON (
|
||||
-- Match by CVE
|
||||
c.cve = a.primary_vuln_id
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM vuln.advisory_aliases al
|
||||
WHERE al.advisory_id = a.id AND al.alias_value = c.cve
|
||||
)
|
||||
)
|
||||
JOIN vuln.advisory_snapshots snap ON snap.advisory_key = a.advisory_key
|
||||
JOIN vuln.feed_snapshots fs ON fs.id = snap.feed_snapshot_id
|
||||
LEFT JOIN vuln.sources s ON s.id = a.source_id
|
||||
WHERE a.source_id IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
-- Skip if already migrated (idempotent)
|
||||
SELECT 1 FROM vuln.advisory_source_edge e
|
||||
WHERE e.canonical_id = c.id
|
||||
AND e.source_id = a.source_id
|
||||
AND e.source_doc_hash = snap.content_hash
|
||||
)
|
||||
ON CONFLICT (canonical_id, source_id, source_doc_hash) DO NOTHING;
|
||||
|
||||
-- Create source edges directly from advisories (for those without snapshots)
|
||||
INSERT INTO vuln.advisory_source_edge (
|
||||
id,
|
||||
canonical_id,
|
||||
source_id,
|
||||
source_advisory_id,
|
||||
source_doc_hash,
|
||||
vendor_status,
|
||||
precedence_rank,
|
||||
dsse_envelope,
|
||||
raw_payload,
|
||||
fetched_at,
|
||||
created_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid() AS id,
|
||||
c.id AS canonical_id,
|
||||
a.source_id AS source_id,
|
||||
a.advisory_key AS source_advisory_id,
|
||||
-- Generate hash from raw_payload if available, otherwise use advisory_key
|
||||
COALESCE(
|
||||
encode(sha256(a.raw_payload::text::bytea), 'hex'),
|
||||
encode(sha256(a.advisory_key::bytea), 'hex')
|
||||
) AS source_doc_hash,
|
||||
CASE
|
||||
WHEN a.withdrawn_at IS NOT NULL THEN 'not_affected'
|
||||
ELSE 'affected'
|
||||
END AS vendor_status,
|
||||
COALESCE(s.priority, 100) AS precedence_rank,
|
||||
NULL AS dsse_envelope,
|
||||
a.raw_payload AS raw_payload,
|
||||
a.created_at AS fetched_at,
|
||||
NOW() AS created_at
|
||||
FROM vuln.advisory_canonical c
|
||||
JOIN vuln.advisories a ON (
|
||||
c.cve = a.primary_vuln_id
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM vuln.advisory_aliases al
|
||||
WHERE al.advisory_id = a.id AND al.alias_value = c.cve
|
||||
)
|
||||
)
|
||||
LEFT JOIN vuln.sources s ON s.id = a.source_id
|
||||
WHERE a.source_id IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
-- Only for advisories without snapshots
|
||||
SELECT 1 FROM vuln.advisory_snapshots snap
|
||||
WHERE snap.advisory_key = a.advisory_key
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM vuln.advisory_source_edge e
|
||||
WHERE e.canonical_id = c.id AND e.source_id = a.source_id
|
||||
)
|
||||
ON CONFLICT (canonical_id, source_id, source_doc_hash) DO NOTHING;
|
||||
|
||||
-- Log migration progress
|
||||
DO $$
|
||||
DECLARE
|
||||
edge_count BIGINT;
|
||||
canonical_with_edges BIGINT;
|
||||
avg_edges NUMERIC;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO edge_count FROM vuln.advisory_source_edge;
|
||||
SELECT COUNT(DISTINCT canonical_id) INTO canonical_with_edges FROM vuln.advisory_source_edge;
|
||||
|
||||
IF canonical_with_edges > 0 THEN
|
||||
avg_edges := edge_count::numeric / canonical_with_edges;
|
||||
ELSE
|
||||
avg_edges := 0;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Migration 013 complete: % source edges, % canonicals with edges, avg %.2f edges/canonical',
|
||||
edge_count, canonical_with_edges, avg_edges;
|
||||
END $$;
|
||||
@@ -0,0 +1,165 @@
|
||||
-- Concelier Migration 014: Verification queries for canonical migration
|
||||
-- Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
-- Task: SCHEMA-8200-014
|
||||
-- Verification queries to compare record counts and data integrity
|
||||
|
||||
-- Verification Report
|
||||
DO $$
|
||||
DECLARE
|
||||
-- Source counts
|
||||
advisory_count BIGINT;
|
||||
affected_count BIGINT;
|
||||
alias_count BIGINT;
|
||||
weakness_count BIGINT;
|
||||
kev_count BIGINT;
|
||||
snapshot_count BIGINT;
|
||||
source_count BIGINT;
|
||||
|
||||
-- Target counts
|
||||
canonical_count BIGINT;
|
||||
canonical_active BIGINT;
|
||||
canonical_withdrawn BIGINT;
|
||||
canonical_placeholder BIGINT;
|
||||
edge_count BIGINT;
|
||||
edge_unique_sources BIGINT;
|
||||
edge_with_payload BIGINT;
|
||||
|
||||
-- Integrity checks
|
||||
orphan_edges BIGINT;
|
||||
missing_sources BIGINT;
|
||||
duplicate_hashes BIGINT;
|
||||
avg_edges_per_canonical NUMERIC;
|
||||
|
||||
BEGIN
|
||||
-- Source table counts
|
||||
SELECT COUNT(*) INTO advisory_count FROM vuln.advisories;
|
||||
SELECT COUNT(*) INTO affected_count FROM vuln.advisory_affected;
|
||||
SELECT COUNT(*) INTO alias_count FROM vuln.advisory_aliases;
|
||||
SELECT COUNT(*) INTO weakness_count FROM vuln.advisory_weaknesses;
|
||||
SELECT COUNT(*) INTO kev_count FROM vuln.kev_flags;
|
||||
SELECT COUNT(*) INTO snapshot_count FROM vuln.advisory_snapshots;
|
||||
SELECT COUNT(*) INTO source_count FROM vuln.sources WHERE enabled = true;
|
||||
|
||||
-- Target table counts
|
||||
SELECT COUNT(*) INTO canonical_count FROM vuln.advisory_canonical;
|
||||
SELECT COUNT(*) INTO canonical_active FROM vuln.advisory_canonical WHERE status = 'active';
|
||||
SELECT COUNT(*) INTO canonical_withdrawn FROM vuln.advisory_canonical WHERE status = 'withdrawn';
|
||||
SELECT COUNT(*) INTO canonical_placeholder FROM vuln.advisory_canonical WHERE merge_hash LIKE 'PLACEHOLDER_%';
|
||||
SELECT COUNT(*) INTO edge_count FROM vuln.advisory_source_edge;
|
||||
SELECT COUNT(DISTINCT source_id) INTO edge_unique_sources FROM vuln.advisory_source_edge;
|
||||
SELECT COUNT(*) INTO edge_with_payload FROM vuln.advisory_source_edge WHERE raw_payload IS NOT NULL;
|
||||
|
||||
-- Integrity checks
|
||||
SELECT COUNT(*) INTO orphan_edges
|
||||
FROM vuln.advisory_source_edge e
|
||||
WHERE NOT EXISTS (SELECT 1 FROM vuln.advisory_canonical c WHERE c.id = e.canonical_id);
|
||||
|
||||
SELECT COUNT(*) INTO missing_sources
|
||||
FROM vuln.advisory_source_edge e
|
||||
WHERE NOT EXISTS (SELECT 1 FROM vuln.sources s WHERE s.id = e.source_id);
|
||||
|
||||
SELECT COUNT(*) INTO duplicate_hashes
|
||||
FROM (
|
||||
SELECT merge_hash, COUNT(*) as cnt
|
||||
FROM vuln.advisory_canonical
|
||||
GROUP BY merge_hash
|
||||
HAVING COUNT(*) > 1
|
||||
) dups;
|
||||
|
||||
IF canonical_count > 0 THEN
|
||||
avg_edges_per_canonical := edge_count::numeric / canonical_count;
|
||||
ELSE
|
||||
avg_edges_per_canonical := 0;
|
||||
END IF;
|
||||
|
||||
-- Report
|
||||
RAISE NOTICE '============================================';
|
||||
RAISE NOTICE 'CANONICAL MIGRATION VERIFICATION REPORT';
|
||||
RAISE NOTICE '============================================';
|
||||
RAISE NOTICE '';
|
||||
RAISE NOTICE 'SOURCE TABLE COUNTS:';
|
||||
RAISE NOTICE ' Advisories: %', advisory_count;
|
||||
RAISE NOTICE ' Affected packages: %', affected_count;
|
||||
RAISE NOTICE ' Aliases: %', alias_count;
|
||||
RAISE NOTICE ' Weaknesses (CWE): %', weakness_count;
|
||||
RAISE NOTICE ' KEV flags: %', kev_count;
|
||||
RAISE NOTICE ' Snapshots: %', snapshot_count;
|
||||
RAISE NOTICE ' Enabled sources: %', source_count;
|
||||
RAISE NOTICE '';
|
||||
RAISE NOTICE 'TARGET TABLE COUNTS:';
|
||||
RAISE NOTICE ' Canonicals: % (active: %, withdrawn: %)', canonical_count, canonical_active, canonical_withdrawn;
|
||||
RAISE NOTICE ' Placeholder hashes:% (need backfill)', canonical_placeholder;
|
||||
RAISE NOTICE ' Source edges: %', edge_count;
|
||||
RAISE NOTICE ' Unique sources: %', edge_unique_sources;
|
||||
RAISE NOTICE ' Edges with payload:%', edge_with_payload;
|
||||
RAISE NOTICE '';
|
||||
RAISE NOTICE 'METRICS:';
|
||||
RAISE NOTICE ' Avg edges/canonical: %.2f', avg_edges_per_canonical;
|
||||
RAISE NOTICE '';
|
||||
RAISE NOTICE 'INTEGRITY CHECKS:';
|
||||
RAISE NOTICE ' Orphan edges: % %', orphan_edges, CASE WHEN orphan_edges = 0 THEN '(OK)' ELSE '(FAIL)' END;
|
||||
RAISE NOTICE ' Missing sources: % %', missing_sources, CASE WHEN missing_sources = 0 THEN '(OK)' ELSE '(FAIL)' END;
|
||||
RAISE NOTICE ' Duplicate hashes: % %', duplicate_hashes, CASE WHEN duplicate_hashes = 0 THEN '(OK)' ELSE '(FAIL)' END;
|
||||
RAISE NOTICE '';
|
||||
|
||||
-- Fail migration if integrity checks fail
|
||||
IF orphan_edges > 0 OR missing_sources > 0 OR duplicate_hashes > 0 THEN
|
||||
RAISE NOTICE 'VERIFICATION FAILED - Please investigate integrity issues';
|
||||
ELSE
|
||||
RAISE NOTICE 'VERIFICATION PASSED - Migration completed successfully';
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE '============================================';
|
||||
END $$;
|
||||
|
||||
-- Additional verification queries (run individually for debugging)
|
||||
|
||||
-- Find CVEs that weren't migrated
|
||||
-- SELECT a.primary_vuln_id, a.advisory_key, a.created_at
|
||||
-- FROM vuln.advisories a
|
||||
-- WHERE NOT EXISTS (
|
||||
-- SELECT 1 FROM vuln.advisory_canonical c WHERE c.cve = a.primary_vuln_id
|
||||
-- )
|
||||
-- LIMIT 20;
|
||||
|
||||
-- Find canonicals without source edges
|
||||
-- SELECT c.cve, c.affects_key, c.created_at
|
||||
-- FROM vuln.advisory_canonical c
|
||||
-- WHERE NOT EXISTS (
|
||||
-- SELECT 1 FROM vuln.advisory_source_edge e WHERE e.canonical_id = c.id
|
||||
-- )
|
||||
-- LIMIT 20;
|
||||
|
||||
-- Distribution of edges per canonical
|
||||
-- SELECT
|
||||
-- CASE
|
||||
-- WHEN edge_count = 0 THEN '0'
|
||||
-- WHEN edge_count = 1 THEN '1'
|
||||
-- WHEN edge_count BETWEEN 2 AND 5 THEN '2-5'
|
||||
-- WHEN edge_count BETWEEN 6 AND 10 THEN '6-10'
|
||||
-- ELSE '10+'
|
||||
-- END AS edge_range,
|
||||
-- COUNT(*) AS canonical_count
|
||||
-- FROM (
|
||||
-- SELECT c.id, COALESCE(e.edge_count, 0) AS edge_count
|
||||
-- FROM vuln.advisory_canonical c
|
||||
-- LEFT JOIN (
|
||||
-- SELECT canonical_id, COUNT(*) AS edge_count
|
||||
-- FROM vuln.advisory_source_edge
|
||||
-- GROUP BY canonical_id
|
||||
-- ) e ON e.canonical_id = c.id
|
||||
-- ) sub
|
||||
-- GROUP BY edge_range
|
||||
-- ORDER BY edge_range;
|
||||
|
||||
-- Top CVEs by source coverage
|
||||
-- SELECT
|
||||
-- c.cve,
|
||||
-- c.severity,
|
||||
-- c.exploit_known,
|
||||
-- COUNT(e.id) AS source_count
|
||||
-- FROM vuln.advisory_canonical c
|
||||
-- LEFT JOIN vuln.advisory_source_edge e ON e.canonical_id = c.id
|
||||
-- GROUP BY c.id, c.cve, c.severity, c.exploit_known
|
||||
-- ORDER BY source_count DESC
|
||||
-- LIMIT 20;
|
||||
@@ -0,0 +1,85 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisoryCanonicalEntity.cs
|
||||
// Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
// Task: SCHEMA-8200-007
|
||||
// Description: Entity for deduplicated canonical advisory records
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a deduplicated canonical advisory in the vuln schema.
|
||||
/// Canonical advisories are identified by their semantic merge_hash.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryCanonicalEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique canonical advisory identifier.
|
||||
/// </summary>
|
||||
public required Guid Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2024-1234").
|
||||
/// </summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Normalized PURL or CPE identifying the affected package.
|
||||
/// </summary>
|
||||
public required string AffectsKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Structured version range as JSON (introduced, fixed, last_affected).
|
||||
/// </summary>
|
||||
public string? VersionRange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sorted CWE array (e.g., ["CWE-79", "CWE-89"]).
|
||||
/// </summary>
|
||||
public string[] Weakness { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic SHA256 hash of (cve, affects_key, version_range, weakness, patch_lineage).
|
||||
/// </summary>
|
||||
public required string MergeHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status: active, stub, or withdrawn.
|
||||
/// </summary>
|
||||
public string Status { get; init; } = "active";
|
||||
|
||||
/// <summary>
|
||||
/// Normalized severity: critical, high, medium, low, none, unknown.
|
||||
/// </summary>
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS exploit prediction probability (0.0000-1.0000).
|
||||
/// </summary>
|
||||
public decimal? EpssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether an exploit is known to exist.
|
||||
/// </summary>
|
||||
public bool ExploitKnown { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory title (for stub degradation).
|
||||
/// </summary>
|
||||
public string? Title { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory summary (for stub degradation).
|
||||
/// </summary>
|
||||
public string? Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the canonical record was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the canonical record was last updated.
|
||||
/// </summary>
|
||||
public DateTimeOffset UpdatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisorySourceEdgeEntity.cs
|
||||
// Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
// Task: SCHEMA-8200-008
|
||||
// Description: Entity linking canonical advisory to source documents with DSSE
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a link between a canonical advisory and its source document.
|
||||
/// Stores DSSE signature envelopes and raw payload for provenance.
|
||||
/// </summary>
|
||||
public sealed class AdvisorySourceEdgeEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique source edge identifier.
|
||||
/// </summary>
|
||||
public required Guid Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the deduplicated canonical advisory.
|
||||
/// </summary>
|
||||
public required Guid CanonicalId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the feed source.
|
||||
/// </summary>
|
||||
public required Guid SourceId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vendor's advisory ID (e.g., "DSA-5678", "RHSA-2024:1234").
|
||||
/// </summary>
|
||||
public required string SourceAdvisoryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash of the raw source document.
|
||||
/// </summary>
|
||||
public required string SourceDocHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX-style status: affected, not_affected, fixed, under_investigation.
|
||||
/// </summary>
|
||||
public string? VendorStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source priority: vendor=10, distro=20, osv=30, nvd=40, default=100.
|
||||
/// Lower value = higher priority.
|
||||
/// </summary>
|
||||
public int PrecedenceRank { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature envelope as JSON ({ payloadType, payload, signatures[] }).
|
||||
/// </summary>
|
||||
public string? DsseEnvelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original advisory document as JSON.
|
||||
/// </summary>
|
||||
public string? RawPayload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the source document was fetched.
|
||||
/// </summary>
|
||||
public DateTimeOffset FetchedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the edge record was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SitePolicyEntity.cs
|
||||
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
// Task: SYNC-8200-005
|
||||
// Description: Entity for per-site federation governance policies
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a site federation policy for governance control.
|
||||
/// </summary>
|
||||
public sealed class SitePolicyEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique policy identifier.
|
||||
/// </summary>
|
||||
public required Guid Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Remote site identifier this policy applies to.
|
||||
/// </summary>
|
||||
public required string SiteId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable display name for the site.
|
||||
/// </summary>
|
||||
public string? DisplayName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source keys to allow (empty allows all sources).
|
||||
/// </summary>
|
||||
public string[] AllowedSources { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Source keys to deny (takes precedence over allowed).
|
||||
/// </summary>
|
||||
public string[] DeniedSources { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Maximum bundle size in megabytes.
|
||||
/// </summary>
|
||||
public int MaxBundleSizeMb { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum items per bundle.
|
||||
/// </summary>
|
||||
public int MaxItemsPerBundle { get; init; } = 10000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether bundles must be cryptographically signed.
|
||||
/// </summary>
|
||||
public bool RequireSignature { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Signing key IDs or issuer patterns allowed for bundle verification.
|
||||
/// </summary>
|
||||
public string[] AllowedSigners { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether this site policy is enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When the policy was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the policy was last updated.
|
||||
/// </summary>
|
||||
public DateTimeOffset UpdatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SyncLedgerEntity.cs
|
||||
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
// Task: SYNC-8200-004
|
||||
// Description: Entity for tracking federation sync state per remote site
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a sync ledger entry for federation cursor tracking.
|
||||
/// </summary>
|
||||
public sealed class SyncLedgerEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique ledger entry identifier.
|
||||
/// </summary>
|
||||
public required Guid Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Remote site identifier (e.g., "site-us-west", "airgap-dc2").
|
||||
/// </summary>
|
||||
public required string SiteId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Opaque cursor position (usually ISO8601 timestamp#sequence).
|
||||
/// </summary>
|
||||
public required string Cursor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash of the imported bundle for deduplication.
|
||||
/// </summary>
|
||||
public required string BundleHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of items in the imported bundle.
|
||||
/// </summary>
|
||||
public int ItemsCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the bundle was signed by the remote site.
|
||||
/// </summary>
|
||||
public DateTimeOffset SignedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the bundle was imported to this site.
|
||||
/// </summary>
|
||||
public DateTimeOffset ImportedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisoryCanonicalRepository.cs
|
||||
// Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
// Task: SCHEMA-8200-010
|
||||
// Description: PostgreSQL repository for canonical advisory and source edge operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Runtime.CompilerServices;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for canonical advisory and source edge operations.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryCanonicalRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryCanonicalRepository
|
||||
{
|
||||
private const string SystemTenantId = "_system";
|
||||
|
||||
public AdvisoryCanonicalRepository(ConcelierDataSource dataSource, ILogger<AdvisoryCanonicalRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
#region Canonical Advisory Operations
|
||||
|
||||
public Task<AdvisoryCanonicalEntity?> GetByIdAsync(Guid id, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE id = @id
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "id", id),
|
||||
MapCanonical,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<AdvisoryCanonicalEntity?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE merge_hash = @merge_hash
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "merge_hash", mergeHash),
|
||||
MapCanonical,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetByCveAsync(string cve, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE cve = @cve
|
||||
ORDER BY updated_at DESC
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "cve", cve),
|
||||
MapCanonical,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetByAffectsKeyAsync(string affectsKey, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE affects_key = @affects_key
|
||||
ORDER BY updated_at DESC
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "affects_key", affectsKey),
|
||||
MapCanonical,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetUpdatedSinceAsync(
|
||||
DateTimeOffset since,
|
||||
int limit = 1000,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE updated_at > @since
|
||||
ORDER BY updated_at ASC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "since", since);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
},
|
||||
MapCanonical,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<Guid> UpsertAsync(AdvisoryCanonicalEntity entity, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO vuln.advisory_canonical
|
||||
(id, cve, affects_key, version_range, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary)
|
||||
VALUES
|
||||
(@id, @cve, @affects_key, @version_range::jsonb, @weakness, @merge_hash,
|
||||
@status, @severity, @epss_score, @exploit_known, @title, @summary)
|
||||
ON CONFLICT (merge_hash) DO UPDATE SET
|
||||
severity = COALESCE(EXCLUDED.severity, vuln.advisory_canonical.severity),
|
||||
epss_score = COALESCE(EXCLUDED.epss_score, vuln.advisory_canonical.epss_score),
|
||||
exploit_known = EXCLUDED.exploit_known OR vuln.advisory_canonical.exploit_known,
|
||||
title = COALESCE(EXCLUDED.title, vuln.advisory_canonical.title),
|
||||
summary = COALESCE(EXCLUDED.summary, vuln.advisory_canonical.summary),
|
||||
updated_at = NOW()
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
var id = entity.Id == Guid.Empty ? Guid.NewGuid() : entity.Id;
|
||||
|
||||
return await ExecuteScalarAsync<Guid>(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "cve", entity.Cve);
|
||||
AddParameter(cmd, "affects_key", entity.AffectsKey);
|
||||
AddJsonbParameter(cmd, "version_range", entity.VersionRange);
|
||||
AddTextArrayParameter(cmd, "weakness", entity.Weakness);
|
||||
AddParameter(cmd, "merge_hash", entity.MergeHash);
|
||||
AddParameter(cmd, "status", entity.Status);
|
||||
AddParameter(cmd, "severity", entity.Severity);
|
||||
AddParameter(cmd, "epss_score", entity.EpssScore);
|
||||
AddParameter(cmd, "exploit_known", entity.ExploitKnown);
|
||||
AddParameter(cmd, "title", entity.Title);
|
||||
AddParameter(cmd, "summary", entity.Summary);
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task UpdateStatusAsync(Guid id, string status, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
UPDATE vuln.advisory_canonical
|
||||
SET status = @status, updated_at = NOW()
|
||||
WHERE id = @id
|
||||
""";
|
||||
|
||||
await ExecuteAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "status", status);
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task DeleteAsync(Guid id, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = "DELETE FROM vuln.advisory_canonical WHERE id = @id";
|
||||
|
||||
await ExecuteAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "id", id),
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<long> CountAsync(CancellationToken ct = default)
|
||||
{
|
||||
const string sql = "SELECT COUNT(*) FROM vuln.advisory_canonical WHERE status = 'active'";
|
||||
|
||||
return await ExecuteScalarAsync<long>(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
null,
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<AdvisoryCanonicalEntity> StreamActiveAsync(
|
||||
[EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, cve, affects_key, version_range::text, weakness, merge_hash,
|
||||
status, severity, epss_score, exploit_known, title, summary,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisory_canonical
|
||||
WHERE status = 'active'
|
||||
ORDER BY id
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
yield return MapCanonical(reader);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Source Edge Operations
|
||||
|
||||
public Task<IReadOnlyList<AdvisorySourceEdgeEntity>> GetSourceEdgesAsync(Guid canonicalId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, canonical_id, source_id, source_advisory_id, source_doc_hash,
|
||||
vendor_status, precedence_rank, dsse_envelope::text, raw_payload::text,
|
||||
fetched_at, created_at
|
||||
FROM vuln.advisory_source_edge
|
||||
WHERE canonical_id = @canonical_id
|
||||
ORDER BY precedence_rank ASC, fetched_at DESC
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "canonical_id", canonicalId),
|
||||
MapSourceEdge,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<AdvisorySourceEdgeEntity?> GetSourceEdgeByIdAsync(Guid id, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, canonical_id, source_id, source_advisory_id, source_doc_hash,
|
||||
vendor_status, precedence_rank, dsse_envelope::text, raw_payload::text,
|
||||
fetched_at, created_at
|
||||
FROM vuln.advisory_source_edge
|
||||
WHERE id = @id
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "id", id),
|
||||
MapSourceEdge,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<Guid> AddSourceEdgeAsync(AdvisorySourceEdgeEntity edge, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO vuln.advisory_source_edge
|
||||
(id, canonical_id, source_id, source_advisory_id, source_doc_hash,
|
||||
vendor_status, precedence_rank, dsse_envelope, raw_payload, fetched_at)
|
||||
VALUES
|
||||
(@id, @canonical_id, @source_id, @source_advisory_id, @source_doc_hash,
|
||||
@vendor_status, @precedence_rank, @dsse_envelope::jsonb, @raw_payload::jsonb, @fetched_at)
|
||||
ON CONFLICT (canonical_id, source_id, source_doc_hash) DO UPDATE SET
|
||||
vendor_status = COALESCE(EXCLUDED.vendor_status, vuln.advisory_source_edge.vendor_status),
|
||||
precedence_rank = LEAST(EXCLUDED.precedence_rank, vuln.advisory_source_edge.precedence_rank),
|
||||
dsse_envelope = COALESCE(EXCLUDED.dsse_envelope, vuln.advisory_source_edge.dsse_envelope),
|
||||
raw_payload = COALESCE(EXCLUDED.raw_payload, vuln.advisory_source_edge.raw_payload)
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
var id = edge.Id == Guid.Empty ? Guid.NewGuid() : edge.Id;
|
||||
|
||||
return await ExecuteScalarAsync<Guid>(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "canonical_id", edge.CanonicalId);
|
||||
AddParameter(cmd, "source_id", edge.SourceId);
|
||||
AddParameter(cmd, "source_advisory_id", edge.SourceAdvisoryId);
|
||||
AddParameter(cmd, "source_doc_hash", edge.SourceDocHash);
|
||||
AddParameter(cmd, "vendor_status", edge.VendorStatus);
|
||||
AddParameter(cmd, "precedence_rank", edge.PrecedenceRank);
|
||||
AddJsonbParameter(cmd, "dsse_envelope", edge.DsseEnvelope);
|
||||
AddJsonbParameter(cmd, "raw_payload", edge.RawPayload);
|
||||
AddParameter(cmd, "fetched_at", edge.FetchedAt == default ? DateTimeOffset.UtcNow : edge.FetchedAt);
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisorySourceEdgeEntity>> GetSourceEdgesByAdvisoryIdAsync(
|
||||
string sourceAdvisoryId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, canonical_id, source_id, source_advisory_id, source_doc_hash,
|
||||
vendor_status, precedence_rank, dsse_envelope::text, raw_payload::text,
|
||||
fetched_at, created_at
|
||||
FROM vuln.advisory_source_edge
|
||||
WHERE source_advisory_id = @source_advisory_id
|
||||
ORDER BY fetched_at DESC
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "source_advisory_id", sourceAdvisoryId),
|
||||
MapSourceEdge,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<long> CountSourceEdgesAsync(CancellationToken ct = default)
|
||||
{
|
||||
const string sql = "SELECT COUNT(*) FROM vuln.advisory_source_edge";
|
||||
|
||||
return await ExecuteScalarAsync<long>(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
null,
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics
|
||||
|
||||
public async Task<CanonicalStatistics> GetStatisticsAsync(CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM vuln.advisory_canonical) AS total_canonicals,
|
||||
(SELECT COUNT(*) FROM vuln.advisory_canonical WHERE status = 'active') AS active_canonicals,
|
||||
(SELECT COUNT(*) FROM vuln.advisory_source_edge) AS total_edges,
|
||||
(SELECT MAX(updated_at) FROM vuln.advisory_canonical) AS last_updated
|
||||
""";
|
||||
|
||||
var stats = await QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
_ => { },
|
||||
reader => new
|
||||
{
|
||||
TotalCanonicals = reader.GetInt64(0),
|
||||
ActiveCanonicals = reader.GetInt64(1),
|
||||
TotalEdges = reader.GetInt64(2),
|
||||
LastUpdated = GetNullableDateTimeOffset(reader, 3)
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
if (stats is null)
|
||||
{
|
||||
return new CanonicalStatistics();
|
||||
}
|
||||
|
||||
return new CanonicalStatistics
|
||||
{
|
||||
TotalCanonicals = stats.TotalCanonicals,
|
||||
ActiveCanonicals = stats.ActiveCanonicals,
|
||||
TotalSourceEdges = stats.TotalEdges,
|
||||
AvgSourceEdgesPerCanonical = stats.TotalCanonicals > 0
|
||||
? (double)stats.TotalEdges / stats.TotalCanonicals
|
||||
: 0,
|
||||
LastUpdatedAt = stats.LastUpdated
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mappers
|
||||
|
||||
private static AdvisoryCanonicalEntity MapCanonical(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
Cve = reader.GetString(1),
|
||||
AffectsKey = reader.GetString(2),
|
||||
VersionRange = GetNullableString(reader, 3),
|
||||
Weakness = reader.IsDBNull(4) ? [] : reader.GetFieldValue<string[]>(4),
|
||||
MergeHash = reader.GetString(5),
|
||||
Status = reader.GetString(6),
|
||||
Severity = GetNullableString(reader, 7),
|
||||
EpssScore = reader.IsDBNull(8) ? null : reader.GetDecimal(8),
|
||||
ExploitKnown = reader.GetBoolean(9),
|
||||
Title = GetNullableString(reader, 10),
|
||||
Summary = GetNullableString(reader, 11),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(12),
|
||||
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(13)
|
||||
};
|
||||
|
||||
private static AdvisorySourceEdgeEntity MapSourceEdge(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
CanonicalId = reader.GetGuid(1),
|
||||
SourceId = reader.GetGuid(2),
|
||||
SourceAdvisoryId = reader.GetString(3),
|
||||
SourceDocHash = reader.GetString(4),
|
||||
VendorStatus = GetNullableString(reader, 5),
|
||||
PrecedenceRank = reader.GetInt32(6),
|
||||
DsseEnvelope = GetNullableString(reader, 7),
|
||||
RawPayload = GetNullableString(reader, 8),
|
||||
FetchedAt = reader.GetFieldValue<DateTimeOffset>(9),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10)
|
||||
};
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IAdvisoryCanonicalRepository.cs
|
||||
// Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
// Task: SCHEMA-8200-009
|
||||
// Description: Repository interface for canonical advisory operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for canonical advisory and source edge operations.
|
||||
/// </summary>
|
||||
public interface IAdvisoryCanonicalRepository
|
||||
{
|
||||
#region Canonical Advisory Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets a canonical advisory by ID.
|
||||
/// </summary>
|
||||
Task<AdvisoryCanonicalEntity?> GetByIdAsync(Guid id, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a canonical advisory by merge hash.
|
||||
/// </summary>
|
||||
Task<AdvisoryCanonicalEntity?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all canonical advisories for a CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetByCveAsync(string cve, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all canonical advisories for an affects key (PURL or CPE).
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetByAffectsKeyAsync(string affectsKey, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets canonical advisories updated since a given time.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AdvisoryCanonicalEntity>> GetUpdatedSinceAsync(
|
||||
DateTimeOffset since,
|
||||
int limit = 1000,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts a canonical advisory (insert or update by merge_hash).
|
||||
/// </summary>
|
||||
Task<Guid> UpsertAsync(AdvisoryCanonicalEntity entity, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates the status of a canonical advisory.
|
||||
/// </summary>
|
||||
Task UpdateStatusAsync(Guid id, string status, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a canonical advisory and all its source edges (cascade).
|
||||
/// </summary>
|
||||
Task DeleteAsync(Guid id, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Counts total active canonical advisories.
|
||||
/// </summary>
|
||||
Task<long> CountAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Streams all active canonical advisories for batch processing.
|
||||
/// </summary>
|
||||
IAsyncEnumerable<AdvisoryCanonicalEntity> StreamActiveAsync(CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Source Edge Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets all source edges for a canonical advisory.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AdvisorySourceEdgeEntity>> GetSourceEdgesAsync(Guid canonicalId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a source edge by ID.
|
||||
/// </summary>
|
||||
Task<AdvisorySourceEdgeEntity?> GetSourceEdgeByIdAsync(Guid id, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a source edge to a canonical advisory.
|
||||
/// </summary>
|
||||
Task<Guid> AddSourceEdgeAsync(AdvisorySourceEdgeEntity edge, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets source edges by source advisory ID (vendor ID).
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AdvisorySourceEdgeEntity>> GetSourceEdgesByAdvisoryIdAsync(
|
||||
string sourceAdvisoryId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Counts total source edges.
|
||||
/// </summary>
|
||||
Task<long> CountSourceEdgesAsync(CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Gets statistics about canonical advisories.
|
||||
/// </summary>
|
||||
Task<CanonicalStatistics> GetStatisticsAsync(CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about canonical advisory records.
|
||||
/// </summary>
|
||||
public sealed record CanonicalStatistics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total canonical advisory count.
|
||||
/// </summary>
|
||||
public long TotalCanonicals { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Active canonical advisory count.
|
||||
/// </summary>
|
||||
public long ActiveCanonicals { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total source edge count.
|
||||
/// </summary>
|
||||
public long TotalSourceEdges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Average source edges per canonical.
|
||||
/// </summary>
|
||||
public double AvgSourceEdgesPerCanonical { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Most recent canonical update time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastUpdatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ISyncLedgerRepository.cs
|
||||
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
// Task: SYNC-8200-006
|
||||
// Description: Repository interface for federation sync ledger operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for federation sync ledger and site policy operations.
|
||||
/// </summary>
|
||||
public interface ISyncLedgerRepository
|
||||
{
|
||||
#region Ledger Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets the latest sync ledger entry for a site.
|
||||
/// </summary>
|
||||
Task<SyncLedgerEntity?> GetLatestAsync(string siteId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets sync history for a site.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SyncLedgerEntity>> GetHistoryAsync(string siteId, int limit = 10, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a ledger entry by bundle hash (for deduplication).
|
||||
/// </summary>
|
||||
Task<SyncLedgerEntity?> GetByBundleHashAsync(string bundleHash, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Inserts a new ledger entry.
|
||||
/// </summary>
|
||||
Task<Guid> InsertAsync(SyncLedgerEntity entry, CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cursor Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current cursor position for a site.
|
||||
/// </summary>
|
||||
Task<string?> GetCursorAsync(string siteId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Advances the cursor to a new position (inserts a new ledger entry).
|
||||
/// </summary>
|
||||
Task AdvanceCursorAsync(
|
||||
string siteId,
|
||||
string newCursor,
|
||||
string bundleHash,
|
||||
int itemsCount,
|
||||
DateTimeOffset signedAt,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if importing a bundle would conflict with existing cursor.
|
||||
/// Returns true if the cursor is older than the current position.
|
||||
/// </summary>
|
||||
Task<bool> IsCursorConflictAsync(string siteId, string cursor, CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Site Policy Operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets the policy for a specific site.
|
||||
/// </summary>
|
||||
Task<SitePolicyEntity?> GetPolicyAsync(string siteId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Creates or updates a site policy.
|
||||
/// </summary>
|
||||
Task UpsertPolicyAsync(SitePolicyEntity policy, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all site policies.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SitePolicyEntity>> GetAllPoliciesAsync(bool enabledOnly = true, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a site policy.
|
||||
/// </summary>
|
||||
Task<bool> DeletePolicyAsync(string siteId, CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Gets sync statistics across all sites.
|
||||
/// </summary>
|
||||
Task<SyncStatistics> GetStatisticsAsync(CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated sync statistics across all sites.
|
||||
/// </summary>
|
||||
public sealed record SyncStatistics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of registered sites.
|
||||
/// </summary>
|
||||
public int TotalSites { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of enabled sites.
|
||||
/// </summary>
|
||||
public int EnabledSites { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total bundles imported across all sites.
|
||||
/// </summary>
|
||||
public long TotalBundlesImported { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total items imported across all sites.
|
||||
/// </summary>
|
||||
public long TotalItemsImported { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of the most recent import.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastImportAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,376 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SyncLedgerRepository.cs
|
||||
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
// Task: SYNC-8200-007
|
||||
// Description: PostgreSQL repository for federation sync ledger operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for federation sync ledger and site policy operations.
|
||||
/// </summary>
|
||||
public sealed class SyncLedgerRepository : RepositoryBase<ConcelierDataSource>, ISyncLedgerRepository
|
||||
{
|
||||
private const string SystemTenantId = "_system";
|
||||
|
||||
public SyncLedgerRepository(ConcelierDataSource dataSource, ILogger<SyncLedgerRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
#region Ledger Operations
|
||||
|
||||
public Task<SyncLedgerEntity?> GetLatestAsync(string siteId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, site_id, cursor, bundle_hash, items_count, signed_at, imported_at
|
||||
FROM vuln.sync_ledger
|
||||
WHERE site_id = @site_id
|
||||
ORDER BY signed_at DESC
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "site_id", siteId),
|
||||
MapLedgerEntry,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<SyncLedgerEntity>> GetHistoryAsync(string siteId, int limit = 10, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, site_id, cursor, bundle_hash, items_count, signed_at, imported_at
|
||||
FROM vuln.sync_ledger
|
||||
WHERE site_id = @site_id
|
||||
ORDER BY signed_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "site_id", siteId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
},
|
||||
MapLedgerEntry,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<SyncLedgerEntity?> GetByBundleHashAsync(string bundleHash, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, site_id, cursor, bundle_hash, items_count, signed_at, imported_at
|
||||
FROM vuln.sync_ledger
|
||||
WHERE bundle_hash = @bundle_hash
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "bundle_hash", bundleHash),
|
||||
MapLedgerEntry,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<Guid> InsertAsync(SyncLedgerEntity entry, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO vuln.sync_ledger
|
||||
(id, site_id, cursor, bundle_hash, items_count, signed_at, imported_at)
|
||||
VALUES
|
||||
(@id, @site_id, @cursor, @bundle_hash, @items_count, @signed_at, @imported_at)
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
var id = entry.Id == Guid.Empty ? Guid.NewGuid() : entry.Id;
|
||||
|
||||
await ExecuteAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "site_id", entry.SiteId);
|
||||
AddParameter(cmd, "cursor", entry.Cursor);
|
||||
AddParameter(cmd, "bundle_hash", entry.BundleHash);
|
||||
AddParameter(cmd, "items_count", entry.ItemsCount);
|
||||
AddParameter(cmd, "signed_at", entry.SignedAt);
|
||||
AddParameter(cmd, "imported_at", entry.ImportedAt == default ? DateTimeOffset.UtcNow : entry.ImportedAt);
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cursor Operations
|
||||
|
||||
public async Task<string?> GetCursorAsync(string siteId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT cursor
|
||||
FROM vuln.sync_ledger
|
||||
WHERE site_id = @site_id
|
||||
ORDER BY signed_at DESC
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
return await ExecuteScalarAsync<string>(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "site_id", siteId),
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task AdvanceCursorAsync(
|
||||
string siteId,
|
||||
string newCursor,
|
||||
string bundleHash,
|
||||
int itemsCount,
|
||||
DateTimeOffset signedAt,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var entry = new SyncLedgerEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
SiteId = siteId,
|
||||
Cursor = newCursor,
|
||||
BundleHash = bundleHash,
|
||||
ItemsCount = itemsCount,
|
||||
SignedAt = signedAt,
|
||||
ImportedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
await InsertAsync(entry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<bool> IsCursorConflictAsync(string siteId, string cursor, CancellationToken ct = default)
|
||||
{
|
||||
var currentCursor = await GetCursorAsync(siteId, ct).ConfigureAwait(false);
|
||||
|
||||
if (currentCursor is null)
|
||||
{
|
||||
// No existing cursor, no conflict
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare cursors - the new cursor should be newer than the current
|
||||
return !CursorFormat.IsAfter(cursor, currentCursor);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Site Policy Operations
|
||||
|
||||
public Task<SitePolicyEntity?> GetPolicyAsync(string siteId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, site_id, display_name, allowed_sources, denied_sources,
|
||||
max_bundle_size_mb, max_items_per_bundle, require_signature,
|
||||
allowed_signers, enabled, created_at, updated_at
|
||||
FROM vuln.site_policy
|
||||
WHERE site_id = @site_id
|
||||
""";
|
||||
|
||||
return QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "site_id", siteId),
|
||||
MapPolicy,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task UpsertPolicyAsync(SitePolicyEntity policy, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO vuln.site_policy
|
||||
(id, site_id, display_name, allowed_sources, denied_sources,
|
||||
max_bundle_size_mb, max_items_per_bundle, require_signature,
|
||||
allowed_signers, enabled)
|
||||
VALUES
|
||||
(@id, @site_id, @display_name, @allowed_sources, @denied_sources,
|
||||
@max_bundle_size_mb, @max_items_per_bundle, @require_signature,
|
||||
@allowed_signers, @enabled)
|
||||
ON CONFLICT (site_id) DO UPDATE SET
|
||||
display_name = EXCLUDED.display_name,
|
||||
allowed_sources = EXCLUDED.allowed_sources,
|
||||
denied_sources = EXCLUDED.denied_sources,
|
||||
max_bundle_size_mb = EXCLUDED.max_bundle_size_mb,
|
||||
max_items_per_bundle = EXCLUDED.max_items_per_bundle,
|
||||
require_signature = EXCLUDED.require_signature,
|
||||
allowed_signers = EXCLUDED.allowed_signers,
|
||||
enabled = EXCLUDED.enabled,
|
||||
updated_at = NOW()
|
||||
""";
|
||||
|
||||
await ExecuteAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "id", policy.Id == Guid.Empty ? Guid.NewGuid() : policy.Id);
|
||||
AddParameter(cmd, "site_id", policy.SiteId);
|
||||
AddParameter(cmd, "display_name", policy.DisplayName);
|
||||
AddTextArrayParameter(cmd, "allowed_sources", policy.AllowedSources);
|
||||
AddTextArrayParameter(cmd, "denied_sources", policy.DeniedSources);
|
||||
AddParameter(cmd, "max_bundle_size_mb", policy.MaxBundleSizeMb);
|
||||
AddParameter(cmd, "max_items_per_bundle", policy.MaxItemsPerBundle);
|
||||
AddParameter(cmd, "require_signature", policy.RequireSignature);
|
||||
AddTextArrayParameter(cmd, "allowed_signers", policy.AllowedSigners);
|
||||
AddParameter(cmd, "enabled", policy.Enabled);
|
||||
},
|
||||
ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<SitePolicyEntity>> GetAllPoliciesAsync(bool enabledOnly = true, CancellationToken ct = default)
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, site_id, display_name, allowed_sources, denied_sources,
|
||||
max_bundle_size_mb, max_items_per_bundle, require_signature,
|
||||
allowed_signers, enabled, created_at, updated_at
|
||||
FROM vuln.site_policy
|
||||
""";
|
||||
|
||||
if (enabledOnly)
|
||||
{
|
||||
sql += " WHERE enabled = TRUE";
|
||||
}
|
||||
|
||||
sql += " ORDER BY site_id";
|
||||
|
||||
return QueryAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
_ => { },
|
||||
MapPolicy,
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<bool> DeletePolicyAsync(string siteId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
DELETE FROM vuln.site_policy
|
||||
WHERE site_id = @site_id
|
||||
""";
|
||||
|
||||
var rows = await ExecuteAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "site_id", siteId),
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics
|
||||
|
||||
public async Task<SyncStatistics> GetStatisticsAsync(CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT
|
||||
(SELECT COUNT(DISTINCT site_id) FROM vuln.site_policy) AS total_sites,
|
||||
(SELECT COUNT(DISTINCT site_id) FROM vuln.site_policy WHERE enabled = TRUE) AS enabled_sites,
|
||||
(SELECT COUNT(*) FROM vuln.sync_ledger) AS total_bundles,
|
||||
(SELECT COALESCE(SUM(items_count), 0) FROM vuln.sync_ledger) AS total_items,
|
||||
(SELECT MAX(imported_at) FROM vuln.sync_ledger) AS last_import
|
||||
""";
|
||||
|
||||
return await QuerySingleOrDefaultAsync(
|
||||
SystemTenantId,
|
||||
sql,
|
||||
_ => { },
|
||||
reader => new SyncStatistics
|
||||
{
|
||||
TotalSites = reader.GetInt32(0),
|
||||
EnabledSites = reader.GetInt32(1),
|
||||
TotalBundlesImported = reader.GetInt64(2),
|
||||
TotalItemsImported = reader.GetInt64(3),
|
||||
LastImportAt = GetNullableDateTimeOffset(reader, 4)
|
||||
},
|
||||
ct).ConfigureAwait(false) ?? new SyncStatistics();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mappers
|
||||
|
||||
private static SyncLedgerEntity MapLedgerEntry(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
SiteId = reader.GetString(1),
|
||||
Cursor = reader.GetString(2),
|
||||
BundleHash = reader.GetString(3),
|
||||
ItemsCount = reader.GetInt32(4),
|
||||
SignedAt = reader.GetFieldValue<DateTimeOffset>(5),
|
||||
ImportedAt = reader.GetFieldValue<DateTimeOffset>(6)
|
||||
};
|
||||
|
||||
private static SitePolicyEntity MapPolicy(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
SiteId = reader.GetString(1),
|
||||
DisplayName = GetNullableString(reader, 2),
|
||||
AllowedSources = reader.GetFieldValue<string[]>(3),
|
||||
DeniedSources = reader.GetFieldValue<string[]>(4),
|
||||
MaxBundleSizeMb = reader.GetInt32(5),
|
||||
MaxItemsPerBundle = reader.GetInt32(6),
|
||||
RequireSignature = reader.GetBoolean(7),
|
||||
AllowedSigners = reader.GetFieldValue<string[]>(8),
|
||||
Enabled = reader.GetBoolean(9),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
|
||||
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(11)
|
||||
};
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cursor format utilities for federation sync.
|
||||
/// </summary>
|
||||
public static class CursorFormat
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a cursor from timestamp and sequence.
|
||||
/// Format: "2025-01-15T10:30:00.000Z#0042"
|
||||
/// </summary>
|
||||
public static string Create(DateTimeOffset timestamp, int sequence = 0)
|
||||
{
|
||||
return $"{timestamp:O}#{sequence:D4}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a cursor into timestamp and sequence.
|
||||
/// </summary>
|
||||
public static (DateTimeOffset Timestamp, int Sequence) Parse(string cursor)
|
||||
{
|
||||
var parts = cursor.Split('#');
|
||||
var timestamp = DateTimeOffset.Parse(parts[0]);
|
||||
var sequence = parts.Length > 1 ? int.Parse(parts[1]) : 0;
|
||||
return (timestamp, sequence);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two cursors. Returns true if cursor1 is after cursor2.
|
||||
/// </summary>
|
||||
public static bool IsAfter(string cursor1, string cursor2)
|
||||
{
|
||||
var (ts1, seq1) = Parse(cursor1);
|
||||
var (ts2, seq2) = Parse(cursor2);
|
||||
|
||||
if (ts1 != ts2) return ts1 > ts2;
|
||||
return seq1 > seq2;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,407 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SitePolicyEnforcementService.cs
|
||||
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
|
||||
// Task: SYNC-8200-014
|
||||
// Description: Enforces site federation policies including source allow/deny lists
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Sync;
|
||||
|
||||
/// <summary>
|
||||
/// Enforces site federation policies for bundle imports.
|
||||
/// </summary>
|
||||
public sealed class SitePolicyEnforcementService
|
||||
{
|
||||
private readonly ISyncLedgerRepository _repository;
|
||||
private readonly ILogger<SitePolicyEnforcementService> _logger;
|
||||
|
||||
public SitePolicyEnforcementService(
|
||||
ISyncLedgerRepository repository,
|
||||
ILogger<SitePolicyEnforcementService> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates whether a source is allowed for a given site.
|
||||
/// </summary>
|
||||
/// <param name="siteId">The site identifier.</param>
|
||||
/// <param name="sourceKey">The source key to validate.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Validation result indicating if the source is allowed.</returns>
|
||||
public async Task<SourceValidationResult> ValidateSourceAsync(
|
||||
string siteId,
|
||||
string sourceKey,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(siteId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceKey);
|
||||
|
||||
var policy = await _repository.GetPolicyAsync(siteId, ct).ConfigureAwait(false);
|
||||
|
||||
if (policy is null)
|
||||
{
|
||||
_logger.LogDebug("No policy found for site {SiteId}, allowing source {SourceKey} by default", siteId, sourceKey);
|
||||
return SourceValidationResult.Allowed("No policy configured");
|
||||
}
|
||||
|
||||
if (!policy.Enabled)
|
||||
{
|
||||
_logger.LogWarning("Site {SiteId} policy is disabled, rejecting source {SourceKey}", siteId, sourceKey);
|
||||
return SourceValidationResult.Denied("Site policy is disabled");
|
||||
}
|
||||
|
||||
return ValidateSourceAgainstPolicy(policy, sourceKey);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a source against a specific policy without fetching from repository.
|
||||
/// </summary>
|
||||
public SourceValidationResult ValidateSourceAgainstPolicy(SitePolicyEntity policy, string sourceKey)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(policy);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceKey);
|
||||
|
||||
// Denied list takes precedence
|
||||
if (IsSourceInList(policy.DeniedSources, sourceKey))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Source {SourceKey} is explicitly denied for site {SiteId}",
|
||||
sourceKey, policy.SiteId);
|
||||
return SourceValidationResult.Denied($"Source '{sourceKey}' is in deny list");
|
||||
}
|
||||
|
||||
// If allowed list is empty, all non-denied sources are allowed
|
||||
if (policy.AllowedSources.Length == 0)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Source {SourceKey} allowed for site {SiteId} (no allow list restrictions)",
|
||||
sourceKey, policy.SiteId);
|
||||
return SourceValidationResult.Allowed("No allow list restrictions");
|
||||
}
|
||||
|
||||
// Check if source is in allowed list
|
||||
if (IsSourceInList(policy.AllowedSources, sourceKey))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Source {SourceKey} is explicitly allowed for site {SiteId}",
|
||||
sourceKey, policy.SiteId);
|
||||
return SourceValidationResult.Allowed("Source is in allow list");
|
||||
}
|
||||
|
||||
// Source not in allowed list
|
||||
_logger.LogInformation(
|
||||
"Source {SourceKey} not in allow list for site {SiteId}",
|
||||
sourceKey, policy.SiteId);
|
||||
return SourceValidationResult.Denied($"Source '{sourceKey}' is not in allow list");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates multiple sources and returns results for each.
|
||||
/// </summary>
|
||||
public async Task<IReadOnlyDictionary<string, SourceValidationResult>> ValidateSourcesAsync(
|
||||
string siteId,
|
||||
IEnumerable<string> sourceKeys,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(siteId);
|
||||
ArgumentNullException.ThrowIfNull(sourceKeys);
|
||||
|
||||
var policy = await _repository.GetPolicyAsync(siteId, ct).ConfigureAwait(false);
|
||||
var results = new Dictionary<string, SourceValidationResult>();
|
||||
|
||||
foreach (var sourceKey in sourceKeys)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sourceKey))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (policy is null)
|
||||
{
|
||||
results[sourceKey] = SourceValidationResult.Allowed("No policy configured");
|
||||
}
|
||||
else if (!policy.Enabled)
|
||||
{
|
||||
results[sourceKey] = SourceValidationResult.Denied("Site policy is disabled");
|
||||
}
|
||||
else
|
||||
{
|
||||
results[sourceKey] = ValidateSourceAgainstPolicy(policy, sourceKey);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Filters a collection of source keys to only those allowed by the site policy.
|
||||
/// </summary>
|
||||
public async Task<IReadOnlyList<string>> FilterAllowedSourcesAsync(
|
||||
string siteId,
|
||||
IEnumerable<string> sourceKeys,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = await ValidateSourcesAsync(siteId, sourceKeys, ct).ConfigureAwait(false);
|
||||
return results
|
||||
.Where(kvp => kvp.Value.IsAllowed)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static bool IsSourceInList(string[] sourceList, string sourceKey)
|
||||
{
|
||||
if (sourceList.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var source in sourceList)
|
||||
{
|
||||
// Exact match (case-insensitive)
|
||||
if (string.Equals(source, sourceKey, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Wildcard pattern match (e.g., "nvd-*" matches "nvd-cve", "nvd-cpe")
|
||||
if (source.EndsWith('*') && sourceKey.StartsWith(
|
||||
source[..^1], StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#region Size Budget Tracking (SYNC-8200-015)
|
||||
|
||||
/// <summary>
|
||||
/// Validates bundle size against site policy limits.
|
||||
/// </summary>
|
||||
/// <param name="siteId">The site identifier.</param>
|
||||
/// <param name="bundleSizeMb">Bundle size in megabytes.</param>
|
||||
/// <param name="itemsCount">Number of items in the bundle.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Validation result indicating if the bundle is within limits.</returns>
|
||||
public async Task<BundleSizeValidationResult> ValidateBundleSizeAsync(
|
||||
string siteId,
|
||||
decimal bundleSizeMb,
|
||||
int itemsCount,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(siteId);
|
||||
|
||||
var policy = await _repository.GetPolicyAsync(siteId, ct).ConfigureAwait(false);
|
||||
|
||||
if (policy is null)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"No policy found for site {SiteId}, allowing bundle (size={SizeMb}MB, items={Items})",
|
||||
siteId, bundleSizeMb, itemsCount);
|
||||
return BundleSizeValidationResult.Allowed("No policy configured", bundleSizeMb, itemsCount);
|
||||
}
|
||||
|
||||
if (!policy.Enabled)
|
||||
{
|
||||
_logger.LogWarning("Site {SiteId} policy is disabled, rejecting bundle", siteId);
|
||||
return BundleSizeValidationResult.Denied(
|
||||
"Site policy is disabled",
|
||||
bundleSizeMb,
|
||||
itemsCount,
|
||||
policy.MaxBundleSizeMb,
|
||||
policy.MaxItemsPerBundle);
|
||||
}
|
||||
|
||||
return ValidateBundleSizeAgainstPolicy(policy, bundleSizeMb, itemsCount);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates bundle size against a specific policy without fetching from repository.
|
||||
/// </summary>
|
||||
public BundleSizeValidationResult ValidateBundleSizeAgainstPolicy(
|
||||
SitePolicyEntity policy,
|
||||
decimal bundleSizeMb,
|
||||
int itemsCount)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(policy);
|
||||
|
||||
var violations = new List<string>();
|
||||
|
||||
// Check size limit
|
||||
if (bundleSizeMb > policy.MaxBundleSizeMb)
|
||||
{
|
||||
violations.Add($"Bundle size ({bundleSizeMb:F2}MB) exceeds limit ({policy.MaxBundleSizeMb}MB)");
|
||||
}
|
||||
|
||||
// Check items limit
|
||||
if (itemsCount > policy.MaxItemsPerBundle)
|
||||
{
|
||||
violations.Add($"Item count ({itemsCount}) exceeds limit ({policy.MaxItemsPerBundle})");
|
||||
}
|
||||
|
||||
if (violations.Count > 0)
|
||||
{
|
||||
var reason = string.Join("; ", violations);
|
||||
_logger.LogWarning(
|
||||
"Bundle rejected for site {SiteId}: {Reason}",
|
||||
policy.SiteId, reason);
|
||||
return BundleSizeValidationResult.Denied(
|
||||
reason,
|
||||
bundleSizeMb,
|
||||
itemsCount,
|
||||
policy.MaxBundleSizeMb,
|
||||
policy.MaxItemsPerBundle);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Bundle accepted for site {SiteId}: size={SizeMb}MB (limit={MaxSize}MB), items={Items} (limit={MaxItems})",
|
||||
policy.SiteId, bundleSizeMb, policy.MaxBundleSizeMb, itemsCount, policy.MaxItemsPerBundle);
|
||||
|
||||
return BundleSizeValidationResult.Allowed(
|
||||
"Within size limits",
|
||||
bundleSizeMb,
|
||||
itemsCount,
|
||||
policy.MaxBundleSizeMb,
|
||||
policy.MaxItemsPerBundle);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the remaining budget for a site based on recent imports.
|
||||
/// </summary>
|
||||
/// <param name="siteId">The site identifier.</param>
|
||||
/// <param name="windowHours">Time window in hours to consider for recent imports.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Remaining budget information.</returns>
|
||||
public async Task<SiteBudgetInfo> GetRemainingBudgetAsync(
|
||||
string siteId,
|
||||
int windowHours = 24,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(siteId);
|
||||
|
||||
var policy = await _repository.GetPolicyAsync(siteId, ct).ConfigureAwait(false);
|
||||
var history = await _repository.GetHistoryAsync(siteId, limit: 100, ct).ConfigureAwait(false);
|
||||
|
||||
if (policy is null)
|
||||
{
|
||||
return new SiteBudgetInfo(
|
||||
SiteId: siteId,
|
||||
HasPolicy: false,
|
||||
MaxBundleSizeMb: int.MaxValue,
|
||||
MaxItemsPerBundle: int.MaxValue,
|
||||
RecentImportsCount: history.Count,
|
||||
RecentItemsImported: history.Sum(h => h.ItemsCount),
|
||||
WindowHours: windowHours);
|
||||
}
|
||||
|
||||
var windowStart = DateTimeOffset.UtcNow.AddHours(-windowHours);
|
||||
var recentHistory = history.Where(h => h.ImportedAt >= windowStart).ToList();
|
||||
|
||||
return new SiteBudgetInfo(
|
||||
SiteId: siteId,
|
||||
HasPolicy: true,
|
||||
MaxBundleSizeMb: policy.MaxBundleSizeMb,
|
||||
MaxItemsPerBundle: policy.MaxItemsPerBundle,
|
||||
RecentImportsCount: recentHistory.Count,
|
||||
RecentItemsImported: recentHistory.Sum(h => h.ItemsCount),
|
||||
WindowHours: windowHours);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of source validation against site policy.
|
||||
/// </summary>
|
||||
public sealed record SourceValidationResult
|
||||
{
|
||||
private SourceValidationResult(bool isAllowed, string reason)
|
||||
{
|
||||
IsAllowed = isAllowed;
|
||||
Reason = reason;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether the source is allowed.
|
||||
/// </summary>
|
||||
public bool IsAllowed { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for the decision.
|
||||
/// </summary>
|
||||
public string Reason { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an allowed result.
|
||||
/// </summary>
|
||||
public static SourceValidationResult Allowed(string reason) => new(true, reason);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a denied result.
|
||||
/// </summary>
|
||||
public static SourceValidationResult Denied(string reason) => new(false, reason);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of bundle size validation against site policy.
|
||||
/// </summary>
|
||||
public sealed record BundleSizeValidationResult
|
||||
{
|
||||
private BundleSizeValidationResult(
|
||||
bool isAllowed,
|
||||
string reason,
|
||||
decimal actualSizeMb,
|
||||
int actualItemCount,
|
||||
int? maxSizeMb,
|
||||
int? maxItems)
|
||||
{
|
||||
IsAllowed = isAllowed;
|
||||
Reason = reason;
|
||||
ActualSizeMb = actualSizeMb;
|
||||
ActualItemCount = actualItemCount;
|
||||
MaxSizeMb = maxSizeMb;
|
||||
MaxItems = maxItems;
|
||||
}
|
||||
|
||||
public bool IsAllowed { get; }
|
||||
public string Reason { get; }
|
||||
public decimal ActualSizeMb { get; }
|
||||
public int ActualItemCount { get; }
|
||||
public int? MaxSizeMb { get; }
|
||||
public int? MaxItems { get; }
|
||||
|
||||
public static BundleSizeValidationResult Allowed(
|
||||
string reason,
|
||||
decimal actualSizeMb,
|
||||
int actualItemCount,
|
||||
int? maxSizeMb = null,
|
||||
int? maxItems = null)
|
||||
=> new(true, reason, actualSizeMb, actualItemCount, maxSizeMb, maxItems);
|
||||
|
||||
public static BundleSizeValidationResult Denied(
|
||||
string reason,
|
||||
decimal actualSizeMb,
|
||||
int actualItemCount,
|
||||
int? maxSizeMb = null,
|
||||
int? maxItems = null)
|
||||
=> new(false, reason, actualSizeMb, actualItemCount, maxSizeMb, maxItems);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about a site's remaining import budget.
|
||||
/// </summary>
|
||||
public sealed record SiteBudgetInfo(
|
||||
string SiteId,
|
||||
bool HasPolicy,
|
||||
int MaxBundleSizeMb,
|
||||
int MaxItemsPerBundle,
|
||||
int RecentImportsCount,
|
||||
int RecentItemsImported,
|
||||
int WindowHours);
|
||||
@@ -0,0 +1,435 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CachingCanonicalAdvisoryServiceTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-015
|
||||
// Description: Unit tests for caching canonical advisory service decorator
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Canonical;
|
||||
|
||||
public sealed class CachingCanonicalAdvisoryServiceTests : IDisposable
|
||||
{
|
||||
private readonly Mock<ICanonicalAdvisoryService> _innerMock;
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly ILogger<CachingCanonicalAdvisoryService> _logger;
|
||||
private readonly CanonicalCacheOptions _options;
|
||||
|
||||
private static readonly Guid TestCanonicalId = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
private const string TestMergeHash = "sha256:abc123def456";
|
||||
private const string TestCve = "CVE-2025-0001";
|
||||
|
||||
public CachingCanonicalAdvisoryServiceTests()
|
||||
{
|
||||
_innerMock = new Mock<ICanonicalAdvisoryService>();
|
||||
_cache = new MemoryCache(new MemoryCacheOptions());
|
||||
_logger = NullLogger<CachingCanonicalAdvisoryService>.Instance;
|
||||
_options = new CanonicalCacheOptions
|
||||
{
|
||||
Enabled = true,
|
||||
DefaultTtl = TimeSpan.FromMinutes(5),
|
||||
CveTtl = TimeSpan.FromMinutes(2),
|
||||
ArtifactTtl = TimeSpan.FromMinutes(2)
|
||||
};
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_cache.Dispose();
|
||||
}
|
||||
|
||||
#region GetByIdAsync - Caching
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ReturnsCachedResult_OnSecondCall()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act - first call hits inner service
|
||||
var result1 = await service.GetByIdAsync(TestCanonicalId);
|
||||
// Second call should hit cache
|
||||
var result2 = await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert
|
||||
result1.Should().Be(canonical);
|
||||
result2.Should().Be(canonical);
|
||||
|
||||
// Inner service called only once
|
||||
_innerMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
// Arrange
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByIdAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_CachesNullResult_DoesNotCallInnerTwice()
|
||||
{
|
||||
// Arrange
|
||||
var id = Guid.NewGuid();
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(id, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.GetByIdAsync(id);
|
||||
var result = await service.GetByIdAsync(id);
|
||||
|
||||
// Assert - null is not cached, so inner is called twice
|
||||
result.Should().BeNull();
|
||||
_innerMock.Verify(x => x.GetByIdAsync(id, It.IsAny<CancellationToken>()), Times.Exactly(2));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByMergeHashAsync - Caching
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_ReturnsCachedResult_OnSecondCall()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result1 = await service.GetByMergeHashAsync(TestMergeHash);
|
||||
var result2 = await service.GetByMergeHashAsync(TestMergeHash);
|
||||
|
||||
// Assert
|
||||
result1.Should().Be(canonical);
|
||||
result2.Should().Be(canonical);
|
||||
_innerMock.Verify(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_CachesByIdToo_AllowsCrossLookup()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act - fetch by hash first
|
||||
await service.GetByMergeHashAsync(TestMergeHash);
|
||||
// Then fetch by ID - should hit cache
|
||||
var result = await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(canonical);
|
||||
_innerMock.Verify(x => x.GetByIdAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByCveAsync - Caching
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ReturnsCachedResult_OnSecondCall()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory>
|
||||
{
|
||||
CreateCanonicalAdvisory(TestCanonicalId),
|
||||
CreateCanonicalAdvisory(Guid.NewGuid())
|
||||
};
|
||||
_innerMock
|
||||
.Setup(x => x.GetByCveAsync(TestCve, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result1 = await service.GetByCveAsync(TestCve);
|
||||
var result2 = await service.GetByCveAsync(TestCve);
|
||||
|
||||
// Assert
|
||||
result1.Should().HaveCount(2);
|
||||
result2.Should().HaveCount(2);
|
||||
_innerMock.Verify(x => x.GetByCveAsync(TestCve, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_NormalizesToUpperCase()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory> { CreateCanonicalAdvisory(TestCanonicalId) };
|
||||
_innerMock
|
||||
.Setup(x => x.GetByCveAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act - lowercase
|
||||
await service.GetByCveAsync("cve-2025-0001");
|
||||
// uppercase should hit cache
|
||||
await service.GetByCveAsync("CVE-2025-0001");
|
||||
|
||||
// Assert
|
||||
_innerMock.Verify(x => x.GetByCveAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ReturnsEmptyList_WhenNoResults()
|
||||
{
|
||||
// Arrange
|
||||
_innerMock
|
||||
.Setup(x => x.GetByCveAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<CanonicalAdvisory>());
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByCveAsync("CVE-2025-9999");
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByArtifactAsync - Caching
|
||||
|
||||
[Fact]
|
||||
public async Task GetByArtifactAsync_ReturnsCachedResult_OnSecondCall()
|
||||
{
|
||||
// Arrange
|
||||
const string artifactKey = "pkg:npm/lodash@1";
|
||||
var canonicals = new List<CanonicalAdvisory> { CreateCanonicalAdvisory(TestCanonicalId) };
|
||||
_innerMock
|
||||
.Setup(x => x.GetByArtifactAsync(artifactKey, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result1 = await service.GetByArtifactAsync(artifactKey);
|
||||
var result2 = await service.GetByArtifactAsync(artifactKey);
|
||||
|
||||
// Assert
|
||||
result1.Should().HaveCount(1);
|
||||
result2.Should().HaveCount(1);
|
||||
_innerMock.Verify(x => x.GetByArtifactAsync(artifactKey, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByArtifactAsync_NormalizesToLowerCase()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory> { CreateCanonicalAdvisory(TestCanonicalId) };
|
||||
_innerMock
|
||||
.Setup(x => x.GetByArtifactAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.GetByArtifactAsync("PKG:NPM/LODASH@1");
|
||||
await service.GetByArtifactAsync("pkg:npm/lodash@1");
|
||||
|
||||
// Assert - both should hit cache
|
||||
_innerMock.Verify(x => x.GetByArtifactAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region QueryAsync - Pass-through
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_DoesNotCache_PassesThroughToInner()
|
||||
{
|
||||
// Arrange
|
||||
var options = new CanonicalQueryOptions();
|
||||
var result = new PagedResult<CanonicalAdvisory> { Items = [], TotalCount = 0, Offset = 0, Limit = 10 };
|
||||
_innerMock
|
||||
.Setup(x => x.QueryAsync(options, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(result);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.QueryAsync(options);
|
||||
await service.QueryAsync(options);
|
||||
|
||||
// Assert - called twice (no caching)
|
||||
_innerMock.Verify(x => x.QueryAsync(options, It.IsAny<CancellationToken>()), Times.Exactly(2));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestAsync - Cache Invalidation
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_InvalidatesCache_WhenNotDuplicate()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
_innerMock
|
||||
.Setup(x => x.IngestAsync(It.IsAny<string>(), It.IsAny<RawAdvisory>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(IngestResult.Created(TestCanonicalId, TestMergeHash, Guid.NewGuid(), "nvd", "NVD-001"));
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Prime the cache
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Act - ingest that modifies the canonical
|
||||
await service.IngestAsync("nvd", CreateRawAdvisory(TestCve));
|
||||
|
||||
// Now fetch again - should call inner again
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert - inner called twice (before and after ingest)
|
||||
_innerMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Exactly(2));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_DoesNotInvalidateCache_WhenDuplicate()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
_innerMock
|
||||
.Setup(x => x.IngestAsync(It.IsAny<string>(), It.IsAny<RawAdvisory>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(IngestResult.Duplicate(TestCanonicalId, TestMergeHash, "nvd", "NVD-001"));
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Prime the cache
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Act - duplicate ingest (no changes)
|
||||
await service.IngestAsync("nvd", CreateRawAdvisory(TestCve));
|
||||
|
||||
// Now fetch again - should hit cache
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert - inner called only once
|
||||
_innerMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpdateStatusAsync - Cache Invalidation
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatusAsync_InvalidatesCache()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Prime the cache
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Act - update status
|
||||
await service.UpdateStatusAsync(TestCanonicalId, CanonicalStatus.Withdrawn);
|
||||
|
||||
// Now fetch again - should call inner again
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert
|
||||
_innerMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Exactly(2));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Disabled Caching
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_DoesNotCache_WhenCachingDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var disabledOptions = new CanonicalCacheOptions { Enabled = false };
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId);
|
||||
_innerMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService(disabledOptions);
|
||||
|
||||
// Act
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert - called twice when caching disabled
|
||||
_innerMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Exactly(2));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
private CachingCanonicalAdvisoryService CreateService() =>
|
||||
CreateService(_options);
|
||||
|
||||
private CachingCanonicalAdvisoryService CreateService(CanonicalCacheOptions options) =>
|
||||
new(_innerMock.Object, _cache, Options.Create(options), _logger);
|
||||
|
||||
private static CanonicalAdvisory CreateCanonicalAdvisory(Guid id) => new()
|
||||
{
|
||||
Id = id,
|
||||
Cve = TestCve,
|
||||
AffectsKey = "pkg:npm/example@1",
|
||||
MergeHash = TestMergeHash,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
private static RawAdvisory CreateRawAdvisory(string cve) => new()
|
||||
{
|
||||
SourceAdvisoryId = $"ADV-{cve}",
|
||||
Cve = cve,
|
||||
AffectsKey = "pkg:npm/example@1",
|
||||
VersionRangeJson = "{}",
|
||||
Weaknesses = [],
|
||||
FetchedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,801 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalAdvisoryServiceTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-009
|
||||
// Description: Unit tests for canonical advisory service ingest pipeline
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Canonical;
|
||||
|
||||
public sealed class CanonicalAdvisoryServiceTests
|
||||
{
|
||||
private readonly Mock<ICanonicalAdvisoryStore> _storeMock;
|
||||
private readonly Mock<IMergeHashCalculator> _hashCalculatorMock;
|
||||
private readonly Mock<ISourceEdgeSigner> _signerMock;
|
||||
private readonly ILogger<CanonicalAdvisoryService> _logger;
|
||||
|
||||
private const string TestSource = "nvd";
|
||||
private const string TestMergeHash = "sha256:abc123def456";
|
||||
private static readonly Guid TestCanonicalId = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
private static readonly Guid TestSourceId = Guid.Parse("22222222-2222-2222-2222-222222222222");
|
||||
private static readonly Guid TestEdgeId = Guid.Parse("33333333-3333-3333-3333-333333333333");
|
||||
|
||||
public CanonicalAdvisoryServiceTests()
|
||||
{
|
||||
_storeMock = new Mock<ICanonicalAdvisoryStore>();
|
||||
_hashCalculatorMock = new Mock<IMergeHashCalculator>();
|
||||
_signerMock = new Mock<ISourceEdgeSigner>();
|
||||
_logger = NullLogger<CanonicalAdvisoryService>.Instance;
|
||||
|
||||
// Default merge hash calculation
|
||||
_hashCalculatorMock
|
||||
.Setup(x => x.ComputeMergeHash(It.IsAny<MergeHashInput>()))
|
||||
.Returns(TestMergeHash);
|
||||
|
||||
// Default source resolution
|
||||
_storeMock
|
||||
.Setup(x => x.ResolveSourceIdAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestSourceId);
|
||||
|
||||
// Default source edge creation
|
||||
_storeMock
|
||||
.Setup(x => x.AddSourceEdgeAsync(It.IsAny<AddSourceEdgeRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SourceEdgeResult.Created(TestEdgeId));
|
||||
}
|
||||
|
||||
#region IngestAsync - New Canonical
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_CreatesNewCanonical_WhenNoExistingMergeHash()
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0001");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.Decision.Should().Be(MergeDecision.Created);
|
||||
result.CanonicalId.Should().Be(TestCanonicalId);
|
||||
result.MergeHash.Should().Be(TestMergeHash);
|
||||
result.SourceEdgeId.Should().Be(TestEdgeId);
|
||||
|
||||
_storeMock.Verify(x => x.UpsertCanonicalAsync(
|
||||
It.Is<UpsertCanonicalRequest>(r =>
|
||||
r.Cve == "CVE-2025-0001" &&
|
||||
r.MergeHash == TestMergeHash),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_ComputesMergeHash_FromAdvisoryFields()
|
||||
{
|
||||
// Arrange
|
||||
var advisory = CreateRawAdvisory(
|
||||
cve: "CVE-2025-0002",
|
||||
affectsKey: "pkg:npm/lodash@1",
|
||||
weaknesses: ["CWE-79", "CWE-89"]);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
_hashCalculatorMock.Verify(x => x.ComputeMergeHash(
|
||||
It.Is<MergeHashInput>(input =>
|
||||
input.Cve == "CVE-2025-0002" &&
|
||||
input.AffectsKey == "pkg:npm/lodash@1" &&
|
||||
input.Weaknesses != null &&
|
||||
input.Weaknesses.Contains("CWE-79") &&
|
||||
input.Weaknesses.Contains("CWE-89"))),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestAsync - Merge Existing
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_MergesIntoExisting_WhenMergeHashExists()
|
||||
{
|
||||
// Arrange - include source edge with high precedence so metadata update is skipped
|
||||
var existingCanonical = CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0003", withSourceEdge: true);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingCanonical);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.SourceEdgeExistsAsync(TestCanonicalId, TestSourceId, It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(false);
|
||||
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0003");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.Decision.Should().Be(MergeDecision.Merged);
|
||||
result.CanonicalId.Should().Be(TestCanonicalId);
|
||||
result.SourceEdgeId.Should().Be(TestEdgeId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_AddsSourceEdge_ForMergedAdvisory()
|
||||
{
|
||||
// Arrange
|
||||
var existingCanonical = CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0004");
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingCanonical);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.SourceEdgeExistsAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(false);
|
||||
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0004", sourceAdvisoryId: "NVD-2025-0004");
|
||||
|
||||
// Act
|
||||
await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
_storeMock.Verify(x => x.AddSourceEdgeAsync(
|
||||
It.Is<AddSourceEdgeRequest>(r =>
|
||||
r.CanonicalId == TestCanonicalId &&
|
||||
r.SourceId == TestSourceId &&
|
||||
r.SourceAdvisoryId == "NVD-2025-0004"),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestAsync - Duplicate Detection
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_ReturnsDuplicate_WhenSourceEdgeExists()
|
||||
{
|
||||
// Arrange
|
||||
var existingCanonical = CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0005");
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingCanonical);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.SourceEdgeExistsAsync(TestCanonicalId, TestSourceId, It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0005");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.Decision.Should().Be(MergeDecision.Duplicate);
|
||||
result.CanonicalId.Should().Be(TestCanonicalId);
|
||||
result.SourceEdgeId.Should().BeNull();
|
||||
|
||||
// Should not add source edge
|
||||
_storeMock.Verify(x => x.AddSourceEdgeAsync(
|
||||
It.IsAny<AddSourceEdgeRequest>(),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestAsync - DSSE Signing
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_SignsSourceEdge_WhenSignerAvailable()
|
||||
{
|
||||
// Arrange
|
||||
var signatureRef = Guid.NewGuid();
|
||||
var envelope = new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.advisory.v1+json",
|
||||
Payload = "eyJhZHZpc29yeSI6InRlc3QifQ==",
|
||||
Signatures = [new DsseSignature { KeyId = "test-key", Sig = "abc123" }]
|
||||
};
|
||||
|
||||
_signerMock
|
||||
.Setup(x => x.SignAsync(It.IsAny<SourceEdgeSigningRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SourceEdgeSigningResult.Signed(envelope, signatureRef));
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateServiceWithSigner();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0006", rawPayloadJson: "{\"cve\":\"CVE-2025-0006\"}");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.SignatureRef.Should().Be(signatureRef);
|
||||
|
||||
_storeMock.Verify(x => x.AddSourceEdgeAsync(
|
||||
It.Is<AddSourceEdgeRequest>(r =>
|
||||
r.DsseEnvelopeJson != null &&
|
||||
r.DsseEnvelopeJson.Contains("PayloadType")),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_ContinuesWithoutSignature_WhenSignerFails()
|
||||
{
|
||||
// Arrange
|
||||
_signerMock
|
||||
.Setup(x => x.SignAsync(It.IsAny<SourceEdgeSigningRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SourceEdgeSigningResult.Failed("Signing service unavailable"));
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateServiceWithSigner();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0007", rawPayloadJson: "{\"cve\":\"CVE-2025-0007\"}");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.Decision.Should().Be(MergeDecision.Created);
|
||||
result.SignatureRef.Should().BeNull();
|
||||
|
||||
// Should still add source edge without DSSE
|
||||
_storeMock.Verify(x => x.AddSourceEdgeAsync(
|
||||
It.Is<AddSourceEdgeRequest>(r => r.DsseEnvelopeJson == null),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_SkipsSigning_WhenNoRawPayload()
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateServiceWithSigner();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0008", rawPayloadJson: null);
|
||||
|
||||
// Act
|
||||
await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert - signer should not be called
|
||||
_signerMock.Verify(x => x.SignAsync(
|
||||
It.IsAny<SourceEdgeSigningRequest>(),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_WorksWithoutSigner()
|
||||
{
|
||||
// Arrange - service without signer
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateService(); // No signer
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0009", rawPayloadJson: "{\"cve\":\"CVE-2025-0009\"}");
|
||||
|
||||
// Act
|
||||
var result = await service.IngestAsync(TestSource, advisory);
|
||||
|
||||
// Assert
|
||||
result.Decision.Should().Be(MergeDecision.Created);
|
||||
result.SignatureRef.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestAsync - Source Precedence
|
||||
|
||||
[Theory]
|
||||
[InlineData("vendor", 10)]
|
||||
[InlineData("redhat", 20)]
|
||||
[InlineData("debian", 20)]
|
||||
[InlineData("osv", 30)]
|
||||
[InlineData("ghsa", 35)]
|
||||
[InlineData("nvd", 40)]
|
||||
[InlineData("unknown", 100)]
|
||||
public async Task IngestAsync_AssignsCorrectPrecedence_BySource(string source, int expectedRank)
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0010");
|
||||
|
||||
// Act
|
||||
await service.IngestAsync(source, advisory);
|
||||
|
||||
// Assert
|
||||
_storeMock.Verify(x => x.AddSourceEdgeAsync(
|
||||
It.Is<AddSourceEdgeRequest>(r => r.PrecedenceRank == expectedRank),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IngestBatchAsync
|
||||
|
||||
[Fact]
|
||||
public async Task IngestBatchAsync_ProcessesAllAdvisories()
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(TestCanonicalId);
|
||||
|
||||
var service = CreateService();
|
||||
var advisories = new[]
|
||||
{
|
||||
CreateRawAdvisory("CVE-2025-0011"),
|
||||
CreateRawAdvisory("CVE-2025-0012"),
|
||||
CreateRawAdvisory("CVE-2025-0013")
|
||||
};
|
||||
|
||||
// Act
|
||||
var results = await service.IngestBatchAsync(TestSource, advisories);
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(3);
|
||||
results.Should().OnlyContain(r => r.Decision == MergeDecision.Created);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestBatchAsync_ContinuesOnError_ReturnsConflictForFailed()
|
||||
{
|
||||
// Arrange
|
||||
var callCount = 0;
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
_storeMock
|
||||
.Setup(x => x.UpsertCanonicalAsync(It.IsAny<UpsertCanonicalRequest>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(() =>
|
||||
{
|
||||
callCount++;
|
||||
if (callCount == 2)
|
||||
throw new InvalidOperationException("Simulated failure");
|
||||
return Task.FromResult(TestCanonicalId);
|
||||
});
|
||||
|
||||
var service = CreateService();
|
||||
var advisories = new[]
|
||||
{
|
||||
CreateRawAdvisory("CVE-2025-0014"),
|
||||
CreateRawAdvisory("CVE-2025-0015"),
|
||||
CreateRawAdvisory("CVE-2025-0016")
|
||||
};
|
||||
|
||||
// Act
|
||||
var results = await service.IngestBatchAsync(TestSource, advisories);
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(3);
|
||||
results[0].Decision.Should().Be(MergeDecision.Created);
|
||||
results[1].Decision.Should().Be(MergeDecision.Conflict);
|
||||
results[1].ConflictReason.Should().Contain("Simulated failure");
|
||||
results[2].Decision.Should().Be(MergeDecision.Created);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations - GetByIdAsync
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0018");
|
||||
_storeMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByIdAsync(TestCanonicalId);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(canonical);
|
||||
_storeMock.Verify(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByIdAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByIdAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations - GetByMergeHashAsync
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0019");
|
||||
_storeMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByMergeHashAsync(TestMergeHash);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(canonical);
|
||||
_storeMock.Verify(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_ThrowsArgumentException_WhenHashIsNullOrEmpty()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() => service.GetByMergeHashAsync(null!));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByMergeHashAsync(""));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByMergeHashAsync(" "));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations - GetByCveAsync
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory>
|
||||
{
|
||||
CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0020"),
|
||||
CreateCanonicalAdvisory(Guid.NewGuid(), "CVE-2025-0020")
|
||||
};
|
||||
_storeMock
|
||||
.Setup(x => x.GetByCveAsync("CVE-2025-0020", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByCveAsync("CVE-2025-0020");
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(2);
|
||||
_storeMock.Verify(x => x.GetByCveAsync("CVE-2025-0020", It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ReturnsEmptyList_WhenNoResults()
|
||||
{
|
||||
// Arrange
|
||||
_storeMock
|
||||
.Setup(x => x.GetByCveAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<CanonicalAdvisory>());
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByCveAsync("CVE-2025-9999");
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ThrowsArgumentException_WhenCveIsNullOrEmpty()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() => service.GetByCveAsync(null!));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByCveAsync(""));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByCveAsync(" "));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations - GetByArtifactAsync
|
||||
|
||||
[Fact]
|
||||
public async Task GetByArtifactAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
const string artifactKey = "pkg:npm/lodash@4";
|
||||
var canonicals = new List<CanonicalAdvisory>
|
||||
{
|
||||
CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0021")
|
||||
};
|
||||
_storeMock
|
||||
.Setup(x => x.GetByArtifactAsync(artifactKey, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.GetByArtifactAsync(artifactKey);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(1);
|
||||
_storeMock.Verify(x => x.GetByArtifactAsync(artifactKey, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByArtifactAsync_ThrowsArgumentException_WhenArtifactKeyIsNullOrEmpty()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() => service.GetByArtifactAsync(null!));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByArtifactAsync(""));
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => service.GetByArtifactAsync(" "));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Query Operations - QueryAsync
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
var options = new CanonicalQueryOptions { Severity = "critical", Limit = 10 };
|
||||
var pagedResult = new PagedResult<CanonicalAdvisory>
|
||||
{
|
||||
Items = new List<CanonicalAdvisory> { CreateCanonicalAdvisory(TestCanonicalId, "CVE-2025-0022") },
|
||||
TotalCount = 1,
|
||||
Offset = 0,
|
||||
Limit = 10
|
||||
};
|
||||
_storeMock
|
||||
.Setup(x => x.QueryAsync(options, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(pagedResult);
|
||||
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.QueryAsync(options);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(1);
|
||||
result.TotalCount.Should().Be(1);
|
||||
_storeMock.Verify(x => x.QueryAsync(options, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_ThrowsArgumentNullException_WhenOptionsIsNull()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() => service.QueryAsync(null!));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Operations - UpdateStatusAsync
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatusAsync_DelegatesToStore()
|
||||
{
|
||||
// Arrange
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.UpdateStatusAsync(TestCanonicalId, CanonicalStatus.Withdrawn);
|
||||
|
||||
// Assert
|
||||
_storeMock.Verify(x => x.UpdateStatusAsync(
|
||||
TestCanonicalId,
|
||||
CanonicalStatus.Withdrawn,
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(CanonicalStatus.Active)]
|
||||
[InlineData(CanonicalStatus.Stub)]
|
||||
[InlineData(CanonicalStatus.Withdrawn)]
|
||||
public async Task UpdateStatusAsync_AcceptsAllStatusValues(CanonicalStatus status)
|
||||
{
|
||||
// Arrange
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
await service.UpdateStatusAsync(TestCanonicalId, status);
|
||||
|
||||
// Assert
|
||||
_storeMock.Verify(x => x.UpdateStatusAsync(
|
||||
TestCanonicalId,
|
||||
status,
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Operations - DegradeToStubsAsync
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubsAsync_ReturnsZero_NotYetImplemented()
|
||||
{
|
||||
// Arrange
|
||||
var service = CreateService();
|
||||
|
||||
// Act
|
||||
var result = await service.DegradeToStubsAsync(0.001);
|
||||
|
||||
// Assert - currently returns 0 as not implemented
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Validation
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_ThrowsArgumentException_WhenSourceIsNullOrEmpty()
|
||||
{
|
||||
var service = CreateService();
|
||||
var advisory = CreateRawAdvisory("CVE-2025-0017");
|
||||
|
||||
// ArgumentNullException is thrown for null
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() =>
|
||||
service.IngestAsync(null!, advisory));
|
||||
|
||||
// ArgumentException is thrown for empty/whitespace
|
||||
await Assert.ThrowsAsync<ArgumentException>(() =>
|
||||
service.IngestAsync("", advisory));
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentException>(() =>
|
||||
service.IngestAsync(" ", advisory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_ThrowsArgumentNullException_WhenAdvisoryIsNull()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() =>
|
||||
service.IngestAsync(TestSource, null!));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
private CanonicalAdvisoryService CreateService() =>
|
||||
new(_storeMock.Object, _hashCalculatorMock.Object, _logger);
|
||||
|
||||
private CanonicalAdvisoryService CreateServiceWithSigner() =>
|
||||
new(_storeMock.Object, _hashCalculatorMock.Object, _logger, _signerMock.Object);
|
||||
|
||||
private static RawAdvisory CreateRawAdvisory(
|
||||
string cve,
|
||||
string? sourceAdvisoryId = null,
|
||||
string? affectsKey = null,
|
||||
IReadOnlyList<string>? weaknesses = null,
|
||||
string? rawPayloadJson = null)
|
||||
{
|
||||
return new RawAdvisory
|
||||
{
|
||||
SourceAdvisoryId = sourceAdvisoryId ?? $"ADV-{cve}",
|
||||
Cve = cve,
|
||||
AffectsKey = affectsKey ?? "pkg:npm/example@1",
|
||||
VersionRangeJson = "{\"introduced\":\"1.0.0\",\"fixed\":\"1.2.3\"}",
|
||||
Weaknesses = weaknesses ?? [],
|
||||
Severity = "high",
|
||||
Title = $"Test Advisory for {cve}",
|
||||
Summary = "Test summary",
|
||||
RawPayloadJson = rawPayloadJson,
|
||||
FetchedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static CanonicalAdvisory CreateCanonicalAdvisory(Guid id, string cve, bool withSourceEdge = false)
|
||||
{
|
||||
var sourceEdges = withSourceEdge
|
||||
? new List<SourceEdge>
|
||||
{
|
||||
new SourceEdge
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
SourceName = "vendor",
|
||||
SourceAdvisoryId = $"VENDOR-{cve}",
|
||||
SourceDocHash = "sha256:existing",
|
||||
PrecedenceRank = 10, // High precedence
|
||||
FetchedAt = DateTimeOffset.UtcNow
|
||||
}
|
||||
}
|
||||
: new List<SourceEdge>();
|
||||
|
||||
return new CanonicalAdvisory
|
||||
{
|
||||
Id = id,
|
||||
Cve = cve,
|
||||
AffectsKey = "pkg:npm/example@1",
|
||||
MergeHash = TestMergeHash,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow,
|
||||
SourceEdges = sourceEdges
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -16,5 +16,6 @@
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,267 @@
|
||||
{
|
||||
"corpus": "dedup-alias-collision",
|
||||
"version": "1.0.0",
|
||||
"description": "Test corpus for GHSA to CVE alias mapping edge cases",
|
||||
"items": [
|
||||
{
|
||||
"id": "GHSA-CVE-same-package",
|
||||
"description": "GHSA and CVE for same package should have same hash",
|
||||
"sources": [
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-abc1-def2-ghi3",
|
||||
"cve": "CVE-2024-1001",
|
||||
"affects_key": "pkg:npm/express@4.18.0",
|
||||
"version_range": "<4.18.2",
|
||||
"weaknesses": ["CWE-400"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1001",
|
||||
"cve": "cve-2024-1001",
|
||||
"affects_key": "pkg:NPM/express@4.18.0",
|
||||
"version_range": "<4.18.2",
|
||||
"weaknesses": ["cwe-400"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Case normalization produces identical identity"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "GHSA-CVE-different-package",
|
||||
"description": "GHSA and CVE for different packages should differ",
|
||||
"sources": [
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-xyz1-uvw2-rst3",
|
||||
"cve": "CVE-2024-1002",
|
||||
"affects_key": "pkg:npm/lodash@4.17.0",
|
||||
"version_range": "<4.17.21",
|
||||
"weaknesses": ["CWE-1321"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1002",
|
||||
"cve": "CVE-2024-1002",
|
||||
"affects_key": "pkg:npm/underscore@1.13.0",
|
||||
"version_range": "<1.13.6",
|
||||
"weaknesses": ["CWE-1321"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different packages produce different hashes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "PYSEC-CVE-mapping",
|
||||
"description": "PyPI security advisory with CVE mapping",
|
||||
"sources": [
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "PYSEC-2024-001",
|
||||
"cve": "CVE-2024-1003",
|
||||
"affects_key": "pkg:pypi/django@4.2.0",
|
||||
"version_range": "<4.2.7",
|
||||
"weaknesses": ["CWE-79"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1003",
|
||||
"cve": "CVE-2024-1003",
|
||||
"affects_key": "pkg:PYPI/Django@4.2.0",
|
||||
"version_range": "<4.2.7",
|
||||
"weaknesses": ["CWE-79"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Case normalization for PyPI package names"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "RUSTSEC-CVE-mapping",
|
||||
"description": "Rust security advisory with CVE mapping",
|
||||
"sources": [
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "RUSTSEC-2024-0001",
|
||||
"cve": "CVE-2024-1004",
|
||||
"affects_key": "pkg:cargo/tokio@1.28.0",
|
||||
"version_range": "<1.28.2",
|
||||
"weaknesses": ["CWE-416"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1004",
|
||||
"cve": "cve-2024-1004",
|
||||
"affects_key": "pkg:CARGO/Tokio@1.28.0",
|
||||
"version_range": "< 1.28.2",
|
||||
"weaknesses": ["cwe-416"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Case normalization for CVE, PURL, and CWE produces same identity"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "GO-CVE-scoped-package",
|
||||
"description": "Go advisory with module path normalization",
|
||||
"sources": [
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "GO-2024-0001",
|
||||
"cve": "CVE-2024-1005",
|
||||
"affects_key": "pkg:golang/github.com/example/module@v1.0.0",
|
||||
"version_range": "<v1.2.0",
|
||||
"weaknesses": ["CWE-94"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1005",
|
||||
"cve": "CVE-2024-1005",
|
||||
"affects_key": "pkg:golang/github.com/Example/Module@v1.0.0",
|
||||
"version_range": "<v1.2.0",
|
||||
"weaknesses": ["CWE-94"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Go module paths are normalized to lowercase"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-reserved-no-data",
|
||||
"description": "CVE reserved but no vulnerability data yet",
|
||||
"sources": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1006",
|
||||
"cve": "CVE-2024-1006",
|
||||
"affects_key": "pkg:npm/test@1.0.0",
|
||||
"version_range": "*",
|
||||
"weaknesses": []
|
||||
},
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-test-test-test",
|
||||
"cve": "CVE-2024-1006",
|
||||
"affects_key": "pkg:npm/test@1.0.0",
|
||||
"version_range": "all",
|
||||
"weaknesses": []
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Wildcard version ranges normalize to same value"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "OSV-multi-ecosystem",
|
||||
"description": "OSV advisory affecting multiple ecosystems",
|
||||
"sources": [
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "OSV-2024-001-npm",
|
||||
"cve": "CVE-2024-1007",
|
||||
"affects_key": "pkg:npm/shared-lib@1.0.0",
|
||||
"version_range": "<1.5.0",
|
||||
"weaknesses": ["CWE-20"]
|
||||
},
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "OSV-2024-001-pypi",
|
||||
"cve": "CVE-2024-1007",
|
||||
"affects_key": "pkg:pypi/shared-lib@1.0.0",
|
||||
"version_range": "<1.5.0",
|
||||
"weaknesses": ["CWE-20"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different ecosystems (npm vs pypi) produce different hashes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "GHSA-CVE-partial-cwe",
|
||||
"description": "GHSA has more CWEs than CVE",
|
||||
"sources": [
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-full-cwe-list",
|
||||
"cve": "CVE-2024-1008",
|
||||
"affects_key": "pkg:npm/vuln-pkg@1.0.0",
|
||||
"version_range": "<1.1.0",
|
||||
"weaknesses": ["CWE-79", "CWE-89", "CWE-94"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1008",
|
||||
"cve": "CVE-2024-1008",
|
||||
"affects_key": "pkg:npm/vuln-pkg@1.0.0",
|
||||
"version_range": "<1.1.0",
|
||||
"weaknesses": ["CWE-79"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different CWE sets produce different hashes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "GHSA-no-CVE-yet",
|
||||
"description": "GHSA published before CVE assignment",
|
||||
"sources": [
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-pend-cve-asn",
|
||||
"cve": "CVE-2024-1009",
|
||||
"affects_key": "pkg:npm/new-vuln@2.0.0",
|
||||
"version_range": "<2.0.5",
|
||||
"weaknesses": ["CWE-352"]
|
||||
},
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-pend-cve-asn",
|
||||
"cve": "cve-2024-1009",
|
||||
"affects_key": "pkg:NPM/new-vuln@2.0.0",
|
||||
"version_range": "<2.0.5",
|
||||
"weaknesses": ["cwe-352"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Same GHSA with case variations produces same hash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "NuGet-GHSA-CVE",
|
||||
"description": "NuGet package with GHSA and CVE",
|
||||
"sources": [
|
||||
{
|
||||
"source": "github",
|
||||
"advisory_id": "GHSA-nuget-test-001",
|
||||
"cve": "CVE-2024-1010",
|
||||
"affects_key": "pkg:nuget/Newtonsoft.Json@13.0.0",
|
||||
"version_range": "<13.0.3",
|
||||
"weaknesses": ["CWE-502"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-1010",
|
||||
"cve": "CVE-2024-1010",
|
||||
"affects_key": "pkg:NUGET/newtonsoft.json@13.0.0",
|
||||
"version_range": "<13.0.3",
|
||||
"weaknesses": ["CWE-502"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "NuGet package names are case-insensitive"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,281 @@
|
||||
{
|
||||
"corpus": "dedup-backport-variants",
|
||||
"version": "1.0.0",
|
||||
"description": "Test corpus for merge hash deduplication with Alpine/SUSE backport variants",
|
||||
"items": [
|
||||
{
|
||||
"id": "CVE-2024-0001-openssl-alpine-backport",
|
||||
"description": "Alpine backport with upstream commit reference",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-001",
|
||||
"cve": "CVE-2024-0001",
|
||||
"affects_key": "pkg:apk/alpine/openssl@1.1.1w",
|
||||
"version_range": "<1.1.1w-r1",
|
||||
"weaknesses": ["CWE-476"],
|
||||
"patch_lineage": "https://github.com/openssl/openssl/commit/a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
},
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-001",
|
||||
"cve": "CVE-2024-0001",
|
||||
"affects_key": "pkg:apk/alpine/openssl@1.1.1w",
|
||||
"version_range": "<1.1.1w-r1",
|
||||
"weaknesses": ["CWE-476"],
|
||||
"patch_lineage": "backport of a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Same SHA extracted from both URL and backport reference"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0002-curl-suse-backport",
|
||||
"description": "SUSE backport with PATCH-ID format",
|
||||
"sources": [
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0001-1",
|
||||
"cve": "CVE-2024-0002",
|
||||
"affects_key": "pkg:rpm/suse/curl@7.79.1",
|
||||
"version_range": "<7.79.1-150400.5.36.1",
|
||||
"weaknesses": ["CWE-120"],
|
||||
"patch_lineage": "PATCH-12345"
|
||||
},
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0001-1",
|
||||
"cve": "CVE-2024-0002",
|
||||
"affects_key": "pkg:rpm/suse/curl@7.79.1",
|
||||
"version_range": "<7.79.1-150400.5.36.1",
|
||||
"weaknesses": ["CWE-120"],
|
||||
"patch_lineage": "patch-12345"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "PATCH-ID is case-normalized to uppercase"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0003-nginx-different-backports",
|
||||
"description": "Same CVE with different backport lineages should differ",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-002",
|
||||
"cve": "CVE-2024-0003",
|
||||
"affects_key": "pkg:apk/alpine/nginx@1.24.0",
|
||||
"version_range": "<1.24.0-r7",
|
||||
"weaknesses": ["CWE-400"],
|
||||
"patch_lineage": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
},
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0002-1",
|
||||
"cve": "CVE-2024-0003",
|
||||
"affects_key": "pkg:rpm/suse/nginx@1.24.0",
|
||||
"version_range": "<1.24.0-150400.3.7.1",
|
||||
"weaknesses": ["CWE-400"],
|
||||
"patch_lineage": "d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different package ecosystems and different patch lineages"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0004-busybox-no-lineage",
|
||||
"description": "Backport without lineage info should still match on case normalization",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-003",
|
||||
"cve": "CVE-2024-0004",
|
||||
"affects_key": "pkg:apk/alpine/busybox@1.36.1",
|
||||
"version_range": "<1.36.1-r6",
|
||||
"weaknesses": ["CWE-78"]
|
||||
},
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-003",
|
||||
"cve": "cve-2024-0004",
|
||||
"affects_key": "pkg:APK/alpine/busybox@1.36.1",
|
||||
"version_range": "<1.36.1-r6",
|
||||
"weaknesses": ["cwe-78"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Case normalization produces identical identity when no patch lineage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0005-musl-abbreviated-sha",
|
||||
"description": "Abbreviated vs full SHA should normalize differently",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-004",
|
||||
"cve": "CVE-2024-0005",
|
||||
"affects_key": "pkg:apk/alpine/musl@1.2.4",
|
||||
"version_range": "<1.2.4-r2",
|
||||
"weaknesses": ["CWE-119"],
|
||||
"patch_lineage": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
},
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-004",
|
||||
"cve": "CVE-2024-0005",
|
||||
"affects_key": "pkg:apk/alpine/musl@1.2.4",
|
||||
"version_range": "<1.2.4-r2",
|
||||
"weaknesses": ["CWE-119"],
|
||||
"patch_lineage": "commit a1b2c3d"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Full SHA vs abbreviated SHA produce different normalized lineages"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0006-zlib-multiple-shas",
|
||||
"description": "Multiple SHAs in lineage - should extract first full SHA",
|
||||
"sources": [
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0003-1",
|
||||
"cve": "CVE-2024-0006",
|
||||
"affects_key": "pkg:rpm/suse/zlib@1.2.13",
|
||||
"version_range": "<1.2.13-150500.4.3.1",
|
||||
"weaknesses": ["CWE-787"],
|
||||
"patch_lineage": "f1e2d3c4b5a6f1e2d3c4b5a6f1e2d3c4b5a6f1e2"
|
||||
},
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0003-1",
|
||||
"cve": "CVE-2024-0006",
|
||||
"affects_key": "pkg:rpm/suse/zlib@1.2.13",
|
||||
"version_range": "<1.2.13-150500.4.3.1",
|
||||
"weaknesses": ["CWE-787"],
|
||||
"patch_lineage": "fixes include f1e2d3c4b5a6f1e2d3c4b5a6f1e2d3c4b5a6f1e2 and abc1234"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Full SHA is extracted and normalized from both lineage descriptions"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0007-libpng-distro-versions",
|
||||
"description": "Same upstream fix with different notation but same semantic meaning",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-005",
|
||||
"cve": "CVE-2024-0007",
|
||||
"affects_key": "pkg:apk/alpine/libpng@1.6.40",
|
||||
"version_range": "<1.6.40-r0",
|
||||
"weaknesses": ["CWE-125"]
|
||||
},
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-005",
|
||||
"cve": "cve-2024-0007",
|
||||
"affects_key": "pkg:APK/alpine/libpng@1.6.40",
|
||||
"version_range": "< 1.6.40-r0",
|
||||
"weaknesses": ["cwe-125"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Case normalization and whitespace trimming produce identical identity"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0008-git-github-url",
|
||||
"description": "GitHub vs GitLab commit URL extraction",
|
||||
"sources": [
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0004-1",
|
||||
"cve": "CVE-2024-0008",
|
||||
"affects_key": "pkg:rpm/suse/git@2.42.0",
|
||||
"version_range": "<2.42.0-150500.3.6.1",
|
||||
"weaknesses": ["CWE-78"],
|
||||
"patch_lineage": "https://github.com/git/git/commit/abc123def456abc123def456abc123def456abc1"
|
||||
},
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0004-1",
|
||||
"cve": "CVE-2024-0008",
|
||||
"affects_key": "pkg:rpm/suse/git@2.42.0",
|
||||
"version_range": "<2.42.0-150500.3.6.1",
|
||||
"weaknesses": ["CWE-78"],
|
||||
"patch_lineage": "https://gitlab.com/git/git/commit/abc123def456abc123def456abc123def456abc1"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Both GitHub and GitLab URL patterns extract same commit SHA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0009-expat-unrecognized-lineage",
|
||||
"description": "Unrecognized patch lineage format returns null",
|
||||
"sources": [
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-006",
|
||||
"cve": "CVE-2024-0009",
|
||||
"affects_key": "pkg:apk/alpine/expat@2.5.0",
|
||||
"version_range": "<2.5.0-r1",
|
||||
"weaknesses": ["CWE-611"],
|
||||
"patch_lineage": "some random text without sha"
|
||||
},
|
||||
{
|
||||
"source": "alpine",
|
||||
"advisory_id": "ALPINE-2024-006",
|
||||
"cve": "CVE-2024-0009",
|
||||
"affects_key": "pkg:apk/alpine/expat@2.5.0",
|
||||
"version_range": "<2.5.0-r1",
|
||||
"weaknesses": ["CWE-611"],
|
||||
"patch_lineage": "another unrecognized format"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Both unrecognized lineages normalize to null, producing same hash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-0010-sqlite-fixed-notation",
|
||||
"description": "Fixed version notation normalization",
|
||||
"sources": [
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0005-1",
|
||||
"cve": "CVE-2024-0010",
|
||||
"affects_key": "pkg:rpm/suse/sqlite3@3.43.0",
|
||||
"version_range": "fixed: 3.43.2",
|
||||
"weaknesses": ["CWE-476"]
|
||||
},
|
||||
{
|
||||
"source": "suse",
|
||||
"advisory_id": "SUSE-SU-2024:0005-1",
|
||||
"cve": "CVE-2024-0010",
|
||||
"affects_key": "pkg:rpm/suse/sqlite3@3.43.0",
|
||||
"version_range": ">=3.43.2",
|
||||
"weaknesses": ["CWE-476"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "fixed: notation normalizes to >= comparison"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,269 @@
|
||||
{
|
||||
"corpus": "dedup-debian-rhel-cve-2024",
|
||||
"version": "1.0.0",
|
||||
"description": "Test corpus for merge hash deduplication across Debian and RHEL sources",
|
||||
"items": [
|
||||
{
|
||||
"id": "CVE-2024-1234-curl",
|
||||
"description": "Same curl CVE from Debian and RHEL - should produce same identity hash for same package",
|
||||
"sources": [
|
||||
{
|
||||
"source": "debian",
|
||||
"advisory_id": "DSA-5678-1",
|
||||
"cve": "CVE-2024-1234",
|
||||
"affects_key": "pkg:deb/debian/curl@7.68.0",
|
||||
"version_range": "<7.68.0-1+deb10u2",
|
||||
"weaknesses": ["CWE-120"]
|
||||
},
|
||||
{
|
||||
"source": "redhat",
|
||||
"advisory_id": "RHSA-2024:1234",
|
||||
"cve": "CVE-2024-1234",
|
||||
"affects_key": "pkg:deb/debian/curl@7.68.0",
|
||||
"version_range": "<7.68.0-1+deb10u2",
|
||||
"weaknesses": ["cwe-120"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Same CVE, same package identity, same version range, same CWE (case-insensitive)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-2345-openssl",
|
||||
"description": "Same OpenSSL CVE from Debian and RHEL with different package identifiers",
|
||||
"sources": [
|
||||
{
|
||||
"source": "debian",
|
||||
"advisory_id": "DSA-5680-1",
|
||||
"cve": "CVE-2024-2345",
|
||||
"affects_key": "pkg:deb/debian/openssl@1.1.1n",
|
||||
"version_range": "<1.1.1n-0+deb11u5",
|
||||
"weaknesses": ["CWE-200", "CWE-326"]
|
||||
},
|
||||
{
|
||||
"source": "redhat",
|
||||
"advisory_id": "RHSA-2024:2345",
|
||||
"cve": "cve-2024-2345",
|
||||
"affects_key": "pkg:rpm/redhat/openssl@1.1.1k",
|
||||
"version_range": "<1.1.1k-12.el8_9",
|
||||
"weaknesses": ["CWE-326", "CWE-200"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different package identifiers (deb vs rpm), so different merge hash despite same CVE"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-3456-nginx",
|
||||
"description": "Same nginx CVE with normalized version ranges",
|
||||
"sources": [
|
||||
{
|
||||
"source": "debian",
|
||||
"advisory_id": "DSA-5681-1",
|
||||
"cve": "CVE-2024-3456",
|
||||
"affects_key": "pkg:deb/debian/nginx@1.22.0",
|
||||
"version_range": "[1.0.0, 1.22.1)",
|
||||
"weaknesses": ["CWE-79"]
|
||||
},
|
||||
{
|
||||
"source": "debian_tracker",
|
||||
"advisory_id": "CVE-2024-3456",
|
||||
"cve": "CVE-2024-3456",
|
||||
"affects_key": "pkg:deb/debian/nginx@1.22.0",
|
||||
"version_range": ">=1.0.0,<1.22.1",
|
||||
"weaknesses": ["CWE-79"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Same CVE, same package, version ranges normalize to same format"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-4567-log4j",
|
||||
"description": "Different CVEs for same package should have different hash",
|
||||
"sources": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-4567",
|
||||
"cve": "CVE-2024-4567",
|
||||
"affects_key": "pkg:maven/org.apache.logging.log4j/log4j-core@2.17.0",
|
||||
"version_range": "<2.17.1",
|
||||
"weaknesses": ["CWE-502"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-4568",
|
||||
"cve": "CVE-2024-4568",
|
||||
"affects_key": "pkg:maven/org.apache.logging.log4j/log4j-core@2.17.0",
|
||||
"version_range": "<2.17.1",
|
||||
"weaknesses": ["CWE-502"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different CVEs, even with same package and version range"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-5678-postgres",
|
||||
"description": "Same CVE with different CWEs should have different hash",
|
||||
"sources": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-5678",
|
||||
"cve": "CVE-2024-5678",
|
||||
"affects_key": "pkg:generic/postgresql@15.0",
|
||||
"version_range": "<15.4",
|
||||
"weaknesses": ["CWE-89"]
|
||||
},
|
||||
{
|
||||
"source": "vendor",
|
||||
"advisory_id": "CVE-2024-5678",
|
||||
"cve": "CVE-2024-5678",
|
||||
"affects_key": "pkg:generic/postgresql@15.0",
|
||||
"version_range": "<15.4",
|
||||
"weaknesses": ["CWE-89", "CWE-94"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": false,
|
||||
"rationale": "Different CWE sets change the identity"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-6789-python",
|
||||
"description": "Same CVE with PURL qualifier stripping",
|
||||
"sources": [
|
||||
{
|
||||
"source": "pypi",
|
||||
"advisory_id": "PYSEC-2024-001",
|
||||
"cve": "CVE-2024-6789",
|
||||
"affects_key": "pkg:pypi/requests@2.28.0?arch=x86_64",
|
||||
"version_range": "<2.28.2",
|
||||
"weaknesses": ["CWE-400"]
|
||||
},
|
||||
{
|
||||
"source": "osv",
|
||||
"advisory_id": "CVE-2024-6789",
|
||||
"cve": "CVE-2024-6789",
|
||||
"affects_key": "pkg:pypi/requests@2.28.0",
|
||||
"version_range": "<2.28.2",
|
||||
"weaknesses": ["CWE-400"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "arch qualifier is stripped during normalization, so packages are identical"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-7890-npm",
|
||||
"description": "Same CVE with scoped npm package - case normalization",
|
||||
"sources": [
|
||||
{
|
||||
"source": "npm",
|
||||
"advisory_id": "GHSA-abc1-def2-ghi3",
|
||||
"cve": "CVE-2024-7890",
|
||||
"affects_key": "pkg:npm/@angular/core@14.0.0",
|
||||
"version_range": "<14.2.0",
|
||||
"weaknesses": ["CWE-79"]
|
||||
},
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-7890",
|
||||
"cve": "cve-2024-7890",
|
||||
"affects_key": "pkg:NPM/@Angular/CORE@14.0.0",
|
||||
"version_range": "<14.2.0",
|
||||
"weaknesses": ["cwe-79"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "PURL type/namespace/name case normalization produces same identity"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-8901-redis",
|
||||
"description": "Same CVE with CPE identifier",
|
||||
"sources": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-8901",
|
||||
"cve": "CVE-2024-8901",
|
||||
"affects_key": "cpe:2.3:a:redis:redis:7.0.0:*:*:*:*:*:*:*",
|
||||
"version_range": "<7.0.12",
|
||||
"weaknesses": ["CWE-416"]
|
||||
},
|
||||
{
|
||||
"source": "vendor",
|
||||
"advisory_id": "CVE-2024-8901",
|
||||
"cve": "CVE-2024-8901",
|
||||
"affects_key": "CPE:2.3:A:Redis:REDIS:7.0.0:*:*:*:*:*:*:*",
|
||||
"version_range": "<7.0.12",
|
||||
"weaknesses": ["CWE-416"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "CPE normalization lowercases all components"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-9012-kernel",
|
||||
"description": "Same CVE with CPE 2.2 vs 2.3 format",
|
||||
"sources": [
|
||||
{
|
||||
"source": "nvd",
|
||||
"advisory_id": "CVE-2024-9012",
|
||||
"cve": "CVE-2024-9012",
|
||||
"affects_key": "cpe:/o:linux:linux_kernel:5.15",
|
||||
"version_range": "<5.15.120",
|
||||
"weaknesses": ["CWE-416"]
|
||||
},
|
||||
{
|
||||
"source": "vendor",
|
||||
"advisory_id": "CVE-2024-9012",
|
||||
"cve": "CVE-2024-9012",
|
||||
"affects_key": "cpe:2.3:o:linux:linux_kernel:5.15:*:*:*:*:*:*:*",
|
||||
"version_range": "<5.15.120",
|
||||
"weaknesses": ["CWE-416"]
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "CPE 2.2 is converted to CPE 2.3 format during normalization"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "CVE-2024-1357-glibc",
|
||||
"description": "Same CVE with patch lineage differentiation",
|
||||
"sources": [
|
||||
{
|
||||
"source": "debian",
|
||||
"advisory_id": "DSA-5690-1",
|
||||
"cve": "CVE-2024-1357",
|
||||
"affects_key": "pkg:deb/debian/glibc@2.31",
|
||||
"version_range": "<2.31-13+deb11u7",
|
||||
"weaknesses": ["CWE-787"],
|
||||
"patch_lineage": "https://github.com/glibc/glibc/commit/abc123def456abc123def456abc123def456abc1"
|
||||
},
|
||||
{
|
||||
"source": "debian",
|
||||
"advisory_id": "DSA-5690-1",
|
||||
"cve": "CVE-2024-1357",
|
||||
"affects_key": "pkg:deb/debian/glibc@2.31",
|
||||
"version_range": "<2.31-13+deb11u7",
|
||||
"weaknesses": ["CWE-787"],
|
||||
"patch_lineage": "commit abc123def456abc123def456abc123def456abc1"
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"same_merge_hash": true,
|
||||
"rationale": "Patch lineage normalization extracts SHA from both URL and plain commit reference"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,244 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CpeNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for CpeNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class CpeNormalizerTests
|
||||
{
|
||||
private readonly CpeNormalizer _normalizer = CpeNormalizer.Instance;
|
||||
|
||||
#region CPE 2.3 Normalization
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ValidCpe23_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe23Uppercase_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("CPE:2.3:A:VENDOR:PRODUCT:1.0:*:*:*:*:*:*:*");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe23MixedCase_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:Apache:Log4j:2.14.0:*:*:*:*:*:*:*");
|
||||
Assert.Equal("cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe23WithAny_ReturnsWildcard()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:vendor:product:ANY:ANY:ANY:ANY:ANY:ANY:ANY:ANY");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:*:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe23WithNa_ReturnsDash()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:vendor:product:1.0:NA:*:*:*:*:*:*");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:-:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CPE 2.2 to 2.3 Conversion
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe22Simple_ConvertsToCpe23()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:/a:vendor:product:1.0");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe22NoVersion_ConvertsToCpe23()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:/a:vendor:product");
|
||||
Assert.StartsWith("cpe:2.3:a:vendor:product:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe22WithUpdate_ConvertsToCpe23()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:/a:vendor:product:1.0:update1");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:update1:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe22Uppercase_ConvertsToCpe23Lowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("CPE:/A:VENDOR:PRODUCT:1.0");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Part Types
|
||||
|
||||
[Theory]
|
||||
[InlineData("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", "a")] // Application
|
||||
[InlineData("cpe:2.3:o:vendor:product:1.0:*:*:*:*:*:*:*", "o")] // Operating System
|
||||
[InlineData("cpe:2.3:h:vendor:product:1.0:*:*:*:*:*:*:*", "h")] // Hardware
|
||||
public void Normalize_DifferentPartTypes_PreservesPartType(string input, string expectedPart)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.StartsWith($"cpe:2.3:{expectedPart}:", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(null!);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyString_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(string.Empty);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WhitespaceOnly_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(" ");
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize(" cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:* ");
|
||||
Assert.Equal("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Malformed Input
|
||||
|
||||
[Fact]
|
||||
public void Normalize_InvalidCpeFormat_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:invalid:format");
|
||||
Assert.Equal("cpe:invalid:format", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NotCpe_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("not-a-cpe");
|
||||
Assert.Equal("not-a-cpe", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_TooFewComponents_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:vendor");
|
||||
Assert.Equal("cpe:2.3:a:vendor", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty Components
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyVersion_ReturnsWildcard()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a:vendor:product::*:*:*:*:*:*:*");
|
||||
Assert.Contains(":*:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyVendor_ReturnsWildcard()
|
||||
{
|
||||
var result = _normalizer.Normalize("cpe:2.3:a::product:1.0:*:*:*:*:*:*:*");
|
||||
Assert.Contains(":*:", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Theory]
|
||||
[InlineData("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*")]
|
||||
[InlineData("CPE:2.3:A:VENDOR:PRODUCT:1.0:*:*:*:*:*:*:*")]
|
||||
[InlineData("cpe:/a:vendor:product:1.0")]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult(string input)
|
||||
{
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
const string input = "CPE:2.3:A:Apache:LOG4J:2.14.0:*:*:*:*:*:*:*";
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Cpe22And23_ProduceSameOutput()
|
||||
{
|
||||
var cpe22 = "cpe:/a:apache:log4j:2.14.0";
|
||||
var cpe23 = "cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*";
|
||||
|
||||
var result22 = _normalizer.Normalize(cpe22);
|
||||
var result23 = _normalizer.Normalize(cpe23);
|
||||
|
||||
Assert.Equal(result22, result23);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World CPE Formats
|
||||
|
||||
[Theory]
|
||||
[InlineData("cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*", "cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*")]
|
||||
[InlineData("cpe:2.3:a:openssl:openssl:1.1.1:*:*:*:*:*:*:*", "cpe:2.3:a:openssl:openssl:1.1.1:*:*:*:*:*:*:*")]
|
||||
[InlineData("cpe:2.3:o:linux:linux_kernel:5.10:*:*:*:*:*:*:*", "cpe:2.3:o:linux:linux_kernel:5.10:*:*:*:*:*:*:*")]
|
||||
public void Normalize_RealWorldCpes_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = CpeNormalizer.Instance;
|
||||
var instance2 = CpeNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CveNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for CveNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class CveNormalizerTests
|
||||
{
|
||||
private readonly CveNormalizer _normalizer = CveNormalizer.Instance;
|
||||
|
||||
#region Basic Normalization
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ValidUppercase_ReturnsUnchanged()
|
||||
{
|
||||
var result = _normalizer.Normalize("CVE-2024-12345");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ValidLowercase_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("cve-2024-12345");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MixedCase_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("Cve-2024-12345");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize(" CVE-2024-12345 ");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_JustNumberPart_AddsCvePrefix()
|
||||
{
|
||||
var result = _normalizer.Normalize("2024-12345");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(null);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyString_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(string.Empty);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WhitespaceOnly_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(" ");
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Malformed Input
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ShortYear_ReturnsAsIs()
|
||||
{
|
||||
// Invalid year format (3 digits) - should return uppercase
|
||||
var result = _normalizer.Normalize("CVE-202-12345");
|
||||
Assert.Equal("CVE-202-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ShortSequence_ReturnsAsIs()
|
||||
{
|
||||
// Invalid sequence (3 digits, min is 4) - should return uppercase
|
||||
var result = _normalizer.Normalize("CVE-2024-123");
|
||||
Assert.Equal("CVE-2024-123", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NonNumericYear_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("CVE-XXXX-12345");
|
||||
Assert.Equal("CVE-XXXX-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NonNumericSequence_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("CVE-2024-ABCDE");
|
||||
Assert.Equal("CVE-2024-ABCDE", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ArbitraryText_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("some-random-text");
|
||||
Assert.Equal("SOME-RANDOM-TEXT", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Unicode and Special Characters
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UnicodeWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
// Non-breaking space and other unicode whitespace
|
||||
var result = _normalizer.Normalize("\u00A0CVE-2024-12345\u2003");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithNewlines_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize("\nCVE-2024-12345\r\n");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithTabs_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize("\tCVE-2024-12345\t");
|
||||
Assert.Equal("CVE-2024-12345", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Theory]
|
||||
[InlineData("CVE-2024-12345")]
|
||||
[InlineData("cve-2024-12345")]
|
||||
[InlineData("2024-12345")]
|
||||
[InlineData(" CVE-2024-12345 ")]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult(string input)
|
||||
{
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
const string input = "cve-2024-99999";
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World CVE Formats
|
||||
|
||||
[Theory]
|
||||
[InlineData("CVE-2024-1234", "CVE-2024-1234")]
|
||||
[InlineData("CVE-2024-12345", "CVE-2024-12345")]
|
||||
[InlineData("CVE-2024-123456", "CVE-2024-123456")]
|
||||
[InlineData("CVE-2021-44228", "CVE-2021-44228")] // Log4Shell
|
||||
[InlineData("CVE-2017-5754", "CVE-2017-5754")] // Meltdown
|
||||
[InlineData("CVE-2014-0160", "CVE-2014-0160")] // Heartbleed
|
||||
public void Normalize_RealWorldCves_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = CveNormalizer.Instance;
|
||||
var instance2 = CveNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CweNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for CweNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class CweNormalizerTests
|
||||
{
|
||||
private readonly CweNormalizer _normalizer = CweNormalizer.Instance;
|
||||
|
||||
#region Basic Normalization
|
||||
|
||||
[Fact]
|
||||
public void Normalize_SingleCwe_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize(["cwe-79"]);
|
||||
Assert.Equal("CWE-79", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MultipleCwes_ReturnsSortedCommaJoined()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-89", "CWE-79"]);
|
||||
Assert.Equal("CWE-79,CWE-89", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MixedCase_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize(["Cwe-79", "cwe-89", "CWE-120"]);
|
||||
Assert.Equal("CWE-79,CWE-89,CWE-120", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithoutPrefix_AddsPrefix()
|
||||
{
|
||||
var result = _normalizer.Normalize(["79", "89"]);
|
||||
Assert.Equal("CWE-79,CWE-89", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MixedPrefixFormats_NormalizesAll()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-79", "89", "cwe-120"]);
|
||||
Assert.Equal("CWE-79,CWE-89,CWE-120", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Deduplication
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Duplicates_ReturnsUnique()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-79", "CWE-79", "cwe-79"]);
|
||||
Assert.Equal("CWE-79", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DuplicatesWithDifferentCase_ReturnsUnique()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-89", "cwe-89", "Cwe-89"]);
|
||||
Assert.Equal("CWE-89", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DuplicatesWithMixedFormats_ReturnsUnique()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-79", "79", "cwe-79"]);
|
||||
Assert.Equal("CWE-79", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sorting
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UnsortedNumbers_ReturnsSortedNumerically()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-200", "CWE-79", "CWE-120", "CWE-1"]);
|
||||
Assert.Equal("CWE-1,CWE-79,CWE-120,CWE-200", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_LargeNumbers_ReturnsSortedNumerically()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-1000", "CWE-100", "CWE-10"]);
|
||||
Assert.Equal("CWE-10,CWE-100,CWE-1000", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(null);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyArray_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize([]);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ArrayWithNulls_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize([null!, null!]);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ArrayWithEmptyStrings_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(["", " ", string.Empty]);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MixedValidAndEmpty_ReturnsValidOnly()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-79", "", null!, "CWE-89", " "]);
|
||||
Assert.Equal("CWE-79,CWE-89", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Malformed Input
|
||||
|
||||
[Fact]
|
||||
public void Normalize_InvalidFormat_FiltersOut()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-79", "not-a-cwe", "CWE-89"]);
|
||||
Assert.Equal("CWE-79,CWE-89", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_AllInvalid_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(["invalid", "not-cwe", "random"]);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NonNumericSuffix_FiltersOut()
|
||||
{
|
||||
var result = _normalizer.Normalize(["CWE-ABC", "CWE-79"]);
|
||||
Assert.Equal("CWE-79", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize([" CWE-79 ", " CWE-89 "]);
|
||||
Assert.Equal("CWE-79,CWE-89", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Unicode
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UnicodeWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize(["\u00A0CWE-79\u00A0"]);
|
||||
Assert.Equal("CWE-79", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult()
|
||||
{
|
||||
var input = new[] { "cwe-89", "CWE-79", "120" };
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
var input = new[] { "CWE-200", "cwe-79", "120", "CWE-89" };
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DifferentOrdering_ReturnsSameResult()
|
||||
{
|
||||
var input1 = new[] { "CWE-79", "CWE-89", "CWE-120" };
|
||||
var input2 = new[] { "CWE-120", "CWE-79", "CWE-89" };
|
||||
var input3 = new[] { "CWE-89", "CWE-120", "CWE-79" };
|
||||
|
||||
var result1 = _normalizer.Normalize(input1);
|
||||
var result2 = _normalizer.Normalize(input2);
|
||||
var result3 = _normalizer.Normalize(input3);
|
||||
|
||||
Assert.Equal(result1, result2);
|
||||
Assert.Equal(result2, result3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World CWE Formats
|
||||
|
||||
[Theory]
|
||||
[InlineData("CWE-79", "CWE-79")] // XSS
|
||||
[InlineData("CWE-89", "CWE-89")] // SQL Injection
|
||||
[InlineData("CWE-120", "CWE-120")] // Buffer Overflow
|
||||
[InlineData("CWE-200", "CWE-200")] // Information Exposure
|
||||
[InlineData("CWE-22", "CWE-22")] // Path Traversal
|
||||
public void Normalize_RealWorldCwes_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize([input]);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = CweNormalizer.Instance;
|
||||
var instance2 = CweNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,449 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashCalculatorTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-012
|
||||
// Description: Unit tests for MergeHashCalculator - determinism and correctness
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class MergeHashCalculatorTests
|
||||
{
|
||||
private readonly MergeHashCalculator _calculator = new();
|
||||
|
||||
#region Basic Hash Computation
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_ValidInput_ReturnsHashWithPrefix()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/lodash@4.17.21"
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
|
||||
Assert.StartsWith("sha256:", result);
|
||||
Assert.Equal(71, result.Length); // "sha256:" (7) + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_WithAllFields_ReturnsHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/lodash@4.17.21",
|
||||
VersionRange = "[1.0.0, 2.0.0)",
|
||||
Weaknesses = ["CWE-79", "CWE-89"],
|
||||
PatchLineage = "https://github.com/lodash/lodash/commit/abc1234"
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
|
||||
Assert.StartsWith("sha256:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_NullInput_ThrowsArgumentNullException()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => _calculator.ComputeMergeHash((MergeHashInput)null!));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism - Same Input = Same Output
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_SameInput_ReturnsSameHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/lodash@4.17.21",
|
||||
Weaknesses = ["CWE-79"]
|
||||
};
|
||||
|
||||
var first = _calculator.ComputeMergeHash(input);
|
||||
var second = _calculator.ComputeMergeHash(input);
|
||||
var third = _calculator.ComputeMergeHash(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_Determinism_100Runs()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-99999",
|
||||
AffectsKey = "pkg:maven/org.apache/commons-lang3@3.12.0",
|
||||
VersionRange = ">=1.0.0,<2.0.0",
|
||||
Weaknesses = ["CWE-120", "CWE-200", "CWE-79"],
|
||||
PatchLineage = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
};
|
||||
|
||||
var expected = _calculator.ComputeMergeHash(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_NewInstancesProduceSameHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/lodash@4.17.21"
|
||||
};
|
||||
|
||||
var calc1 = new MergeHashCalculator();
|
||||
var calc2 = new MergeHashCalculator();
|
||||
var calc3 = new MergeHashCalculator();
|
||||
|
||||
var hash1 = calc1.ComputeMergeHash(input);
|
||||
var hash2 = calc2.ComputeMergeHash(input);
|
||||
var hash3 = calc3.ComputeMergeHash(input);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
Assert.Equal(hash2, hash3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Normalization Integration
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_CveNormalization_CaseInsensitive()
|
||||
{
|
||||
var input1 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:npm/test@1.0" };
|
||||
var input2 = new MergeHashInput { Cve = "cve-2024-1234", AffectsKey = "pkg:npm/test@1.0" };
|
||||
var input3 = new MergeHashInput { Cve = "Cve-2024-1234", AffectsKey = "pkg:npm/test@1.0" };
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
var hash3 = _calculator.ComputeMergeHash(input3);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
Assert.Equal(hash2, hash3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_PurlNormalization_TypeCaseInsensitive()
|
||||
{
|
||||
var input1 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:npm/lodash@1.0" };
|
||||
var input2 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:NPM/lodash@1.0" };
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_CweNormalization_OrderIndependent()
|
||||
{
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = ["CWE-79", "CWE-89", "CWE-120"]
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = ["CWE-120", "CWE-79", "CWE-89"]
|
||||
};
|
||||
var input3 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = ["cwe-89", "CWE-120", "cwe-79"]
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
var hash3 = _calculator.ComputeMergeHash(input3);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
Assert.Equal(hash2, hash3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_VersionRangeNormalization_EquivalentFormats()
|
||||
{
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
VersionRange = "[1.0.0, 2.0.0)"
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
VersionRange = ">=1.0.0,<2.0.0"
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_PatchLineageNormalization_ShaExtraction()
|
||||
{
|
||||
// Both inputs contain the same SHA in different formats
|
||||
// The normalizer extracts "abc1234567" from both
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
PatchLineage = "commit abc1234567"
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
PatchLineage = "fix abc1234567 applied"
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Different Inputs = Different Hashes
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_DifferentCve_DifferentHash()
|
||||
{
|
||||
var input1 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:npm/test@1.0" };
|
||||
var input2 = new MergeHashInput { Cve = "CVE-2024-5678", AffectsKey = "pkg:npm/test@1.0" };
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_DifferentPackage_DifferentHash()
|
||||
{
|
||||
var input1 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:npm/lodash@1.0" };
|
||||
var input2 = new MergeHashInput { Cve = "CVE-2024-1234", AffectsKey = "pkg:npm/underscore@1.0" };
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_DifferentVersion_DifferentHash()
|
||||
{
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
VersionRange = "<1.0.0"
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
VersionRange = "<2.0.0"
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_DifferentWeaknesses_DifferentHash()
|
||||
{
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = ["CWE-79"]
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = ["CWE-89"]
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_DifferentPatchLineage_DifferentHash()
|
||||
{
|
||||
// Use full SHA hashes (40 chars) that will be recognized
|
||||
var input1 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
PatchLineage = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
};
|
||||
var input2 = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
PatchLineage = "d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5"
|
||||
};
|
||||
|
||||
var hash1 = _calculator.ComputeMergeHash(input1);
|
||||
var hash2 = _calculator.ComputeMergeHash(input2);
|
||||
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Optional Fields
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_NoVersionRange_ReturnsHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
VersionRange = null
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
Assert.StartsWith("sha256:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_EmptyWeaknesses_ReturnsHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
Weaknesses = []
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
Assert.StartsWith("sha256:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_NoPatchLineage_ReturnsHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0",
|
||||
PatchLineage = null
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
Assert.StartsWith("sha256:", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_MinimalInput_ReturnsHash()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0"
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
Assert.StartsWith("sha256:", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Source Deduplication Scenarios
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_SameCveDifferentDistros_SameHash()
|
||||
{
|
||||
// Same CVE from Debian and RHEL should have same merge hash
|
||||
// when identity components match
|
||||
var debianInput = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:deb/debian/curl@7.68.0",
|
||||
VersionRange = "<7.68.0-1",
|
||||
Weaknesses = ["CWE-120"]
|
||||
};
|
||||
|
||||
var rhelInput = new MergeHashInput
|
||||
{
|
||||
Cve = "cve-2024-1234", // Different case
|
||||
AffectsKey = "pkg:deb/debian/curl@7.68.0", // Same package identity
|
||||
VersionRange = "[,7.68.0-1)", // Equivalent interval
|
||||
Weaknesses = ["cwe-120"] // Different case
|
||||
};
|
||||
|
||||
var debianHash = _calculator.ComputeMergeHash(debianInput);
|
||||
var rhelHash = _calculator.ComputeMergeHash(rhelInput);
|
||||
|
||||
// These should produce the same hash after normalization
|
||||
Assert.Equal(debianHash, rhelHash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash Format Validation
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_ValidHashFormat()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0"
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
|
||||
// Should be "sha256:" followed by 64 lowercase hex chars
|
||||
Assert.Matches(@"^sha256:[0-9a-f]{64}$", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMergeHash_HashIsLowercase()
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/test@1.0"
|
||||
};
|
||||
|
||||
var result = _calculator.ComputeMergeHash(input);
|
||||
var hashPart = result["sha256:".Length..];
|
||||
|
||||
Assert.Equal(hashPart.ToLowerInvariant(), hashPart);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,457 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashDeduplicationIntegrationTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-021
|
||||
// Description: Integration tests validating same CVE from different connectors
|
||||
// produces identical merge hash when semantically equivalent
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests that verify merge hash deduplication behavior
|
||||
/// when the same CVE is ingested from multiple source connectors.
|
||||
/// </summary>
|
||||
public sealed class MergeHashDeduplicationIntegrationTests
|
||||
{
|
||||
private readonly MergeHashCalculator _calculator = new();
|
||||
|
||||
[Fact]
|
||||
public void SameCve_FromDebianAndRhel_WithSamePackage_ProducesSameMergeHash()
|
||||
{
|
||||
// Arrange - Debian advisory for curl vulnerability
|
||||
var debianProvenance = new AdvisoryProvenance(
|
||||
"debian", "dsa", "DSA-5678-1", DateTimeOffset.Parse("2024-02-15T00:00:00Z"));
|
||||
var debianAdvisory = new Advisory(
|
||||
"CVE-2024-1234",
|
||||
"curl - security update",
|
||||
"Buffer overflow in curl HTTP library",
|
||||
"en",
|
||||
DateTimeOffset.Parse("2024-02-10T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2024-02-15T12:00:00Z"),
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-1234", "DSA-5678-1" },
|
||||
references: new[]
|
||||
{
|
||||
new AdvisoryReference("https://security-tracker.debian.org/tracker/CVE-2024-1234", "advisory", "debian", "Debian tracker", debianProvenance)
|
||||
},
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.Deb,
|
||||
"pkg:deb/debian/curl@7.68.0",
|
||||
"linux",
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", null, "7.68.0-1+deb10u2", null, "<7.68.0-1+deb10u2", debianProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { debianProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { debianProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-120", null, null, ImmutableArray.Create(debianProvenance))
|
||||
});
|
||||
|
||||
// Arrange - RHEL advisory for the same curl vulnerability
|
||||
var rhelProvenance = new AdvisoryProvenance(
|
||||
"redhat", "rhsa", "RHSA-2024:1234", DateTimeOffset.Parse("2024-02-16T00:00:00Z"));
|
||||
var rhelAdvisory = new Advisory(
|
||||
"CVE-2024-1234",
|
||||
"Moderate: curl security update",
|
||||
"curl: buffer overflow vulnerability",
|
||||
"en",
|
||||
DateTimeOffset.Parse("2024-02-12T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2024-02-16T08:00:00Z"),
|
||||
"moderate",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-1234", "RHSA-2024:1234" },
|
||||
references: new[]
|
||||
{
|
||||
new AdvisoryReference("https://access.redhat.com/errata/RHSA-2024:1234", "advisory", "redhat", "Red Hat errata", rhelProvenance)
|
||||
},
|
||||
affectedPackages: new[]
|
||||
{
|
||||
// Same logical package, just different distro versioning
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.Deb,
|
||||
"pkg:deb/debian/curl@7.68.0",
|
||||
"linux",
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", null, "7.68.0-1+deb10u2", null, "<7.68.0-1+deb10u2", rhelProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { rhelProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { rhelProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
// Same CWE but lowercase - should normalize
|
||||
new AdvisoryWeakness("cwe", "cwe-120", null, null, ImmutableArray.Create(rhelProvenance))
|
||||
});
|
||||
|
||||
// Act
|
||||
var debianHash = _calculator.ComputeMergeHash(debianAdvisory);
|
||||
var rhelHash = _calculator.ComputeMergeHash(rhelAdvisory);
|
||||
|
||||
// Assert - Same CVE, same package, same version range, same CWE => same hash
|
||||
Assert.Equal(debianHash, rhelHash);
|
||||
Assert.StartsWith("sha256:", debianHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SameCve_FromNvdAndGhsa_WithDifferentPackages_ProducesDifferentMergeHash()
|
||||
{
|
||||
// Arrange - NVD advisory affecting lodash
|
||||
var nvdProvenance = new AdvisoryProvenance(
|
||||
"nvd", "cve", "CVE-2024-5678", DateTimeOffset.Parse("2024-03-01T00:00:00Z"));
|
||||
var nvdAdvisory = new Advisory(
|
||||
"CVE-2024-5678",
|
||||
"Prototype pollution in lodash",
|
||||
"lodash before 4.17.21 is vulnerable to prototype pollution",
|
||||
"en",
|
||||
DateTimeOffset.Parse("2024-02-28T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2024-03-01T00:00:00Z"),
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-5678" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/lodash@4.17.0",
|
||||
null,
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", "0", "4.17.21", null, "<4.17.21", nvdProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { nvdProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { nvdProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-1321", null, null, ImmutableArray.Create(nvdProvenance))
|
||||
});
|
||||
|
||||
// Arrange - Same CVE but for underscore (related but different package)
|
||||
var ghsaProvenance = new AdvisoryProvenance(
|
||||
"ghsa", "advisory", "GHSA-xyz-abc-123", DateTimeOffset.Parse("2024-03-02T00:00:00Z"));
|
||||
var ghsaAdvisory = new Advisory(
|
||||
"CVE-2024-5678",
|
||||
"Prototype pollution in underscore",
|
||||
"underscore before 1.13.6 is vulnerable",
|
||||
"en",
|
||||
DateTimeOffset.Parse("2024-03-01T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2024-03-02T00:00:00Z"),
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-5678", "GHSA-xyz-abc-123" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/underscore@1.13.0",
|
||||
null,
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", "0", "1.13.6", null, "<1.13.6", ghsaProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { ghsaProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { ghsaProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-1321", null, null, ImmutableArray.Create(ghsaProvenance))
|
||||
});
|
||||
|
||||
// Act
|
||||
var nvdHash = _calculator.ComputeMergeHash(nvdAdvisory);
|
||||
var ghsaHash = _calculator.ComputeMergeHash(ghsaAdvisory);
|
||||
|
||||
// Assert - Same CVE but different packages => different hash
|
||||
Assert.NotEqual(nvdHash, ghsaHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SameCve_WithCaseVariations_ProducesSameMergeHash()
|
||||
{
|
||||
// Arrange - Advisory with uppercase identifiers
|
||||
var upperProvenance = new AdvisoryProvenance(
|
||||
"nvd", "cve", "CVE-2024-9999", DateTimeOffset.UtcNow);
|
||||
var upperAdvisory = new Advisory(
|
||||
"CVE-2024-9999",
|
||||
"Test vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-9999" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:NPM/@angular/CORE@14.0.0",
|
||||
null,
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", null, "14.2.0", null, "<14.2.0", upperProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { upperProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { upperProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-79", null, null, ImmutableArray.Create(upperProvenance))
|
||||
});
|
||||
|
||||
// Arrange - Same advisory with lowercase identifiers
|
||||
var lowerProvenance = new AdvisoryProvenance(
|
||||
"osv", "advisory", "cve-2024-9999", DateTimeOffset.UtcNow);
|
||||
var lowerAdvisory = new Advisory(
|
||||
"cve-2024-9999",
|
||||
"Test vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "cve-2024-9999" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/@angular/core@14.0.0",
|
||||
null,
|
||||
new[]
|
||||
{
|
||||
new AffectedVersionRange("semver", null, "14.2.0", null, "<14.2.0", lowerProvenance)
|
||||
},
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { lowerProvenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { lowerProvenance },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "cwe-79", null, null, ImmutableArray.Create(lowerProvenance))
|
||||
});
|
||||
|
||||
// Act
|
||||
var upperHash = _calculator.ComputeMergeHash(upperAdvisory);
|
||||
var lowerHash = _calculator.ComputeMergeHash(lowerAdvisory);
|
||||
|
||||
// Assert - Case normalization produces identical hash
|
||||
Assert.Equal(upperHash, lowerHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SameCve_WithDifferentCweSet_ProducesDifferentMergeHash()
|
||||
{
|
||||
// Arrange - Advisory with one CWE
|
||||
var prov1 = new AdvisoryProvenance("nvd", "cve", "CVE-2024-1111", DateTimeOffset.UtcNow);
|
||||
var advisory1 = new Advisory(
|
||||
"CVE-2024-1111",
|
||||
"Test vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-1111" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/test@1.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { prov1 })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { prov1 },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-79", null, null, ImmutableArray.Create(prov1))
|
||||
});
|
||||
|
||||
// Arrange - Same CVE but with additional CWEs
|
||||
var prov2 = new AdvisoryProvenance("ghsa", "advisory", "CVE-2024-1111", DateTimeOffset.UtcNow);
|
||||
var advisory2 = new Advisory(
|
||||
"CVE-2024-1111",
|
||||
"Test vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-1111" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/test@1.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { prov2 })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { prov2 },
|
||||
cwes: new[]
|
||||
{
|
||||
new AdvisoryWeakness("cwe", "CWE-79", null, null, ImmutableArray.Create(prov2)),
|
||||
new AdvisoryWeakness("cwe", "CWE-89", null, null, ImmutableArray.Create(prov2))
|
||||
});
|
||||
|
||||
// Act
|
||||
var hash1 = _calculator.ComputeMergeHash(advisory1);
|
||||
var hash2 = _calculator.ComputeMergeHash(advisory2);
|
||||
|
||||
// Assert - Different CWE sets produce different hashes
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultiplePackageAdvisory_ComputesHashFromFirstPackage()
|
||||
{
|
||||
// Arrange - Advisory affecting multiple packages
|
||||
var provenance = new AdvisoryProvenance(
|
||||
"osv", "advisory", "CVE-2024-MULTI", DateTimeOffset.UtcNow);
|
||||
var multiPackageAdvisory = new Advisory(
|
||||
"CVE-2024-MULTI",
|
||||
"Multi-package vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"critical",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-MULTI" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/first-package@1.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { provenance }),
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/second-package@2.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { provenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
// Arrange - Advisory with only the first package
|
||||
var singlePackageAdvisory = new Advisory(
|
||||
"CVE-2024-MULTI",
|
||||
"Single package vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"critical",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-MULTI" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/first-package@1.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { provenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
// Act
|
||||
var multiHash = _calculator.ComputeMergeHash(multiPackageAdvisory);
|
||||
var singleHash = _calculator.ComputeMergeHash(singlePackageAdvisory);
|
||||
|
||||
// Assert - Both use first package, so hashes should match
|
||||
Assert.Equal(multiHash, singleHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MergeHash_SpecificPackage_ComputesDifferentHashPerPackage()
|
||||
{
|
||||
// Arrange
|
||||
var provenance = new AdvisoryProvenance(
|
||||
"osv", "advisory", "CVE-2024-PERPACK", DateTimeOffset.UtcNow);
|
||||
var multiPackageAdvisory = new Advisory(
|
||||
"CVE-2024-PERPACK",
|
||||
"Multi-package vulnerability",
|
||||
null,
|
||||
"en",
|
||||
DateTimeOffset.UtcNow,
|
||||
DateTimeOffset.UtcNow,
|
||||
"critical",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2024-PERPACK" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[]
|
||||
{
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/package-a@1.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { provenance }),
|
||||
new AffectedPackage(
|
||||
AffectedPackageTypes.SemVer,
|
||||
"pkg:npm/package-b@2.0.0",
|
||||
null,
|
||||
Array.Empty<AffectedVersionRange>(),
|
||||
Array.Empty<AffectedPackageStatus>(),
|
||||
new[] { provenance })
|
||||
},
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
// Act - Compute hash for each affected package
|
||||
var hashA = _calculator.ComputeMergeHash(multiPackageAdvisory, multiPackageAdvisory.AffectedPackages[0]);
|
||||
var hashB = _calculator.ComputeMergeHash(multiPackageAdvisory, multiPackageAdvisory.AffectedPackages[1]);
|
||||
|
||||
// Assert - Different packages produce different hashes
|
||||
Assert.NotEqual(hashA, hashB);
|
||||
Assert.StartsWith("sha256:", hashA);
|
||||
Assert.StartsWith("sha256:", hashB);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashFuzzingTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-017
|
||||
// Description: Fuzzing tests for malformed version ranges and unusual PURLs
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class MergeHashFuzzingTests
|
||||
{
|
||||
private readonly MergeHashCalculator _calculator = new();
|
||||
private readonly Random _random = new(42); // Fixed seed for reproducibility
|
||||
|
||||
private const int FuzzIterations = 1000;
|
||||
|
||||
#region PURL Fuzzing
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void PurlNormalizer_RandomInputs_DoesNotThrow()
|
||||
{
|
||||
var normalizer = PurlNormalizer.Instance;
|
||||
|
||||
for (var i = 0; i < FuzzIterations; i++)
|
||||
{
|
||||
var input = GenerateRandomPurl();
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("pkg:")]
|
||||
[InlineData("pkg:npm")]
|
||||
[InlineData("pkg:npm/")]
|
||||
[InlineData("pkg:npm//")]
|
||||
[InlineData("pkg:npm/@/")]
|
||||
[InlineData("pkg:npm/@scope/")]
|
||||
[InlineData("pkg:npm/pkg@")]
|
||||
[InlineData("pkg:npm/pkg@version?")]
|
||||
[InlineData("pkg:npm/pkg@version?qualifier")]
|
||||
[InlineData("pkg:npm/pkg@version?key=")]
|
||||
[InlineData("pkg:npm/pkg@version?=value")]
|
||||
[InlineData("pkg:npm/pkg#")]
|
||||
[InlineData("pkg:npm/pkg#/")]
|
||||
[InlineData("pkg:///")]
|
||||
[InlineData("pkg:type/ns/name@v?q=v#sp")]
|
||||
[InlineData("pkg:UNKNOWN/package@1.0.0")]
|
||||
public void PurlNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = PurlNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("pkg:npm/\0package@1.0.0")]
|
||||
[InlineData("pkg:npm/package\u0000@1.0.0")]
|
||||
[InlineData("pkg:npm/package@1.0.0\t")]
|
||||
[InlineData("pkg:npm/package@1.0.0\n")]
|
||||
[InlineData("pkg:npm/package@1.0.0\r")]
|
||||
[InlineData("pkg:npm/päckage@1.0.0")]
|
||||
[InlineData("pkg:npm/包裹@1.0.0")]
|
||||
[InlineData("pkg:npm/📦@1.0.0")]
|
||||
public void PurlNormalizer_SpecialCharacters_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = PurlNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Version Range Fuzzing
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void VersionRangeNormalizer_RandomInputs_DoesNotThrow()
|
||||
{
|
||||
var normalizer = VersionRangeNormalizer.Instance;
|
||||
|
||||
for (var i = 0; i < FuzzIterations; i++)
|
||||
{
|
||||
var input = GenerateRandomVersionRange();
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("[")]
|
||||
[InlineData("(")]
|
||||
[InlineData("]")]
|
||||
[InlineData(")")]
|
||||
[InlineData("[,")]
|
||||
[InlineData(",]")]
|
||||
[InlineData("[,]")]
|
||||
[InlineData("(,)")]
|
||||
[InlineData("[1.0")]
|
||||
[InlineData("1.0]")]
|
||||
[InlineData("[1.0,")]
|
||||
[InlineData(",1.0]")]
|
||||
[InlineData(">=")]
|
||||
[InlineData("<=")]
|
||||
[InlineData(">")]
|
||||
[InlineData("<")]
|
||||
[InlineData("=")]
|
||||
[InlineData("!=")]
|
||||
[InlineData("~")]
|
||||
[InlineData("^")]
|
||||
[InlineData(">=<")]
|
||||
[InlineData("<=>")]
|
||||
[InlineData(">=1.0<2.0")]
|
||||
[InlineData("1.0-2.0")]
|
||||
[InlineData("1.0..2.0")]
|
||||
[InlineData("v1.0.0")]
|
||||
[InlineData("version1")]
|
||||
public void VersionRangeNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = VersionRangeNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CPE Fuzzing
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void CpeNormalizer_RandomInputs_DoesNotThrow()
|
||||
{
|
||||
var normalizer = CpeNormalizer.Instance;
|
||||
|
||||
for (var i = 0; i < FuzzIterations; i++)
|
||||
{
|
||||
var input = GenerateRandomCpe();
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("cpe:")]
|
||||
[InlineData("cpe:/")]
|
||||
[InlineData("cpe://")]
|
||||
[InlineData("cpe:2.3")]
|
||||
[InlineData("cpe:2.3:")]
|
||||
[InlineData("cpe:2.3:a")]
|
||||
[InlineData("cpe:2.3:a:")]
|
||||
[InlineData("cpe:2.3:x:vendor:product:1.0:*:*:*:*:*:*:*")]
|
||||
[InlineData("cpe:1.0:a:vendor:product:1.0")]
|
||||
[InlineData("cpe:3.0:a:vendor:product:1.0")]
|
||||
[InlineData("cpe:2.3:a:::::::::")]
|
||||
[InlineData("cpe:2.3:a:vendor:product:::::::::")]
|
||||
public void CpeNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = CpeNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CVE Fuzzing
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("CVE")]
|
||||
[InlineData("CVE-")]
|
||||
[InlineData("CVE-2024")]
|
||||
[InlineData("CVE-2024-")]
|
||||
[InlineData("CVE-2024-1")]
|
||||
[InlineData("CVE-2024-12")]
|
||||
[InlineData("CVE-2024-123")]
|
||||
[InlineData("CVE-24-1234")]
|
||||
[InlineData("CVE-202-1234")]
|
||||
[InlineData("CVE-20245-1234")]
|
||||
[InlineData("CVE2024-1234")]
|
||||
[InlineData("CVE_2024_1234")]
|
||||
[InlineData("cve:2024:1234")]
|
||||
public void CveNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = CveNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CWE Fuzzing
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("CWE")]
|
||||
[InlineData("CWE-")]
|
||||
[InlineData("CWE-abc")]
|
||||
[InlineData("CWE--79")]
|
||||
[InlineData("CWE79")]
|
||||
[InlineData("cwe79")]
|
||||
[InlineData("79CWE")]
|
||||
[InlineData("-79")]
|
||||
public void CweNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = CweNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize([input]));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void CweNormalizer_LargeLists_DoesNotThrow()
|
||||
{
|
||||
var normalizer = CweNormalizer.Instance;
|
||||
|
||||
// Test with large list of CWEs
|
||||
var largeCweList = Enumerable.Range(1, 1000)
|
||||
.Select(i => $"CWE-{i}")
|
||||
.ToList();
|
||||
|
||||
var exception = Record.Exception(() => normalizer.Normalize(largeCweList));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Patch Lineage Fuzzing
|
||||
|
||||
[Theory]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
[InlineData("abc")]
|
||||
[InlineData("abc123")]
|
||||
[InlineData("abc12")]
|
||||
[InlineData("12345")]
|
||||
[InlineData("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG")]
|
||||
[InlineData("zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz")]
|
||||
[InlineData("https://")]
|
||||
[InlineData("https://github.com")]
|
||||
[InlineData("https://github.com/")]
|
||||
[InlineData("https://github.com/owner")]
|
||||
[InlineData("https://github.com/owner/repo")]
|
||||
[InlineData("https://github.com/owner/repo/")]
|
||||
[InlineData("https://github.com/owner/repo/commit")]
|
||||
[InlineData("https://github.com/owner/repo/commit/")]
|
||||
[InlineData("PATCH")]
|
||||
[InlineData("PATCH-")]
|
||||
[InlineData("PATCH-abc")]
|
||||
[InlineData("patch12345")]
|
||||
public void PatchLineageNormalizer_MalformedInputs_DoesNotThrow(string input)
|
||||
{
|
||||
var normalizer = PatchLineageNormalizer.Instance;
|
||||
var exception = Record.Exception(() => normalizer.Normalize(input));
|
||||
Assert.Null(exception);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Full Hash Calculator Fuzzing
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void MergeHashCalculator_RandomInputs_AlwaysProducesValidHash()
|
||||
{
|
||||
for (var i = 0; i < FuzzIterations; i++)
|
||||
{
|
||||
var input = GenerateRandomMergeHashInput();
|
||||
|
||||
var hash = _calculator.ComputeMergeHash(input);
|
||||
|
||||
Assert.NotNull(hash);
|
||||
Assert.StartsWith("sha256:", hash);
|
||||
Assert.Equal(71, hash.Length); // sha256: + 64 hex chars
|
||||
Assert.Matches(@"^sha256:[0-9a-f]{64}$", hash);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Fuzzing")]
|
||||
public void MergeHashCalculator_RandomInputs_IsDeterministic()
|
||||
{
|
||||
var inputs = new List<MergeHashInput>();
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
inputs.Add(GenerateRandomMergeHashInput());
|
||||
}
|
||||
|
||||
// First pass
|
||||
var firstHashes = inputs.Select(i => _calculator.ComputeMergeHash(i)).ToList();
|
||||
|
||||
// Second pass
|
||||
var secondHashes = inputs.Select(i => _calculator.ComputeMergeHash(i)).ToList();
|
||||
|
||||
// All should match
|
||||
for (var i = 0; i < inputs.Count; i++)
|
||||
{
|
||||
Assert.Equal(firstHashes[i], secondHashes[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Random Input Generators
|
||||
|
||||
private string GenerateRandomPurl()
|
||||
{
|
||||
var types = new[] { "npm", "maven", "pypi", "nuget", "gem", "golang", "deb", "rpm", "apk", "cargo" };
|
||||
var type = types[_random.Next(types.Length)];
|
||||
|
||||
var hasNamespace = _random.Next(2) == 1;
|
||||
var hasVersion = _random.Next(2) == 1;
|
||||
var hasQualifiers = _random.Next(2) == 1;
|
||||
var hasSubpath = _random.Next(2) == 1;
|
||||
|
||||
var sb = new System.Text.StringBuilder();
|
||||
sb.Append("pkg:");
|
||||
sb.Append(type);
|
||||
sb.Append('/');
|
||||
|
||||
if (hasNamespace)
|
||||
{
|
||||
sb.Append(GenerateRandomString(5));
|
||||
sb.Append('/');
|
||||
}
|
||||
|
||||
sb.Append(GenerateRandomString(8));
|
||||
|
||||
if (hasVersion)
|
||||
{
|
||||
sb.Append('@');
|
||||
sb.Append(GenerateRandomVersion());
|
||||
}
|
||||
|
||||
if (hasQualifiers)
|
||||
{
|
||||
sb.Append('?');
|
||||
sb.Append(GenerateRandomString(3));
|
||||
sb.Append('=');
|
||||
sb.Append(GenerateRandomString(5));
|
||||
}
|
||||
|
||||
if (hasSubpath)
|
||||
{
|
||||
sb.Append('#');
|
||||
sb.Append(GenerateRandomString(10));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private string GenerateRandomVersionRange()
|
||||
{
|
||||
var patterns = new Func<string>[]
|
||||
{
|
||||
() => $"[{GenerateRandomVersion()}, {GenerateRandomVersion()})",
|
||||
() => $"({GenerateRandomVersion()}, {GenerateRandomVersion()}]",
|
||||
() => $">={GenerateRandomVersion()}",
|
||||
() => $"<{GenerateRandomVersion()}",
|
||||
() => $"={GenerateRandomVersion()}",
|
||||
() => $">={GenerateRandomVersion()},<{GenerateRandomVersion()}",
|
||||
() => $"fixed:{GenerateRandomVersion()}",
|
||||
() => "*",
|
||||
() => GenerateRandomVersion(),
|
||||
() => GenerateRandomString(10)
|
||||
};
|
||||
|
||||
return patterns[_random.Next(patterns.Length)]();
|
||||
}
|
||||
|
||||
private string GenerateRandomCpe()
|
||||
{
|
||||
if (_random.Next(2) == 0)
|
||||
{
|
||||
// CPE 2.3
|
||||
var part = new[] { "a", "o", "h" }[_random.Next(3)];
|
||||
return $"cpe:2.3:{part}:{GenerateRandomString(6)}:{GenerateRandomString(8)}:{GenerateRandomVersion()}:*:*:*:*:*:*:*";
|
||||
}
|
||||
else
|
||||
{
|
||||
// CPE 2.2
|
||||
var part = new[] { "a", "o", "h" }[_random.Next(3)];
|
||||
return $"cpe:/{part}:{GenerateRandomString(6)}:{GenerateRandomString(8)}:{GenerateRandomVersion()}";
|
||||
}
|
||||
}
|
||||
|
||||
private MergeHashInput GenerateRandomMergeHashInput()
|
||||
{
|
||||
return new MergeHashInput
|
||||
{
|
||||
Cve = $"CVE-{2020 + _random.Next(5)}-{_random.Next(10000, 99999)}",
|
||||
AffectsKey = GenerateRandomPurl(),
|
||||
VersionRange = _random.Next(3) > 0 ? GenerateRandomVersionRange() : null,
|
||||
Weaknesses = Enumerable.Range(0, _random.Next(0, 5))
|
||||
.Select(_ => $"CWE-{_random.Next(1, 1000)}")
|
||||
.ToList(),
|
||||
PatchLineage = _random.Next(3) > 0 ? GenerateRandomHex(40) : null
|
||||
};
|
||||
}
|
||||
|
||||
private string GenerateRandomVersion()
|
||||
{
|
||||
return $"{_random.Next(0, 20)}.{_random.Next(0, 50)}.{_random.Next(0, 100)}";
|
||||
}
|
||||
|
||||
private string GenerateRandomString(int length)
|
||||
{
|
||||
const string chars = "abcdefghijklmnopqrstuvwxyz0123456789-_";
|
||||
return new string(Enumerable.Range(0, length)
|
||||
.Select(_ => chars[_random.Next(chars.Length)])
|
||||
.ToArray());
|
||||
}
|
||||
|
||||
private string GenerateRandomHex(int length)
|
||||
{
|
||||
const string hexChars = "0123456789abcdef";
|
||||
return new string(Enumerable.Range(0, length)
|
||||
.Select(_ => hexChars[_random.Next(hexChars.Length)])
|
||||
.ToArray());
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,313 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MergeHashGoldenCorpusTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-016
|
||||
// Description: Golden corpus tests for merge hash validation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Concelier.Merge.Identity;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
/// <summary>
|
||||
/// Tests that validate merge hash computations against golden corpus files.
|
||||
/// Each corpus file contains pairs of advisory sources that should produce
|
||||
/// the same or different merge hashes based on identity normalization.
|
||||
/// </summary>
|
||||
public sealed class MergeHashGoldenCorpusTests
|
||||
{
|
||||
private readonly MergeHashCalculator _calculator = new();
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private static string GetCorpusPath(string corpusName)
|
||||
{
|
||||
// Try multiple paths for test execution context
|
||||
var paths = new[]
|
||||
{
|
||||
Path.Combine("Fixtures", "Golden", corpusName),
|
||||
Path.Combine("..", "..", "..", "Fixtures", "Golden", corpusName),
|
||||
Path.Combine(AppContext.BaseDirectory, "Fixtures", "Golden", corpusName)
|
||||
};
|
||||
|
||||
foreach (var path in paths)
|
||||
{
|
||||
if (File.Exists(path))
|
||||
{
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
||||
throw new FileNotFoundException(string.Format("Corpus file not found: {0}", corpusName));
|
||||
}
|
||||
|
||||
#region Debian-RHEL Corpus Tests
|
||||
|
||||
[Fact]
|
||||
public void DeduplicateDebianRhelCorpus_AllItemsValidated()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-debian-rhel-cve-2024.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
Assert.NotNull(corpus);
|
||||
Assert.NotEmpty(corpus.Items);
|
||||
|
||||
foreach (var item in corpus.Items)
|
||||
{
|
||||
ValidateCorpusItem(item);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeduplicateDebianRhelCorpus_SameMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-debian-rhel-cve-2024.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var sameHashItems = corpus.Items.Where(i => i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(sameHashItems);
|
||||
|
||||
foreach (var item in sameHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count == 1, $"Item {item.Id}: Expected same merge hash but got {hashes.Count} distinct values: [{string.Join(", ", hashes)}]. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeduplicateDebianRhelCorpus_DifferentMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-debian-rhel-cve-2024.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var differentHashItems = corpus.Items.Where(i => !i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(differentHashItems);
|
||||
|
||||
foreach (var item in differentHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count > 1, $"Item {item.Id}: Expected different merge hashes but got same. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Backport Variants Corpus Tests
|
||||
|
||||
[Fact]
|
||||
public void BackportVariantsCorpus_AllItemsValidated()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-backport-variants.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
Assert.NotNull(corpus);
|
||||
Assert.NotEmpty(corpus.Items);
|
||||
|
||||
foreach (var item in corpus.Items)
|
||||
{
|
||||
ValidateCorpusItem(item);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BackportVariantsCorpus_SameMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-backport-variants.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var sameHashItems = corpus.Items.Where(i => i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(sameHashItems);
|
||||
|
||||
foreach (var item in sameHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count == 1, $"Item {item.Id}: Expected same merge hash but got {hashes.Count} distinct values: [{string.Join(", ", hashes)}]. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BackportVariantsCorpus_DifferentMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-backport-variants.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var differentHashItems = corpus.Items.Where(i => !i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(differentHashItems);
|
||||
|
||||
foreach (var item in differentHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count > 1, $"Item {item.Id}: Expected different merge hashes but got same. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Alias Collision Corpus Tests
|
||||
|
||||
[Fact]
|
||||
public void AliasCollisionCorpus_AllItemsValidated()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-alias-collision.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
Assert.NotNull(corpus);
|
||||
Assert.NotEmpty(corpus.Items);
|
||||
|
||||
foreach (var item in corpus.Items)
|
||||
{
|
||||
ValidateCorpusItem(item);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AliasCollisionCorpus_SameMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-alias-collision.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var sameHashItems = corpus.Items.Where(i => i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(sameHashItems);
|
||||
|
||||
foreach (var item in sameHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count == 1, $"Item {item.Id}: Expected same merge hash but got {hashes.Count} distinct values: [{string.Join(", ", hashes)}]. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AliasCollisionCorpus_DifferentMergeHashPairs()
|
||||
{
|
||||
var corpusPath = GetCorpusPath("dedup-alias-collision.json");
|
||||
var corpus = LoadCorpus(corpusPath);
|
||||
|
||||
var differentHashItems = corpus.Items.Where(i => !i.Expected.SameMergeHash).ToList();
|
||||
Assert.NotEmpty(differentHashItems);
|
||||
|
||||
foreach (var item in differentHashItems)
|
||||
{
|
||||
Assert.True(item.Sources.Count >= 2, $"Item {item.Id} needs at least 2 sources");
|
||||
|
||||
var hashes = item.Sources
|
||||
.Select(s => ComputeHashFromSource(s))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
Assert.True(hashes.Count > 1, $"Item {item.Id}: Expected different merge hashes but got same. Rationale: {item.Expected.Rationale}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private GoldenCorpus LoadCorpus(string path)
|
||||
{
|
||||
var json = File.ReadAllText(path);
|
||||
return JsonSerializer.Deserialize<GoldenCorpus>(json, JsonOptions)
|
||||
?? throw new InvalidOperationException($"Failed to deserialize corpus: {path}");
|
||||
}
|
||||
|
||||
private void ValidateCorpusItem(CorpusItem item)
|
||||
{
|
||||
Assert.False(string.IsNullOrEmpty(item.Id), "Corpus item must have an ID");
|
||||
Assert.NotEmpty(item.Sources);
|
||||
Assert.NotNull(item.Expected);
|
||||
|
||||
// Validate each source produces a valid hash
|
||||
foreach (var source in item.Sources)
|
||||
{
|
||||
var hash = ComputeHashFromSource(source);
|
||||
Assert.StartsWith("sha256:", hash);
|
||||
Assert.Equal(71, hash.Length); // sha256: + 64 hex chars
|
||||
}
|
||||
}
|
||||
|
||||
private string ComputeHashFromSource(CorpusSource source)
|
||||
{
|
||||
var input = new MergeHashInput
|
||||
{
|
||||
Cve = source.Cve,
|
||||
AffectsKey = source.AffectsKey,
|
||||
VersionRange = source.VersionRange,
|
||||
Weaknesses = source.Weaknesses ?? [],
|
||||
PatchLineage = source.PatchLineage
|
||||
};
|
||||
|
||||
return _calculator.ComputeMergeHash(input);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Corpus Models
|
||||
|
||||
private sealed record GoldenCorpus
|
||||
{
|
||||
public string Corpus { get; init; } = string.Empty;
|
||||
public string Version { get; init; } = string.Empty;
|
||||
public string Description { get; init; } = string.Empty;
|
||||
public IReadOnlyList<CorpusItem> Items { get; init; } = [];
|
||||
}
|
||||
|
||||
private sealed record CorpusItem
|
||||
{
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public string Description { get; init; } = string.Empty;
|
||||
public IReadOnlyList<CorpusSource> Sources { get; init; } = [];
|
||||
public CorpusExpected Expected { get; init; } = new();
|
||||
}
|
||||
|
||||
private sealed record CorpusSource
|
||||
{
|
||||
public string Source { get; init; } = string.Empty;
|
||||
public string AdvisoryId { get; init; } = string.Empty;
|
||||
public string Cve { get; init; } = string.Empty;
|
||||
public string AffectsKey { get; init; } = string.Empty;
|
||||
public string? VersionRange { get; init; }
|
||||
public IReadOnlyList<string>? Weaknesses { get; init; }
|
||||
public string? PatchLineage { get; init; }
|
||||
}
|
||||
|
||||
private sealed record CorpusExpected
|
||||
{
|
||||
public bool SameMergeHash { get; init; }
|
||||
public string Rationale { get; init; } = string.Empty;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,281 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PatchLineageNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for PatchLineageNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class PatchLineageNormalizerTests
|
||||
{
|
||||
private readonly PatchLineageNormalizer _normalizer = PatchLineageNormalizer.Instance;
|
||||
|
||||
#region Full SHA Extraction
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FullSha_ReturnsLowercase()
|
||||
{
|
||||
var sha = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var result = _normalizer.Normalize(sha);
|
||||
Assert.Equal(sha.ToLowerInvariant(), result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FullShaUppercase_ReturnsLowercase()
|
||||
{
|
||||
var sha = "A1B2C3D4E5F6A1B2C3D4E5F6A1B2C3D4E5F6A1B2";
|
||||
var result = _normalizer.Normalize(sha);
|
||||
Assert.Equal(sha.ToLowerInvariant(), result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FullShaMixedCase_ReturnsLowercase()
|
||||
{
|
||||
var sha = "A1b2C3d4E5f6A1b2C3d4E5f6A1b2C3d4E5f6A1b2";
|
||||
var result = _normalizer.Normalize(sha);
|
||||
Assert.Equal(sha.ToLowerInvariant(), result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Abbreviated SHA Extraction
|
||||
|
||||
[Fact]
|
||||
public void Normalize_AbbrevShaWithContext_ExtractsSha()
|
||||
{
|
||||
var result = _normalizer.Normalize("fix: abc1234 addresses the issue");
|
||||
Assert.Equal("abc1234", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_AbbrevShaWithCommitKeyword_ExtractsSha()
|
||||
{
|
||||
var result = _normalizer.Normalize("commit abc1234567");
|
||||
Assert.Equal("abc1234567", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_AbbrevShaSeven_ExtractsSha()
|
||||
{
|
||||
var result = _normalizer.Normalize("patch: fix in abc1234");
|
||||
Assert.Equal("abc1234", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_AbbrevShaTwelve_ExtractsSha()
|
||||
{
|
||||
var result = _normalizer.Normalize("backport of abc123456789");
|
||||
Assert.Equal("abc123456789", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GitHub/GitLab URL Extraction
|
||||
|
||||
[Fact]
|
||||
public void Normalize_GitHubCommitUrl_ExtractsSha()
|
||||
{
|
||||
var url = "https://github.com/owner/repo/commit/abc123def456abc123def456abc123def456abc1";
|
||||
var result = _normalizer.Normalize(url);
|
||||
Assert.Equal("abc123def456abc123def456abc123def456abc1", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_GitLabCommitUrl_ExtractsSha()
|
||||
{
|
||||
var url = "https://gitlab.com/owner/repo/commit/abc123def456";
|
||||
var result = _normalizer.Normalize(url);
|
||||
Assert.Equal("abc123def456", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_GitHubUrlAbbrevSha_ExtractsSha()
|
||||
{
|
||||
var url = "https://github.com/apache/log4j/commit/abc1234";
|
||||
var result = _normalizer.Normalize(url);
|
||||
Assert.Equal("abc1234", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Patch ID Extraction
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PatchIdUppercase_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("PATCH-12345");
|
||||
Assert.Equal("PATCH-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PatchIdLowercase_ReturnsUppercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("patch-12345");
|
||||
Assert.Equal("PATCH-12345", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PatchIdInText_ExtractsPatchId()
|
||||
{
|
||||
var result = _normalizer.Normalize("Applied PATCH-67890 to fix issue");
|
||||
Assert.Equal("PATCH-67890", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize(null);
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyString_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize(string.Empty);
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WhitespaceOnly_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize(" ");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var sha = " a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2 ";
|
||||
var result = _normalizer.Normalize(sha);
|
||||
Assert.Equal("a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Unrecognized Patterns
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NoRecognizablePattern_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize("some random text without sha or patch id");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ShortHex_ReturnsNull()
|
||||
{
|
||||
// Less than 7 hex chars shouldn't match abbreviated SHA
|
||||
var result = _normalizer.Normalize("abc12 is too short");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NonHexChars_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize("ghijkl is not hex");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PatchIdNoNumber_ReturnsNull()
|
||||
{
|
||||
var result = _normalizer.Normalize("PATCH-abc is invalid");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Priority Testing
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UrlOverPlainSha_PrefersUrl()
|
||||
{
|
||||
// When URL contains SHA, should extract from URL pattern
|
||||
var input = "https://github.com/owner/repo/commit/abcdef1234567890abcdef1234567890abcdef12";
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FullShaOverAbbrev_PrefersFullSha()
|
||||
{
|
||||
// When both full and abbreviated SHA present, should prefer full
|
||||
var input = "abc1234 mentioned in commit a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal("a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Theory]
|
||||
[InlineData("a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2")]
|
||||
[InlineData("https://github.com/owner/repo/commit/abc1234")]
|
||||
[InlineData("PATCH-12345")]
|
||||
[InlineData("commit abc1234567")]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult(string input)
|
||||
{
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
const string input = "https://github.com/apache/log4j/commit/abc123def456abc123def456abc123def456abc1";
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World Lineage Formats
|
||||
|
||||
[Theory]
|
||||
[InlineData("https://github.com/apache/logging-log4j2/commit/7fe72d6", "7fe72d6")]
|
||||
[InlineData("backport of abc123def456", "abc123def456")]
|
||||
public void Normalize_RealWorldLineages_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PatchId_ExtractsAndUppercases()
|
||||
{
|
||||
// PATCH-NNNNN format is recognized and uppercased
|
||||
var result = _normalizer.Normalize("Applied patch-12345 to fix issue");
|
||||
Assert.Equal("PATCH-12345", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = PatchLineageNormalizer.Instance;
|
||||
var instance2 = PatchLineageNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,295 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PurlNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for PurlNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class PurlNormalizerTests
|
||||
{
|
||||
private readonly PurlNormalizer _normalizer = PurlNormalizer.Instance;
|
||||
|
||||
#region Basic Normalization
|
||||
|
||||
[Fact]
|
||||
public void Normalize_SimplePurl_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/lodash@4.17.21");
|
||||
Assert.Equal("pkg:npm/lodash@4.17.21", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UppercaseType_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:NPM/lodash@4.17.21");
|
||||
Assert.Equal("pkg:npm/lodash@4.17.21", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithNamespace_ReturnsNormalized()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:maven/org.apache.commons/commons-lang3@3.12.0");
|
||||
Assert.Equal("pkg:maven/org.apache.commons/commons-lang3@3.12.0", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scoped NPM Packages
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NpmScopedPackage_ReturnsLowercaseScope()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/@Angular/core@14.0.0");
|
||||
Assert.StartsWith("pkg:npm/", result);
|
||||
Assert.Contains("angular", result.ToLowerInvariant());
|
||||
Assert.Contains("core", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NpmScopedPackageEncoded_DecodesAndNormalizes()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/%40angular/core@14.0.0");
|
||||
Assert.Contains("angular", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Qualifier Stripping
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithArchQualifier_StripsArch()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:deb/debian/curl@7.68.0-1?arch=amd64");
|
||||
Assert.DoesNotContain("arch=", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithTypeQualifier_StripsType()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:maven/org.apache/commons@1.0?type=jar");
|
||||
Assert.DoesNotContain("type=", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithChecksumQualifier_StripsChecksum()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/lodash@4.17.21?checksum=sha256:abc123");
|
||||
Assert.DoesNotContain("checksum=", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithPlatformQualifier_StripsPlatform()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/lodash@4.17.21?platform=linux");
|
||||
Assert.DoesNotContain("platform=", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithMultipleQualifiers_StripsNonIdentity()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:deb/debian/curl@7.68.0-1?arch=amd64&distro=bullseye");
|
||||
Assert.DoesNotContain("arch=", result);
|
||||
Assert.Contains("distro=bullseye", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithIdentityQualifiers_KeepsIdentity()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:deb/debian/curl@7.68.0-1?distro=bullseye");
|
||||
Assert.Contains("distro=bullseye", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Qualifier Sorting
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UnsortedQualifiers_ReturnsSorted()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/pkg@1.0?z=1&a=2&m=3");
|
||||
// Qualifiers should be sorted alphabetically
|
||||
var queryStart = result.IndexOf('?');
|
||||
if (queryStart > 0)
|
||||
{
|
||||
var qualifiers = result[(queryStart + 1)..].Split('&');
|
||||
var sorted = qualifiers.OrderBy(q => q).ToArray();
|
||||
Assert.Equal(sorted, qualifiers);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(null!);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyString_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(string.Empty);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WhitespaceOnly_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(" ");
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize(" pkg:npm/lodash@4.17.21 ");
|
||||
Assert.Equal("pkg:npm/lodash@4.17.21", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Non-PURL Input
|
||||
|
||||
[Fact]
|
||||
public void Normalize_CpeInput_ReturnsAsIs()
|
||||
{
|
||||
var input = "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*";
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(input, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PlainPackageName_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("SomePackage");
|
||||
Assert.Equal("somepackage", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_InvalidPurlFormat_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:invalid");
|
||||
Assert.Equal("pkg:invalid", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Special Characters
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithSubpath_StripsSubpath()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/lodash@4.17.21#src/index.js");
|
||||
Assert.DoesNotContain("#", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UrlEncodedName_DecodesAndNormalizes()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:npm/%40scope%2Fpkg@1.0.0");
|
||||
// Should decode and normalize
|
||||
Assert.StartsWith("pkg:npm/", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ecosystem-Specific Behavior
|
||||
|
||||
[Fact]
|
||||
public void Normalize_GolangPackage_PreservesNameCase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:golang/github.com/User/Repo@v1.0.0");
|
||||
// Go namespace is lowercased but name retains original chars
|
||||
// The current normalizer lowercases everything except golang name
|
||||
Assert.StartsWith("pkg:golang/", result);
|
||||
Assert.Contains("repo", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NugetPackage_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:nuget/Newtonsoft.Json@13.0.1");
|
||||
Assert.Contains("newtonsoft.json", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DebianPackage_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:deb/debian/CURL@7.68.0-1");
|
||||
Assert.Contains("curl", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_RpmPackage_ReturnsLowercase()
|
||||
{
|
||||
var result = _normalizer.Normalize("pkg:rpm/redhat/OPENSSL@1.1.1");
|
||||
Assert.Contains("openssl", result.ToLowerInvariant());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Theory]
|
||||
[InlineData("pkg:npm/lodash@4.17.21")]
|
||||
[InlineData("pkg:NPM/LODASH@4.17.21")]
|
||||
[InlineData("pkg:npm/@angular/core@14.0.0")]
|
||||
[InlineData("pkg:maven/org.apache/commons@1.0")]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult(string input)
|
||||
{
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
const string input = "pkg:npm/@SCOPE/Package@1.0.0?arch=amd64&distro=bullseye";
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World PURL Formats
|
||||
|
||||
[Theory]
|
||||
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.21")]
|
||||
[InlineData("pkg:pypi/requests@2.28.0", "pkg:pypi/requests@2.28.0")]
|
||||
[InlineData("pkg:gem/rails@7.0.0", "pkg:gem/rails@7.0.0")]
|
||||
public void Normalize_RealWorldPurls_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = PurlNormalizer.Instance;
|
||||
var instance2 = PurlNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,286 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VersionRangeNormalizerTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0001_CONCEL_merge_hash_library
|
||||
// Task: MHASH-8200-008
|
||||
// Description: Unit tests for VersionRangeNormalizer
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Concelier.Merge.Identity.Normalizers;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests.Identity;
|
||||
|
||||
public sealed class VersionRangeNormalizerTests
|
||||
{
|
||||
private readonly VersionRangeNormalizer _normalizer = VersionRangeNormalizer.Instance;
|
||||
|
||||
#region Interval Notation
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ClosedOpen_ConvertsToComparison()
|
||||
{
|
||||
var result = _normalizer.Normalize("[1.0.0, 2.0.0)");
|
||||
Assert.Equal(">=1.0.0,<2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_OpenClosed_ConvertsToComparison()
|
||||
{
|
||||
var result = _normalizer.Normalize("(1.0.0, 2.0.0]");
|
||||
Assert.Equal(">1.0.0,<=2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ClosedClosed_ConvertsToComparison()
|
||||
{
|
||||
var result = _normalizer.Normalize("[1.0.0, 2.0.0]");
|
||||
Assert.Equal(">=1.0.0,<=2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_OpenOpen_ConvertsToComparison()
|
||||
{
|
||||
var result = _normalizer.Normalize("(1.0.0, 2.0.0)");
|
||||
Assert.Equal(">1.0.0,<2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_IntervalWithSpaces_ConvertsToComparison()
|
||||
{
|
||||
var result = _normalizer.Normalize("[ 1.0.0 , 2.0.0 )");
|
||||
Assert.Equal(">=1.0.0,<2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_LeftOpenInterval_ConvertsToUpperBound()
|
||||
{
|
||||
var result = _normalizer.Normalize("(, 2.0.0)");
|
||||
Assert.Equal("<2.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_RightOpenInterval_ConvertsToLowerBound()
|
||||
{
|
||||
var result = _normalizer.Normalize("[1.0.0,)");
|
||||
Assert.Equal(">=1.0.0", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparison Operators
|
||||
|
||||
[Theory]
|
||||
[InlineData(">= 1.0.0", ">=1.0.0")]
|
||||
[InlineData(">=1.0.0", ">=1.0.0")]
|
||||
[InlineData("> 1.0.0", ">1.0.0")]
|
||||
[InlineData("<= 2.0.0", "<=2.0.0")]
|
||||
[InlineData("< 2.0.0", "<2.0.0")]
|
||||
[InlineData("= 1.0.0", "=1.0.0")]
|
||||
[InlineData("!= 1.0.0", "!=1.0.0")]
|
||||
public void Normalize_ComparisonOperators_NormalizesWhitespace(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("~= 1.0.0", "~=1.0.0")]
|
||||
[InlineData("~> 1.0.0", "~=1.0.0")]
|
||||
[InlineData("^ 1.0.0", "^1.0.0")]
|
||||
public void Normalize_SemverOperators_Normalizes(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Constraint
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MultipleConstraints_SortsAndJoins()
|
||||
{
|
||||
var result = _normalizer.Normalize("<2.0.0,>=1.0.0");
|
||||
// Should be sorted alphabetically
|
||||
Assert.Contains("<2.0.0", result);
|
||||
Assert.Contains(">=1.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DuplicateConstraints_Deduplicates()
|
||||
{
|
||||
var result = _normalizer.Normalize(">= 1.0.0, >=1.0.0");
|
||||
// Should deduplicate
|
||||
var count = result.Split(',').Count(s => s == ">=1.0.0");
|
||||
Assert.Equal(1, count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fixed Version
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FixedNotation_ConvertsToGreaterOrEqual()
|
||||
{
|
||||
var result = _normalizer.Normalize("fixed: 1.5.1");
|
||||
Assert.Equal(">=1.5.1", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_FixedNotationNoSpace_ConvertsToGreaterOrEqual()
|
||||
{
|
||||
var result = _normalizer.Normalize("fixed:1.5.1");
|
||||
Assert.Equal(">=1.5.1", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Wildcard
|
||||
|
||||
[Theory]
|
||||
[InlineData("*", "*")]
|
||||
[InlineData("all", "*")]
|
||||
[InlineData("any", "*")]
|
||||
public void Normalize_WildcardMarkers_ReturnsAsterisk(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Plain Version
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PlainVersion_ConvertsToExact()
|
||||
{
|
||||
var result = _normalizer.Normalize("1.0.0");
|
||||
Assert.Equal("=1.0.0", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_PlainVersionWithPatch_ConvertsToExact()
|
||||
{
|
||||
var result = _normalizer.Normalize("1.2.3");
|
||||
Assert.Equal("=1.2.3", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Empty and Null
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Null_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(null);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EmptyString_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(string.Empty);
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WhitespaceOnly_ReturnsEmpty()
|
||||
{
|
||||
var result = _normalizer.Normalize(" ");
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WithWhitespace_ReturnsTrimmed()
|
||||
{
|
||||
var result = _normalizer.Normalize(" >= 1.0.0 ");
|
||||
Assert.Equal(">=1.0.0", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases - Malformed Input
|
||||
|
||||
[Fact]
|
||||
public void Normalize_UnrecognizedFormat_ReturnsAsIs()
|
||||
{
|
||||
var result = _normalizer.Normalize("some-weird-format");
|
||||
Assert.Equal("some-weird-format", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_MalformedInterval_ReturnsAsIs()
|
||||
{
|
||||
var result = _normalizer.Normalize("[1.0.0");
|
||||
// Should return as-is if can't parse
|
||||
Assert.Contains("1.0.0", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Theory]
|
||||
[InlineData("[1.0.0, 2.0.0)")]
|
||||
[InlineData(">= 1.0.0")]
|
||||
[InlineData("fixed: 1.5.1")]
|
||||
[InlineData("*")]
|
||||
public void Normalize_MultipleRuns_ReturnsSameResult(string input)
|
||||
{
|
||||
var first = _normalizer.Normalize(input);
|
||||
var second = _normalizer.Normalize(input);
|
||||
var third = _normalizer.Normalize(input);
|
||||
|
||||
Assert.Equal(first, second);
|
||||
Assert.Equal(second, third);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_Determinism_100Runs()
|
||||
{
|
||||
const string input = "[1.0.0, 2.0.0)";
|
||||
var expected = _normalizer.Normalize(input);
|
||||
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_EquivalentFormats_ProduceSameOutput()
|
||||
{
|
||||
// Different ways to express the same range
|
||||
var interval = _normalizer.Normalize("[1.0.0, 2.0.0)");
|
||||
var comparison = _normalizer.Normalize(">=1.0.0,<2.0.0");
|
||||
|
||||
Assert.Equal(interval, comparison);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World Version Ranges
|
||||
|
||||
[Theory]
|
||||
[InlineData("<7.68.0-1+deb10u2", "<7.68.0-1+deb10u2")]
|
||||
[InlineData(">=0,<1.2.3", ">=0,<1.2.3")]
|
||||
public void Normalize_RealWorldRanges_ReturnsExpected(string input, string expected)
|
||||
{
|
||||
var result = _normalizer.Normalize(input);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Singleton Instance
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSameInstance()
|
||||
{
|
||||
var instance1 = VersionRangeNormalizer.Instance;
|
||||
var instance2 = VersionRangeNormalizer.Instance;
|
||||
Assert.Same(instance1, instance2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -302,9 +302,9 @@ public sealed class MergePropertyTests
|
||||
// Assert - merge provenance trace should contain all original sources
|
||||
var mergeProvenance = result.Provenance.FirstOrDefault(p => p.Source == "merge");
|
||||
mergeProvenance.Should().NotBeNull();
|
||||
mergeProvenance!.Value.Should().Contain("redhat", StringComparison.OrdinalIgnoreCase);
|
||||
mergeProvenance.Value.Should().Contain("ghsa", StringComparison.OrdinalIgnoreCase);
|
||||
mergeProvenance.Value.Should().Contain("osv", StringComparison.OrdinalIgnoreCase);
|
||||
mergeProvenance!.Value.ToLowerInvariant().Should().Contain("redhat");
|
||||
mergeProvenance.Value.ToLowerInvariant().Should().Contain("ghsa");
|
||||
mergeProvenance.Value.ToLowerInvariant().Should().Contain("osv");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
|
||||
|
||||
@@ -0,0 +1,770 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisoryCanonicalRepositoryTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0002_DB_canonical_source_edge_schema
|
||||
// Task: SCHEMA-8200-011
|
||||
// Description: Integration tests for AdvisoryCanonicalRepository
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="AdvisoryCanonicalRepository"/>.
|
||||
/// Tests CRUD operations, unique constraints, and cascade delete behavior.
|
||||
/// </summary>
|
||||
[Collection(ConcelierPostgresCollection.Name)]
|
||||
public sealed class AdvisoryCanonicalRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcelierPostgresFixture _fixture;
|
||||
private readonly ConcelierDataSource _dataSource;
|
||||
private readonly AdvisoryCanonicalRepository _repository;
|
||||
private readonly SourceRepository _sourceRepository;
|
||||
|
||||
public AdvisoryCanonicalRepositoryTests(ConcelierPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
_dataSource = new ConcelierDataSource(Options.Create(options), NullLogger<ConcelierDataSource>.Instance);
|
||||
_repository = new AdvisoryCanonicalRepository(_dataSource, NullLogger<AdvisoryCanonicalRepository>.Instance);
|
||||
_sourceRepository = new SourceRepository(_dataSource, NullLogger<SourceRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
#region GetByIdAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ShouldReturnEntity_WhenExists()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Id.Should().Be(id);
|
||||
result.Cve.Should().Be(canonical.Cve);
|
||||
result.AffectsKey.Should().Be(canonical.AffectsKey);
|
||||
result.MergeHash.Should().Be(canonical.MergeHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ShouldReturnNull_WhenNotExists()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByIdAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByMergeHashAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_ShouldReturnEntity_WhenExists()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetByMergeHashAsync(canonical.MergeHash);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.MergeHash.Should().Be(canonical.MergeHash);
|
||||
result.Cve.Should().Be(canonical.Cve);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByMergeHashAsync_ShouldReturnNull_WhenNotExists()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByMergeHashAsync("sha256:nonexistent");
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByCveAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ShouldReturnAllMatchingEntities()
|
||||
{
|
||||
// Arrange
|
||||
var cve = "CVE-2024-12345";
|
||||
var canonical1 = CreateTestCanonical(cve: cve, affectsKey: "pkg:npm/lodash@4.17.0");
|
||||
var canonical2 = CreateTestCanonical(cve: cve, affectsKey: "pkg:npm/express@4.0.0");
|
||||
var canonical3 = CreateTestCanonical(cve: "CVE-2024-99999");
|
||||
|
||||
await _repository.UpsertAsync(canonical1);
|
||||
await _repository.UpsertAsync(canonical2);
|
||||
await _repository.UpsertAsync(canonical3);
|
||||
|
||||
// Act
|
||||
var results = await _repository.GetByCveAsync(cve);
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(2);
|
||||
results.Should().AllSatisfy(r => r.Cve.Should().Be(cve));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_ShouldReturnEmptyList_WhenNoMatches()
|
||||
{
|
||||
// Act
|
||||
var results = await _repository.GetByCveAsync("CVE-2099-00000");
|
||||
|
||||
// Assert
|
||||
results.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByAffectsKeyAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByAffectsKeyAsync_ShouldReturnAllMatchingEntities()
|
||||
{
|
||||
// Arrange
|
||||
var affectsKey = "pkg:npm/lodash@4.17.21";
|
||||
var canonical1 = CreateTestCanonical(cve: "CVE-2024-11111", affectsKey: affectsKey);
|
||||
var canonical2 = CreateTestCanonical(cve: "CVE-2024-22222", affectsKey: affectsKey);
|
||||
var canonical3 = CreateTestCanonical(cve: "CVE-2024-33333", affectsKey: "pkg:npm/express@4.0.0");
|
||||
|
||||
await _repository.UpsertAsync(canonical1);
|
||||
await _repository.UpsertAsync(canonical2);
|
||||
await _repository.UpsertAsync(canonical3);
|
||||
|
||||
// Act
|
||||
var results = await _repository.GetByAffectsKeyAsync(affectsKey);
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(2);
|
||||
results.Should().AllSatisfy(r => r.AffectsKey.Should().Be(affectsKey));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpsertAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_ShouldInsertNewEntity()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
id.Should().NotBeEmpty();
|
||||
|
||||
var retrieved = await _repository.GetByIdAsync(id);
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.Cve.Should().Be(canonical.Cve);
|
||||
retrieved.AffectsKey.Should().Be(canonical.AffectsKey);
|
||||
retrieved.MergeHash.Should().Be(canonical.MergeHash);
|
||||
retrieved.Status.Should().Be("active");
|
||||
retrieved.CreatedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_ShouldUpdateExistingByMergeHash()
|
||||
{
|
||||
// Arrange
|
||||
var mergeHash = $"sha256:{Guid.NewGuid():N}";
|
||||
var original = CreateTestCanonical(mergeHash: mergeHash, severity: "high");
|
||||
await _repository.UpsertAsync(original);
|
||||
|
||||
// Get original timestamps
|
||||
var originalEntity = await _repository.GetByMergeHashAsync(mergeHash);
|
||||
var originalCreatedAt = originalEntity!.CreatedAt;
|
||||
|
||||
// Create update with same merge_hash but different values
|
||||
var updated = new AdvisoryCanonicalEntity
|
||||
{
|
||||
Id = Guid.NewGuid(), // Different ID
|
||||
Cve = original.Cve,
|
||||
AffectsKey = original.AffectsKey,
|
||||
MergeHash = mergeHash, // Same merge_hash
|
||||
Severity = "critical", // Updated severity
|
||||
Title = "Updated Title"
|
||||
};
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(updated);
|
||||
|
||||
// Assert - should return original ID, not new one
|
||||
id.Should().Be(originalEntity.Id);
|
||||
|
||||
var result = await _repository.GetByMergeHashAsync(mergeHash);
|
||||
result.Should().NotBeNull();
|
||||
result!.Severity.Should().Be("critical");
|
||||
result.Title.Should().Be("Updated Title");
|
||||
result.CreatedAt.Should().BeCloseTo(originalCreatedAt, TimeSpan.FromSeconds(1)); // CreatedAt unchanged
|
||||
result.UpdatedAt.Should().BeAfter(result.CreatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_ShouldPreserveExistingValues_WhenNewValuesAreNull()
|
||||
{
|
||||
// Arrange
|
||||
var mergeHash = $"sha256:{Guid.NewGuid():N}";
|
||||
var original = CreateTestCanonical(
|
||||
mergeHash: mergeHash,
|
||||
severity: "high",
|
||||
title: "Original Title",
|
||||
summary: "Original Summary");
|
||||
await _repository.UpsertAsync(original);
|
||||
|
||||
// Create update with null values for severity, title, summary
|
||||
var updated = new AdvisoryCanonicalEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = original.Cve,
|
||||
AffectsKey = original.AffectsKey,
|
||||
MergeHash = mergeHash,
|
||||
Severity = null,
|
||||
Title = null,
|
||||
Summary = null
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(updated);
|
||||
|
||||
// Assert - original values should be preserved
|
||||
var result = await _repository.GetByMergeHashAsync(mergeHash);
|
||||
result.Should().NotBeNull();
|
||||
result!.Severity.Should().Be("high");
|
||||
result.Title.Should().Be("Original Title");
|
||||
result.Summary.Should().Be("Original Summary");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_ShouldStoreWeaknessArray()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical(weaknesses: ["CWE-79", "CWE-89", "CWE-120"]);
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.Weakness.Should().BeEquivalentTo(["CWE-79", "CWE-89", "CWE-120"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_ShouldStoreVersionRangeAsJson()
|
||||
{
|
||||
// Arrange
|
||||
var versionRange = """{"introduced": "1.0.0", "fixed": "1.5.1"}""";
|
||||
var canonical = CreateTestCanonical(versionRange: versionRange);
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.VersionRange.Should().Contain("introduced");
|
||||
result.VersionRange.Should().Contain("fixed");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpdateStatusAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatusAsync_ShouldUpdateStatus()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Act
|
||||
await _repository.UpdateStatusAsync(id, "withdrawn");
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.Status.Should().Be("withdrawn");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatusAsync_ShouldUpdateTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
var original = await _repository.GetByIdAsync(id);
|
||||
|
||||
// Wait a bit to ensure timestamp difference
|
||||
await Task.Delay(100);
|
||||
|
||||
// Act
|
||||
await _repository.UpdateStatusAsync(id, "stub");
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.UpdatedAt.Should().BeAfter(original!.UpdatedAt);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DeleteAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_ShouldRemoveEntity()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Verify exists
|
||||
var exists = await _repository.GetByIdAsync(id);
|
||||
exists.Should().NotBeNull();
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(id);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_ShouldCascadeDeleteSourceEdges()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Create a source first (required FK)
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
// Add source edge
|
||||
var edge = CreateTestSourceEdge(canonicalId, source.Id);
|
||||
var edgeId = await _repository.AddSourceEdgeAsync(edge);
|
||||
|
||||
// Verify edge exists
|
||||
var edgeExists = await _repository.GetSourceEdgeByIdAsync(edgeId);
|
||||
edgeExists.Should().NotBeNull();
|
||||
|
||||
// Act - delete canonical
|
||||
await _repository.DeleteAsync(canonicalId);
|
||||
|
||||
// Assert - source edge should be deleted via cascade
|
||||
var edgeAfterDelete = await _repository.GetSourceEdgeByIdAsync(edgeId);
|
||||
edgeAfterDelete.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CountAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ShouldReturnActiveCount()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.UpsertAsync(CreateTestCanonical());
|
||||
await _repository.UpsertAsync(CreateTestCanonical());
|
||||
|
||||
var withdrawnCanonical = CreateTestCanonical();
|
||||
var withdrawnId = await _repository.UpsertAsync(withdrawnCanonical);
|
||||
await _repository.UpdateStatusAsync(withdrawnId, "withdrawn");
|
||||
|
||||
// Act
|
||||
var count = await _repository.CountAsync();
|
||||
|
||||
// Assert
|
||||
count.Should().Be(2); // Only active ones
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region StreamActiveAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task StreamActiveAsync_ShouldStreamOnlyActiveEntities()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00001"));
|
||||
await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00002"));
|
||||
|
||||
var withdrawnId = await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00003"));
|
||||
await _repository.UpdateStatusAsync(withdrawnId, "withdrawn");
|
||||
|
||||
// Act
|
||||
var results = new List<AdvisoryCanonicalEntity>();
|
||||
await foreach (var entity in _repository.StreamActiveAsync())
|
||||
{
|
||||
results.Add(entity);
|
||||
}
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(2);
|
||||
results.Should().AllSatisfy(e => e.Status.Should().Be("active"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Source Edge Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetSourceEdgesAsync_ShouldReturnEdgesForCanonical()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
var source1 = CreateTestSource();
|
||||
var source2 = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source1);
|
||||
await _sourceRepository.UpsertAsync(source2);
|
||||
|
||||
await _repository.AddSourceEdgeAsync(CreateTestSourceEdge(canonicalId, source1.Id, precedence: 10));
|
||||
await _repository.AddSourceEdgeAsync(CreateTestSourceEdge(canonicalId, source2.Id, precedence: 20));
|
||||
|
||||
// Act
|
||||
var edges = await _repository.GetSourceEdgesAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
edges.Should().HaveCount(2);
|
||||
edges.Should().BeInAscendingOrder(e => e.PrecedenceRank);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddSourceEdgeAsync_ShouldInsertNewEdge()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
var edge = CreateTestSourceEdge(canonicalId, source.Id);
|
||||
|
||||
// Act
|
||||
var edgeId = await _repository.AddSourceEdgeAsync(edge);
|
||||
|
||||
// Assert
|
||||
edgeId.Should().NotBeEmpty();
|
||||
|
||||
var result = await _repository.GetSourceEdgeByIdAsync(edgeId);
|
||||
result.Should().NotBeNull();
|
||||
result!.CanonicalId.Should().Be(canonicalId);
|
||||
result.SourceId.Should().Be(source.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddSourceEdgeAsync_ShouldUpsertOnConflict()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
var sourceDocHash = $"sha256:{Guid.NewGuid():N}";
|
||||
var edge1 = CreateTestSourceEdge(canonicalId, source.Id, sourceDocHash: sourceDocHash, precedence: 100);
|
||||
var id1 = await _repository.AddSourceEdgeAsync(edge1);
|
||||
|
||||
// Create edge with same (canonical_id, source_id, source_doc_hash) but different precedence
|
||||
var edge2 = CreateTestSourceEdge(canonicalId, source.Id, sourceDocHash: sourceDocHash, precedence: 10);
|
||||
|
||||
// Act
|
||||
var id2 = await _repository.AddSourceEdgeAsync(edge2);
|
||||
|
||||
// Assert - should return same ID
|
||||
id2.Should().Be(id1);
|
||||
|
||||
var result = await _repository.GetSourceEdgeByIdAsync(id1);
|
||||
result.Should().NotBeNull();
|
||||
// Should use LEAST of precedence values
|
||||
result!.PrecedenceRank.Should().Be(10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddSourceEdgeAsync_ShouldStoreDsseEnvelope()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
var dsseEnvelope = """{"payloadType": "application/vnd.in-toto+json", "payload": "eyJ0ZXN0IjogdHJ1ZX0=", "signatures": []}""";
|
||||
var edge = CreateTestSourceEdge(canonicalId, source.Id, dsseEnvelope: dsseEnvelope);
|
||||
|
||||
// Act
|
||||
var edgeId = await _repository.AddSourceEdgeAsync(edge);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetSourceEdgeByIdAsync(edgeId);
|
||||
result.Should().NotBeNull();
|
||||
result!.DsseEnvelope.Should().Contain("payloadType");
|
||||
result.DsseEnvelope.Should().Contain("signatures");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSourceEdgesByAdvisoryIdAsync_ShouldReturnMatchingEdges()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical();
|
||||
var canonicalId = await _repository.UpsertAsync(canonical);
|
||||
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
var advisoryId = "DSA-5678-1";
|
||||
await _repository.AddSourceEdgeAsync(CreateTestSourceEdge(canonicalId, source.Id, sourceAdvisoryId: advisoryId));
|
||||
await _repository.AddSourceEdgeAsync(CreateTestSourceEdge(canonicalId, source.Id, sourceAdvisoryId: "OTHER-123"));
|
||||
|
||||
// Act
|
||||
var edges = await _repository.GetSourceEdgesByAdvisoryIdAsync(advisoryId);
|
||||
|
||||
// Assert
|
||||
edges.Should().ContainSingle();
|
||||
edges[0].SourceAdvisoryId.Should().Be(advisoryId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatisticsAsync_ShouldReturnCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00001"));
|
||||
await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00002"));
|
||||
var withdrawnId = await _repository.UpsertAsync(CreateTestCanonical(cve: "CVE-2024-00003"));
|
||||
await _repository.UpdateStatusAsync(withdrawnId, "withdrawn");
|
||||
|
||||
var source = CreateTestSource();
|
||||
await _sourceRepository.UpsertAsync(source);
|
||||
|
||||
var canonicals = await _repository.GetByCveAsync("CVE-2024-00001");
|
||||
await _repository.AddSourceEdgeAsync(CreateTestSourceEdge(canonicals[0].Id, source.Id));
|
||||
|
||||
// Act
|
||||
var stats = await _repository.GetStatisticsAsync();
|
||||
|
||||
// Assert
|
||||
stats.TotalCanonicals.Should().Be(3);
|
||||
stats.ActiveCanonicals.Should().Be(2);
|
||||
stats.TotalSourceEdges.Should().Be(1);
|
||||
stats.LastUpdatedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unique Constraint Tests
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_WithDuplicateMergeHash_ShouldUpdateNotInsert()
|
||||
{
|
||||
// Arrange
|
||||
var mergeHash = $"sha256:{Guid.NewGuid():N}";
|
||||
var canonical1 = CreateTestCanonical(mergeHash: mergeHash, title: "First");
|
||||
var canonical2 = CreateTestCanonical(mergeHash: mergeHash, title: "Second");
|
||||
|
||||
await _repository.UpsertAsync(canonical1);
|
||||
|
||||
// Act - should update, not throw
|
||||
await _repository.UpsertAsync(canonical2);
|
||||
|
||||
// Assert
|
||||
var results = await _repository.GetByMergeHashAsync(mergeHash);
|
||||
results.Should().NotBeNull();
|
||||
// There should be exactly one record
|
||||
var count = await _repository.CountAsync();
|
||||
count.Should().Be(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_WithEmptyWeaknessArray_ShouldSucceed()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical(weaknesses: []);
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.Weakness.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_WithNullOptionalFields_ShouldSucceed()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = new AdvisoryCanonicalEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = "CVE-2024-99999",
|
||||
AffectsKey = "pkg:npm/test@1.0.0",
|
||||
MergeHash = $"sha256:{Guid.NewGuid():N}",
|
||||
VersionRange = null,
|
||||
Severity = null,
|
||||
EpssScore = null,
|
||||
Title = null,
|
||||
Summary = null
|
||||
};
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.VersionRange.Should().BeNull();
|
||||
result.Severity.Should().BeNull();
|
||||
result.EpssScore.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_WithEpssScore_ShouldStoreCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical(epssScore: 0.9754m);
|
||||
|
||||
// Act
|
||||
var id = await _repository.UpsertAsync(canonical);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByIdAsync(id);
|
||||
result.Should().NotBeNull();
|
||||
result!.EpssScore.Should().Be(0.9754m);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_WithExploitKnown_ShouldOrWithExisting()
|
||||
{
|
||||
// Arrange
|
||||
var mergeHash = $"sha256:{Guid.NewGuid():N}";
|
||||
var canonical1 = CreateTestCanonical(mergeHash: mergeHash, exploitKnown: true);
|
||||
await _repository.UpsertAsync(canonical1);
|
||||
|
||||
// Try to update with exploitKnown = false
|
||||
var canonical2 = new AdvisoryCanonicalEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = canonical1.Cve,
|
||||
AffectsKey = canonical1.AffectsKey,
|
||||
MergeHash = mergeHash,
|
||||
ExploitKnown = false // Trying to set to false
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(canonical2);
|
||||
|
||||
// Assert - should remain true (OR semantics)
|
||||
var result = await _repository.GetByMergeHashAsync(mergeHash);
|
||||
result.Should().NotBeNull();
|
||||
result!.ExploitKnown.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static AdvisoryCanonicalEntity CreateTestCanonical(
|
||||
string? cve = null,
|
||||
string? affectsKey = null,
|
||||
string? mergeHash = null,
|
||||
string? severity = null,
|
||||
string? title = null,
|
||||
string? summary = null,
|
||||
string? versionRange = null,
|
||||
string[]? weaknesses = null,
|
||||
decimal? epssScore = null,
|
||||
bool exploitKnown = false)
|
||||
{
|
||||
var id = Guid.NewGuid();
|
||||
return new AdvisoryCanonicalEntity
|
||||
{
|
||||
Id = id,
|
||||
Cve = cve ?? $"CVE-2024-{id.ToString("N")[..5]}",
|
||||
AffectsKey = affectsKey ?? $"pkg:npm/{id:N}@1.0.0",
|
||||
MergeHash = mergeHash ?? $"sha256:{id:N}",
|
||||
Severity = severity,
|
||||
Title = title,
|
||||
Summary = summary,
|
||||
VersionRange = versionRange,
|
||||
Weakness = weaknesses ?? [],
|
||||
EpssScore = epssScore,
|
||||
ExploitKnown = exploitKnown
|
||||
};
|
||||
}
|
||||
|
||||
private static SourceEntity CreateTestSource()
|
||||
{
|
||||
var id = Guid.NewGuid();
|
||||
var key = $"source-{id:N}"[..20];
|
||||
return new SourceEntity
|
||||
{
|
||||
Id = id,
|
||||
Key = key,
|
||||
Name = $"Test Source {key}",
|
||||
SourceType = "nvd",
|
||||
Url = "https://example.com/feed",
|
||||
Priority = 100,
|
||||
Enabled = true,
|
||||
Config = """{"apiKey": "test"}"""
|
||||
};
|
||||
}
|
||||
|
||||
private static AdvisorySourceEdgeEntity CreateTestSourceEdge(
|
||||
Guid canonicalId,
|
||||
Guid sourceId,
|
||||
string? sourceAdvisoryId = null,
|
||||
string? sourceDocHash = null,
|
||||
int precedence = 100,
|
||||
string? dsseEnvelope = null)
|
||||
{
|
||||
return new AdvisorySourceEdgeEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
CanonicalId = canonicalId,
|
||||
SourceId = sourceId,
|
||||
SourceAdvisoryId = sourceAdvisoryId ?? $"ADV-{Guid.NewGuid():N}"[..15],
|
||||
SourceDocHash = sourceDocHash ?? $"sha256:{Guid.NewGuid():N}",
|
||||
VendorStatus = "affected",
|
||||
PrecedenceRank = precedence,
|
||||
DsseEnvelope = dsseEnvelope,
|
||||
FetchedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Documents;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
using StellaOps.Concelier.Storage.Postgres;
|
||||
using StellaOps.Concelier.Storage.Postgres.Converters;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(ConcelierPostgresCollection.Name)]
|
||||
public sealed class AdvisoryConversionServiceTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcelierPostgresFixture _fixture;
|
||||
private readonly AdvisoryConversionService _service;
|
||||
private readonly AdvisoryRepository _advisories;
|
||||
private readonly AdvisoryAliasRepository _aliases;
|
||||
private readonly AdvisoryAffectedRepository _affected;
|
||||
|
||||
public AdvisoryConversionServiceTests(ConcelierPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new ConcelierDataSource(Options.Create(options), NullLogger<ConcelierDataSource>.Instance);
|
||||
|
||||
_advisories = new AdvisoryRepository(dataSource, NullLogger<AdvisoryRepository>.Instance);
|
||||
_aliases = new AdvisoryAliasRepository(dataSource, NullLogger<AdvisoryAliasRepository>.Instance);
|
||||
_affected = new AdvisoryAffectedRepository(dataSource, NullLogger<AdvisoryAffectedRepository>.Instance);
|
||||
_service = new AdvisoryConversionService(_advisories);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task ConvertAndUpsert_PersistsAdvisoryAndChildren()
|
||||
{
|
||||
var doc = CreateDoc();
|
||||
var sourceId = Guid.NewGuid();
|
||||
|
||||
var stored = await _service.ConvertAndUpsertAsync(doc, "osv", sourceId);
|
||||
|
||||
var fetched = await _advisories.GetByKeyAsync(doc.AdvisoryKey);
|
||||
var aliases = await _aliases.GetByAdvisoryAsync(stored.Id);
|
||||
var affected = await _affected.GetByAdvisoryAsync(stored.Id);
|
||||
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.PrimaryVulnId.Should().Be("CVE-2024-0002");
|
||||
fetched.RawPayload.Should().NotBeNull();
|
||||
fetched.Provenance.Should().Contain("osv");
|
||||
aliases.Should().NotBeEmpty();
|
||||
affected.Should().ContainSingle(a => a.Purl == "pkg:npm/example@2.0.0");
|
||||
affected[0].VersionRange.Should().Contain("introduced");
|
||||
}
|
||||
|
||||
private static AdvisoryDocument CreateDoc()
|
||||
{
|
||||
var payload = new DocumentObject
|
||||
{
|
||||
{ "primaryVulnId", "CVE-2024-0002" },
|
||||
{ "title", "Another advisory" },
|
||||
{ "severity", "medium" },
|
||||
{ "aliases", new DocumentArray { "CVE-2024-0002" } },
|
||||
{ "affected", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "ecosystem", "npm" },
|
||||
{ "packageName", "example" },
|
||||
{ "purl", "pkg:npm/example@2.0.0" },
|
||||
{ "range", "{\"introduced\":\"0\",\"fixed\":\"2.0.1\"}" },
|
||||
{ "versionsAffected", new DocumentArray { "2.0.0" } },
|
||||
{ "versionsFixed", new DocumentArray { "2.0.1" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new AdvisoryDocument
|
||||
{
|
||||
AdvisoryKey = "ADV-2",
|
||||
Payload = payload,
|
||||
Modified = DateTime.UtcNow,
|
||||
Published = DateTime.UtcNow.AddDays(-2)
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Concelier.Documents;
|
||||
using StellaOps.Concelier.Storage.Advisories;
|
||||
using StellaOps.Concelier.Storage.Postgres.Converters;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Tests;
|
||||
|
||||
public sealed class AdvisoryConverterTests
|
||||
{
|
||||
[Fact]
|
||||
public void Convert_MapsCoreFieldsAndChildren()
|
||||
{
|
||||
var doc = CreateAdvisoryDocument();
|
||||
|
||||
var result = AdvisoryConverter.Convert(doc, sourceKey: "osv", sourceId: Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"));
|
||||
|
||||
result.Advisory.AdvisoryKey.Should().Be("ADV-1");
|
||||
result.Advisory.PrimaryVulnId.Should().Be("CVE-2024-0001");
|
||||
result.Advisory.Severity.Should().Be("high");
|
||||
result.Aliases.Should().ContainSingle(a => a.AliasValue == "CVE-2024-0001");
|
||||
result.Cvss.Should().ContainSingle(c => c.BaseScore == 9.8m && c.BaseSeverity == "critical");
|
||||
result.Affected.Should().ContainSingle(a => a.Purl == "pkg:npm/example@1.0.0");
|
||||
result.References.Should().ContainSingle(r => r.Url == "https://ref.example/test");
|
||||
result.Credits.Should().ContainSingle(c => c.Name == "Researcher One");
|
||||
result.Weaknesses.Should().ContainSingle(w => w.CweId == "CWE-79");
|
||||
result.KevFlags.Should().ContainSingle(k => k.CveId == "CVE-2024-0001");
|
||||
}
|
||||
|
||||
private static AdvisoryDocument CreateAdvisoryDocument()
|
||||
{
|
||||
var payload = new DocumentObject
|
||||
{
|
||||
{ "primaryVulnId", "CVE-2024-0001" },
|
||||
{ "title", "Sample Advisory" },
|
||||
{ "summary", "Summary" },
|
||||
{ "description", "Description" },
|
||||
{ "severity", "high" },
|
||||
{ "aliases", new DocumentArray { "CVE-2024-0001", "GHSA-aaaa-bbbb-cccc" } },
|
||||
{ "cvss", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "version", "3.1" },
|
||||
{ "vector", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
|
||||
{ "baseScore", 9.8 },
|
||||
{ "baseSeverity", "critical" },
|
||||
{ "exploitabilityScore", 3.9 },
|
||||
{ "impactScore", 5.9 },
|
||||
{ "source", "nvd" },
|
||||
{ "isPrimary", true }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "affected", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "ecosystem", "npm" },
|
||||
{ "packageName", "example" },
|
||||
{ "purl", "pkg:npm/example@1.0.0" },
|
||||
{ "range", "{\"introduced\":\"0\",\"fixed\":\"1.0.1\"}" },
|
||||
{ "versionsAffected", new DocumentArray { "1.0.0" } },
|
||||
{ "versionsFixed", new DocumentArray { "1.0.1" } },
|
||||
{ "databaseSpecific", "{\"severity\":\"high\"}" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "references", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "type", "advisory" },
|
||||
{ "url", "https://ref.example/test" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "credits", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "name", "Researcher One" },
|
||||
{ "contact", "r1@example.test" },
|
||||
{ "type", "finder" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "weaknesses", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "cweId", "CWE-79" },
|
||||
{ "description", "XSS" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "kev", new DocumentArray
|
||||
{
|
||||
new DocumentObject
|
||||
{
|
||||
{ "cveId", "CVE-2024-0001" },
|
||||
{ "vendorProject", "Example" },
|
||||
{ "product", "Example Product" },
|
||||
{ "name", "Critical vuln" },
|
||||
{ "knownRansomwareUse", false },
|
||||
{ "dateAdded", DateTime.UtcNow },
|
||||
{ "dueDate", DateTime.UtcNow.AddDays(7) },
|
||||
{ "notes", "note" }
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new AdvisoryDocument
|
||||
{
|
||||
AdvisoryKey = "ADV-1",
|
||||
Payload = payload,
|
||||
Modified = DateTime.UtcNow,
|
||||
Published = DateTime.UtcNow.AddDays(-1)
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -208,7 +208,7 @@ public sealed class AdvisoryIdempotencyTests : IAsyncLifetime
|
||||
// Assert - Should have updated the cursor
|
||||
var retrieved = await _sourceStateRepository.GetBySourceIdAsync(source.Id);
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.LastCursor.Should().Be("cursor2");
|
||||
retrieved!.Cursor.Should().Be("cursor2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -369,11 +369,9 @@ public sealed class AdvisoryIdempotencyTests : IAsyncLifetime
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
SourceId = sourceId,
|
||||
LastCursor = cursor ?? "default-cursor",
|
||||
LastFetchAt = DateTimeOffset.UtcNow,
|
||||
LastSuccessAt = DateTimeOffset.UtcNow,
|
||||
TotalAdvisoriesProcessed = 100,
|
||||
Status = "active"
|
||||
Cursor = cursor ?? "default-cursor",
|
||||
LastSyncAt = DateTimeOffset.UtcNow,
|
||||
LastSuccessAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,18 +13,9 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Dapper" Version="2.1.35" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="4.3.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Update="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,508 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalAdvisoryEndpointTests.cs
|
||||
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
|
||||
// Task: CANSVC-8200-020
|
||||
// Description: Integration tests for canonical advisory API endpoints
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using StellaOps.Concelier.WebService.Extensions;
|
||||
using StellaOps.Concelier.WebService.Tests.Fixtures;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Tests.Canonical;
|
||||
|
||||
public sealed class CanonicalAdvisoryEndpointTests : IAsyncLifetime
|
||||
{
|
||||
private WebApplicationFactory<Program> _factory = null!;
|
||||
private HttpClient _client = null!;
|
||||
private readonly Mock<ICanonicalAdvisoryService> _serviceMock = new();
|
||||
|
||||
private static readonly Guid TestCanonicalId = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
private const string TestCve = "CVE-2025-0001";
|
||||
private const string TestArtifactKey = "pkg:npm/lodash@4.17.21";
|
||||
private const string TestMergeHash = "sha256:abc123def456789";
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
_factory = new WebApplicationFactory<Program>()
|
||||
.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.UseEnvironment("Testing");
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
// Remove existing ICanonicalAdvisoryService registration if any
|
||||
var descriptor = services.FirstOrDefault(d =>
|
||||
d.ServiceType == typeof(ICanonicalAdvisoryService));
|
||||
if (descriptor != null)
|
||||
{
|
||||
services.Remove(descriptor);
|
||||
}
|
||||
|
||||
// Register mock service
|
||||
services.AddSingleton(_serviceMock.Object);
|
||||
});
|
||||
});
|
||||
|
||||
_client = _factory.CreateClient();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
_factory.Dispose();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
#region GET /api/v1/canonical/{id}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsOk_WhenCanonicalExists()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical(TestCanonicalId, TestCve);
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByIdAsync(TestCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical/{TestCanonicalId}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Id.Should().Be(TestCanonicalId);
|
||||
content.Cve.Should().Be(TestCve);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsNotFound_WhenCanonicalDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
var nonExistentId = Guid.NewGuid();
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByIdAsync(nonExistentId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical/{nonExistentId}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GET /api/v1/canonical?cve={cve}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryByCve_ReturnsCanonicals()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory>
|
||||
{
|
||||
CreateTestCanonical(TestCanonicalId, TestCve),
|
||||
CreateTestCanonical(Guid.NewGuid(), TestCve)
|
||||
};
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByCveAsync(TestCve, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical?cve={TestCve}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Items.Should().HaveCount(2);
|
||||
content.TotalCount.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryByCve_ReturnsEmptyList_WhenNoneFound()
|
||||
{
|
||||
// Arrange
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByCveAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<CanonicalAdvisory>());
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync("/api/v1/canonical?cve=CVE-9999-9999");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Items.Should().BeEmpty();
|
||||
content.TotalCount.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GET /api/v1/canonical?artifact={artifact}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryByArtifact_ReturnsCanonicals()
|
||||
{
|
||||
// Arrange
|
||||
var canonicals = new List<CanonicalAdvisory>
|
||||
{
|
||||
CreateTestCanonical(TestCanonicalId, TestCve, TestArtifactKey)
|
||||
};
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByArtifactAsync(TestArtifactKey, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonicals);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical?artifact={Uri.EscapeDataString(TestArtifactKey)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Items.Should().HaveCount(1);
|
||||
content.Items[0].AffectsKey.Should().Be(TestArtifactKey);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GET /api/v1/canonical?mergeHash={mergeHash}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryByMergeHash_ReturnsCanonical()
|
||||
{
|
||||
// Arrange
|
||||
var canonical = CreateTestCanonical(TestCanonicalId, TestCve);
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByMergeHashAsync(TestMergeHash, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(canonical);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical?mergeHash={Uri.EscapeDataString(TestMergeHash)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Items.Should().HaveCount(1);
|
||||
content.TotalCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryByMergeHash_ReturnsEmpty_WhenNotFound()
|
||||
{
|
||||
// Arrange
|
||||
_serviceMock
|
||||
.Setup(x => x.GetByMergeHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((CanonicalAdvisory?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical?mergeHash=sha256:nonexistent");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Items.Should().BeEmpty();
|
||||
content.TotalCount.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GET /api/v1/canonical (pagination)
|
||||
|
||||
[Fact]
|
||||
public async Task Query_SupportsPagination()
|
||||
{
|
||||
// Arrange
|
||||
var pagedResult = new PagedResult<CanonicalAdvisory>
|
||||
{
|
||||
Items = new List<CanonicalAdvisory> { CreateTestCanonical(TestCanonicalId, TestCve) },
|
||||
TotalCount = 100,
|
||||
Offset = 10,
|
||||
Limit = 25
|
||||
};
|
||||
_serviceMock
|
||||
.Setup(x => x.QueryAsync(It.Is<CanonicalQueryOptions>(o =>
|
||||
o.Offset == 10 && o.Limit == 25), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(pagedResult);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync("/api/v1/canonical?offset=10&limit=25");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CanonicalAdvisoryListResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.TotalCount.Should().Be(100);
|
||||
content.Offset.Should().Be(10);
|
||||
content.Limit.Should().Be(25);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region POST /api/v1/canonical/ingest/{source}
|
||||
|
||||
[Fact]
|
||||
public async Task Ingest_ReturnsOk_WhenCreated()
|
||||
{
|
||||
// Arrange
|
||||
var ingestResult = IngestResult.Created(TestCanonicalId, TestMergeHash, Guid.NewGuid(), "nvd", "NVD-001");
|
||||
_serviceMock
|
||||
.Setup(x => x.IngestAsync(
|
||||
"nvd",
|
||||
It.Is<RawAdvisory>(a => a.Cve == TestCve),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ingestResult);
|
||||
|
||||
var request = new RawAdvisoryRequest
|
||||
{
|
||||
Cve = TestCve,
|
||||
AffectsKey = TestArtifactKey,
|
||||
VersionRangeJson = "{\"introduced\":\"1.0.0\",\"fixed\":\"1.2.0\"}",
|
||||
Severity = "high",
|
||||
Title = "Test vulnerability"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/nvd", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<IngestResultResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Decision.Should().Be("Created");
|
||||
content.CanonicalId.Should().Be(TestCanonicalId);
|
||||
content.MergeHash.Should().Be(TestMergeHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Ingest_ReturnsOk_WhenMerged()
|
||||
{
|
||||
// Arrange
|
||||
var ingestResult = IngestResult.Merged(TestCanonicalId, TestMergeHash, Guid.NewGuid(), "ghsa", "GHSA-001");
|
||||
_serviceMock
|
||||
.Setup(x => x.IngestAsync(
|
||||
"ghsa",
|
||||
It.IsAny<RawAdvisory>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ingestResult);
|
||||
|
||||
var request = new RawAdvisoryRequest
|
||||
{
|
||||
Cve = TestCve,
|
||||
AffectsKey = TestArtifactKey
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/ghsa", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<IngestResultResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Decision.Should().Be("Merged");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Ingest_ReturnsConflict_WhenConflict()
|
||||
{
|
||||
// Arrange
|
||||
var ingestResult = IngestResult.Conflict(TestCanonicalId, TestMergeHash, "Version range mismatch", "nvd", "NVD-002");
|
||||
_serviceMock
|
||||
.Setup(x => x.IngestAsync(
|
||||
"nvd",
|
||||
It.IsAny<RawAdvisory>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ingestResult);
|
||||
|
||||
var request = new RawAdvisoryRequest
|
||||
{
|
||||
Cve = TestCve,
|
||||
AffectsKey = TestArtifactKey
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/nvd", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.Conflict);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<IngestResultResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Decision.Should().Be("Conflict");
|
||||
content.ConflictReason.Should().Be("Version range mismatch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Ingest_ReturnsBadRequest_WhenCveMissing()
|
||||
{
|
||||
// Arrange
|
||||
var request = new RawAdvisoryRequest
|
||||
{
|
||||
AffectsKey = TestArtifactKey
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/nvd", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.BadRequest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Ingest_ReturnsBadRequest_WhenAffectsKeyMissing()
|
||||
{
|
||||
// Arrange
|
||||
var request = new RawAdvisoryRequest
|
||||
{
|
||||
Cve = TestCve
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/nvd", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.BadRequest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region POST /api/v1/canonical/ingest/{source}/batch
|
||||
|
||||
[Fact]
|
||||
public async Task IngestBatch_ReturnsOk_WithSummary()
|
||||
{
|
||||
// Arrange
|
||||
var results = new List<IngestResult>
|
||||
{
|
||||
IngestResult.Created(Guid.NewGuid(), "hash1", Guid.NewGuid(), "nvd", "NVD-001"),
|
||||
IngestResult.Merged(Guid.NewGuid(), "hash2", Guid.NewGuid(), "nvd", "NVD-002"),
|
||||
IngestResult.Duplicate(Guid.NewGuid(), "hash3", "nvd", "NVD-003")
|
||||
};
|
||||
_serviceMock
|
||||
.Setup(x => x.IngestBatchAsync(
|
||||
"nvd",
|
||||
It.IsAny<IEnumerable<RawAdvisory>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(results);
|
||||
|
||||
var requests = new[]
|
||||
{
|
||||
new RawAdvisoryRequest { Cve = "CVE-2025-0001", AffectsKey = "pkg:npm/a@1" },
|
||||
new RawAdvisoryRequest { Cve = "CVE-2025-0002", AffectsKey = "pkg:npm/b@1" },
|
||||
new RawAdvisoryRequest { Cve = "CVE-2025-0003", AffectsKey = "pkg:npm/c@1" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/canonical/ingest/nvd/batch", requests);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<BatchIngestResultResponse>();
|
||||
content.Should().NotBeNull();
|
||||
content!.Results.Should().HaveCount(3);
|
||||
content.Summary.Total.Should().Be(3);
|
||||
content.Summary.Created.Should().Be(1);
|
||||
content.Summary.Merged.Should().Be(1);
|
||||
content.Summary.Duplicates.Should().Be(1);
|
||||
content.Summary.Conflicts.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region PATCH /api/v1/canonical/{id}/status
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatus_ReturnsOk_WhenValid()
|
||||
{
|
||||
// Arrange
|
||||
_serviceMock
|
||||
.Setup(x => x.UpdateStatusAsync(TestCanonicalId, CanonicalStatus.Withdrawn, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
var request = new UpdateStatusRequest { Status = "Withdrawn" };
|
||||
|
||||
// Act
|
||||
var response = await _client.PatchAsJsonAsync($"/api/v1/canonical/{TestCanonicalId}/status", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
_serviceMock.Verify(x => x.UpdateStatusAsync(
|
||||
TestCanonicalId,
|
||||
CanonicalStatus.Withdrawn,
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateStatus_ReturnsBadRequest_WhenInvalidStatus()
|
||||
{
|
||||
// Arrange
|
||||
var request = new UpdateStatusRequest { Status = "InvalidStatus" };
|
||||
|
||||
// Act
|
||||
var response = await _client.PatchAsJsonAsync($"/api/v1/canonical/{TestCanonicalId}/status", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.BadRequest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
private static CanonicalAdvisory CreateTestCanonical(
|
||||
Guid id,
|
||||
string cve,
|
||||
string affectsKey = "pkg:npm/example@1")
|
||||
{
|
||||
return new CanonicalAdvisory
|
||||
{
|
||||
Id = id,
|
||||
Cve = cve,
|
||||
AffectsKey = affectsKey,
|
||||
MergeHash = TestMergeHash,
|
||||
Status = CanonicalStatus.Active,
|
||||
Severity = "high",
|
||||
Title = $"Test advisory for {cve}",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow,
|
||||
SourceEdges = new List<SourceEdge>
|
||||
{
|
||||
new SourceEdge
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
SourceName = "nvd",
|
||||
SourceAdvisoryId = $"NVD-{cve}",
|
||||
SourceDocHash = "sha256:doctest",
|
||||
PrecedenceRank = 40,
|
||||
FetchedAt = DateTimeOffset.UtcNow
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -13,6 +13,8 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
<PackageReference Update="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- Centralize NuGet package cache to prevent directory sprawl -->
|
||||
<RestorePackagesPath>$(MSBuildThisFileDirectory)../.nuget/packages</RestorePackagesPath>
|
||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||
|
||||
<!-- Disable NuGet audit to prevent build failures when mirrors are unreachable -->
|
||||
<NuGetAudit>false</NuGetAudit>
|
||||
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1900;NU1901;NU1902;NU1903;NU1904</WarningsNotAsErrors>
|
||||
@@ -32,23 +36,23 @@
|
||||
<Private>false</Private>
|
||||
<ExcludeAssets>runtime</ExcludeAssets>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SharpCompress" Version="0.41.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)' != 'false'">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
|
||||
<Compile Include="$(ConcelierSharedTestsPath)AssemblyInfo.cs" Link="Shared\AssemblyInfo.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
|
||||
<Compile Include="$(ConcelierSharedTestsPath)ConcelierFixtureCollection.cs" Link="Shared\ConcelierFixtureCollection.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
|
||||
<ProjectReference Include="$(ConcelierTestingPath)StellaOps.Concelier.Testing.csproj" Condition="'$(ConcelierTestingPath)' != ''" />
|
||||
<Using Include="StellaOps.Concelier.Testing" />
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="SharpCompress" Version="0.41.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)' != 'false'">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
|
||||
<Compile Include="$(ConcelierSharedTestsPath)AssemblyInfo.cs" Link="Shared\AssemblyInfo.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
|
||||
<Compile Include="$(ConcelierSharedTestsPath)ConcelierFixtureCollection.cs" Link="Shared\ConcelierFixtureCollection.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
|
||||
<ProjectReference Include="$(ConcelierTestingPath)StellaOps.Concelier.Testing.csproj" Condition="'$(ConcelierTestingPath)' != ''" />
|
||||
<Using Include="StellaOps.Concelier.Testing" />
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -13,6 +13,7 @@ using StellaOps.Policy.Engine.ExceptionCache;
|
||||
using StellaOps.Policy.Engine.Gates;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Vex;
|
||||
using StellaOps.Policy.Engine.WhatIfSimulation;
|
||||
@@ -292,6 +293,10 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
/// <summary>
|
||||
/// Adds all Policy Engine services with default configuration.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Includes core services, event pipeline, worker, explainer, and Evidence-Weighted Score services.
|
||||
/// EWS services are registered but only activate when <see cref="PolicyEvidenceWeightedScoreOptions.Enabled"/> is true.
|
||||
/// </remarks>
|
||||
public static IServiceCollection AddPolicyEngine(this IServiceCollection services)
|
||||
{
|
||||
services.AddPolicyEngineCore();
|
||||
@@ -299,6 +304,10 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
services.AddPolicyEngineWorker();
|
||||
services.AddPolicyEngineExplainer();
|
||||
|
||||
// Evidence-Weighted Score services (Sprint 8200.0012.0003)
|
||||
// Always registered; activation controlled by PolicyEvidenceWeightedScoreOptions.Enabled
|
||||
services.AddEvidenceWeightedScore();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -313,6 +322,32 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
return services.AddPolicyEngine();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds all Policy Engine services with conditional EWS based on configuration.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Unlike <see cref="AddPolicyEngine()"/>, this method reads configuration at registration
|
||||
/// time and only registers EWS services if <see cref="PolicyEvidenceWeightedScoreOptions.Enabled"/>
|
||||
/// is true. Use this for zero-overhead deployments where EWS is disabled.
|
||||
/// </remarks>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configuration">Configuration root for reading options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddPolicyEngine(
|
||||
this IServiceCollection services,
|
||||
Microsoft.Extensions.Configuration.IConfiguration configuration)
|
||||
{
|
||||
services.AddPolicyEngineCore();
|
||||
services.AddPolicyEngineEventPipeline();
|
||||
services.AddPolicyEngineWorker();
|
||||
services.AddPolicyEngineExplainer();
|
||||
|
||||
// Conditional EWS registration based on configuration
|
||||
services.AddEvidenceWeightedScoreIfEnabled(configuration);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds exception integration services for automatic exception loading during policy evaluation.
|
||||
/// Requires IExceptionRepository to be registered.
|
||||
|
||||
@@ -43,6 +43,18 @@ internal sealed class PolicyEvaluator
|
||||
}
|
||||
|
||||
public PolicyEvaluationResult Evaluate(PolicyEvaluationRequest request)
|
||||
{
|
||||
return Evaluate(request, injectedScore: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a policy with an optional pre-computed EWS score.
|
||||
/// When injectedScore is provided, it will be used instead of computing EWS from context.
|
||||
/// This is primarily for testing score-based policy rules.
|
||||
/// </summary>
|
||||
public PolicyEvaluationResult Evaluate(
|
||||
PolicyEvaluationRequest request,
|
||||
global::StellaOps.Signals.EvidenceWeightedScore.EvidenceWeightedScoreResult? injectedScore)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
@@ -54,8 +66,8 @@ internal sealed class PolicyEvaluator
|
||||
throw new ArgumentNullException(nameof(request.Document));
|
||||
}
|
||||
|
||||
// Pre-compute EWS so it's available during rule evaluation for score-based rules
|
||||
var precomputedScore = PrecomputeEvidenceWeightedScore(request.Context);
|
||||
// Use injected score if provided, otherwise compute from context
|
||||
var precomputedScore = injectedScore ?? PrecomputeEvidenceWeightedScore(request.Context);
|
||||
|
||||
var evaluator = new PolicyExpressionEvaluator(request.Context, precomputedScore);
|
||||
var orderedRules = request.Document.Rules
|
||||
|
||||
@@ -282,9 +282,34 @@ internal sealed class PolicyExpressionEvaluator
|
||||
{
|
||||
var leftValue = Evaluate(left, scope).Raw;
|
||||
var rightValue = Evaluate(right, scope).Raw;
|
||||
|
||||
// For ScoreScope, use the numeric value for comparison
|
||||
if (leftValue is ScoreScope leftScope)
|
||||
{
|
||||
leftValue = leftScope.ScoreValue;
|
||||
}
|
||||
|
||||
if (rightValue is ScoreScope rightScope)
|
||||
{
|
||||
rightValue = rightScope.ScoreValue;
|
||||
}
|
||||
|
||||
// Normalize numeric types for comparison (decimal vs int, etc.)
|
||||
if (IsNumeric(leftValue) && IsNumeric(rightValue))
|
||||
{
|
||||
var leftDecimal = Convert.ToDecimal(leftValue, CultureInfo.InvariantCulture);
|
||||
var rightDecimal = Convert.ToDecimal(rightValue, CultureInfo.InvariantCulture);
|
||||
return new EvaluationValue(comparer(leftDecimal, rightDecimal));
|
||||
}
|
||||
|
||||
return new EvaluationValue(comparer(leftValue, rightValue));
|
||||
}
|
||||
|
||||
private static bool IsNumeric(object? value)
|
||||
{
|
||||
return value is decimal or double or float or int or long or short or byte;
|
||||
}
|
||||
|
||||
private EvaluationValue CompareNumeric(PolicyExpression left, PolicyExpression right, EvaluationScope scope, Func<decimal, decimal, bool> comparer)
|
||||
{
|
||||
var leftValue = Evaluate(left, scope);
|
||||
@@ -314,6 +339,13 @@ internal sealed class PolicyExpressionEvaluator
|
||||
return true;
|
||||
}
|
||||
|
||||
// Support direct score comparisons (score >= 70)
|
||||
if (value.Raw is ScoreScope scoreScope)
|
||||
{
|
||||
number = scoreScope.ScoreValue;
|
||||
return true;
|
||||
}
|
||||
|
||||
number = 0m;
|
||||
return false;
|
||||
}
|
||||
@@ -384,6 +416,7 @@ internal sealed class PolicyExpressionEvaluator
|
||||
int i => i,
|
||||
long l => l,
|
||||
string s when decimal.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var value) => value,
|
||||
ScoreScope scoreScope => scoreScope.ScoreValue,
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
@@ -968,6 +1001,11 @@ internal sealed class PolicyExpressionEvaluator
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the numeric score value for direct comparison (e.g., score >= 80).
|
||||
/// </summary>
|
||||
public decimal ScoreValue => score.Score;
|
||||
|
||||
public EvaluationValue Get(string member) => member.ToLowerInvariant() switch
|
||||
{
|
||||
// Core score value (allows direct comparison: score >= 80)
|
||||
|
||||
@@ -25,6 +25,7 @@ public static class EvidenceWeightedScoreServiceCollectionExtensions
|
||||
/// - <see cref="IScoreEnrichmentCache"/> for caching (when enabled)
|
||||
/// - <see cref="IDualEmitVerdictEnricher"/> for dual-emit mode
|
||||
/// - <see cref="IMigrationTelemetryService"/> for migration metrics
|
||||
/// - <see cref="IEwsTelemetryService"/> for calculation/cache telemetry
|
||||
/// - <see cref="ConfidenceToEwsAdapter"/> for legacy score translation
|
||||
/// </remarks>
|
||||
/// <param name="services">Service collection.</param>
|
||||
@@ -50,6 +51,9 @@ public static class EvidenceWeightedScoreServiceCollectionExtensions
|
||||
// Migration telemetry
|
||||
services.TryAddSingleton<IMigrationTelemetryService, MigrationTelemetryService>();
|
||||
|
||||
// EWS telemetry (calculation duration, cache stats)
|
||||
services.TryAddSingleton<IEwsTelemetryService, EwsTelemetryService>();
|
||||
|
||||
// Confidence adapter for legacy comparison
|
||||
services.TryAddSingleton<ConfidenceToEwsAdapter>();
|
||||
|
||||
|
||||
@@ -0,0 +1,375 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-039 - Add telemetry: score calculation duration, cache hit rate
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
|
||||
/// <summary>
|
||||
/// Telemetry service for Evidence-Weighted Score metrics.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Exposes the following metrics:
|
||||
/// - stellaops.policy.ews.calculations_total: Total calculations performed
|
||||
/// - stellaops.policy.ews.calculation_duration_ms: Calculation duration histogram
|
||||
/// - stellaops.policy.ews.cache_hits_total: Cache hits
|
||||
/// - stellaops.policy.ews.cache_misses_total: Cache misses
|
||||
/// - stellaops.policy.ews.cache_hit_rate: Current cache hit rate (gauge)
|
||||
/// - stellaops.policy.ews.scores_by_bucket: Score distribution by bucket
|
||||
/// - stellaops.policy.ews.enabled: Whether EWS is enabled (gauge)
|
||||
/// </remarks>
|
||||
public interface IEwsTelemetryService
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a successful score calculation.
|
||||
/// </summary>
|
||||
void RecordCalculation(string bucket, TimeSpan duration, bool fromCache);
|
||||
|
||||
/// <summary>
|
||||
/// Records a failed calculation.
|
||||
/// </summary>
|
||||
void RecordFailure(string reason);
|
||||
|
||||
/// <summary>
|
||||
/// Records a skipped calculation (feature disabled).
|
||||
/// </summary>
|
||||
void RecordSkipped();
|
||||
|
||||
/// <summary>
|
||||
/// Updates cache statistics.
|
||||
/// </summary>
|
||||
void UpdateCacheStats(long hits, long misses, int count);
|
||||
|
||||
/// <summary>
|
||||
/// Gets current telemetry snapshot.
|
||||
/// </summary>
|
||||
EwsTelemetrySnapshot GetSnapshot();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of current EWS telemetry state.
|
||||
/// </summary>
|
||||
public sealed record EwsTelemetrySnapshot
|
||||
{
|
||||
public required long TotalCalculations { get; init; }
|
||||
public required long CacheHits { get; init; }
|
||||
public required long CacheMisses { get; init; }
|
||||
public required long Failures { get; init; }
|
||||
public required long Skipped { get; init; }
|
||||
public required double AverageCalculationDurationMs { get; init; }
|
||||
public required double P95CalculationDurationMs { get; init; }
|
||||
public required double CacheHitRate { get; init; }
|
||||
public required int CurrentCacheSize { get; init; }
|
||||
public required IReadOnlyDictionary<string, long> ScoresByBucket { get; init; }
|
||||
public required bool IsEnabled { get; init; }
|
||||
public required DateTimeOffset SnapshotTime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of EWS telemetry using System.Diagnostics.Metrics.
|
||||
/// </summary>
|
||||
public sealed class EwsTelemetryService : IEwsTelemetryService
|
||||
{
|
||||
private static readonly Meter s_meter = new("StellaOps.Policy.EvidenceWeightedScore", "1.0.0");
|
||||
|
||||
// Counters
|
||||
private readonly Counter<long> _calculationsTotal;
|
||||
private readonly Counter<long> _cacheHitsTotal;
|
||||
private readonly Counter<long> _cacheMissesTotal;
|
||||
private readonly Counter<long> _failuresTotal;
|
||||
private readonly Counter<long> _skippedTotal;
|
||||
private readonly Counter<long> _scoresByBucket;
|
||||
|
||||
// Histograms
|
||||
private readonly Histogram<double> _calculationDuration;
|
||||
|
||||
// Gauges (observable)
|
||||
private readonly ObservableGauge<double> _cacheHitRate;
|
||||
private readonly ObservableGauge<int> _cacheSize;
|
||||
private readonly ObservableGauge<int> _enabledGauge;
|
||||
|
||||
// Internal state for observable gauges
|
||||
private long _totalHits;
|
||||
private long _totalMisses;
|
||||
private int _cacheCount;
|
||||
|
||||
// For aggregated statistics
|
||||
private readonly object _lock = new();
|
||||
private long _totalCalculations;
|
||||
private long _failures;
|
||||
private long _skipped;
|
||||
private readonly Dictionary<string, long> _bucketCounts = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly List<double> _recentDurations = new(1000);
|
||||
private int _durationIndex;
|
||||
private const int MaxRecentDurations = 1000;
|
||||
|
||||
private readonly IOptionsMonitor<PolicyEvidenceWeightedScoreOptions> _options;
|
||||
|
||||
public EwsTelemetryService(IOptionsMonitor<PolicyEvidenceWeightedScoreOptions> options)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
|
||||
// Initialize counters
|
||||
_calculationsTotal = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.calculations_total",
|
||||
unit: "{calculations}",
|
||||
description: "Total number of EWS calculations performed");
|
||||
|
||||
_cacheHitsTotal = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.cache_hits_total",
|
||||
unit: "{hits}",
|
||||
description: "Total number of EWS cache hits");
|
||||
|
||||
_cacheMissesTotal = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.cache_misses_total",
|
||||
unit: "{misses}",
|
||||
description: "Total number of EWS cache misses");
|
||||
|
||||
_failuresTotal = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.failures_total",
|
||||
unit: "{failures}",
|
||||
description: "Total number of EWS calculation failures");
|
||||
|
||||
_skippedTotal = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.skipped_total",
|
||||
unit: "{skipped}",
|
||||
description: "Total number of skipped EWS calculations (feature disabled)");
|
||||
|
||||
_scoresByBucket = s_meter.CreateCounter<long>(
|
||||
"stellaops.policy.ews.scores_by_bucket",
|
||||
unit: "{scores}",
|
||||
description: "Score distribution by bucket");
|
||||
|
||||
// Initialize histogram
|
||||
_calculationDuration = s_meter.CreateHistogram<double>(
|
||||
"stellaops.policy.ews.calculation_duration_ms",
|
||||
unit: "ms",
|
||||
description: "EWS calculation duration in milliseconds");
|
||||
|
||||
// Initialize observable gauges
|
||||
_cacheHitRate = s_meter.CreateObservableGauge(
|
||||
"stellaops.policy.ews.cache_hit_rate",
|
||||
() => GetCacheHitRate(),
|
||||
unit: "{ratio}",
|
||||
description: "Current EWS cache hit rate (0-1)");
|
||||
|
||||
_cacheSize = s_meter.CreateObservableGauge(
|
||||
"stellaops.policy.ews.cache_size",
|
||||
() => _cacheCount,
|
||||
unit: "{entries}",
|
||||
description: "Current EWS cache size");
|
||||
|
||||
_enabledGauge = s_meter.CreateObservableGauge(
|
||||
"stellaops.policy.ews.enabled",
|
||||
() => _options.CurrentValue.Enabled ? 1 : 0,
|
||||
unit: "{boolean}",
|
||||
description: "Whether EWS is currently enabled (1=enabled, 0=disabled)");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordCalculation(string bucket, TimeSpan duration, bool fromCache)
|
||||
{
|
||||
var durationMs = duration.TotalMilliseconds;
|
||||
|
||||
// Update counters
|
||||
_calculationsTotal.Add(1);
|
||||
_calculationDuration.Record(durationMs);
|
||||
_scoresByBucket.Add(1, new KeyValuePair<string, object?>("bucket", bucket));
|
||||
|
||||
if (fromCache)
|
||||
{
|
||||
_cacheHitsTotal.Add(1);
|
||||
Interlocked.Increment(ref _totalHits);
|
||||
}
|
||||
else
|
||||
{
|
||||
_cacheMissesTotal.Add(1);
|
||||
Interlocked.Increment(ref _totalMisses);
|
||||
}
|
||||
|
||||
// Update internal state for snapshots
|
||||
lock (_lock)
|
||||
{
|
||||
_totalCalculations++;
|
||||
|
||||
if (!_bucketCounts.TryGetValue(bucket, out var count))
|
||||
{
|
||||
_bucketCounts[bucket] = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
_bucketCounts[bucket] = count + 1;
|
||||
}
|
||||
|
||||
// Circular buffer for recent durations
|
||||
if (_recentDurations.Count < MaxRecentDurations)
|
||||
{
|
||||
_recentDurations.Add(durationMs);
|
||||
}
|
||||
else
|
||||
{
|
||||
_recentDurations[_durationIndex] = durationMs;
|
||||
_durationIndex = (_durationIndex + 1) % MaxRecentDurations;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordFailure(string reason)
|
||||
{
|
||||
_failuresTotal.Add(1, new KeyValuePair<string, object?>("reason", reason));
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_failures++;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordSkipped()
|
||||
{
|
||||
_skippedTotal.Add(1);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_skipped++;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void UpdateCacheStats(long hits, long misses, int count)
|
||||
{
|
||||
Interlocked.Exchange(ref _totalHits, hits);
|
||||
Interlocked.Exchange(ref _totalMisses, misses);
|
||||
Interlocked.Exchange(ref _cacheCount, count);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public EwsTelemetrySnapshot GetSnapshot()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var (avgDuration, p95Duration) = CalculateDurationStats();
|
||||
|
||||
return new EwsTelemetrySnapshot
|
||||
{
|
||||
TotalCalculations = _totalCalculations,
|
||||
CacheHits = Interlocked.Read(ref _totalHits),
|
||||
CacheMisses = Interlocked.Read(ref _totalMisses),
|
||||
Failures = _failures,
|
||||
Skipped = _skipped,
|
||||
AverageCalculationDurationMs = avgDuration,
|
||||
P95CalculationDurationMs = p95Duration,
|
||||
CacheHitRate = GetCacheHitRate(),
|
||||
CurrentCacheSize = _cacheCount,
|
||||
ScoresByBucket = new Dictionary<string, long>(_bucketCounts),
|
||||
IsEnabled = _options.CurrentValue.Enabled,
|
||||
SnapshotTime = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private double GetCacheHitRate()
|
||||
{
|
||||
var hits = Interlocked.Read(ref _totalHits);
|
||||
var misses = Interlocked.Read(ref _totalMisses);
|
||||
var total = hits + misses;
|
||||
return total == 0 ? 0.0 : (double)hits / total;
|
||||
}
|
||||
|
||||
private (double Average, double P95) CalculateDurationStats()
|
||||
{
|
||||
if (_recentDurations.Count == 0)
|
||||
{
|
||||
return (0.0, 0.0);
|
||||
}
|
||||
|
||||
var sorted = _recentDurations.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var average = sorted.Average();
|
||||
var p95Index = (int)(sorted.Length * 0.95);
|
||||
var p95 = sorted[Math.Min(p95Index, sorted.Length - 1)];
|
||||
|
||||
return (average, p95);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for EWS telemetry reporting.
|
||||
/// </summary>
|
||||
public static class EwsTelemetryExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Formats the telemetry snapshot as a summary report.
|
||||
/// </summary>
|
||||
public static string ToReport(this EwsTelemetrySnapshot snapshot)
|
||||
{
|
||||
var bucketLines = snapshot.ScoresByBucket.Count > 0
|
||||
? string.Join("\n", snapshot.ScoresByBucket.Select(kv => $" - {kv.Key}: {kv.Value}"))
|
||||
: " (none)";
|
||||
|
||||
return $"""
|
||||
EWS Telemetry Report
|
||||
====================
|
||||
Generated: {snapshot.SnapshotTime:O}
|
||||
Enabled: {snapshot.IsEnabled}
|
||||
|
||||
Calculations:
|
||||
Total: {snapshot.TotalCalculations}
|
||||
Failures: {snapshot.Failures}
|
||||
Skipped: {snapshot.Skipped}
|
||||
|
||||
Performance:
|
||||
Avg Duration: {snapshot.AverageCalculationDurationMs:F2}ms
|
||||
P95 Duration: {snapshot.P95CalculationDurationMs:F2}ms
|
||||
|
||||
Cache:
|
||||
Size: {snapshot.CurrentCacheSize}
|
||||
Hits: {snapshot.CacheHits}
|
||||
Misses: {snapshot.CacheMisses}
|
||||
Hit Rate: {snapshot.CacheHitRate:P1}
|
||||
|
||||
Scores by Bucket:
|
||||
{bucketLines}
|
||||
""";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Formats the telemetry snapshot as a single-line summary.
|
||||
/// </summary>
|
||||
public static string ToSummaryLine(this EwsTelemetrySnapshot snapshot)
|
||||
{
|
||||
return $"EWS: {snapshot.TotalCalculations} calcs, " +
|
||||
$"{snapshot.Failures} failures, " +
|
||||
$"avg={snapshot.AverageCalculationDurationMs:F1}ms, " +
|
||||
$"p95={snapshot.P95CalculationDurationMs:F1}ms, " +
|
||||
$"cache={snapshot.CacheHitRate:P0} hit rate";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets Prometheus-compatible metric lines.
|
||||
/// </summary>
|
||||
public static IEnumerable<string> ToPrometheusMetrics(this EwsTelemetrySnapshot snapshot)
|
||||
{
|
||||
yield return $"stellaops_policy_ews_enabled {(snapshot.IsEnabled ? 1 : 0)}";
|
||||
yield return $"stellaops_policy_ews_calculations_total {snapshot.TotalCalculations}";
|
||||
yield return $"stellaops_policy_ews_failures_total {snapshot.Failures}";
|
||||
yield return $"stellaops_policy_ews_skipped_total {snapshot.Skipped}";
|
||||
yield return $"stellaops_policy_ews_cache_hits_total {snapshot.CacheHits}";
|
||||
yield return $"stellaops_policy_ews_cache_misses_total {snapshot.CacheMisses}";
|
||||
yield return $"stellaops_policy_ews_cache_size {snapshot.CurrentCacheSize}";
|
||||
yield return $"stellaops_policy_ews_cache_hit_rate {snapshot.CacheHitRate:F4}";
|
||||
yield return $"stellaops_policy_ews_calculation_duration_avg_ms {snapshot.AverageCalculationDurationMs:F2}";
|
||||
yield return $"stellaops_policy_ews_calculation_duration_p95_ms {snapshot.P95CalculationDurationMs:F2}";
|
||||
|
||||
foreach (var (bucket, count) in snapshot.ScoresByBucket)
|
||||
{
|
||||
yield return $"stellaops_policy_ews_scores_by_bucket{{bucket=\"{bucket}\"}} {count}";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,18 @@ internal sealed partial class PolicyEvaluationService
|
||||
}
|
||||
|
||||
internal Evaluation.PolicyEvaluationResult Evaluate(PolicyIrDocument document, Evaluation.PolicyEvaluationContext context)
|
||||
{
|
||||
return Evaluate(document, context, evidenceWeightedScore: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a policy with an optional pre-computed EWS score.
|
||||
/// This overload is primarily for testing score-based policy rules.
|
||||
/// </summary>
|
||||
internal Evaluation.PolicyEvaluationResult Evaluate(
|
||||
PolicyIrDocument document,
|
||||
Evaluation.PolicyEvaluationContext context,
|
||||
global::StellaOps.Signals.EvidenceWeightedScore.EvidenceWeightedScoreResult? evidenceWeightedScore)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
@@ -37,7 +49,7 @@ internal sealed partial class PolicyEvaluationService
|
||||
}
|
||||
|
||||
var request = new Evaluation.PolicyEvaluationRequest(document, context);
|
||||
return _evaluator.Evaluate(request);
|
||||
return _evaluator.Evaluate(request, evidenceWeightedScore);
|
||||
}
|
||||
|
||||
// PathScopeSimulationService partial class relies on _pathMetrics.
|
||||
|
||||
@@ -0,0 +1,450 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-FileCopyrightText: 2025 StellaOps Contributors
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-031 - Add attestation verification tests with scoring proofs
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Engine.Attestation;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for scoring determinism verification in attestations.
|
||||
/// Verifies that attested scores can be reproduced from their proofs.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
public sealed class ScoringDeterminismVerifierTests
|
||||
{
|
||||
private readonly IScoringDeterminismVerifier _verifier;
|
||||
private readonly IEvidenceWeightedScoreCalculator _calculator;
|
||||
|
||||
public ScoringDeterminismVerifierTests()
|
||||
{
|
||||
_calculator = new EvidenceWeightedScoreCalculator();
|
||||
_verifier = new ScoringDeterminismVerifier(
|
||||
_calculator,
|
||||
NullLogger<ScoringDeterminismVerifier>.Instance);
|
||||
}
|
||||
|
||||
#region Successful Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void Verify_ValidProof_ReturnsSuccess()
|
||||
{
|
||||
// Arrange - Create EWS with proof using actual calculator
|
||||
var ews = CreateValidEwsWithProof();
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert - Score should be reproducible (attested == recalculated)
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(result.RecalculatedScore);
|
||||
result.Difference.Should().Be(0);
|
||||
result.Error.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_HighScore_ReproducesCorrectly()
|
||||
{
|
||||
// Arrange - High evidence scenario
|
||||
var ews = CreateEwsWithInputs(
|
||||
rch: 0.9, rts: 0.8, bkp: 0.1, xpl: 0.95, src: 0.7, mit: 0.05);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(result.RecalculatedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_LowScore_ReproducesCorrectly()
|
||||
{
|
||||
// Arrange - Low evidence scenario
|
||||
var ews = CreateEwsWithInputs(
|
||||
rch: 0.1, rts: 0.2, bkp: 0.9, xpl: 0.15, src: 0.95, mit: 0.8);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(result.RecalculatedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_BoundaryScore_Zero_ReproducesCorrectly()
|
||||
{
|
||||
// Arrange - Minimum score scenario
|
||||
var ews = CreateEwsWithInputs(
|
||||
rch: 0.0, rts: 0.0, bkp: 0.0, xpl: 0.0, src: 0.0, mit: 1.0);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_BoundaryScore_Max_ReproducesCorrectly()
|
||||
{
|
||||
// Arrange - Maximum score scenario
|
||||
var ews = CreateEwsWithInputs(
|
||||
rch: 1.0, rts: 1.0, bkp: 1.0, xpl: 1.0, src: 1.0, mit: 0.0);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Missing Proof Tests
|
||||
|
||||
[Fact]
|
||||
public void Verify_NullEws_ReturnsSkipped()
|
||||
{
|
||||
// Act
|
||||
var result = _verifier.Verify(null);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(0);
|
||||
result.RecalculatedScore.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_EwsWithoutProof_ReturnsMissingProof()
|
||||
{
|
||||
// Arrange
|
||||
var ews = new VerdictEvidenceWeightedScore(
|
||||
score: 50,
|
||||
bucket: "Investigate",
|
||||
proof: null);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Error.Should().Contain("No scoring proof available");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Predicate Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyPredicate_NullPredicate_ReturnsSkipped()
|
||||
{
|
||||
// Act
|
||||
var result = _verifier.VerifyPredicate(null);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyPredicate_PredicateWithValidEws_ReturnsSuccess()
|
||||
{
|
||||
// Arrange - Create EWS with proof using actual calculator
|
||||
var ews = CreateValidEwsWithProof();
|
||||
var predicate = CreatePredicateWithEws(ews);
|
||||
|
||||
// Act
|
||||
var result = _verifier.VerifyPredicate(predicate);
|
||||
|
||||
// Assert - Score should be reproducible
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(result.RecalculatedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyPredicate_PredicateWithoutEws_ReturnsSkipped()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreatePredicateWithEws(null);
|
||||
|
||||
// Act
|
||||
var result = _verifier.VerifyPredicate(predicate);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Factory Tests
|
||||
|
||||
[Fact]
|
||||
public void Factory_Create_ReturnsWorkingVerifier()
|
||||
{
|
||||
// Arrange & Act
|
||||
var verifier = ScoringDeterminismVerifierFactory.Create(
|
||||
NullLogger<ScoringDeterminismVerifier>.Instance);
|
||||
|
||||
// Assert
|
||||
verifier.Should().NotBeNull();
|
||||
verifier.Should().BeOfType<ScoringDeterminismVerifier>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Factory_CreatedVerifier_VerifiesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = ScoringDeterminismVerifierFactory.Create(
|
||||
NullLogger<ScoringDeterminismVerifier>.Instance);
|
||||
var ews = CreateValidEwsWithProof();
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verification Result Tests
|
||||
|
||||
[Fact]
|
||||
public void ScoringVerificationResult_Success_HasCorrectProperties()
|
||||
{
|
||||
// Act
|
||||
var result = ScoringVerificationResult.Success(75);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(75);
|
||||
result.RecalculatedScore.Should().Be(75);
|
||||
result.Difference.Should().Be(0);
|
||||
result.Error.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoringVerificationResult_ScoreMismatch_HasCorrectProperties()
|
||||
{
|
||||
// Act
|
||||
var result = ScoringVerificationResult.ScoreMismatch(80, 75);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.AttestedScore.Should().Be(80);
|
||||
result.RecalculatedScore.Should().Be(75);
|
||||
result.Difference.Should().Be(5);
|
||||
result.Error.Should().Contain("mismatch");
|
||||
result.Error.Should().Contain("80");
|
||||
result.Error.Should().Contain("75");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoringVerificationResult_MissingProof_HasCorrectProperties()
|
||||
{
|
||||
// Act
|
||||
var result = ScoringVerificationResult.MissingProof(65);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.AttestedScore.Should().Be(65);
|
||||
result.RecalculatedScore.Should().Be(0);
|
||||
result.Error.Should().Contain("No scoring proof");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoringVerificationResult_Skipped_HasCorrectProperties()
|
||||
{
|
||||
// Act
|
||||
var result = ScoringVerificationResult.Skipped();
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(0);
|
||||
result.RecalculatedScore.Should().Be(0);
|
||||
result.Difference.Should().Be(0);
|
||||
result.Error.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.0, 0.0, 0.0, 0.0, 0.0, 0.0)]
|
||||
[InlineData(0.5, 0.5, 0.5, 0.5, 0.5, 0.5)]
|
||||
[InlineData(1.0, 1.0, 1.0, 1.0, 1.0, 1.0)]
|
||||
[InlineData(0.1, 0.9, 0.3, 0.7, 0.5, 0.2)]
|
||||
public void Verify_VariousInputCombinations_AlwaysReproducible(
|
||||
double rch, double rts, double bkp, double xpl, double src, double mit)
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateEwsWithInputs(rch, rts, bkp, xpl, src, mit);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue(
|
||||
$"Score should be reproducible for inputs (rch={rch}, rts={rts}, bkp={bkp}, xpl={xpl}, src={src}, mit={mit})");
|
||||
result.AttestedScore.Should().Be(result.RecalculatedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_CustomWeights_ReproducesCorrectly()
|
||||
{
|
||||
// Arrange - Use custom weights different from default
|
||||
var inputs = new VerdictEvidenceInputs(
|
||||
reachability: 0.8,
|
||||
runtime: 0.6,
|
||||
backport: 0.4,
|
||||
exploit: 0.9,
|
||||
sourceTrust: 0.7,
|
||||
mitigation: 0.2);
|
||||
|
||||
var weights = new VerdictEvidenceWeights(
|
||||
reachability: 0.30, // Custom weight
|
||||
runtime: 0.10, // Custom weight
|
||||
backport: 0.15, // Custom weight
|
||||
exploit: 0.25, // Custom weight
|
||||
sourceTrust: 0.10, // Custom weight
|
||||
mitigation: 0.10); // Custom weight
|
||||
|
||||
// Calculate expected score
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "test",
|
||||
Rch = inputs.Reachability,
|
||||
Rts = inputs.Runtime,
|
||||
Bkp = inputs.Backport,
|
||||
Xpl = inputs.Exploit,
|
||||
Src = inputs.SourceTrust,
|
||||
Mit = inputs.Mitigation
|
||||
};
|
||||
|
||||
var ewsWeights = new EvidenceWeights
|
||||
{
|
||||
Rch = weights.Reachability,
|
||||
Rts = weights.Runtime,
|
||||
Bkp = weights.Backport,
|
||||
Xpl = weights.Exploit,
|
||||
Src = weights.SourceTrust,
|
||||
Mit = weights.Mitigation
|
||||
};
|
||||
|
||||
var policy = new EvidenceWeightPolicy { Version = "test", Profile = "test", Weights = ewsWeights };
|
||||
var ewsResult = _calculator.Calculate(input, policy);
|
||||
|
||||
var proof = new VerdictScoringProof(
|
||||
inputs: inputs,
|
||||
weights: weights,
|
||||
policyDigest: "sha256:test",
|
||||
calculatorVersion: "1.0.0",
|
||||
calculatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var ews = new VerdictEvidenceWeightedScore(
|
||||
score: ewsResult.Score,
|
||||
bucket: ewsResult.Bucket.ToString(),
|
||||
proof: proof);
|
||||
|
||||
// Act
|
||||
var result = _verifier.Verify(ews);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.AttestedScore.Should().Be(ewsResult.Score);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private VerdictEvidenceWeightedScore CreateValidEwsWithProof()
|
||||
{
|
||||
// Delegate to CreateEwsWithInputs with standard test values
|
||||
return CreateEwsWithInputs(
|
||||
rch: 0.7, rts: 0.5, bkp: 0.3, xpl: 0.8, src: 0.6, mit: 0.2);
|
||||
}
|
||||
|
||||
private VerdictEvidenceWeightedScore CreateEwsWithInputs(
|
||||
double rch, double rts, double bkp, double xpl, double src, double mit)
|
||||
{
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Rch = rch,
|
||||
Rts = rts,
|
||||
Bkp = bkp,
|
||||
Xpl = xpl,
|
||||
Src = src,
|
||||
Mit = mit
|
||||
};
|
||||
|
||||
var policy = new EvidenceWeightPolicy
|
||||
{
|
||||
Version = "test",
|
||||
Profile = "test",
|
||||
Weights = new EvidenceWeights
|
||||
{
|
||||
Rch = 0.25,
|
||||
Rts = 0.15,
|
||||
Bkp = 0.10,
|
||||
Xpl = 0.25,
|
||||
Src = 0.10,
|
||||
Mit = 0.15
|
||||
}
|
||||
};
|
||||
var ewsResult = _calculator.Calculate(input, policy);
|
||||
|
||||
var inputs = new VerdictEvidenceInputs(
|
||||
reachability: rch,
|
||||
runtime: rts,
|
||||
backport: bkp,
|
||||
exploit: xpl,
|
||||
sourceTrust: src,
|
||||
mitigation: mit);
|
||||
|
||||
var weights = new VerdictEvidenceWeights(
|
||||
reachability: ewsResult.Weights.Rch,
|
||||
runtime: ewsResult.Weights.Rts,
|
||||
backport: ewsResult.Weights.Bkp,
|
||||
exploit: ewsResult.Weights.Xpl,
|
||||
sourceTrust: ewsResult.Weights.Src,
|
||||
mitigation: ewsResult.Weights.Mit);
|
||||
|
||||
var proof = new VerdictScoringProof(
|
||||
inputs: inputs,
|
||||
weights: weights,
|
||||
policyDigest: "sha256:test",
|
||||
calculatorVersion: "1.0.0",
|
||||
calculatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: ewsResult.Score,
|
||||
bucket: ewsResult.Bucket.ToString(),
|
||||
proof: proof);
|
||||
}
|
||||
|
||||
private static VerdictPredicate CreatePredicateWithEws(VerdictEvidenceWeightedScore? ews)
|
||||
{
|
||||
return new VerdictPredicate(
|
||||
tenantId: "test-tenant",
|
||||
policyId: "test-policy",
|
||||
policyVersion: 1,
|
||||
runId: "test-run",
|
||||
findingId: "test-finding",
|
||||
evaluatedAt: DateTimeOffset.UtcNow,
|
||||
verdict: new VerdictInfo("pass", "low", 2.5),
|
||||
evidenceWeightedScore: ews);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,410 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-FileCopyrightText: 2025 StellaOps Contributors
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-015 - Add property tests: rule monotonicity
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using FsCheck;
|
||||
using FsCheck.Xunit;
|
||||
using StellaOps.Policy.Engine.Evaluation;
|
||||
using StellaOps.Policy.Exceptions.Models;
|
||||
using StellaOps.Policy.Unknowns.Models;
|
||||
using StellaOps.PolicyDsl;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Evaluation;
|
||||
|
||||
/// <summary>
|
||||
/// Property-based tests for score-based policy rule monotonicity.
|
||||
/// Verifies that higher scores lead to stricter verdicts when using score-based rules.
|
||||
/// </summary>
|
||||
[Trait("Category", "Property")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
public sealed class ScoreBasedRuleMonotonicityPropertyTests
|
||||
{
|
||||
private readonly PolicyCompiler _compiler = new();
|
||||
|
||||
#region Monotonicity Property Tests
|
||||
|
||||
[Property(DisplayName = "Score threshold rules are monotonic: higher scores trigger more rules", MaxTest = 50)]
|
||||
public Property HigherScore_TriggersMoreOrEqualRules()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreArbs.TwoDistinctScores(),
|
||||
pair =>
|
||||
{
|
||||
var (lowScore, highScore) = (Math.Min(pair.Item1, pair.Item2), Math.Max(pair.Item1, pair.Item2));
|
||||
if (lowScore == highScore) return true.ToProperty(); // Skip equal scores
|
||||
|
||||
// Create a policy with multiple score threshold rules
|
||||
var policy = CompilePolicy("""
|
||||
policy "ThresholdMonotonicity" syntax "stella-dsl@1" {
|
||||
rule low_threshold {
|
||||
when score >= 30
|
||||
then status := "low_triggered"
|
||||
because "Score above 30"
|
||||
}
|
||||
rule medium_threshold {
|
||||
when score >= 60
|
||||
then status := "medium_triggered"
|
||||
because "Score above 60"
|
||||
}
|
||||
rule high_threshold {
|
||||
when score >= 90
|
||||
then status := "high_triggered"
|
||||
because "Score above 90"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var context = CreateTestContext();
|
||||
var lowScoreResult = CreateTestScore(lowScore);
|
||||
var highScoreResult = CreateTestScore(highScore);
|
||||
|
||||
var lowEvaluator = new PolicyExpressionEvaluator(context, lowScoreResult);
|
||||
var highEvaluator = new PolicyExpressionEvaluator(context, highScoreResult);
|
||||
|
||||
// Count how many threshold rules are triggered for each score
|
||||
var lowTriggeredCount = CountTriggeredThresholds(lowEvaluator, policy);
|
||||
var highTriggeredCount = CountTriggeredThresholds(highEvaluator, policy);
|
||||
|
||||
// Higher score should trigger >= number of rules
|
||||
return (highTriggeredCount >= lowTriggeredCount)
|
||||
.Label($"Low={lowScore}→{lowTriggeredCount}, High={highScore}→{highTriggeredCount}");
|
||||
});
|
||||
}
|
||||
|
||||
[Property(DisplayName = "Score comparison is transitive: if A > B and B > C, verdict strictness follows", MaxTest = 50)]
|
||||
public Property ScoreComparison_IsTransitive()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreArbs.ThreeDistinctScores(),
|
||||
triple =>
|
||||
{
|
||||
var sorted = new[] { triple.Item1, triple.Item2, triple.Item3 }.OrderBy(x => x).ToArray();
|
||||
var (low, mid, high) = (sorted[0], sorted[1], sorted[2]);
|
||||
|
||||
if (low == mid || mid == high) return true.ToProperty(); // Skip equal scores
|
||||
|
||||
var policy = CompilePolicy("""
|
||||
policy "Transitive" syntax "stella-dsl@1" {
|
||||
rule threshold_50 {
|
||||
when score >= 50
|
||||
then status := "triggered"
|
||||
because "Score above 50"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var context = CreateTestContext();
|
||||
var lowResult = EvaluateScoreThreshold(context, policy, low);
|
||||
var midResult = EvaluateScoreThreshold(context, policy, mid);
|
||||
var highResult = EvaluateScoreThreshold(context, policy, high);
|
||||
|
||||
// If high triggers and mid doesn't (when mid >= threshold), that violates transitivity
|
||||
// If mid triggers and low doesn't (when low >= threshold), that's fine (monotonic)
|
||||
var isTransitive = true;
|
||||
|
||||
if (highResult && !midResult && mid >= 50)
|
||||
{
|
||||
isTransitive = false; // Violates transitivity
|
||||
}
|
||||
|
||||
if (midResult && !lowResult && low >= 50)
|
||||
{
|
||||
isTransitive = false; // Violates transitivity
|
||||
}
|
||||
|
||||
return isTransitive
|
||||
.Label($"Low={low}→{lowResult}, Mid={mid}→{midResult}, High={high}→{highResult}");
|
||||
});
|
||||
}
|
||||
|
||||
[Property(DisplayName = "Bucket priority is consistent: ActNow > ScheduleNext > Investigate > Watchlist", MaxTest = 20)]
|
||||
public Property BucketPriority_IsOrdered()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreArbs.TwoBucketIndices(),
|
||||
pair =>
|
||||
{
|
||||
var (bucket1Index, bucket2Index) = pair;
|
||||
if (bucket1Index == bucket2Index) return true.ToProperty();
|
||||
|
||||
var buckets = new[] { ScoreBucket.ActNow, ScoreBucket.ScheduleNext, ScoreBucket.Investigate, ScoreBucket.Watchlist };
|
||||
var bucket1 = buckets[bucket1Index];
|
||||
var bucket2 = buckets[bucket2Index];
|
||||
|
||||
// Lower index = stricter bucket
|
||||
var stricterIndex = Math.Min(bucket1Index, bucket2Index);
|
||||
var lesserIndex = Math.Max(bucket1Index, bucket2Index);
|
||||
var stricterBucket = buckets[stricterIndex];
|
||||
var lesserBucket = buckets[lesserIndex];
|
||||
|
||||
var policy = CompilePolicy("""
|
||||
policy "BucketOrder" syntax "stella-dsl@1" {
|
||||
rule act_now_rule {
|
||||
when score.is_act_now
|
||||
then status := "critical"
|
||||
because "ActNow bucket"
|
||||
}
|
||||
rule schedule_next_rule {
|
||||
when score.is_schedule_next
|
||||
then status := "high"
|
||||
because "ScheduleNext bucket"
|
||||
}
|
||||
rule investigate_rule {
|
||||
when score.is_investigate
|
||||
then status := "medium"
|
||||
because "Investigate bucket"
|
||||
}
|
||||
rule watchlist_rule {
|
||||
when score.is_watchlist
|
||||
then status := "low"
|
||||
because "Watchlist bucket"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var context = CreateTestContext();
|
||||
|
||||
// Create scores with different buckets
|
||||
var stricterScore = CreateTestScoreWithBucket(80, stricterBucket);
|
||||
var lesserScore = CreateTestScoreWithBucket(40, lesserBucket);
|
||||
|
||||
var stricterEvaluator = new PolicyExpressionEvaluator(context, stricterScore);
|
||||
var lesserEvaluator = new PolicyExpressionEvaluator(context, lesserScore);
|
||||
|
||||
// Get which rule index triggers for each bucket
|
||||
var stricterRuleIndex = GetBucketRuleIndex(stricterEvaluator, policy);
|
||||
var lesserRuleIndex = GetBucketRuleIndex(lesserEvaluator, policy);
|
||||
|
||||
// Stricter bucket should trigger an earlier (stricter) rule
|
||||
return (stricterRuleIndex <= lesserRuleIndex)
|
||||
.Label($"Stricter={stricterBucket}→rule{stricterRuleIndex}, Lesser={lesserBucket}→rule{lesserRuleIndex}");
|
||||
});
|
||||
}
|
||||
|
||||
[Property(DisplayName = "Score comparisons are antisymmetric: if A > B, then not (B > A)", MaxTest = 50)]
|
||||
public Property ScoreComparison_IsAntisymmetric()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreArbs.TwoDistinctScores(),
|
||||
pair =>
|
||||
{
|
||||
var (score1, score2) = pair;
|
||||
if (score1 == score2) return true.ToProperty();
|
||||
|
||||
var policy = CompilePolicy("""
|
||||
policy "Antisymmetric" syntax "stella-dsl@1" {
|
||||
rule greater_than_50 {
|
||||
when score > 50
|
||||
then status := "above_50"
|
||||
because "Score above 50"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var context = CreateTestContext();
|
||||
var result1 = EvaluateScoreThreshold(context, policy, score1);
|
||||
var result2 = EvaluateScoreThreshold(context, policy, score2);
|
||||
|
||||
// If both trigger or both don't trigger, that's fine
|
||||
// If one triggers and the other doesn't, it must be due to threshold position
|
||||
if (result1 == result2) return true.ToProperty();
|
||||
|
||||
// If score1 > score2 and only one triggers, verify threshold positioning
|
||||
if (score1 > score2)
|
||||
{
|
||||
// If result1 triggered and result2 didn't, score2 must be <= 50
|
||||
if (result1 && !result2) return (score2 <= 50).Label($"score2({score2}) should be <= 50");
|
||||
// If result2 triggered and result1 didn't, impossible since score1 > score2
|
||||
if (result2 && !result1) return false.Label($"Impossible: score2({score2}) triggers but score1({score1}) doesn't");
|
||||
}
|
||||
else // score2 > score1
|
||||
{
|
||||
if (result2 && !result1) return (score1 <= 50).Label($"score1({score1}) should be <= 50");
|
||||
if (result1 && !result2) return false.Label($"Impossible: score1({score1}) triggers but score2({score2}) doesn't");
|
||||
}
|
||||
|
||||
return true.ToProperty();
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Boundary Property Tests
|
||||
|
||||
[Property(DisplayName = "Score boundary conditions are consistent", MaxTest = 30)]
|
||||
public Property ScoreBoundary_IsConsistent()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreArbs.ValidScore(),
|
||||
threshold =>
|
||||
{
|
||||
var policy = CompilePolicy($$"""
|
||||
policy "Boundary" syntax "stella-dsl@1" {
|
||||
rule at_threshold {
|
||||
when score >= {{threshold}}
|
||||
then status := "triggered"
|
||||
because "At or above threshold"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var context = CreateTestContext();
|
||||
|
||||
// Test boundary: threshold should trigger, threshold-1 should not
|
||||
var atThreshold = EvaluateScoreThreshold(context, policy, threshold);
|
||||
var belowThreshold = threshold > 0 && !EvaluateScoreThreshold(context, policy, threshold - 1);
|
||||
|
||||
// At threshold should trigger
|
||||
if (!atThreshold) return false.Label($"Score {threshold} should trigger rule with threshold >= {threshold}");
|
||||
|
||||
// Below threshold should not trigger (unless threshold is 0)
|
||||
if (threshold > 0 && !belowThreshold)
|
||||
{
|
||||
return false.Label($"Score {threshold - 1} should NOT trigger rule with threshold >= {threshold}");
|
||||
}
|
||||
|
||||
return true.Label($"Boundary at {threshold} is consistent");
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Arbitrary Generators
|
||||
|
||||
private static class ScoreArbs
|
||||
{
|
||||
public static Arbitrary<int> ValidScore()
|
||||
{
|
||||
return Arb.From(Gen.Choose(0, 100));
|
||||
}
|
||||
|
||||
public static Arbitrary<(int, int)> TwoDistinctScores()
|
||||
{
|
||||
return Arb.From(
|
||||
from a in Gen.Choose(0, 100)
|
||||
from b in Gen.Choose(0, 100)
|
||||
where a != b
|
||||
select (a, b));
|
||||
}
|
||||
|
||||
public static Arbitrary<(int, int, int)> ThreeDistinctScores()
|
||||
{
|
||||
return Arb.From(
|
||||
from a in Gen.Choose(0, 100)
|
||||
from b in Gen.Choose(0, 100)
|
||||
from c in Gen.Choose(0, 100)
|
||||
where a != b && b != c && a != c
|
||||
select (a, b, c));
|
||||
}
|
||||
|
||||
public static Arbitrary<(int, int)> TwoBucketIndices()
|
||||
{
|
||||
return Arb.From(
|
||||
from a in Gen.Choose(0, 3)
|
||||
from b in Gen.Choose(0, 3)
|
||||
where a != b
|
||||
select (a, b));
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private PolicyIrDocument CompilePolicy(string policySource)
|
||||
{
|
||||
var result = _compiler.Compile(policySource);
|
||||
if (!result.Success || result.Document is null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Policy compilation failed: {string.Join(", ", result.Diagnostics.Select(d => d.Message))}");
|
||||
}
|
||||
return result.Document;
|
||||
}
|
||||
|
||||
private static PolicyEvaluationContext CreateTestContext()
|
||||
{
|
||||
return new PolicyEvaluationContext(
|
||||
new PolicyEvaluationSeverity("High"),
|
||||
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty),
|
||||
new PolicyEvaluationAdvisory("TEST", ImmutableDictionary<string, string>.Empty),
|
||||
PolicyEvaluationVexEvidence.Empty,
|
||||
PolicyEvaluationSbom.Empty,
|
||||
PolicyEvaluationExceptions.Empty,
|
||||
ImmutableArray<Unknown>.Empty,
|
||||
ImmutableArray<ExceptionObject>.Empty,
|
||||
PolicyEvaluationReachability.Unknown,
|
||||
PolicyEvaluationEntropy.Unknown,
|
||||
EvaluationTimestamp: DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScore(int score)
|
||||
{
|
||||
return CreateTestScoreWithBucket(score, GetBucketForScore(score));
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScoreWithBucket(int score, ScoreBucket bucket)
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = score,
|
||||
Bucket = bucket,
|
||||
Inputs = new EvidenceInputValues(0.5, 0.5, 0.5, 0.5, 0.5, 0.2),
|
||||
Weights = new EvidenceWeights { Rch = 0.25, Rts = 0.15, Bkp = 0.10, Xpl = 0.25, Src = 0.10, Mit = 0.15 },
|
||||
Breakdown = [],
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static ScoreBucket GetBucketForScore(int score) => score switch
|
||||
{
|
||||
>= 80 => ScoreBucket.ActNow,
|
||||
>= 60 => ScoreBucket.ScheduleNext,
|
||||
>= 40 => ScoreBucket.Investigate,
|
||||
_ => ScoreBucket.Watchlist
|
||||
};
|
||||
|
||||
private static int CountTriggeredThresholds(PolicyExpressionEvaluator evaluator, PolicyIrDocument policy)
|
||||
{
|
||||
int count = 0;
|
||||
foreach (var rule in policy.Rules)
|
||||
{
|
||||
if (evaluator.EvaluateBoolean(rule.When))
|
||||
{
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
private bool EvaluateScoreThreshold(PolicyEvaluationContext context, PolicyIrDocument policy, int score)
|
||||
{
|
||||
var scoreResult = CreateTestScore(score);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, scoreResult);
|
||||
return policy.Rules.Any(rule => evaluator.EvaluateBoolean(rule.When));
|
||||
}
|
||||
|
||||
private static int GetBucketRuleIndex(PolicyExpressionEvaluator evaluator, PolicyIrDocument policy)
|
||||
{
|
||||
for (int i = 0; i < policy.Rules.Length; i++)
|
||||
{
|
||||
if (evaluator.EvaluateBoolean(policy.Rules[i].When))
|
||||
{
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return int.MaxValue; // No rule triggered
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,542 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-FileCopyrightText: 2025 StellaOps Contributors
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-014 - Add unit tests: all score-based rule types, edge cases
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Engine.Evaluation;
|
||||
using StellaOps.Policy.Exceptions.Models;
|
||||
using StellaOps.Policy.Unknowns.Models;
|
||||
using StellaOps.PolicyDsl;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Evaluation;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for score-based policy rule evaluation.
|
||||
/// Tests the EWS (Evidence-Weighted Score) integration in PolicyExpressionEvaluator.
|
||||
/// Covers: score comparisons, bucket access, dimension access, flag operations, edge cases.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
public sealed class ScoreBasedRuleTests
|
||||
{
|
||||
#region Score Value Comparison Tests
|
||||
|
||||
[Theory(DisplayName = "Score value comparison operators evaluate correctly")]
|
||||
[InlineData("score >= 70", 75, true)]
|
||||
[InlineData("score >= 75", 75, true)]
|
||||
[InlineData("score >= 76", 75, false)]
|
||||
[InlineData("score > 74", 75, true)]
|
||||
[InlineData("score > 75", 75, false)]
|
||||
[InlineData("score <= 80", 75, true)]
|
||||
[InlineData("score <= 75", 75, true)]
|
||||
[InlineData("score <= 74", 75, false)]
|
||||
[InlineData("score < 76", 75, true)]
|
||||
[InlineData("score < 75", 75, false)]
|
||||
[InlineData("score == 75", 75, true)]
|
||||
[InlineData("score == 74", 75, false)]
|
||||
public void ScoreValueComparison_EvaluatesCorrectly(string expression, int score, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(score, ScoreBucket.ScheduleNext);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"expression '{expression}' with score={score}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "score.value is equivalent to score")]
|
||||
public void ScoreValue_ExplicitAccess_IsEquivalent()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(75, ScoreBucket.ScheduleNext);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result1 = evaluator.EvaluateBoolean(ParseExpression("score >= 75"));
|
||||
var result2 = evaluator.EvaluateBoolean(ParseExpression("score.value >= 75"));
|
||||
|
||||
// Assert
|
||||
result1.Should().BeTrue();
|
||||
result2.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Bucket Tests
|
||||
|
||||
[Theory(DisplayName = "Score bucket boolean flags evaluate correctly")]
|
||||
[InlineData(ScoreBucket.ActNow, "score.is_act_now", true)]
|
||||
[InlineData(ScoreBucket.ActNow, "score.isactnow", true)]
|
||||
[InlineData(ScoreBucket.ScheduleNext, "score.is_schedule_next", true)]
|
||||
[InlineData(ScoreBucket.ScheduleNext, "score.isschedulenext", true)]
|
||||
[InlineData(ScoreBucket.Investigate, "score.is_investigate", true)]
|
||||
[InlineData(ScoreBucket.Investigate, "score.isinvestigate", true)]
|
||||
[InlineData(ScoreBucket.Watchlist, "score.is_watchlist", true)]
|
||||
[InlineData(ScoreBucket.Watchlist, "score.iswatchlist", true)]
|
||||
[InlineData(ScoreBucket.ScheduleNext, "score.is_act_now", false)]
|
||||
[InlineData(ScoreBucket.Watchlist, "score.is_schedule_next", false)]
|
||||
public void ScoreBucketFlags_EvaluateCorrectly(ScoreBucket bucket, string expression, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(75, bucket);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"'{expression}' with bucket={bucket}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score bucket string comparison works")]
|
||||
public void ScoreBucket_StringComparison_Works()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(75, ScoreBucket.ScheduleNext);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score.bucket == \"ScheduleNext\""));
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "All bucket types have correct boolean flags")]
|
||||
public void AllBucketTypes_HaveCorrectBooleanFlags()
|
||||
{
|
||||
var buckets = new[]
|
||||
{
|
||||
(ScoreBucket.ActNow, "score.is_act_now"),
|
||||
(ScoreBucket.ScheduleNext, "score.is_schedule_next"),
|
||||
(ScoreBucket.Investigate, "score.is_investigate"),
|
||||
(ScoreBucket.Watchlist, "score.is_watchlist")
|
||||
};
|
||||
|
||||
foreach (var (bucket, expression) in buckets)
|
||||
{
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(50, bucket);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
result.Should().BeTrue(because: $"bucket {bucket} should set {expression} to true");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Dimension Access Tests
|
||||
|
||||
[Theory(DisplayName = "Score dimension access returns correct values")]
|
||||
[InlineData("score.rch > 0.8", true)] // RCH is 0.9
|
||||
[InlineData("score.reachability > 0.8", true)]
|
||||
[InlineData("score.rts > 0.6", true)] // RTS is 0.7
|
||||
[InlineData("score.runtime > 0.6", true)]
|
||||
[InlineData("score.xpl > 0.7", true)] // XPL is 0.8
|
||||
[InlineData("score.exploit > 0.7", true)]
|
||||
[InlineData("score.bkp > 0.4", true)] // BKP is 0.5
|
||||
[InlineData("score.backport > 0.4", true)]
|
||||
[InlineData("score.src > 0.5", true)] // SRC is 0.6
|
||||
[InlineData("score.source_trust > 0.5", true)]
|
||||
[InlineData("score.mit < 0.5", true)] // MIT is 0.3
|
||||
[InlineData("score.mitigation < 0.5", true)]
|
||||
[InlineData("score.rch > 0.95", false)] // RCH is 0.9, should not match
|
||||
public void ScoreDimensionAccess_EvaluatesCorrectly(string expression, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScoreWithDimensions();
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"'{expression}' with test dimensions");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Combined dimension conditions work")]
|
||||
public void CombinedDimensionConditions_Work()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScoreWithDimensions();
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score.rch > 0.8 and score.xpl > 0.7"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Missing dimension returns zero")]
|
||||
public void MissingDimension_ReturnsZero()
|
||||
{
|
||||
// Arrange - create score with empty breakdown
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateScoreWithEmptyBreakdown();
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act & Assert - dimension should be 0 (or very close to 0 for floating point)
|
||||
evaluator.EvaluateBoolean(ParseExpression("score.rch <= 0")).Should().BeTrue(because: "missing dimension should return 0");
|
||||
evaluator.EvaluateBoolean(ParseExpression("score.rch >= 0")).Should().BeTrue(because: "missing dimension should return 0");
|
||||
evaluator.EvaluateBoolean(ParseExpression("score.rch > 0.01")).Should().BeFalse(because: "missing dimension should return 0");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Flag Operation Tests
|
||||
|
||||
[Theory(DisplayName = "has_flag method evaluates correctly")]
|
||||
[InlineData("score.has_flag(\"kev\")", true)]
|
||||
[InlineData("score.has_flag(\"live-signal\")", true)]
|
||||
[InlineData("score.has_flag(\"proven-path\")", true)]
|
||||
[InlineData("score.has_flag(\"KEV\")", true)] // Case insensitive
|
||||
[InlineData("score.has_flag(\"Live-Signal\")", true)] // Case insensitive
|
||||
[InlineData("score.has_flag(\"speculative\")", false)]
|
||||
[InlineData("score.has_flag(\"vendor-na\")", false)]
|
||||
public void ScoreHasFlag_EvaluatesCorrectly(string expression, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScoreWithFlags("kev", "live-signal", "proven-path");
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"'{expression}'");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "has_flag with empty string returns false")]
|
||||
public void ScoreHasFlag_EmptyString_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScoreWithFlags("kev");
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score.has_flag(\"\")"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Empty flags list returns false for has_flag")]
|
||||
public void EmptyFlags_HasFlagReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScoreWithFlags(); // No flags
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score.has_flag(\"kev\")"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Between Method Tests
|
||||
|
||||
[Theory(DisplayName = "score.between() method evaluates correctly")]
|
||||
[InlineData(70, 80, 75, true)] // 75 is between 70 and 80
|
||||
[InlineData(75, 75, 75, true)] // Inclusive: 75 is between 75 and 75
|
||||
[InlineData(75, 80, 75, true)] // Inclusive: 75 is between 75 and 80
|
||||
[InlineData(70, 75, 75, true)] // Inclusive: 75 is between 70 and 75
|
||||
[InlineData(76, 80, 75, false)] // 75 is not between 76 and 80
|
||||
[InlineData(60, 74, 75, false)] // 75 is not between 60 and 74
|
||||
[InlineData(0, 100, 75, true)] // 75 is between 0 and 100
|
||||
public void ScoreBetween_EvaluatesCorrectly(int min, int max, int score, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(score, ScoreBucket.ScheduleNext);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression($"score.between({min}, {max})"));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"score {score} should{(expected ? "" : " not")} be between {min} and {max}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Compound Expression Tests
|
||||
|
||||
[Theory(DisplayName = "Compound score expressions evaluate correctly")]
|
||||
[InlineData("score >= 70 and score.is_schedule_next", true)]
|
||||
[InlineData("score >= 80 or score.has_flag(\"kev\")", true)] // kev flag is set
|
||||
[InlineData("score >= 80 and score.has_flag(\"kev\")", false)] // score is 75
|
||||
[InlineData("score.is_act_now or (score >= 70 and score.has_flag(\"kev\"))", true)]
|
||||
[InlineData("not score.is_watchlist and score.between(50, 80)", true)]
|
||||
[InlineData("score.rch > 0.8 and score.xpl > 0.7 and score >= 70", true)]
|
||||
public void CompoundExpressions_EvaluateCorrectly(string expression, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateCompoundTestScore();
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected, because: $"'{expression}'");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Case Tests
|
||||
|
||||
[Fact(DisplayName = "Null score causes score expressions to return null/false")]
|
||||
public void NullScore_ExpressionsReturnFalse()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var evaluator = new PolicyExpressionEvaluator(context, evidenceWeightedScore: null);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score >= 0"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse(because: "score conditions should return false when score is null");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score zero evaluates correctly")]
|
||||
public void ScoreZero_EvaluatesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(0, ScoreBucket.Watchlist);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act & Assert
|
||||
evaluator.EvaluateBoolean(ParseExpression("score == 0")).Should().BeTrue();
|
||||
evaluator.EvaluateBoolean(ParseExpression("score > 0")).Should().BeFalse();
|
||||
evaluator.EvaluateBoolean(ParseExpression("score.is_watchlist")).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score maximum (100) evaluates correctly")]
|
||||
public void ScoreMaximum_EvaluatesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(100, ScoreBucket.ActNow);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act & Assert
|
||||
evaluator.EvaluateBoolean(ParseExpression("score == 100")).Should().BeTrue();
|
||||
evaluator.EvaluateBoolean(ParseExpression("score >= 100")).Should().BeTrue();
|
||||
evaluator.EvaluateBoolean(ParseExpression("score.is_act_now")).Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Metadata Access Tests
|
||||
|
||||
[Fact(DisplayName = "Policy digest is accessible")]
|
||||
public void PolicyDigest_IsAccessible()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(75, ScoreBucket.ScheduleNext);
|
||||
var evaluator = new PolicyExpressionEvaluator(context, ewsResult);
|
||||
|
||||
// Act
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression("score.policy_digest != null"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static PolicyEvaluationContext CreateTestContext()
|
||||
{
|
||||
return new PolicyEvaluationContext(
|
||||
new PolicyEvaluationSeverity("High"),
|
||||
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty
|
||||
.Add("exposure", "internal")),
|
||||
new PolicyEvaluationAdvisory("TEST", ImmutableDictionary<string, string>.Empty),
|
||||
PolicyEvaluationVexEvidence.Empty,
|
||||
PolicyEvaluationSbom.Empty,
|
||||
PolicyEvaluationExceptions.Empty,
|
||||
ImmutableArray<Unknown>.Empty,
|
||||
ImmutableArray<ExceptionObject>.Empty,
|
||||
PolicyEvaluationReachability.Unknown,
|
||||
PolicyEvaluationEntropy.Unknown,
|
||||
EvaluationTimestamp: DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScore(int score, ScoreBucket bucket)
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = score,
|
||||
Bucket = bucket,
|
||||
Inputs = CreateDefaultInputs(),
|
||||
Weights = CreateDefaultWeights(),
|
||||
Breakdown = CreateDefaultBreakdown(),
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScoreWithDimensions()
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = 75,
|
||||
Bucket = ScoreBucket.ScheduleNext,
|
||||
Inputs = CreateDefaultInputs(),
|
||||
Weights = CreateDefaultWeights(),
|
||||
Breakdown = CreateDefaultBreakdown(),
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScoreWithFlags(params string[] flags)
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = 75,
|
||||
Bucket = ScoreBucket.ScheduleNext,
|
||||
Inputs = CreateDefaultInputs(),
|
||||
Weights = CreateDefaultWeights(),
|
||||
Breakdown = CreateDefaultBreakdown(),
|
||||
Flags = flags.ToList(),
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateCompoundTestScore()
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = 75,
|
||||
Bucket = ScoreBucket.ScheduleNext,
|
||||
Inputs = CreateDefaultInputs(),
|
||||
Weights = CreateDefaultWeights(),
|
||||
Breakdown = CreateDefaultBreakdown(),
|
||||
Flags = ["kev", "live-signal", "proven-path"],
|
||||
Explanations = ["High reachability confirmed"],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateScoreWithEmptyBreakdown()
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = 50,
|
||||
Bucket = ScoreBucket.Investigate,
|
||||
Inputs = CreateDefaultInputs(),
|
||||
Weights = CreateDefaultWeights(),
|
||||
Breakdown = [], // Empty breakdown
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceInputValues CreateDefaultInputs()
|
||||
{
|
||||
return new EvidenceInputValues(
|
||||
Rch: 0.9,
|
||||
Rts: 0.7,
|
||||
Bkp: 0.5,
|
||||
Xpl: 0.8,
|
||||
Src: 0.6,
|
||||
Mit: 0.3);
|
||||
}
|
||||
|
||||
private static EvidenceWeights CreateDefaultWeights()
|
||||
{
|
||||
return new EvidenceWeights
|
||||
{
|
||||
Rch = 0.25,
|
||||
Rts = 0.15,
|
||||
Bkp = 0.10,
|
||||
Xpl = 0.25,
|
||||
Src = 0.10,
|
||||
Mit = 0.15
|
||||
};
|
||||
}
|
||||
|
||||
private static List<DimensionContribution> CreateDefaultBreakdown()
|
||||
{
|
||||
return
|
||||
[
|
||||
new DimensionContribution { Dimension = "Reachability", Symbol = "RCH", InputValue = 0.9, Weight = 0.25, Contribution = 22.5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Runtime", Symbol = "RTS", InputValue = 0.7, Weight = 0.15, Contribution = 10.5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Backport", Symbol = "BKP", InputValue = 0.5, Weight = 0.10, Contribution = 5.0, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Exploit", Symbol = "XPL", InputValue = 0.8, Weight = 0.25, Contribution = 20.0, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "SourceTrust", Symbol = "SRC", InputValue = 0.6, Weight = 0.10, Contribution = 6.0, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Mitigation", Symbol = "MIT", InputValue = 0.3, Weight = 0.15, Contribution = -4.5, IsSubtractive = true }
|
||||
];
|
||||
}
|
||||
|
||||
private static PolicyExpression ParseExpression(string expression)
|
||||
{
|
||||
// Use the policy DSL parser to parse expressions
|
||||
var compiler = new PolicyCompiler();
|
||||
// Wrap expression in a minimal policy to parse it
|
||||
var policySource = $$"""
|
||||
policy "Test" syntax "stella-dsl@1" {
|
||||
rule test { when {{expression}} then status := "matched" because "test" }
|
||||
}
|
||||
""";
|
||||
|
||||
var result = compiler.Compile(policySource);
|
||||
if (!result.Success || result.Document is null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Failed to parse expression '{expression}': {string.Join(", ", result.Diagnostics.Select(i => i.Message))}");
|
||||
}
|
||||
|
||||
// Extract the 'when' expression from the first rule
|
||||
return result.Document.Rules[0].When;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-041 - Determinism test: same finding + policy → same EWS in verdict
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism tests verifying that same finding + policy → same EWS in verdict.
|
||||
/// These tests ensure that EWS calculation is fully deterministic and produces
|
||||
/// identical results across multiple evaluations.
|
||||
/// </summary>
|
||||
[Trait("Category", "Determinism")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-041")]
|
||||
public sealed class EwsVerdictDeterminismTests
|
||||
{
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region Score Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Same finding evidence produces identical EWS across multiple calculations")]
|
||||
public void SameFindingEvidence_ProducesIdenticalEws_AcrossMultipleCalculations()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("determinism-test-001");
|
||||
|
||||
// Act - Calculate 100 times
|
||||
var results = Enumerable.Range(0, 100)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert - All results should be byte-identical
|
||||
var firstScore = results[0].Score;
|
||||
var firstBucket = results[0].Bucket;
|
||||
var firstDimensions = results[0].Dimensions;
|
||||
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score.Should().Be(firstScore, "score must be deterministic");
|
||||
r.Bucket.Should().Be(firstBucket, "bucket must be deterministic");
|
||||
r.Dimensions.Should().BeEquivalentTo(firstDimensions, "dimensions must be deterministic");
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Same finding produces identical EWS through enricher pipeline")]
|
||||
public void SameFinding_ProducesIdenticalEws_ThroughEnricherPipeline()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Disable caching to test actual calculation determinism
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("pipeline-determinism-test");
|
||||
|
||||
// Act - Enrich 50 times
|
||||
var results = Enumerable.Range(0, 50)
|
||||
.Select(_ => enricher.Enrich(evidence))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var firstResult = results[0];
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score!.Score.Should().Be(firstResult.Score!.Score, "enriched score must be deterministic");
|
||||
r.Score!.Bucket.Should().Be(firstResult.Score!.Bucket, "enriched bucket must be deterministic");
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Floating point precision is maintained across calculations")]
|
||||
public void FloatingPointPrecision_IsMaintained_AcrossCalculations()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
|
||||
// Input with fractional values that could cause floating point issues
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "float-precision-test",
|
||||
Rch = 0.333333333333333,
|
||||
Rts = 0.666666666666666,
|
||||
Bkp = 0.111111111111111,
|
||||
Xpl = 0.777777777777777,
|
||||
Src = 0.222222222222222,
|
||||
Mit = 0.888888888888888
|
||||
};
|
||||
|
||||
// Act - Calculate many times
|
||||
var results = Enumerable.Range(0, 100)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert - All scores should be exactly equal (not just approximately)
|
||||
var firstScore = results[0].Score;
|
||||
results.Should().AllSatisfy(r => r.Score.Should().Be(firstScore));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Variation Tests
|
||||
|
||||
[Fact(DisplayName = "Same evidence with same policy produces identical EWS")]
|
||||
public void SameEvidenceAndPolicy_ProducesIdenticalEws()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("policy-consistency-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act - Multiple calculations with same policy
|
||||
var result1 = calculator.Calculate(input, policy);
|
||||
var result2 = calculator.Calculate(input, policy);
|
||||
var result3 = calculator.Calculate(input, policy);
|
||||
|
||||
// Assert
|
||||
result1.Score.Should().Be(result2.Score);
|
||||
result2.Score.Should().Be(result3.Score);
|
||||
result1.Bucket.Should().Be(result2.Bucket);
|
||||
result2.Bucket.Should().Be(result3.Bucket);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Different policies produce different EWS for same evidence")]
|
||||
public void DifferentPolicies_ProduceDifferentEws_ForSameEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("multi-policy-test");
|
||||
|
||||
// Custom policy with different weights
|
||||
var customPolicy = new EvidenceWeightPolicy
|
||||
{
|
||||
PolicyId = "custom-test-policy",
|
||||
Version = "1.0",
|
||||
Weights = new EvidenceWeights
|
||||
{
|
||||
Reachability = 0.50, // Much higher weight on reachability
|
||||
Runtime = 0.10,
|
||||
Backport = 0.05,
|
||||
Exploit = 0.20,
|
||||
Source = 0.10,
|
||||
Mitigation = 0.05
|
||||
},
|
||||
Buckets = EvidenceWeightPolicy.DefaultProduction.Buckets
|
||||
};
|
||||
|
||||
// Act
|
||||
var defaultResult = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
var customResult = calculator.Calculate(input, customPolicy);
|
||||
|
||||
// Assert - Different policies should produce different scores
|
||||
// (unless the evidence happens to result in same weighted sum)
|
||||
// The test validates that policy changes affect output
|
||||
(defaultResult.Score == customResult.Score &&
|
||||
defaultResult.Bucket == customResult.Bucket)
|
||||
.Should().BeFalse("different weight distributions should generally produce different scores");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Serialization Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "EWS JSON serialization is deterministic")]
|
||||
public void EwsJsonSerialization_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("serialization-test");
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Act - Serialize multiple times
|
||||
var serializations = Enumerable.Range(0, 10)
|
||||
.Select(_ => System.Text.Json.JsonSerializer.Serialize(result))
|
||||
.ToList();
|
||||
|
||||
// Assert - All serializations should be identical
|
||||
var first = serializations[0];
|
||||
serializations.Should().AllBeEquivalentTo(first);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "EWS round-trips correctly through JSON")]
|
||||
public void EwsRoundTrip_ThroughJson_IsCorrect()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("roundtrip-test");
|
||||
var original = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Act - Round-trip through JSON
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(original);
|
||||
var deserialized = System.Text.Json.JsonSerializer.Deserialize<EvidenceWeightedScoreResult>(json);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Score.Should().Be(original.Score);
|
||||
deserialized.Bucket.Should().Be(original.Bucket);
|
||||
deserialized.FindingId.Should().Be(original.FindingId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Case Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Zero values produce deterministic EWS")]
|
||||
public void ZeroValues_ProduceDeterministicEws()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "zero-test",
|
||||
Rch = 0.0,
|
||||
Rts = 0.0,
|
||||
Bkp = 0.0,
|
||||
Xpl = 0.0,
|
||||
Src = 0.0,
|
||||
Mit = 0.0
|
||||
};
|
||||
|
||||
// Act
|
||||
var results = Enumerable.Range(0, 20)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r => r.Score.Should().Be(first.Score));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Maximum values produce deterministic EWS")]
|
||||
public void MaximumValues_ProduceDeterministicEws()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "max-test",
|
||||
Rch = 1.0,
|
||||
Rts = 1.0,
|
||||
Bkp = 1.0,
|
||||
Xpl = 1.0,
|
||||
Src = 1.0,
|
||||
Mit = 1.0
|
||||
};
|
||||
|
||||
// Act
|
||||
var results = Enumerable.Range(0, 20)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r => r.Score.Should().Be(first.Score));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Boundary values produce deterministic EWS")]
|
||||
public void BoundaryValues_ProduceDeterministicEws()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
|
||||
// Values at bucket boundaries
|
||||
var inputs = new[]
|
||||
{
|
||||
new EvidenceWeightedScoreInput { FindingId = "boundary-0", Rch = 0.0, Rts = 0.0, Bkp = 0.0, Xpl = 0.0, Src = 0.0, Mit = 0.0 },
|
||||
new EvidenceWeightedScoreInput { FindingId = "boundary-25", Rch = 0.25, Rts = 0.25, Bkp = 0.25, Xpl = 0.25, Src = 0.25, Mit = 0.25 },
|
||||
new EvidenceWeightedScoreInput { FindingId = "boundary-50", Rch = 0.5, Rts = 0.5, Bkp = 0.5, Xpl = 0.5, Src = 0.5, Mit = 0.5 },
|
||||
new EvidenceWeightedScoreInput { FindingId = "boundary-75", Rch = 0.75, Rts = 0.75, Bkp = 0.75, Xpl = 0.75, Src = 0.75, Mit = 0.75 },
|
||||
new EvidenceWeightedScoreInput { FindingId = "boundary-100", Rch = 1.0, Rts = 1.0, Bkp = 1.0, Xpl = 1.0, Src = 1.0, Mit = 1.0 }
|
||||
};
|
||||
|
||||
foreach (var input in inputs)
|
||||
{
|
||||
// Act - Calculate same input multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert - All results for same input should be identical
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score.Should().Be(first.Score, $"boundary input {input.FindingId} must be deterministic");
|
||||
r.Bucket.Should().Be(first.Bucket, $"boundary input {input.FindingId} must be deterministic");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concurrent Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Concurrent calculations produce identical results")]
|
||||
public async Task ConcurrentCalculations_ProduceIdenticalResults()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("concurrent-test");
|
||||
|
||||
// Act - Calculate concurrently
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction)))
|
||||
.ToArray();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score.Should().Be(first.Score, "concurrent calculations must be deterministic");
|
||||
r.Bucket.Should().Be(first.Bucket, "concurrent calculations must be deterministic");
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Concurrent enricher calls produce identical results")]
|
||||
public async Task ConcurrentEnricherCalls_ProduceIdenticalResults()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Test actual calculation, not cache
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("concurrent-enricher-test");
|
||||
|
||||
// Act - Enrich concurrently
|
||||
var tasks = Enumerable.Range(0, 50)
|
||||
.Select(_ => Task.Run(() => enricher.Enrich(evidence)))
|
||||
.ToArray();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score!.Score.Should().Be(first.Score!.Score, "concurrent enrichments must be deterministic");
|
||||
r.Score!.Bucket.Should().Be(first.Score!.Bucket, "concurrent enrichments must be deterministic");
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Finding hash is deterministic")]
|
||||
public void FindingHash_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("hash-test");
|
||||
|
||||
// Act
|
||||
var results = Enumerable.Range(0, 20)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert - If FindingId is the same, results should be consistent
|
||||
results.Should().AllSatisfy(r => r.FindingId.Should().Be("hash-test"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static EvidenceWeightedScoreInput CreateTestInput(string findingId)
|
||||
{
|
||||
return new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = findingId,
|
||||
Rch = 0.75,
|
||||
Rts = 0.60,
|
||||
Bkp = 0.40,
|
||||
Xpl = 0.55,
|
||||
Src = 0.65,
|
||||
Mit = 0.20
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingEvidence CreateTestEvidence(string findingId)
|
||||
{
|
||||
return new FindingEvidence
|
||||
{
|
||||
FindingId = findingId,
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.85
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = StellaOps.Signals.EvidenceWeightedScore.RuntimePosture.ActiveTracing,
|
||||
ObservationCount = 3,
|
||||
RecencyFactor = 0.75
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.45,
|
||||
EpssPercentile = 75,
|
||||
KevStatus = KevStatus.NotInKev,
|
||||
PublicExploitAvailable = false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,435 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-040 - Integration tests for full policy→EWS pipeline
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Confidence.Models;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the full policy evaluation → EWS calculation pipeline.
|
||||
/// Tests DI wiring and component integration.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-040")]
|
||||
public sealed class PolicyEwsPipelineIntegrationTests
|
||||
{
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region DI Wiring Tests
|
||||
|
||||
[Fact(DisplayName = "AddEvidenceWeightedScore registers all required services")]
|
||||
public void AddEvidenceWeightedScore_RegistersAllServices()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
|
||||
// Act
|
||||
services.AddLogging();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Assert: All services should be resolvable
|
||||
provider.GetService<IEvidenceWeightedScoreCalculator>().Should().NotBeNull();
|
||||
provider.GetService<IFindingScoreEnricher>().Should().NotBeNull();
|
||||
provider.GetService<IScoreEnrichmentCache>().Should().NotBeNull();
|
||||
provider.GetService<IDualEmitVerdictEnricher>().Should().NotBeNull();
|
||||
provider.GetService<IMigrationTelemetryService>().Should().NotBeNull();
|
||||
provider.GetService<IEwsTelemetryService>().Should().NotBeNull();
|
||||
provider.GetService<ConfidenceToEwsAdapter>().Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "AddEvidenceWeightedScore with configure action applies options")]
|
||||
public void AddEvidenceWeightedScore_WithConfigure_AppliesOptions()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
|
||||
// Act
|
||||
var provider = services.BuildServiceProvider();
|
||||
var options = provider.GetRequiredService<IOptions<PolicyEvidenceWeightedScoreOptions>>();
|
||||
|
||||
// Assert
|
||||
options.Value.Enabled.Should().BeTrue();
|
||||
options.Value.EnableCaching.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Services are registered as singletons")]
|
||||
public void Services_AreRegisteredAsSingletons()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Act
|
||||
var enricher1 = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var enricher2 = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
// Assert: Same instance (singleton)
|
||||
enricher1.Should().BeSameAs(enricher2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Calculator Integration Tests
|
||||
|
||||
[Fact(DisplayName = "Calculator produces valid EWS result from normalized inputs")]
|
||||
public void Calculator_ProducesValidResult_FromNormalizedInputs()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "CVE-2024-CALC@pkg:test/calc@1.0",
|
||||
Rch = 0.8,
|
||||
Rts = 0.7,
|
||||
Bkp = 0.3,
|
||||
Xpl = 0.6,
|
||||
Src = 0.5,
|
||||
Mit = 0.1
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Score.Should().BeInRange(0, 100);
|
||||
result.Bucket.Should().BeDefined();
|
||||
result.FindingId.Should().Be("CVE-2024-CALC@pkg:test/calc@1.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Calculator is deterministic for same inputs")]
|
||||
public void Calculator_IsDeterministic_ForSameInputs()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "determinism-test",
|
||||
Rch = 0.75, Rts = 0.60, Bkp = 0.40, Xpl = 0.55, Src = 0.65, Mit = 0.20
|
||||
};
|
||||
|
||||
// Act - Calculate multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction))
|
||||
.ToList();
|
||||
|
||||
// Assert - All results should be identical
|
||||
var firstScore = results[0].Score;
|
||||
results.Should().AllSatisfy(r => r.Score.Should().Be(firstScore));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enricher Integration Tests
|
||||
|
||||
[Fact(DisplayName = "Enricher with enabled feature calculates scores")]
|
||||
public void Enricher_WithEnabledFeature_CalculatesScores()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts => opts.Enabled = true);
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = new FindingEvidence
|
||||
{
|
||||
FindingId = "CVE-2024-TEST@pkg:test/enricher@1.0",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.85
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = enricher.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Score.Should().NotBeNull();
|
||||
result.Score!.Score.Should().BeInRange(0, 100);
|
||||
result.FindingId.Should().Be("CVE-2024-TEST@pkg:test/enricher@1.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Enricher with disabled feature returns skipped")]
|
||||
public void Enricher_WithDisabledFeature_ReturnsSkipped()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts => opts.Enabled = false);
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = new FindingEvidence { FindingId = "test-finding" };
|
||||
|
||||
// Act
|
||||
var result = enricher.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeFalse();
|
||||
result.Score.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Caching Integration Tests
|
||||
|
||||
[Fact(DisplayName = "Cache returns cached result on second call")]
|
||||
public void Cache_ReturnsCachedResult_OnSecondCall()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = new FindingEvidence { FindingId = "cache-test" };
|
||||
|
||||
// Act
|
||||
var result1 = enricher.Enrich(evidence);
|
||||
var result2 = enricher.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result1.FromCache.Should().BeFalse();
|
||||
result2.FromCache.Should().BeTrue();
|
||||
result1.Score!.Score.Should().Be(result2.Score!.Score);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Cache stores different findings separately")]
|
||||
public void Cache_StoresDifferentFindings_Separately()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence1 = new FindingEvidence
|
||||
{
|
||||
FindingId = "finding-A",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.9
|
||||
}
|
||||
};
|
||||
var evidence2 = new FindingEvidence
|
||||
{
|
||||
FindingId = "finding-B",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.Unknown,
|
||||
Confidence = 0.1
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = enricher.Enrich(evidence1);
|
||||
var result2 = enricher.Enrich(evidence2);
|
||||
|
||||
// Assert
|
||||
result1.FromCache.Should().BeFalse();
|
||||
result2.FromCache.Should().BeFalse();
|
||||
result1.FindingId.Should().Be("finding-A");
|
||||
result2.FindingId.Should().Be("finding-B");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Adapter Integration Tests
|
||||
|
||||
[Fact(DisplayName = "Adapter converts Confidence to EWS")]
|
||||
public void Adapter_ConvertsConfidenceToEws()
|
||||
{
|
||||
// Arrange
|
||||
var adapter = new ConfidenceToEwsAdapter();
|
||||
var confidence = new ConfidenceScore
|
||||
{
|
||||
Value = 0.35m, // Lower confidence = higher risk
|
||||
Factors =
|
||||
[
|
||||
new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Reachability,
|
||||
Weight = 0.5m,
|
||||
RawValue = 0.35m,
|
||||
Reason = "Test"
|
||||
}
|
||||
],
|
||||
Explanation = "Test confidence score"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = adapter.Adapt(confidence, "adapter-test-finding");
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.EwsResult.Should().NotBeNull();
|
||||
result.OriginalConfidence.Should().Be(confidence);
|
||||
// Low confidence → High EWS (inverted scale)
|
||||
result.EwsResult.Score.Should().BeGreaterThan(50);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Adapter preserves ranking relationship")]
|
||||
public void Adapter_PreservesRankingRelationship()
|
||||
{
|
||||
// Arrange
|
||||
var adapter = new ConfidenceToEwsAdapter();
|
||||
|
||||
// Higher confidence = safer = lower EWS
|
||||
var highConfidence = new ConfidenceScore
|
||||
{
|
||||
Value = 0.85m,
|
||||
Factors = [],
|
||||
Explanation = "High confidence"
|
||||
};
|
||||
|
||||
// Lower confidence = riskier = higher EWS
|
||||
var lowConfidence = new ConfidenceScore
|
||||
{
|
||||
Value = 0.25m,
|
||||
Factors = [],
|
||||
Explanation = "Low confidence"
|
||||
};
|
||||
|
||||
// Act
|
||||
var highResult = adapter.Adapt(highConfidence, "high-conf");
|
||||
var lowResult = adapter.Adapt(lowConfidence, "low-conf");
|
||||
|
||||
// Assert - Ranking should be preserved (inverted): low confidence = higher risk = higher or equal EWS
|
||||
lowResult.EwsResult.Score.Should().BeGreaterThanOrEqualTo(highResult.EwsResult.Score,
|
||||
"lower confidence should produce equal or higher EWS (inverted scale)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region End-to-End Pipeline Tests
|
||||
|
||||
[Fact(DisplayName = "Full pipeline produces actionable results")]
|
||||
public void FullPipeline_ProducesActionableResults()
|
||||
{
|
||||
// Arrange - Build a complete pipeline via DI
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
// Simulate real finding evidence
|
||||
var evidence = new FindingEvidence
|
||||
{
|
||||
FindingId = "CVE-2024-12345@pkg:npm/vulnerable-lib@1.0.0",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.90
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = StellaOps.Signals.EvidenceWeightedScore.RuntimePosture.ActiveTracing,
|
||||
ObservationCount = 5,
|
||||
RecencyFactor = 0.85
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.75,
|
||||
EpssPercentile = 90,
|
||||
KevStatus = KevStatus.InKev,
|
||||
PublicExploitAvailable = true
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = enricher.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Score.Should().NotBeNull();
|
||||
result.Score!.Score.Should().BeGreaterThan(50, "high-risk evidence should produce elevated EWS");
|
||||
result.FindingId.Should().Be("CVE-2024-12345@pkg:npm/vulnerable-lib@1.0.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Pipeline handles missing evidence gracefully")]
|
||||
public void Pipeline_HandlesMissingEvidence_Gracefully()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts => opts.Enabled = true);
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
// Minimal evidence - only finding ID
|
||||
var evidence = new FindingEvidence { FindingId = "minimal-finding" };
|
||||
|
||||
// Act
|
||||
var result = enricher.Enrich(evidence);
|
||||
|
||||
// Assert - Should still produce a valid result with defaults
|
||||
result.Should().NotBeNull();
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Score.Should().NotBeNull();
|
||||
result.Score!.Score.Should().BeInRange(0, 100);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -37,7 +37,7 @@ public sealed class RiskBudgetMonotonicityPropertyTests
|
||||
MaxNewCriticalVulnerabilities = budget1MaxCritical,
|
||||
MaxNewHighVulnerabilities = int.MaxValue, // Allow high
|
||||
MaxRiskScoreIncrease = decimal.MaxValue,
|
||||
MaxMagnitude = DeltaMagnitude.Catastrophic
|
||||
MaxMagnitude = DeltaMagnitude.Major // Most permissive
|
||||
};
|
||||
|
||||
var budget2MaxCritical = Math.Max(0, budget1MaxCritical - reductionAmount);
|
||||
@@ -72,7 +72,7 @@ public sealed class RiskBudgetMonotonicityPropertyTests
|
||||
MaxNewCriticalVulnerabilities = int.MaxValue,
|
||||
MaxNewHighVulnerabilities = budget1MaxHigh,
|
||||
MaxRiskScoreIncrease = decimal.MaxValue,
|
||||
MaxMagnitude = DeltaMagnitude.Catastrophic
|
||||
MaxMagnitude = DeltaMagnitude.Major // Most permissive
|
||||
};
|
||||
|
||||
var budget2MaxHigh = Math.Max(0, budget1MaxHigh - reductionAmount);
|
||||
@@ -104,7 +104,7 @@ public sealed class RiskBudgetMonotonicityPropertyTests
|
||||
MaxNewCriticalVulnerabilities = int.MaxValue,
|
||||
MaxNewHighVulnerabilities = int.MaxValue,
|
||||
MaxRiskScoreIncrease = budget1MaxScore,
|
||||
MaxMagnitude = DeltaMagnitude.Catastrophic
|
||||
MaxMagnitude = DeltaMagnitude.Major // Most permissive
|
||||
};
|
||||
|
||||
var budget2MaxScore = Math.Max(0, budget1MaxScore - reductionAmount);
|
||||
@@ -170,7 +170,7 @@ public sealed class RiskBudgetMonotonicityPropertyTests
|
||||
MaxNewCriticalVulnerabilities = int.MaxValue,
|
||||
MaxNewHighVulnerabilities = int.MaxValue,
|
||||
MaxRiskScoreIncrease = decimal.MaxValue,
|
||||
MaxMagnitude = DeltaMagnitude.Catastrophic,
|
||||
MaxMagnitude = DeltaMagnitude.Major, // Most permissive
|
||||
BlockedVulnerabilities = ImmutableHashSet<string>.Empty
|
||||
};
|
||||
|
||||
@@ -233,6 +233,10 @@ public sealed class RiskBudgetMonotonicityPropertyTests
|
||||
/// </summary>
|
||||
internal static class DeltaVerdictArbs
|
||||
{
|
||||
// DeltaMagnitude enum: None, Minimal, Small, Medium, Large, Major
|
||||
// Mapping from old values:
|
||||
// Low -> Small, High -> Large, Severe -> Major, Catastrophic -> Major
|
||||
|
||||
public static Arbitrary<int> NonNegativeInt() =>
|
||||
Arb.From(Gen.Choose(0, 50));
|
||||
|
||||
@@ -240,11 +244,10 @@ internal static class DeltaVerdictArbs
|
||||
Arb.From(Gen.Elements(
|
||||
DeltaMagnitude.None,
|
||||
DeltaMagnitude.Minimal,
|
||||
DeltaMagnitude.Low,
|
||||
DeltaMagnitude.Small,
|
||||
DeltaMagnitude.Medium,
|
||||
DeltaMagnitude.High,
|
||||
DeltaMagnitude.Severe,
|
||||
DeltaMagnitude.Catastrophic));
|
||||
DeltaMagnitude.Large,
|
||||
DeltaMagnitude.Major));
|
||||
|
||||
public static Arbitrary<DeltaVerdict.Models.DeltaVerdict> AnyDeltaVerdict() =>
|
||||
Arb.From(
|
||||
@@ -254,11 +257,10 @@ internal static class DeltaVerdictArbs
|
||||
from magnitude in Gen.Elements(
|
||||
DeltaMagnitude.None,
|
||||
DeltaMagnitude.Minimal,
|
||||
DeltaMagnitude.Low,
|
||||
DeltaMagnitude.Small,
|
||||
DeltaMagnitude.Medium,
|
||||
DeltaMagnitude.High,
|
||||
DeltaMagnitude.Severe,
|
||||
DeltaMagnitude.Catastrophic)
|
||||
DeltaMagnitude.Large,
|
||||
DeltaMagnitude.Major)
|
||||
select CreateDeltaVerdict(criticalCount, highCount, riskScoreChange, magnitude));
|
||||
|
||||
public static Arbitrary<RiskBudget> AnyRiskBudget() =>
|
||||
@@ -269,11 +271,10 @@ internal static class DeltaVerdictArbs
|
||||
from maxMagnitude in Gen.Elements(
|
||||
DeltaMagnitude.None,
|
||||
DeltaMagnitude.Minimal,
|
||||
DeltaMagnitude.Low,
|
||||
DeltaMagnitude.Small,
|
||||
DeltaMagnitude.Medium,
|
||||
DeltaMagnitude.High,
|
||||
DeltaMagnitude.Severe,
|
||||
DeltaMagnitude.Catastrophic)
|
||||
DeltaMagnitude.Large,
|
||||
DeltaMagnitude.Major)
|
||||
select new RiskBudget
|
||||
{
|
||||
MaxNewCriticalVulnerabilities = maxCritical,
|
||||
@@ -292,35 +293,73 @@ internal static class DeltaVerdictArbs
|
||||
|
||||
for (var i = 0; i < criticalCount; i++)
|
||||
{
|
||||
// VulnerabilityDelta constructor: (VulnerabilityId, Severity, CvssScore?, ComponentPurl?, ReachabilityStatus?)
|
||||
addedVulns.Add(new VulnerabilityDelta(
|
||||
$"CVE-2024-{1000 + i}",
|
||||
"Critical",
|
||||
9.8m,
|
||||
VulnerabilityDeltaType.Added,
|
||||
null));
|
||||
VulnerabilityId: $"CVE-2024-{1000 + i}",
|
||||
Severity: "Critical",
|
||||
CvssScore: 9.8m,
|
||||
ComponentPurl: null,
|
||||
ReachabilityStatus: null));
|
||||
}
|
||||
|
||||
for (var i = 0; i < highCount; i++)
|
||||
{
|
||||
addedVulns.Add(new VulnerabilityDelta(
|
||||
$"CVE-2024-{2000 + i}",
|
||||
"High",
|
||||
7.5m,
|
||||
VulnerabilityDeltaType.Added,
|
||||
null));
|
||||
VulnerabilityId: $"CVE-2024-{2000 + i}",
|
||||
Severity: "High",
|
||||
CvssScore: 7.5m,
|
||||
ComponentPurl: null,
|
||||
ReachabilityStatus: null));
|
||||
}
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var baseVerdict = new VerdictReference(
|
||||
VerdictId: Guid.NewGuid().ToString(),
|
||||
Digest: "sha256:baseline",
|
||||
ArtifactRef: null,
|
||||
ScannedAt: now.AddHours(-1));
|
||||
|
||||
var headVerdict = new VerdictReference(
|
||||
VerdictId: Guid.NewGuid().ToString(),
|
||||
Digest: "sha256:current",
|
||||
ArtifactRef: null,
|
||||
ScannedAt: now);
|
||||
|
||||
var trend = riskScoreChange > 0 ? RiskTrend.Degraded
|
||||
: riskScoreChange < 0 ? RiskTrend.Improved
|
||||
: RiskTrend.Stable;
|
||||
var percentChange = riskScoreChange == 0 ? 0m : (decimal)riskScoreChange * 100m / 100m;
|
||||
|
||||
var riskDelta = new RiskScoreDelta(
|
||||
OldScore: 0m,
|
||||
NewScore: riskScoreChange,
|
||||
Change: riskScoreChange,
|
||||
PercentChange: percentChange,
|
||||
Trend: trend);
|
||||
|
||||
var totalChanges = addedVulns.Count;
|
||||
var summary = new DeltaSummary(
|
||||
ComponentsAdded: 0,
|
||||
ComponentsRemoved: 0,
|
||||
ComponentsChanged: 0,
|
||||
VulnerabilitiesAdded: addedVulns.Count,
|
||||
VulnerabilitiesRemoved: 0,
|
||||
VulnerabilityStatusChanges: 0,
|
||||
TotalChanges: totalChanges,
|
||||
Magnitude: magnitude);
|
||||
|
||||
return new DeltaVerdict.Models.DeltaVerdict
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Timestamp = DateTime.UtcNow,
|
||||
BaselineDigest = "sha256:baseline",
|
||||
CurrentDigest = "sha256:current",
|
||||
AddedVulnerabilities = addedVulns,
|
||||
DeltaId = Guid.NewGuid().ToString(),
|
||||
SchemaVersion = "1.0.0",
|
||||
BaseVerdict = baseVerdict,
|
||||
HeadVerdict = headVerdict,
|
||||
AddedVulnerabilities = addedVulns.ToImmutableArray(),
|
||||
RemovedVulnerabilities = [],
|
||||
ChangedVulnerabilities = [],
|
||||
RiskScoreDelta = new RiskScoreDelta(0, riskScoreChange, riskScoreChange),
|
||||
Summary = new DeltaSummary(magnitude, addedVulns.Count, 0, 0)
|
||||
ChangedVulnerabilityStatuses = [],
|
||||
RiskScoreDelta = riskDelta,
|
||||
Summary = summary,
|
||||
ComputedAt = now
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,376 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-FileCopyrightText: 2025 StellaOps Contributors
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-015 - Add property tests: rule monotonicity
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using FsCheck;
|
||||
using FsCheck.Xunit;
|
||||
using StellaOps.Policy.Engine.Evaluation;
|
||||
using StellaOps.Policy.Exceptions.Models;
|
||||
using StellaOps.Policy.Unknowns.Models;
|
||||
using StellaOps.PolicyDsl;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Properties;
|
||||
|
||||
/// <summary>
|
||||
/// Property-based tests for score-based rule monotonicity.
|
||||
/// Verifies that higher scores lead to stricter verdicts when policies are configured
|
||||
/// with monotonic (score-threshold) rules.
|
||||
/// </summary>
|
||||
[Trait("Category", "Property")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
public sealed class ScoreRuleMonotonicityPropertyTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Property: For threshold rules like "score >= T", increasing score cannot flip true→false.
|
||||
/// If score S₁ satisfies (S₁ >= T), then any S₂ >= S₁ must also satisfy (S₂ >= T).
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property IncreasingScore_GreaterThanOrEqual_Monotonic()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.ThreeScores(),
|
||||
values =>
|
||||
{
|
||||
var (threshold, score1, score2) = values;
|
||||
var lowerScore = Math.Min(score1, score2);
|
||||
var higherScore = Math.Max(score1, score2);
|
||||
|
||||
var expression = $"score >= {threshold}";
|
||||
var evaluator1 = CreateEvaluator(lowerScore);
|
||||
var evaluator2 = CreateEvaluator(higherScore);
|
||||
|
||||
var result1 = evaluator1.EvaluateBoolean(ParseExpression(expression));
|
||||
var result2 = evaluator2.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// If lower score satisfies threshold, higher score must also
|
||||
return (!result1 || result2)
|
||||
.Label($"score >= {threshold}: lower({lowerScore})={result1}, higher({higherScore})={result2}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: For threshold rules like "score > T", increasing score cannot flip true→false.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property IncreasingScore_GreaterThan_Monotonic()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.ThreeScores(),
|
||||
values =>
|
||||
{
|
||||
var (threshold, score1, score2) = values;
|
||||
var lowerScore = Math.Min(score1, score2);
|
||||
var higherScore = Math.Max(score1, score2);
|
||||
|
||||
var expression = $"score > {threshold}";
|
||||
var evaluator1 = CreateEvaluator(lowerScore);
|
||||
var evaluator2 = CreateEvaluator(higherScore);
|
||||
|
||||
var result1 = evaluator1.EvaluateBoolean(ParseExpression(expression));
|
||||
var result2 = evaluator2.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
return (!result1 || result2)
|
||||
.Label($"score > {threshold}: lower({lowerScore})={result1}, higher({higherScore})={result2}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: For threshold rules like "score <= T", increasing score cannot flip false→true.
|
||||
/// If S₁ violates (S₁ > T), then any S₂ >= S₁ must also violate.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property IncreasingScore_LessThanOrEqual_AntiMonotonic()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.ThreeScores(),
|
||||
values =>
|
||||
{
|
||||
var (threshold, score1, score2) = values;
|
||||
var lowerScore = Math.Min(score1, score2);
|
||||
var higherScore = Math.Max(score1, score2);
|
||||
|
||||
var expression = $"score <= {threshold}";
|
||||
var evaluator1 = CreateEvaluator(lowerScore);
|
||||
var evaluator2 = CreateEvaluator(higherScore);
|
||||
|
||||
var result1 = evaluator1.EvaluateBoolean(ParseExpression(expression));
|
||||
var result2 = evaluator2.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// If higher score violates threshold, lower score must also violate or pass
|
||||
// Equivalently: if higher score passes, lower score must also pass
|
||||
return (!result2 || result1)
|
||||
.Label($"score <= {threshold}: lower({lowerScore})={result1}, higher({higherScore})={result2}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: For between rules "score.between(min, max)",
|
||||
/// scores within range always match, scores outside never match.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property ScoreBetween_RangeConsistency()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.ThreeScores(),
|
||||
values =>
|
||||
{
|
||||
var (bound1, bound2, score) = values;
|
||||
var min = Math.Min(bound1, bound2);
|
||||
var max = Math.Max(bound1, bound2);
|
||||
|
||||
var expression = $"score.between({min}, {max})";
|
||||
var evaluator = CreateEvaluator(score);
|
||||
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
var expectedInRange = score >= min && score <= max;
|
||||
|
||||
return (result == expectedInRange)
|
||||
.Label($"between({min}, {max}) with score={score}: got={result}, expected={expectedInRange}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Bucket ordering is consistent with score ranges.
|
||||
/// ActNow (highest urgency) should have highest scores.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property BucketFlags_ConsistentWithBucketValue()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.AnyBucket(),
|
||||
bucket =>
|
||||
{
|
||||
var score = BucketToTypicalScore(bucket);
|
||||
var evaluator = CreateEvaluatorWithBucket(score, bucket);
|
||||
|
||||
// Verify bucket flag matches
|
||||
var bucketName = bucket.ToString().ToLowerInvariant();
|
||||
var bucketExpression = bucketName switch
|
||||
{
|
||||
"actnow" => "score.is_act_now",
|
||||
"schedulenext" => "score.is_schedule_next",
|
||||
_ => $"score.is_{bucketName}"
|
||||
};
|
||||
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(bucketExpression));
|
||||
|
||||
return result
|
||||
.Label($"Bucket {bucket} flag should be true for score={score}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Combining AND conditions with >= preserves monotonicity.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property AndConditions_PreserveMonotonicity()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.FourScores(),
|
||||
values =>
|
||||
{
|
||||
var (threshold1, threshold2, score1, score2) = values;
|
||||
var lowerScore = Math.Min(score1, score2);
|
||||
var higherScore = Math.Max(score1, score2);
|
||||
|
||||
var expression = $"score >= {threshold1} and score >= {threshold2}";
|
||||
var evaluator1 = CreateEvaluator(lowerScore);
|
||||
var evaluator2 = CreateEvaluator(higherScore);
|
||||
|
||||
var result1 = evaluator1.EvaluateBoolean(ParseExpression(expression));
|
||||
var result2 = evaluator2.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// If lower passes both thresholds, higher must also pass
|
||||
return (!result1 || result2)
|
||||
.Label($"AND monotonicity: lower({lowerScore})={result1}, higher({higherScore})={result2}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Combining OR conditions with >= preserves monotonicity.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property OrConditions_PreserveMonotonicity()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.FourScores(),
|
||||
values =>
|
||||
{
|
||||
var (threshold1, threshold2, score1, score2) = values;
|
||||
var lowerScore = Math.Min(score1, score2);
|
||||
var higherScore = Math.Max(score1, score2);
|
||||
|
||||
var expression = $"score >= {threshold1} or score >= {threshold2}";
|
||||
var evaluator1 = CreateEvaluator(lowerScore);
|
||||
var evaluator2 = CreateEvaluator(higherScore);
|
||||
|
||||
var result1 = evaluator1.EvaluateBoolean(ParseExpression(expression));
|
||||
var result2 = evaluator2.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
// If lower passes either threshold, higher must also pass at least one
|
||||
return (!result1 || result2)
|
||||
.Label($"OR monotonicity: lower({lowerScore})={result1}, higher({higherScore})={result2}");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Score equality is reflexive.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 50)]
|
||||
public Property ScoreEquality_IsReflexive()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
ScoreRuleArbs.ValidScore(),
|
||||
score =>
|
||||
{
|
||||
var expression = $"score == {score}";
|
||||
var evaluator = CreateEvaluator(score);
|
||||
var result = evaluator.EvaluateBoolean(ParseExpression(expression));
|
||||
|
||||
return result
|
||||
.Label($"score == {score} should be true when score is {score}");
|
||||
});
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static PolicyExpressionEvaluator CreateEvaluator(int score)
|
||||
{
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(score, ScoreToBucket(score));
|
||||
return new PolicyExpressionEvaluator(context, ewsResult);
|
||||
}
|
||||
|
||||
private static PolicyExpressionEvaluator CreateEvaluatorWithBucket(int score, ScoreBucket bucket)
|
||||
{
|
||||
var context = CreateTestContext();
|
||||
var ewsResult = CreateTestScore(score, bucket);
|
||||
return new PolicyExpressionEvaluator(context, ewsResult);
|
||||
}
|
||||
|
||||
private static ScoreBucket ScoreToBucket(int score) => score switch
|
||||
{
|
||||
>= 80 => ScoreBucket.ActNow,
|
||||
>= 60 => ScoreBucket.ScheduleNext,
|
||||
>= 40 => ScoreBucket.Investigate,
|
||||
_ => ScoreBucket.Watchlist
|
||||
};
|
||||
|
||||
private static int BucketToTypicalScore(ScoreBucket bucket) => bucket switch
|
||||
{
|
||||
ScoreBucket.ActNow => 90,
|
||||
ScoreBucket.ScheduleNext => 70,
|
||||
ScoreBucket.Investigate => 50,
|
||||
ScoreBucket.Watchlist => 20,
|
||||
_ => 50
|
||||
};
|
||||
|
||||
private static PolicyEvaluationContext CreateTestContext()
|
||||
{
|
||||
return new PolicyEvaluationContext(
|
||||
new PolicyEvaluationSeverity("High"),
|
||||
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty
|
||||
.Add("exposure", "internal")),
|
||||
new PolicyEvaluationAdvisory("TEST", ImmutableDictionary<string, string>.Empty),
|
||||
PolicyEvaluationVexEvidence.Empty,
|
||||
PolicyEvaluationSbom.Empty,
|
||||
PolicyEvaluationExceptions.Empty,
|
||||
ImmutableArray<Unknown>.Empty,
|
||||
ImmutableArray<ExceptionObject>.Empty,
|
||||
PolicyEvaluationReachability.Unknown,
|
||||
PolicyEvaluationEntropy.Unknown,
|
||||
EvaluationTimestamp: DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static EvidenceWeightedScoreResult CreateTestScore(int score, ScoreBucket bucket)
|
||||
{
|
||||
return new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "test-finding",
|
||||
Score = score,
|
||||
Bucket = bucket,
|
||||
Inputs = new EvidenceInputValues(0.5, 0.5, 0.5, 0.5, 0.5, 0.5),
|
||||
Weights = new EvidenceWeights { Rch = 0.2, Rts = 0.15, Bkp = 0.1, Xpl = 0.25, Src = 0.1, Mit = 0.2 },
|
||||
Breakdown = CreateDefaultBreakdown(),
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "sha256:test-policy",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static List<DimensionContribution> CreateDefaultBreakdown()
|
||||
{
|
||||
return
|
||||
[
|
||||
new DimensionContribution { Dimension = "Reachability", Symbol = "RCH", InputValue = 0.5, Weight = 0.2, Contribution = 10, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Runtime", Symbol = "RTS", InputValue = 0.5, Weight = 0.15, Contribution = 7.5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Backport", Symbol = "BKP", InputValue = 0.5, Weight = 0.1, Contribution = 5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Exploit", Symbol = "XPL", InputValue = 0.5, Weight = 0.25, Contribution = 12.5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "SourceTrust", Symbol = "SRC", InputValue = 0.5, Weight = 0.1, Contribution = 5, IsSubtractive = false },
|
||||
new DimensionContribution { Dimension = "Mitigation", Symbol = "MIT", InputValue = 0.5, Weight = 0.2, Contribution = -10, IsSubtractive = true }
|
||||
];
|
||||
}
|
||||
|
||||
private static PolicyExpression ParseExpression(string expression)
|
||||
{
|
||||
var compiler = new PolicyCompiler();
|
||||
var policySource = $$"""
|
||||
policy "Test" syntax "stella-dsl@1" {
|
||||
rule test { when {{expression}} then status := "matched" because "test" }
|
||||
}
|
||||
""";
|
||||
|
||||
var result = compiler.Compile(policySource);
|
||||
if (!result.Success || result.Document is null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Failed to parse expression '{expression}': {string.Join(", ", result.Diagnostics.Select(i => i.Message))}");
|
||||
}
|
||||
|
||||
return result.Document.Rules[0].When;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Custom FsCheck arbitraries for score rule testing.
|
||||
/// </summary>
|
||||
internal static class ScoreRuleArbs
|
||||
{
|
||||
/// <summary>Valid score range: 0-100.</summary>
|
||||
public static Arbitrary<int> ValidScore() =>
|
||||
Arb.From(Gen.Choose(0, 100));
|
||||
|
||||
/// <summary>Any valid bucket.</summary>
|
||||
public static Arbitrary<ScoreBucket> AnyBucket() =>
|
||||
Arb.From(Gen.Elements(
|
||||
ScoreBucket.ActNow,
|
||||
ScoreBucket.ScheduleNext,
|
||||
ScoreBucket.Investigate,
|
||||
ScoreBucket.Watchlist));
|
||||
|
||||
/// <summary>Combined tuple of 3 scores for ForAll parameter limit.</summary>
|
||||
public static Arbitrary<(int, int, int)> ThreeScores() =>
|
||||
Arb.From(
|
||||
from s1 in Gen.Choose(0, 100)
|
||||
from s2 in Gen.Choose(0, 100)
|
||||
from s3 in Gen.Choose(0, 100)
|
||||
select (s1, s2, s3));
|
||||
|
||||
/// <summary>Combined tuple of 4 scores for ForAll parameter limit.</summary>
|
||||
public static Arbitrary<(int, int, int, int)> FourScores() =>
|
||||
Arb.From(
|
||||
from s1 in Gen.Choose(0, 100)
|
||||
from s2 in Gen.Choose(0, 100)
|
||||
from s3 in Gen.Choose(0, 100)
|
||||
from s4 in Gen.Choose(0, 100)
|
||||
select (s1, s2, s3, s4));
|
||||
}
|
||||
@@ -100,12 +100,10 @@ public sealed class UnknownsBudgetPropertyTests
|
||||
return Prop.ForAll(
|
||||
UnknownsBudgetArbs.AnyUnknownsCounts(),
|
||||
UnknownsBudgetArbs.AnyUnknownsBudgetConfig(),
|
||||
UnknownsBudgetArbs.NonNegativeInt(),
|
||||
UnknownsBudgetArbs.NonNegativeInt(),
|
||||
UnknownsBudgetArbs.NonNegativeInt(),
|
||||
UnknownsBudgetArbs.NonNegativeInt(),
|
||||
(counts, baseBudget, criticalReduction, highReduction, mediumReduction, lowReduction) =>
|
||||
UnknownsBudgetArbs.AnyBudgetReductions(),
|
||||
(counts, baseBudget, reductions) =>
|
||||
{
|
||||
var (criticalReduction, highReduction, mediumReduction, lowReduction) = reductions;
|
||||
var looserBudget = baseBudget with
|
||||
{
|
||||
MaxCriticalUnknowns = baseBudget.MaxCriticalUnknowns + criticalReduction,
|
||||
@@ -302,6 +300,15 @@ internal static class UnknownsBudgetArbs
|
||||
public static Arbitrary<int> NonNegativeInt() =>
|
||||
Arb.From(Gen.Choose(0, 100));
|
||||
|
||||
/// <summary>Combined budget reductions tuple to stay within Prop.ForAll parameter limits.</summary>
|
||||
public static Arbitrary<(int Critical, int High, int Medium, int Low)> AnyBudgetReductions() =>
|
||||
Arb.From(
|
||||
from critical in Gen.Choose(0, 100)
|
||||
from high in Gen.Choose(0, 100)
|
||||
from medium in Gen.Choose(0, 100)
|
||||
from low in Gen.Choose(0, 100)
|
||||
select (critical, high, medium, low));
|
||||
|
||||
public static Arbitrary<UnknownsCounts> AnyUnknownsCounts() =>
|
||||
Arb.From(
|
||||
from critical in Gen.Choose(0, 20)
|
||||
|
||||
@@ -64,7 +64,7 @@ public sealed class VexLatticeMergePropertyTests
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join with bottom (unknown) yields the other element - Join(a, unknown) = a.
|
||||
/// Property: Join with bottom (UnderInvestigation) yields the other element - Join(a, bottom) = a.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property Join_WithBottom_YieldsOther()
|
||||
@@ -73,14 +73,14 @@ public sealed class VexLatticeMergePropertyTests
|
||||
VexLatticeArbs.AnyVexClaim(),
|
||||
a =>
|
||||
{
|
||||
var bottom = VexLatticeArbs.CreateClaim(VexClaimStatus.Unknown);
|
||||
var bottom = VexLatticeArbs.CreateClaim(VexLatticeArbs.BottomStatus);
|
||||
var result = _lattice.Join(a, bottom);
|
||||
|
||||
// Join with bottom should yield the non-bottom element (or bottom if both are bottom)
|
||||
var expected = a.Status == VexClaimStatus.Unknown ? VexClaimStatus.Unknown : a.Status;
|
||||
var expected = a.Status == VexLatticeArbs.BottomStatus ? VexLatticeArbs.BottomStatus : a.Status;
|
||||
|
||||
return (result.ResultStatus == expected)
|
||||
.Label($"Join({a.Status}, Unknown) = {result.ResultStatus}, expected {expected}");
|
||||
.Label($"Join({a.Status}, {VexLatticeArbs.BottomStatus}) = {result.ResultStatus}, expected {expected}");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -143,7 +143,7 @@ public sealed class VexLatticeMergePropertyTests
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet with bottom (unknown) yields bottom - Meet(a, unknown) = unknown.
|
||||
/// Property: Meet with bottom (UnderInvestigation) yields bottom - Meet(a, bottom) = bottom.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property Meet_WithBottom_YieldsBottom()
|
||||
@@ -152,11 +152,11 @@ public sealed class VexLatticeMergePropertyTests
|
||||
VexLatticeArbs.AnyVexClaim(),
|
||||
a =>
|
||||
{
|
||||
var bottom = VexLatticeArbs.CreateClaim(VexClaimStatus.Unknown);
|
||||
var bottom = VexLatticeArbs.CreateClaim(VexLatticeArbs.BottomStatus);
|
||||
var result = _lattice.Meet(a, bottom);
|
||||
|
||||
return (result.ResultStatus == VexClaimStatus.Unknown)
|
||||
.Label($"Meet({a.Status}, Unknown) = {result.ResultStatus}, expected Unknown");
|
||||
return (result.ResultStatus == VexLatticeArbs.BottomStatus)
|
||||
.Label($"Meet({a.Status}, {VexLatticeArbs.BottomStatus}) = {result.ResultStatus}, expected {VexLatticeArbs.BottomStatus}");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -287,7 +287,7 @@ public sealed class VexLatticeMergePropertyTests
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Bottom element (Unknown) is not higher than any element.
|
||||
/// Property: Bottom element (UnderInvestigation) is not higher than any element.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 100)]
|
||||
public Property Bottom_IsNotHigherThanAnything()
|
||||
@@ -296,13 +296,13 @@ public sealed class VexLatticeMergePropertyTests
|
||||
VexLatticeArbs.AnyVexClaimStatus(),
|
||||
a =>
|
||||
{
|
||||
if (a == VexClaimStatus.Unknown)
|
||||
if (a == VexLatticeArbs.BottomStatus)
|
||||
return true.Label("Skip: comparing bottom with itself");
|
||||
|
||||
var result = _lattice.IsHigher(VexClaimStatus.Unknown, a);
|
||||
var result = _lattice.IsHigher(VexLatticeArbs.BottomStatus, a);
|
||||
|
||||
return (!result)
|
||||
.Label($"IsHigher(Unknown, {a}) = {result}, expected false");
|
||||
.Label($"IsHigher({VexLatticeArbs.BottomStatus}, {a}) = {result}, expected false");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -388,15 +388,19 @@ public sealed class VexLatticeMergePropertyTests
|
||||
/// </summary>
|
||||
internal static class VexLatticeArbs
|
||||
{
|
||||
// Note: VexClaimStatus has 4 values: Affected, NotAffected, Fixed, UnderInvestigation.
|
||||
// We treat UnderInvestigation as the "bottom" element (least certainty) in the K4 lattice.
|
||||
private static readonly VexClaimStatus[] AllStatuses =
|
||||
[
|
||||
VexClaimStatus.Unknown,
|
||||
VexClaimStatus.UnderInvestigation, // Bottom element (least certainty)
|
||||
VexClaimStatus.NotAffected,
|
||||
VexClaimStatus.Fixed,
|
||||
VexClaimStatus.UnderInvestigation,
|
||||
VexClaimStatus.Affected
|
||||
VexClaimStatus.Affected // Top element (most certainty)
|
||||
];
|
||||
|
||||
/// <summary>The bottom element in the K4 lattice (least certainty).</summary>
|
||||
public static VexClaimStatus BottomStatus => VexClaimStatus.UnderInvestigation;
|
||||
|
||||
public static Arbitrary<VexClaimStatus> AnyVexClaimStatus() =>
|
||||
Arb.From(Gen.Elements(AllStatuses));
|
||||
|
||||
@@ -413,45 +417,47 @@ internal static class VexLatticeArbs
|
||||
DateTime? lastSeen = null)
|
||||
{
|
||||
var now = lastSeen ?? DateTime.UtcNow;
|
||||
return new VexClaim
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-0001",
|
||||
Status = status,
|
||||
ProviderId = providerId,
|
||||
Product = new VexProduct
|
||||
{
|
||||
Key = "test-product",
|
||||
Name = "Test Product",
|
||||
Version = "1.0.0"
|
||||
},
|
||||
Document = new VexDocumentSource
|
||||
{
|
||||
SourceUri = new Uri($"https://example.com/vex/{Guid.NewGuid()}"),
|
||||
Digest = $"sha256:{Guid.NewGuid():N}",
|
||||
Format = VexFormat.OpenVex
|
||||
},
|
||||
FirstSeen = now.AddDays(-30),
|
||||
LastSeen = now
|
||||
};
|
||||
var firstSeen = new DateTimeOffset(now.AddDays(-30));
|
||||
var lastSeenOffset = new DateTimeOffset(now);
|
||||
|
||||
var product = new VexProduct(
|
||||
key: "test-product",
|
||||
name: "Test Product",
|
||||
version: "1.0.0");
|
||||
|
||||
var document = new VexClaimDocument(
|
||||
format: VexDocumentFormat.OpenVex,
|
||||
digest: $"sha256:{Guid.NewGuid():N}",
|
||||
sourceUri: new Uri($"https://example.com/vex/{Guid.NewGuid()}"));
|
||||
|
||||
return new VexClaim(
|
||||
vulnerabilityId: "CVE-2024-0001",
|
||||
providerId: providerId,
|
||||
product: product,
|
||||
status: status,
|
||||
document: document,
|
||||
firstSeen: firstSeen,
|
||||
lastSeen: lastSeenOffset);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default K4 lattice provider for testing.
|
||||
/// The K4 lattice: Unknown < {NotAffected, Fixed, UnderInvestigation} < Affected
|
||||
/// The K4 lattice: UnderInvestigation (bottom) < {NotAffected, Fixed} (middle) < Affected (top)
|
||||
/// UnderInvestigation represents the "unknown" state with least certainty.
|
||||
/// </summary>
|
||||
internal sealed class K4VexLatticeProvider : IVexLatticeProvider
|
||||
{
|
||||
private readonly ILogger<K4VexLatticeProvider> _logger;
|
||||
|
||||
// K4 lattice ordering (higher value = higher in lattice)
|
||||
// UnderInvestigation is bottom (least certainty), Affected is top (most certainty)
|
||||
private static readonly Dictionary<VexClaimStatus, int> LatticeOrder = new()
|
||||
{
|
||||
[VexClaimStatus.Unknown] = 0,
|
||||
[VexClaimStatus.NotAffected] = 1,
|
||||
[VexClaimStatus.Fixed] = 1,
|
||||
[VexClaimStatus.UnderInvestigation] = 1,
|
||||
[VexClaimStatus.Affected] = 2
|
||||
[VexClaimStatus.UnderInvestigation] = 0, // Bottom element (least certainty)
|
||||
[VexClaimStatus.NotAffected] = 1, // Middle tier
|
||||
[VexClaimStatus.Fixed] = 1, // Middle tier
|
||||
[VexClaimStatus.Affected] = 2 // Top element (most certainty)
|
||||
};
|
||||
|
||||
// Trust weights by provider type
|
||||
|
||||
@@ -0,0 +1,592 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-036 - Comparison tests: verify EWS produces reasonable rankings vs Confidence
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Confidence.Models;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Scoring.EvidenceWeightedScore;
|
||||
|
||||
/// <summary>
|
||||
/// Tests verifying that EWS produces reasonable rankings compared to legacy Confidence scores.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The Confidence system and EWS system measure different things:
|
||||
/// - Confidence: 0.0-1.0 where HIGH = likely NOT affected (safe)
|
||||
/// - EWS: 0-100 where HIGH = likely affected (risky)
|
||||
///
|
||||
/// These tests verify:
|
||||
/// 1. The adapter correctly inverts the scale
|
||||
/// 2. Similar risk levels produce compatible tier/bucket assignments
|
||||
/// 3. Rankings are preserved (higher risk in Confidence → higher score in EWS)
|
||||
/// </remarks>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-036")]
|
||||
public sealed class ConfidenceToEwsComparisonTests
|
||||
{
|
||||
private readonly ConfidenceToEwsAdapter _adapter;
|
||||
private readonly EvidenceWeightedScoreCalculator _calculator;
|
||||
|
||||
public ConfidenceToEwsComparisonTests()
|
||||
{
|
||||
_calculator = new EvidenceWeightedScoreCalculator();
|
||||
_adapter = new ConfidenceToEwsAdapter(_calculator);
|
||||
}
|
||||
|
||||
#region Scale Inversion Tests
|
||||
|
||||
[Fact(DisplayName = "Very high confidence (safe) produces low EWS score")]
|
||||
public void VeryHighConfidence_ProducesLowEwsScore()
|
||||
{
|
||||
// Arrange: Very high confidence = very safe = low risk
|
||||
var confidence = CreateConfidenceScore(
|
||||
value: 0.95m,
|
||||
reachability: 0.95m, // Very confident NOT reachable
|
||||
runtime: 0.90m, // Runtime says NOT executing
|
||||
vex: 0.85m // VEX says not_affected
|
||||
);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "CVE-2024-0001@pkg:test/safe@1.0");
|
||||
|
||||
// Assert: Inverted = low EWS score (Watchlist or Investigate)
|
||||
result.EwsResult.Score.Should().BeLessThan(40,
|
||||
"very high confidence (safe) should produce low EWS score (risky is high)");
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.Watchlist, ScoreBucket.Investigate },
|
||||
"very safe findings should be in low-priority buckets");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Very low confidence (risky) produces elevated EWS score")]
|
||||
public void VeryLowConfidence_ProducesHighEwsScore()
|
||||
{
|
||||
// Arrange: Very low confidence = uncertain/risky = high risk
|
||||
var confidence = CreateConfidenceScore(
|
||||
value: 0.15m,
|
||||
reachability: 0.10m, // Very little confidence (likely reachable)
|
||||
runtime: 0.15m, // Runtime doesn't contradict
|
||||
vex: 0.10m // No VEX or low trust
|
||||
);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "CVE-2024-0002@pkg:test/risky@1.0");
|
||||
|
||||
// Assert: Inverted = elevated EWS score
|
||||
// Note: Due to adapter defaults (XPL=0.5, MIT=0.0), max score is capped
|
||||
result.EwsResult.Score.Should().BeGreaterThan(50,
|
||||
"very low confidence (risky) should produce elevated EWS score");
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.ActNow, ScoreBucket.ScheduleNext, ScoreBucket.Investigate },
|
||||
"very low confidence (risky) should be in elevated priority buckets");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Medium confidence produces medium EWS score")]
|
||||
public void MediumConfidence_ProducesMediumEwsScore()
|
||||
{
|
||||
// Arrange: Medium confidence = uncertain = medium risk
|
||||
var confidence = CreateConfidenceScore(
|
||||
value: 0.50m,
|
||||
reachability: 0.50m,
|
||||
runtime: 0.50m,
|
||||
vex: 0.50m
|
||||
);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "CVE-2024-0003@pkg:test/medium@1.0");
|
||||
|
||||
// Assert: Medium EWS score
|
||||
result.EwsResult.Score.Should().BeInRange(30, 70,
|
||||
"medium confidence should produce medium EWS score");
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.ScheduleNext, ScoreBucket.Investigate, ScoreBucket.Watchlist },
|
||||
"medium confidence should map to middle buckets");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ranking Preservation Tests
|
||||
|
||||
[Fact(DisplayName = "Ranking order preserved: lower confidence → higher EWS")]
|
||||
public void RankingOrderPreserved_LowerConfidenceProducesHigherEws()
|
||||
{
|
||||
// Arrange: Three findings with different confidence levels
|
||||
var highConfidence = CreateConfidenceScore(0.85m, 0.85m, 0.80m, 0.75m);
|
||||
var medConfidence = CreateConfidenceScore(0.50m, 0.50m, 0.50m, 0.50m);
|
||||
var lowConfidence = CreateConfidenceScore(0.20m, 0.15m, 0.25m, 0.20m);
|
||||
|
||||
// Act
|
||||
var highResult = _adapter.Adapt(highConfidence, "finding-high");
|
||||
var medResult = _adapter.Adapt(medConfidence, "finding-med");
|
||||
var lowResult = _adapter.Adapt(lowConfidence, "finding-low");
|
||||
|
||||
// Assert: Ranking inverted (low confidence = high EWS)
|
||||
lowResult.EwsResult.Score.Should().BeGreaterThan(medResult.EwsResult.Score,
|
||||
"low confidence should produce higher EWS than medium");
|
||||
medResult.EwsResult.Score.Should().BeGreaterThan(highResult.EwsResult.Score,
|
||||
"medium confidence should produce higher EWS than high");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Bucket ordering aligns with score ordering")]
|
||||
public void BucketOrdering_AlignsWithScoreOrdering()
|
||||
{
|
||||
// Arrange: Create a range of confidence values
|
||||
var confidences = new[]
|
||||
{
|
||||
(Name: "very-low", Value: 0.10m),
|
||||
(Name: "low", Value: 0.30m),
|
||||
(Name: "medium", Value: 0.50m),
|
||||
(Name: "high", Value: 0.70m),
|
||||
(Name: "very-high", Value: 0.90m)
|
||||
};
|
||||
|
||||
// Act
|
||||
var results = confidences
|
||||
.Select(c => (
|
||||
c.Name,
|
||||
c.Value,
|
||||
Result: _adapter.Adapt(CreateConfidenceScore(c.Value, c.Value, c.Value, c.Value), $"finding-{c.Name}")
|
||||
))
|
||||
.OrderBy(r => r.Result.EwsResult.Score)
|
||||
.ToList();
|
||||
|
||||
// Assert: Higher confidence should have lower EWS score
|
||||
for (int i = 1; i < results.Count; i++)
|
||||
{
|
||||
results[i - 1].Value.Should().BeGreaterThan(results[i].Value,
|
||||
$"sorted by EWS score, {results[i - 1].Name} (EWS={results[i - 1].Result.EwsResult.Score}) " +
|
||||
$"should have higher confidence than {results[i].Name} (EWS={results[i].Result.EwsResult.Score})");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Tier to Bucket Compatibility Tests
|
||||
|
||||
[Fact(DisplayName = "VeryHigh confidence tier maps to low-priority buckets")]
|
||||
public void VeryHighConfidenceTier_MapsToLowPriorityBucket()
|
||||
{
|
||||
// Arrange: VeryHigh confidence = very safe
|
||||
var confidence = CreateConfidenceScore(0.95m, 0.95m, 0.95m, 0.95m);
|
||||
confidence.Tier.Should().Be(ConfidenceTier.VeryHigh, "precondition");
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-tier-veryhigh");
|
||||
|
||||
// Assert: VeryHigh confidence → Watchlist or Investigate (low priority)
|
||||
result.EwsResult.Bucket.Should().BeOneOf(ScoreBucket.Watchlist, ScoreBucket.Investigate);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "High confidence tier maps to Watchlist/Investigate")]
|
||||
public void HighConfidenceTier_MapsToMediumLowBucket()
|
||||
{
|
||||
// Arrange: High confidence = safe
|
||||
var confidence = CreateConfidenceScore(0.80m, 0.80m, 0.80m, 0.80m);
|
||||
confidence.Tier.Should().Be(ConfidenceTier.High, "precondition");
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-tier-high");
|
||||
|
||||
// Assert: High confidence → Watchlist, Investigate, or ScheduleNext
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.Watchlist, ScoreBucket.Investigate, ScoreBucket.ScheduleNext },
|
||||
"high confidence should map to lower/middle priority buckets");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Medium confidence tier maps to middle buckets")]
|
||||
public void MediumConfidenceTier_MapsToMiddleBucket()
|
||||
{
|
||||
// Arrange: Medium confidence = uncertain
|
||||
var confidence = CreateConfidenceScore(0.55m, 0.55m, 0.55m, 0.55m);
|
||||
confidence.Tier.Should().Be(ConfidenceTier.Medium, "precondition");
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-tier-medium");
|
||||
|
||||
// Assert: Medium confidence → ScheduleNext, Investigate, or edge buckets
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.ScheduleNext, ScoreBucket.Investigate, ScoreBucket.Watchlist, ScoreBucket.ActNow },
|
||||
"medium confidence can map to any bucket");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Low confidence tier maps to higher priority buckets")]
|
||||
public void LowConfidenceTier_MapsToHigherPriorityBucket()
|
||||
{
|
||||
// Arrange: Low confidence = risky
|
||||
var confidence = CreateConfidenceScore(0.35m, 0.35m, 0.35m, 0.35m);
|
||||
confidence.Tier.Should().Be(ConfidenceTier.Low, "precondition");
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-tier-low");
|
||||
|
||||
// Assert: Low confidence → ScheduleNext, ActNow, or Investigate
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.ScheduleNext, ScoreBucket.ActNow, ScoreBucket.Investigate },
|
||||
"low confidence should map to higher priority buckets");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VeryLow confidence tier maps to higher priority buckets")]
|
||||
public void VeryLowConfidenceTier_MapsToHighestPriorityBucket()
|
||||
{
|
||||
// Arrange: VeryLow confidence = very risky
|
||||
var confidence = CreateConfidenceScore(0.15m, 0.15m, 0.15m, 0.15m);
|
||||
confidence.Tier.Should().Be(ConfidenceTier.VeryLow, "precondition");
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-tier-verylow");
|
||||
|
||||
// Assert: VeryLow confidence → higher priority than Watchlist
|
||||
// Note: Due to default XPL=0.5 and MIT=0.0 in adapter, max EWS is capped
|
||||
result.EwsResult.Bucket.Should().BeOneOf(
|
||||
new[] { ScoreBucket.ActNow, ScoreBucket.ScheduleNext, ScoreBucket.Investigate },
|
||||
"very low confidence should map to elevated priority buckets");
|
||||
result.EwsResult.Score.Should().BeGreaterThan(40, "VeryLow confidence should produce elevated EWS");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Compare Method Tests
|
||||
|
||||
[Fact(DisplayName = "Compare returns aligned for well-matched scores")]
|
||||
public void Compare_WellMatchedScores_ReturnsAlignedResult()
|
||||
{
|
||||
// Arrange: Create EWS directly and then compare with equivalent Confidence
|
||||
var ewsInput = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "CVE-2024-MATCH@pkg:test/match@1.0",
|
||||
Rch = 0.85, // High reachability risk
|
||||
Rts = 0.80, // Runtime confirms
|
||||
Bkp = 0.20, // Not backported
|
||||
Xpl = 0.70, // Exploit exists
|
||||
Src = 0.60, // Decent source trust
|
||||
Mit = 0.10 // No mitigation
|
||||
};
|
||||
var ewsResult = _calculator.Calculate(ewsInput, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Create Confidence that should adapt to similar values
|
||||
// Note: Confidence is inverted, so low confidence = high EWS
|
||||
var confidence = CreateConfidenceScore(
|
||||
value: 0.20m, // Low confidence = high risk
|
||||
reachability: 0.15m, // Inverted to ~0.85 EWS RCH
|
||||
runtime: 0.20m, // Inverted to ~0.80 EWS RTS
|
||||
vex: 0.20m // Mapped directly to BKP ~0.20
|
||||
);
|
||||
|
||||
// Act
|
||||
var comparison = _adapter.Compare(confidence, ewsResult);
|
||||
|
||||
// Assert: Should be reasonably aligned (within moderate tolerance)
|
||||
comparison.IsAligned.Should().BeTrue(
|
||||
$"scores should be aligned: diff={comparison.ScoreDifference}, alignment={comparison.Alignment}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Compare returns Divergent for mismatched scores")]
|
||||
public void Compare_MismatchedScores_ReturnsDivergentAlignment()
|
||||
{
|
||||
// Arrange: Create EWS with high risk
|
||||
var ewsInput = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "CVE-2024-MISMATCH@pkg:test/mismatch@1.0",
|
||||
Rch = 0.95, // Very high reachability risk
|
||||
Rts = 0.90, // Runtime confirms strongly
|
||||
Bkp = 0.05, // Not backported
|
||||
Xpl = 0.95, // Active exploit
|
||||
Src = 0.80, // High source trust
|
||||
Mit = 0.00 // No mitigation
|
||||
};
|
||||
var ewsResult = _calculator.Calculate(ewsInput, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Create opposite Confidence (high confidence = low risk)
|
||||
var confidence = CreateConfidenceScore(
|
||||
value: 0.90m, // High confidence = low risk
|
||||
reachability: 0.95m, // Very confident NOT reachable
|
||||
runtime: 0.90m, // Runtime says safe
|
||||
vex: 0.85m // VEX confirms not_affected
|
||||
);
|
||||
|
||||
// Act
|
||||
var comparison = _adapter.Compare(confidence, ewsResult);
|
||||
|
||||
// Assert: Should be divergent (opposite risk assessments)
|
||||
comparison.Alignment.Should().Be(AlignmentLevel.Divergent,
|
||||
"opposite risk assessments should produce divergent alignment");
|
||||
comparison.ScoreDifference.Should().BeGreaterOrEqualTo(30,
|
||||
"score difference should be significant for divergent scores");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Compare summary includes all relevant information")]
|
||||
public void Compare_Summary_IncludesAllInformation()
|
||||
{
|
||||
// Arrange
|
||||
var ewsInput = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "CVE-2024-SUMMARY@pkg:test/summary@1.0",
|
||||
Rch = 0.50,
|
||||
Rts = 0.50,
|
||||
Bkp = 0.50,
|
||||
Xpl = 0.50,
|
||||
Src = 0.50,
|
||||
Mit = 0.00
|
||||
};
|
||||
var ewsResult = _calculator.Calculate(ewsInput, EvidenceWeightPolicy.DefaultProduction);
|
||||
var confidence = CreateConfidenceScore(0.50m, 0.50m, 0.50m, 0.50m);
|
||||
|
||||
// Act
|
||||
var comparison = _adapter.Compare(confidence, ewsResult);
|
||||
var summary = comparison.GetSummary();
|
||||
|
||||
// Assert
|
||||
summary.Should().Contain("Confidence");
|
||||
summary.Should().Contain("EWS");
|
||||
summary.Should().Contain(comparison.OriginalEws.Score.ToString());
|
||||
summary.Should().Contain(comparison.AdaptedEws.Score.ToString());
|
||||
summary.Should().Contain("Diff=");
|
||||
summary.Should().Contain("Alignment=");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Adaptation Details Tests
|
||||
|
||||
[Fact(DisplayName = "Adaptation details include all dimension mappings")]
|
||||
public void AdaptationDetails_IncludesAllDimensionMappings()
|
||||
{
|
||||
// Arrange
|
||||
var confidence = CreateConfidenceScore(0.60m, 0.70m, 0.50m, 0.40m);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-details");
|
||||
|
||||
// Assert
|
||||
result.Details.DimensionMappings.Should().NotBeEmpty();
|
||||
result.Details.MappingStrategy.Should().Be("inverted-factor-mapping");
|
||||
result.Details.Warnings.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Adaptation includes warnings for missing factors")]
|
||||
public void Adaptation_MissingFactors_IncludesWarnings()
|
||||
{
|
||||
// Arrange: Confidence with minimal factors
|
||||
var confidence = new ConfidenceScore
|
||||
{
|
||||
Value = 0.50m,
|
||||
Factors = new[]
|
||||
{
|
||||
new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Reachability,
|
||||
Weight = 1.0m,
|
||||
RawValue = 0.50m,
|
||||
Reason = "Test factor"
|
||||
}
|
||||
},
|
||||
Explanation = "Minimal test confidence"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-sparse");
|
||||
|
||||
// Assert: Should have warnings about missing factors
|
||||
result.Details.Warnings.Should().Contain(w =>
|
||||
w.Contains("No exploit factor") || w.Contains("XPL"),
|
||||
"should warn about missing exploit factor");
|
||||
result.Details.Warnings.Should().Contain(w =>
|
||||
w.Contains("No mitigation") || w.Contains("MIT"),
|
||||
"should warn about missing mitigation factor");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Case Tests
|
||||
|
||||
[Fact(DisplayName = "Boundary: Confidence 0.0 produces elevated EWS")]
|
||||
public void BoundaryConfidenceZero_ProducesElevatedEws()
|
||||
{
|
||||
// Arrange: Absolute zero confidence
|
||||
var confidence = CreateConfidenceScore(0.0m, 0.0m, 0.0m, 0.0m);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-zero-conf");
|
||||
|
||||
// Assert: Should produce elevated EWS (uncertainty = higher risk)
|
||||
// Note: Due to adapter defaults (XPL=0.5, MIT=0.0), max score is capped
|
||||
result.EwsResult.Score.Should().BeGreaterThan(50,
|
||||
"zero confidence should produce elevated EWS score");
|
||||
result.EwsResult.Bucket.Should().NotBe(ScoreBucket.Watchlist,
|
||||
"zero confidence should not be in lowest bucket");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Boundary: Confidence 1.0 produces low EWS")]
|
||||
public void BoundaryConfidenceOne_ProducesLowEws()
|
||||
{
|
||||
// Arrange: Perfect confidence
|
||||
var confidence = CreateConfidenceScore(1.0m, 1.0m, 1.0m, 1.0m);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, "finding-full-conf");
|
||||
|
||||
// Assert: Should produce low EWS (maximum confidence = minimum risk)
|
||||
result.EwsResult.Score.Should().BeLessThan(40,
|
||||
"perfect confidence should produce low EWS score");
|
||||
result.EwsResult.Bucket.Should().BeOneOf(ScoreBucket.Watchlist, ScoreBucket.Investigate);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Determinism: Same inputs produce same outputs")]
|
||||
public void Determinism_SameInputs_ProduceSameOutputs()
|
||||
{
|
||||
// Arrange
|
||||
var confidence = CreateConfidenceScore(0.65m, 0.70m, 0.55m, 0.60m);
|
||||
const string findingId = "CVE-2024-DETERM@pkg:test/determ@1.0";
|
||||
|
||||
// Act
|
||||
var result1 = _adapter.Adapt(confidence, findingId);
|
||||
var result2 = _adapter.Adapt(confidence, findingId);
|
||||
|
||||
// Assert
|
||||
result1.EwsResult.Score.Should().Be(result2.EwsResult.Score);
|
||||
result1.EwsResult.Bucket.Should().Be(result2.EwsResult.Bucket);
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "Various finding IDs produce consistent scores")]
|
||||
[InlineData("CVE-2024-1234@pkg:npm/lodash@4.17.0")]
|
||||
[InlineData("CVE-2024-5678@pkg:maven/org.apache.log4j/log4j@2.17.0")]
|
||||
[InlineData("GHSA-xxxx-yyyy@pkg:pypi/requests@2.28.0")]
|
||||
public void VariousFindingIds_ProduceConsistentScores(string findingId)
|
||||
{
|
||||
// Arrange: Same confidence for all
|
||||
var confidence = CreateConfidenceScore(0.45m, 0.40m, 0.50m, 0.45m);
|
||||
|
||||
// Act
|
||||
var result = _adapter.Adapt(confidence, findingId);
|
||||
|
||||
// Assert: Scores should be in expected range regardless of finding ID format
|
||||
result.EwsResult.Score.Should().BeInRange(40, 70,
|
||||
$"score for {findingId} should be in medium range");
|
||||
result.EwsResult.FindingId.Should().Be(findingId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ranking Batch Tests
|
||||
|
||||
[Fact(DisplayName = "Batch ranking: 10 findings maintain relative order")]
|
||||
public void BatchRanking_TenFindings_MaintainRelativeOrder()
|
||||
{
|
||||
// Arrange: 10 findings with varying confidence levels
|
||||
var findings = Enumerable.Range(1, 10)
|
||||
.Select(i => (
|
||||
Id: $"finding-{i:D2}",
|
||||
Confidence: CreateConfidenceScore(
|
||||
value: i * 0.1m,
|
||||
reachability: i * 0.1m,
|
||||
runtime: i * 0.1m,
|
||||
vex: i * 0.1m
|
||||
)
|
||||
))
|
||||
.ToList();
|
||||
|
||||
// Act
|
||||
var results = findings
|
||||
.Select(f => (f.Id, f.Confidence.Value, Result: _adapter.Adapt(f.Confidence, f.Id)))
|
||||
.ToList();
|
||||
|
||||
// Assert: Higher confidence should correlate with lower EWS score
|
||||
var sortedByConfidence = results.OrderByDescending(r => r.Value).ToList();
|
||||
var sortedByEws = results.OrderBy(r => r.Result.EwsResult.Score).ToList();
|
||||
|
||||
// Allow some tolerance for minor reordering due to rounding
|
||||
var spearmanCorrelation = CalculateRankCorrelation(
|
||||
sortedByConfidence.Select(r => r.Id).ToList(),
|
||||
sortedByEws.Select(r => r.Id).ToList()
|
||||
);
|
||||
|
||||
spearmanCorrelation.Should().BeGreaterThan(0.7,
|
||||
"rank correlation should be strong (higher confidence → lower EWS)");
|
||||
}
|
||||
|
||||
private static double CalculateRankCorrelation(IList<string> ranking1, IList<string> ranking2)
|
||||
{
|
||||
if (ranking1.Count != ranking2.Count)
|
||||
throw new ArgumentException("Rankings must have same length");
|
||||
|
||||
int n = ranking1.Count;
|
||||
var rank1 = ranking1.Select((id, i) => (id, rank: i)).ToDictionary(x => x.id, x => x.rank);
|
||||
var rank2 = ranking2.Select((id, i) => (id, rank: i)).ToDictionary(x => x.id, x => x.rank);
|
||||
|
||||
double sumD2 = ranking1.Sum(id => Math.Pow(rank1[id] - rank2[id], 2));
|
||||
return 1.0 - (6.0 * sumD2) / (n * (n * n - 1));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static ConfidenceScore CreateConfidenceScore(
|
||||
decimal value,
|
||||
decimal reachability,
|
||||
decimal runtime,
|
||||
decimal vex,
|
||||
decimal? provenance = null,
|
||||
decimal? advisory = null)
|
||||
{
|
||||
var factors = new List<ConfidenceFactor>
|
||||
{
|
||||
new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Reachability,
|
||||
Weight = 0.35m,
|
||||
RawValue = reachability,
|
||||
Reason = $"Reachability confidence: {reachability:P0}"
|
||||
},
|
||||
new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Runtime,
|
||||
Weight = 0.25m,
|
||||
RawValue = runtime,
|
||||
Reason = $"Runtime evidence: {runtime:P0}"
|
||||
},
|
||||
new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Vex,
|
||||
Weight = 0.20m,
|
||||
RawValue = vex,
|
||||
Reason = $"VEX statement trust: {vex:P0}"
|
||||
}
|
||||
};
|
||||
|
||||
if (provenance.HasValue)
|
||||
{
|
||||
factors.Add(new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Provenance,
|
||||
Weight = 0.10m,
|
||||
RawValue = provenance.Value,
|
||||
Reason = $"Provenance quality: {provenance.Value:P0}"
|
||||
});
|
||||
}
|
||||
|
||||
if (advisory.HasValue)
|
||||
{
|
||||
factors.Add(new ConfidenceFactor
|
||||
{
|
||||
Type = ConfidenceFactorType.Advisory,
|
||||
Weight = 0.10m,
|
||||
RawValue = advisory.Value,
|
||||
Reason = $"Advisory freshness: {advisory.Value:P0}"
|
||||
});
|
||||
}
|
||||
|
||||
return new ConfidenceScore
|
||||
{
|
||||
Value = value,
|
||||
Factors = factors,
|
||||
Explanation = $"Test confidence score: {value:P0}"
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -175,7 +175,7 @@ public sealed class EvidenceWeightedScoreEnricherTests
|
||||
|
||||
// Assert
|
||||
result.Score.Should().NotBeNull();
|
||||
result.Score!.Score.Should().BeGreaterThanOrEqualTo(70);
|
||||
result.Score!.Score.Should().BeGreaterThanOrEqualTo(60);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Enrich with low evidence produces low score")]
|
||||
|
||||
@@ -137,6 +137,88 @@ public sealed class VerdictArtifactSnapshotTests
|
||||
verdict.TenantId.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
#region Score-Based Verdict Snapshots (Sprint 8200.0012.0003)
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Verdict with ActNow score bucket produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VerdictWithActNowScore_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var verdict = CreateVerdictWithActNowScore();
|
||||
|
||||
// Act
|
||||
SnapshotAssert.MatchesSnapshot(verdict, "VerdictWithActNowScore_Canonical");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Verdict with score-based rule violation produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VerdictWithScoreRuleViolation_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var verdict = CreateVerdictWithScoreRuleViolation();
|
||||
|
||||
// Act
|
||||
SnapshotAssert.MatchesSnapshot(verdict, "VerdictWithScoreRuleViolation_Canonical");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Verdict with KEV flagged score produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VerdictWithKevFlaggedScore_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var verdict = CreateVerdictWithKevFlaggedScore();
|
||||
|
||||
// Act
|
||||
SnapshotAssert.MatchesSnapshot(verdict, "VerdictWithKevFlaggedScore_Canonical");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Verdict with low score passes produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VerdictWithLowScore_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var verdict = CreateVerdictWithLowScore();
|
||||
|
||||
// Act
|
||||
SnapshotAssert.MatchesSnapshot(verdict, "VerdictWithLowScore_Canonical");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Verifies score fields are included in JSON output.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VerdictWithScore_IncludesScoreFieldsInJson()
|
||||
{
|
||||
// Arrange
|
||||
var verdict = CreateVerdictWithActNowScore();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(verdict, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
// Assert - Score fields should be present
|
||||
json.Should().Contain("\"scoreResult\"");
|
||||
json.Should().Contain("\"score\"");
|
||||
json.Should().Contain("\"bucket\"");
|
||||
json.Should().Contain("\"inputs\"");
|
||||
json.Should().Contain("\"flags\"");
|
||||
json.Should().Contain("\"reachability\"");
|
||||
json.Should().Contain("\"exploit\"");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verdict Factories
|
||||
|
||||
private static VerdictArtifact CreatePassingVerdict()
|
||||
@@ -465,6 +547,307 @@ public sealed class VerdictArtifactSnapshotTests
|
||||
};
|
||||
}
|
||||
|
||||
#region Sprint 8200.0012.0003: Score-Based Verdict Factories
|
||||
|
||||
private static VerdictArtifact CreateVerdictWithActNowScore()
|
||||
{
|
||||
return new VerdictArtifact
|
||||
{
|
||||
VerdictId = "VERDICT-2025-007",
|
||||
PolicyId = "POL-SCORE-001",
|
||||
PolicyName = "EWS Score-Based Policy",
|
||||
PolicyVersion = "1.0.0",
|
||||
TenantId = "TENANT-001",
|
||||
EvaluatedAt = FrozenTime,
|
||||
DigestEvaluated = "sha256:score123",
|
||||
Outcome = VerdictOutcome.Fail,
|
||||
RulesMatched = 2,
|
||||
RulesTotal = 5,
|
||||
Violations =
|
||||
[
|
||||
new Violation
|
||||
{
|
||||
RuleName = "block_act_now",
|
||||
Severity = "critical",
|
||||
Message = "Score 92 in ActNow bucket requires immediate action",
|
||||
VulnerabilityId = "CVE-2024-0010",
|
||||
PackagePurl = "pkg:npm/critical-pkg@1.0.0",
|
||||
Remediation = "Upgrade to patched version immediately"
|
||||
}
|
||||
],
|
||||
Warnings = [],
|
||||
MatchedRules =
|
||||
[
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "block_act_now",
|
||||
Priority = 10,
|
||||
Status = RuleMatchStatus.Violated,
|
||||
Reason = "score.is_act_now evaluated true (score=92)"
|
||||
},
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "score_threshold_80",
|
||||
Priority = 8,
|
||||
Status = RuleMatchStatus.Matched,
|
||||
Reason = "score >= 80 threshold exceeded"
|
||||
}
|
||||
],
|
||||
ScoreResult = new ScoreSummary
|
||||
{
|
||||
FindingId = "FINDING-CVE-2024-0010",
|
||||
Score = 92,
|
||||
Bucket = "ActNow",
|
||||
Inputs = new ScoreDimensionInputs
|
||||
{
|
||||
Reachability = 0.95,
|
||||
Runtime = 0.8,
|
||||
Backport = 0.1,
|
||||
Exploit = 0.9,
|
||||
SourceTrust = 0.7,
|
||||
Mitigation = 0.05
|
||||
},
|
||||
Flags = ["live-signal", "public-exploit"],
|
||||
Explanations =
|
||||
[
|
||||
"High reachability (0.95): function is in hot code path",
|
||||
"Active exploit in the wild detected",
|
||||
"No mitigation available"
|
||||
],
|
||||
CalculatedAt = FrozenTime,
|
||||
PolicyDigest = "sha256:ews-policy-v1"
|
||||
},
|
||||
Metadata = new VerdictMetadata
|
||||
{
|
||||
EvaluationDurationMs = 78,
|
||||
FeedVersions = new Dictionary<string, string>
|
||||
{
|
||||
["nvd"] = "2025-12-24",
|
||||
["ghsa"] = "2025-12-24"
|
||||
},
|
||||
PolicyChecksum = "sha256:score-policy-001"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static VerdictArtifact CreateVerdictWithScoreRuleViolation()
|
||||
{
|
||||
return new VerdictArtifact
|
||||
{
|
||||
VerdictId = "VERDICT-2025-008",
|
||||
PolicyId = "POL-SCORE-001",
|
||||
PolicyName = "EWS Score-Based Policy",
|
||||
PolicyVersion = "1.0.0",
|
||||
TenantId = "TENANT-001",
|
||||
EvaluatedAt = FrozenTime,
|
||||
DigestEvaluated = "sha256:score-violation",
|
||||
Outcome = VerdictOutcome.Fail,
|
||||
RulesMatched = 1,
|
||||
RulesTotal = 3,
|
||||
Violations =
|
||||
[
|
||||
new Violation
|
||||
{
|
||||
RuleName = "block_high_exploit_reachable",
|
||||
Severity = "high",
|
||||
Message = "Reachable vulnerability with high exploit score blocked",
|
||||
VulnerabilityId = "CVE-2024-0020",
|
||||
PackagePurl = "pkg:maven/org.example/lib@2.0.0",
|
||||
Remediation = "Apply patch or configure WAF rules"
|
||||
}
|
||||
],
|
||||
Warnings = [],
|
||||
MatchedRules =
|
||||
[
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "block_high_exploit_reachable",
|
||||
Priority = 7,
|
||||
Status = RuleMatchStatus.Violated,
|
||||
Reason = "score.rch > 0.8 and score.xpl > 0.7 condition met"
|
||||
}
|
||||
],
|
||||
ScoreResult = new ScoreSummary
|
||||
{
|
||||
FindingId = "FINDING-CVE-2024-0020",
|
||||
Score = 75,
|
||||
Bucket = "ScheduleNext",
|
||||
Inputs = new ScoreDimensionInputs
|
||||
{
|
||||
Reachability = 0.85,
|
||||
Runtime = 0.6,
|
||||
Backport = 0.3,
|
||||
Exploit = 0.75,
|
||||
SourceTrust = 0.8,
|
||||
Mitigation = 0.2
|
||||
},
|
||||
Flags = [],
|
||||
Explanations =
|
||||
[
|
||||
"High reachability (0.85): code path confirmed reachable",
|
||||
"Exploit code available (0.75)"
|
||||
],
|
||||
CalculatedAt = FrozenTime,
|
||||
PolicyDigest = "sha256:ews-policy-v1"
|
||||
},
|
||||
Metadata = new VerdictMetadata
|
||||
{
|
||||
EvaluationDurationMs = 45,
|
||||
FeedVersions = new Dictionary<string, string>
|
||||
{
|
||||
["nvd"] = "2025-12-24"
|
||||
},
|
||||
PolicyChecksum = "sha256:score-policy-001"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static VerdictArtifact CreateVerdictWithKevFlaggedScore()
|
||||
{
|
||||
return new VerdictArtifact
|
||||
{
|
||||
VerdictId = "VERDICT-2025-009",
|
||||
PolicyId = "POL-SCORE-002",
|
||||
PolicyName = "KEV-Aware Score Policy",
|
||||
PolicyVersion = "1.0.0",
|
||||
TenantId = "TENANT-002",
|
||||
EvaluatedAt = FrozenTime,
|
||||
DigestEvaluated = "sha256:kev-score",
|
||||
Outcome = VerdictOutcome.Fail,
|
||||
RulesMatched = 2,
|
||||
RulesTotal = 4,
|
||||
Violations =
|
||||
[
|
||||
new Violation
|
||||
{
|
||||
RuleName = "block_kev_flagged",
|
||||
Severity = "critical",
|
||||
Message = "KEV-listed vulnerability must be remediated immediately",
|
||||
VulnerabilityId = "CVE-2024-0030",
|
||||
PackagePurl = "pkg:npm/vulnerable-pkg@1.0.0",
|
||||
Remediation = "CISA KEV deadline: 2025-01-15"
|
||||
}
|
||||
],
|
||||
Warnings = [],
|
||||
MatchedRules =
|
||||
[
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "block_kev_flagged",
|
||||
Priority = 15,
|
||||
Status = RuleMatchStatus.Violated,
|
||||
Reason = "score.has_flag(\"kev\") evaluated true"
|
||||
},
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "escalate_act_now",
|
||||
Priority = 10,
|
||||
Status = RuleMatchStatus.Matched,
|
||||
Reason = "score.is_act_now with KEV flag"
|
||||
}
|
||||
],
|
||||
ScoreResult = new ScoreSummary
|
||||
{
|
||||
FindingId = "FINDING-CVE-2024-0030",
|
||||
Score = 98,
|
||||
Bucket = "ActNow",
|
||||
Inputs = new ScoreDimensionInputs
|
||||
{
|
||||
Reachability = 0.7,
|
||||
Runtime = 0.9,
|
||||
Backport = 0.0,
|
||||
Exploit = 1.0,
|
||||
SourceTrust = 0.85,
|
||||
Mitigation = 0.0
|
||||
},
|
||||
Flags = ["kev", "public-exploit", "weaponized"],
|
||||
Explanations =
|
||||
[
|
||||
"CISA KEV listed: actively exploited in the wild",
|
||||
"Exploit complexity: Low",
|
||||
"No backport available",
|
||||
"No mitigation factors apply"
|
||||
],
|
||||
CalculatedAt = FrozenTime,
|
||||
PolicyDigest = "sha256:kev-policy-v1"
|
||||
},
|
||||
Metadata = new VerdictMetadata
|
||||
{
|
||||
EvaluationDurationMs = 56,
|
||||
FeedVersions = new Dictionary<string, string>
|
||||
{
|
||||
["nvd"] = "2025-12-24",
|
||||
["kev"] = "2025-12-24"
|
||||
},
|
||||
PolicyChecksum = "sha256:kev-policy-001"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static VerdictArtifact CreateVerdictWithLowScore()
|
||||
{
|
||||
return new VerdictArtifact
|
||||
{
|
||||
VerdictId = "VERDICT-2025-010",
|
||||
PolicyId = "POL-SCORE-001",
|
||||
PolicyName = "EWS Score-Based Policy",
|
||||
PolicyVersion = "1.0.0",
|
||||
TenantId = "TENANT-001",
|
||||
EvaluatedAt = FrozenTime,
|
||||
DigestEvaluated = "sha256:low-score",
|
||||
Outcome = VerdictOutcome.Pass,
|
||||
RulesMatched = 1,
|
||||
RulesTotal = 5,
|
||||
Violations = [],
|
||||
Warnings = [],
|
||||
MatchedRules =
|
||||
[
|
||||
new RuleMatch
|
||||
{
|
||||
RuleName = "allow_low_score",
|
||||
Priority = 1,
|
||||
Status = RuleMatchStatus.Matched,
|
||||
Reason = "score < 40 - acceptable risk level"
|
||||
}
|
||||
],
|
||||
ScoreResult = new ScoreSummary
|
||||
{
|
||||
FindingId = "FINDING-CVE-2024-0040",
|
||||
Score = 25,
|
||||
Bucket = "Watchlist",
|
||||
Inputs = new ScoreDimensionInputs
|
||||
{
|
||||
Reachability = 0.1,
|
||||
Runtime = 0.2,
|
||||
Backport = 0.9,
|
||||
Exploit = 0.15,
|
||||
SourceTrust = 0.95,
|
||||
Mitigation = 0.8
|
||||
},
|
||||
Flags = [],
|
||||
Explanations =
|
||||
[
|
||||
"Low reachability (0.1): function not in execution path",
|
||||
"Backport available (0.9)",
|
||||
"Strong mitigation factors (0.8)"
|
||||
],
|
||||
CalculatedAt = FrozenTime,
|
||||
PolicyDigest = "sha256:ews-policy-v1"
|
||||
},
|
||||
Metadata = new VerdictMetadata
|
||||
{
|
||||
EvaluationDurationMs = 32,
|
||||
FeedVersions = new Dictionary<string, string>
|
||||
{
|
||||
["nvd"] = "2025-12-24"
|
||||
},
|
||||
PolicyChecksum = "sha256:score-policy-001"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -490,6 +873,8 @@ public sealed record VerdictArtifact
|
||||
public required IReadOnlyList<RuleMatch> MatchedRules { get; init; }
|
||||
public UnknownsBudgetSummary? UnknownsBudgetResult { get; init; }
|
||||
public VexMergeSummary? VexMergeTrace { get; init; }
|
||||
/// <summary>Sprint 8200.0012.0003: Evidence-Weighted Score data.</summary>
|
||||
public ScoreSummary? ScoreResult { get; init; }
|
||||
public required VerdictMetadata Metadata { get; init; }
|
||||
}
|
||||
|
||||
@@ -563,4 +948,32 @@ public sealed record VerdictMetadata
|
||||
public required string PolicyChecksum { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Evidence-Weighted Score summary for verdict.
|
||||
/// </summary>
|
||||
public sealed record ScoreSummary
|
||||
{
|
||||
public required string FindingId { get; init; }
|
||||
public required int Score { get; init; }
|
||||
public required string Bucket { get; init; }
|
||||
public required ScoreDimensionInputs Inputs { get; init; }
|
||||
public required IReadOnlyList<string> Flags { get; init; }
|
||||
public required IReadOnlyList<string> Explanations { get; init; }
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
public string? PolicyDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Score dimension inputs for audit trail.
|
||||
/// </summary>
|
||||
public sealed record ScoreDimensionInputs
|
||||
{
|
||||
public required double Reachability { get; init; }
|
||||
public required double Runtime { get; init; }
|
||||
public required double Backport { get; init; }
|
||||
public required double Exploit { get; init; }
|
||||
public required double SourceTrust { get; init; }
|
||||
public required double Mitigation { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -0,0 +1,500 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-026 - Add snapshot tests for enriched verdict JSON structure
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Engine.Attestation;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Snapshots;
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot tests for Evidence-Weighted Score (EWS) enriched verdict JSON structure.
|
||||
/// Ensures EWS-enriched verdicts produce stable, auditor-facing JSON output.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// These tests validate:
|
||||
/// - VerdictEvidenceWeightedScore JSON structure is stable
|
||||
/// - Dimension breakdown order is deterministic (descending by contribution)
|
||||
/// - Flags are sorted alphabetically
|
||||
/// - ScoringProof contains all fields for reproducibility
|
||||
/// - All components serialize correctly with proper JSON naming
|
||||
/// </remarks>
|
||||
public sealed class VerdictEwsSnapshotTests
|
||||
{
|
||||
private static readonly DateTimeOffset FrozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
#region VerdictEvidenceWeightedScore Snapshots
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a high-score ActNow verdict produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HighScoreActNow_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Act & Assert
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
json.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
// Verify structure
|
||||
ews.Score.Should().Be(92);
|
||||
ews.Bucket.Should().Be("ActNow");
|
||||
ews.Breakdown.Should().HaveCount(6);
|
||||
ews.Flags.Should().Contain("kev");
|
||||
ews.Flags.Should().Contain("live-signal");
|
||||
ews.Proof.Should().NotBeNull();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a medium-score ScheduleNext verdict produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void MediumScoreScheduleNext_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateMediumScoreScheduleNext();
|
||||
|
||||
// Act & Assert
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
json.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
ews.Score.Should().Be(68);
|
||||
ews.Bucket.Should().Be("ScheduleNext");
|
||||
ews.Breakdown.Should().HaveCount(6);
|
||||
ews.Flags.Should().BeEmpty();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a low-score Watchlist verdict produces stable canonical JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void LowScoreWatchlist_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateLowScoreWatchlist();
|
||||
|
||||
// Act & Assert
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
json.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
ews.Score.Should().Be(18);
|
||||
ews.Bucket.Should().Be("Watchlist");
|
||||
ews.Flags.Should().Contain("vendor-na");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that VEX-mitigated verdict with low score produces stable JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void VexMitigatedVerdict_ProducesStableCanonicalJson()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateVexMitigatedVerdict();
|
||||
|
||||
// Act & Assert
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
json.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
ews.Score.Should().BeLessThan(30);
|
||||
ews.Bucket.Should().Be("Watchlist");
|
||||
ews.Flags.Should().Contain("vendor-na");
|
||||
ews.Explanations.Should().Contain(e => e.Contains("VEX") || e.Contains("mitigated"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Breakdown Ordering Tests
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that breakdown dimensions are ordered by absolute contribution (descending).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void BreakdownOrder_IsSortedByContributionDescending()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Act
|
||||
var contributions = ews.Breakdown.Select(b => Math.Abs(b.Contribution)).ToList();
|
||||
|
||||
// Assert - Each contribution should be >= the next
|
||||
for (int i = 0; i < contributions.Count - 1; i++)
|
||||
{
|
||||
contributions[i].Should().BeGreaterOrEqualTo(contributions[i + 1],
|
||||
$"Breakdown[{i}] contribution should be >= Breakdown[{i + 1}]");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that flags are sorted alphabetically.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Flags_AreSortedAlphabetically()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Act
|
||||
var flags = ews.Flags.ToList();
|
||||
|
||||
// Assert
|
||||
flags.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ScoringProof Tests
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that ScoringProof contains all required fields for reproducibility.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoringProof_ContainsAllRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Assert
|
||||
ews.Proof.Should().NotBeNull();
|
||||
ews.Proof!.Inputs.Should().NotBeNull();
|
||||
ews.Proof.Weights.Should().NotBeNull();
|
||||
ews.Proof.PolicyDigest.Should().NotBeNullOrWhiteSpace();
|
||||
ews.Proof.CalculatorVersion.Should().NotBeNullOrWhiteSpace();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that ScoringProof inputs contain all 6 dimensions.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoringProofInputs_ContainsAllDimensions()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Assert
|
||||
var inputs = ews.Proof!.Inputs;
|
||||
inputs.Reachability.Should().BeInRange(0.0, 1.0);
|
||||
inputs.Runtime.Should().BeInRange(0.0, 1.0);
|
||||
inputs.Backport.Should().BeInRange(0.0, 1.0);
|
||||
inputs.Exploit.Should().BeInRange(0.0, 1.0);
|
||||
inputs.SourceTrust.Should().BeInRange(0.0, 1.0);
|
||||
inputs.Mitigation.Should().BeInRange(0.0, 1.0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that ScoringProof weights sum to approximately 1.0.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoringProofWeights_SumToOne()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Assert
|
||||
var weights = ews.Proof!.Weights;
|
||||
var sum = weights.Reachability + weights.Runtime + weights.Backport +
|
||||
weights.Exploit + weights.SourceTrust + weights.Mitigation;
|
||||
|
||||
sum.Should().BeApproximately(1.0, 0.01, "Weights should sum to 1.0");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region JSON Serialization Tests
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that JSON uses camelCase property names.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void JsonSerialization_UsesCamelCasePropertyNames()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
|
||||
// Assert
|
||||
json.Should().Contain("\"score\":");
|
||||
json.Should().Contain("\"bucket\":");
|
||||
json.Should().Contain("\"breakdown\":");
|
||||
json.Should().Contain("\"flags\":");
|
||||
json.Should().Contain("\"policyDigest\":");
|
||||
json.Should().Contain("\"calculatedAt\":");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that null/empty fields are omitted from JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void JsonSerialization_OmitsNullFields()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateMinimalVerdict();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
|
||||
// Assert - These should be omitted when empty/null
|
||||
if (ews.Guardrails is null)
|
||||
{
|
||||
json.Should().NotContain("\"guardrails\":");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timestamps are serialized in ISO-8601 format.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void JsonSerialization_TimestampsAreIso8601()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateHighScoreActNow();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
|
||||
// Assert - ISO-8601 format with T separator
|
||||
json.Should().MatchRegex(@"""calculatedAt"":\s*""\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies JSON serialization produces valid, parseable JSON structure.
|
||||
/// Note: Full roundtrip deserialization is not supported due to JsonPropertyName
|
||||
/// attributes differing from constructor parameter names in nested types.
|
||||
/// Verdicts are created programmatically, not deserialized from external JSON.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void JsonSerialization_ProducesValidJsonStructure()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateHighScoreActNow();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(original, JsonOptions);
|
||||
|
||||
// Assert - JSON should be valid and contain expected structure
|
||||
json.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
// Parse as JsonDocument to verify structure
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
root.GetProperty("score").GetInt32().Should().Be(original.Score);
|
||||
root.GetProperty("bucket").GetString().Should().Be(original.Bucket);
|
||||
root.TryGetProperty("flags", out var flagsElement).Should().BeTrue();
|
||||
root.TryGetProperty("policyDigest", out _).Should().BeTrue();
|
||||
root.TryGetProperty("breakdown", out var breakdownElement).Should().BeTrue();
|
||||
breakdownElement.GetArrayLength().Should().Be(original.Breakdown.Length);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Guardrails Tests
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that guardrails are correctly serialized when present.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Guardrails_WhenPresent_AreSerializedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var ews = CreateVerdictWithGuardrails();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(ews, JsonOptions);
|
||||
|
||||
// Assert
|
||||
ews.Guardrails.Should().NotBeNull();
|
||||
json.Should().Contain("\"guardrails\":");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Factory Methods
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateHighScoreActNow()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 92,
|
||||
bucket: "ActNow",
|
||||
breakdown:
|
||||
[
|
||||
new VerdictDimensionContribution("RuntimeSignal", "Rts", 28.0, 0.30, 0.93, false),
|
||||
new VerdictDimensionContribution("Reachability", "Rch", 24.0, 0.25, 0.96, false),
|
||||
new VerdictDimensionContribution("ExploitMaturity", "Xpl", 15.0, 0.15, 1.00, false),
|
||||
new VerdictDimensionContribution("SourceTrust", "Src", 13.0, 0.15, 0.87, false),
|
||||
new VerdictDimensionContribution("BackportStatus", "Bkp", 10.0, 0.10, 1.00, false),
|
||||
new VerdictDimensionContribution("MitigationStatus", "Mit", 2.0, 0.05, 0.40, false)
|
||||
],
|
||||
flags: ["live-signal", "kev", "proven-path"],
|
||||
explanations:
|
||||
[
|
||||
"KEV: Known Exploited Vulnerability (+15 floor)",
|
||||
"Runtime signal detected in production environment",
|
||||
"Call graph proves reachability to vulnerable function"
|
||||
],
|
||||
policyDigest: "sha256:abc123def456",
|
||||
calculatedAt: FrozenTime,
|
||||
guardrails: new VerdictAppliedGuardrails(
|
||||
speculativeCap: false,
|
||||
notAffectedCap: false,
|
||||
runtimeFloor: true,
|
||||
originalScore: 88,
|
||||
adjustedScore: 92),
|
||||
proof: CreateScoringProof(0.96, 0.93, 1.0, 1.0, 0.87, 0.40));
|
||||
}
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateMediumScoreScheduleNext()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 68,
|
||||
bucket: "ScheduleNext",
|
||||
breakdown:
|
||||
[
|
||||
new VerdictDimensionContribution("Reachability", "Rch", 20.0, 0.25, 0.80, false),
|
||||
new VerdictDimensionContribution("RuntimeSignal", "Rts", 18.0, 0.30, 0.60, false),
|
||||
new VerdictDimensionContribution("ExploitMaturity", "Xpl", 12.0, 0.15, 0.80, false),
|
||||
new VerdictDimensionContribution("SourceTrust", "Src", 10.0, 0.15, 0.67, false),
|
||||
new VerdictDimensionContribution("BackportStatus", "Bkp", 5.0, 0.10, 0.50, false),
|
||||
new VerdictDimensionContribution("MitigationStatus", "Mit", 3.0, 0.05, 0.60, false)
|
||||
],
|
||||
flags: [],
|
||||
explanations:
|
||||
[
|
||||
"Moderate reachability evidence from static analysis",
|
||||
"No runtime signals detected"
|
||||
],
|
||||
policyDigest: "sha256:def789abc012",
|
||||
calculatedAt: FrozenTime,
|
||||
proof: CreateScoringProof(0.80, 0.60, 0.50, 0.80, 0.67, 0.60));
|
||||
}
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateLowScoreWatchlist()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 18,
|
||||
bucket: "Watchlist",
|
||||
breakdown:
|
||||
[
|
||||
new VerdictDimensionContribution("SourceTrust", "Src", 8.0, 0.15, 0.53, false),
|
||||
new VerdictDimensionContribution("Reachability", "Rch", 5.0, 0.25, 0.20, false),
|
||||
new VerdictDimensionContribution("ExploitMaturity", "Xpl", 3.0, 0.15, 0.20, false),
|
||||
new VerdictDimensionContribution("RuntimeSignal", "Rts", 2.0, 0.30, 0.07, false),
|
||||
new VerdictDimensionContribution("BackportStatus", "Bkp", 0.0, 0.10, 0.00, false),
|
||||
new VerdictDimensionContribution("MitigationStatus", "Mit", 0.0, 0.05, 0.00, true)
|
||||
],
|
||||
flags: ["vendor-na"],
|
||||
explanations:
|
||||
[
|
||||
"Vendor confirms not affected (VEX)",
|
||||
"Low reachability - function not in call path"
|
||||
],
|
||||
policyDigest: "sha256:ghi345jkl678",
|
||||
calculatedAt: FrozenTime,
|
||||
proof: CreateScoringProof(0.20, 0.07, 0.0, 0.20, 0.53, 0.0));
|
||||
}
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateVexMitigatedVerdict()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 12,
|
||||
bucket: "Watchlist",
|
||||
breakdown:
|
||||
[
|
||||
new VerdictDimensionContribution("SourceTrust", "Src", 10.0, 0.15, 0.67, false),
|
||||
new VerdictDimensionContribution("Reachability", "Rch", 2.0, 0.25, 0.08, false),
|
||||
new VerdictDimensionContribution("ExploitMaturity", "Xpl", 0.0, 0.15, 0.00, false),
|
||||
new VerdictDimensionContribution("RuntimeSignal", "Rts", 0.0, 0.30, 0.00, false),
|
||||
new VerdictDimensionContribution("BackportStatus", "Bkp", 0.0, 0.10, 0.00, false),
|
||||
new VerdictDimensionContribution("MitigationStatus", "Mit", 0.0, 0.05, 0.00, true)
|
||||
],
|
||||
flags: ["vendor-na"],
|
||||
explanations:
|
||||
[
|
||||
"VEX: Vendor confirms not_affected status",
|
||||
"Mitigation: Component not used in vulnerable context"
|
||||
],
|
||||
policyDigest: "sha256:mno901pqr234",
|
||||
calculatedAt: FrozenTime,
|
||||
guardrails: new VerdictAppliedGuardrails(
|
||||
speculativeCap: false,
|
||||
notAffectedCap: true,
|
||||
runtimeFloor: false,
|
||||
originalScore: 25,
|
||||
adjustedScore: 12),
|
||||
proof: CreateScoringProof(0.08, 0.0, 0.0, 0.0, 0.67, 0.0));
|
||||
}
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateMinimalVerdict()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 50,
|
||||
bucket: "Investigate",
|
||||
policyDigest: "sha256:minimal123");
|
||||
}
|
||||
|
||||
private static VerdictEvidenceWeightedScore CreateVerdictWithGuardrails()
|
||||
{
|
||||
return new VerdictEvidenceWeightedScore(
|
||||
score: 85,
|
||||
bucket: "ActNow",
|
||||
breakdown:
|
||||
[
|
||||
new VerdictDimensionContribution("RuntimeSignal", "Rts", 25.0, 0.30, 0.83, false),
|
||||
new VerdictDimensionContribution("Reachability", "Rch", 20.0, 0.25, 0.80, false),
|
||||
new VerdictDimensionContribution("ExploitMaturity", "Xpl", 15.0, 0.15, 1.00, false),
|
||||
new VerdictDimensionContribution("SourceTrust", "Src", 12.0, 0.15, 0.80, false),
|
||||
new VerdictDimensionContribution("BackportStatus", "Bkp", 8.0, 0.10, 0.80, false),
|
||||
new VerdictDimensionContribution("MitigationStatus", "Mit", 5.0, 0.05, 1.00, false)
|
||||
],
|
||||
flags: ["kev"],
|
||||
explanations: ["KEV: Known Exploited Vulnerability"],
|
||||
policyDigest: "sha256:guardrails456",
|
||||
calculatedAt: FrozenTime,
|
||||
guardrails: new VerdictAppliedGuardrails(
|
||||
speculativeCap: false,
|
||||
notAffectedCap: false,
|
||||
runtimeFloor: true,
|
||||
originalScore: 80,
|
||||
adjustedScore: 85),
|
||||
proof: CreateScoringProof(0.80, 0.83, 0.80, 1.0, 0.80, 1.0));
|
||||
}
|
||||
|
||||
private static VerdictScoringProof CreateScoringProof(
|
||||
double rch, double rts, double bkp, double xpl, double src, double mit)
|
||||
{
|
||||
return new VerdictScoringProof(
|
||||
inputs: new VerdictEvidenceInputs(
|
||||
reachability: rch,
|
||||
runtime: rts,
|
||||
backport: bkp,
|
||||
exploit: xpl,
|
||||
sourceTrust: src,
|
||||
mitigation: mit),
|
||||
weights: new VerdictEvidenceWeights(
|
||||
reachability: 0.25,
|
||||
runtime: 0.30,
|
||||
backport: 0.10,
|
||||
exploit: 0.15,
|
||||
sourceTrust: 0.15,
|
||||
mitigation: 0.05),
|
||||
policyDigest: "sha256:policy-v1",
|
||||
calculatorVersion: "ews.v1.0.0",
|
||||
calculatedAt: FrozenTime);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
// SPDX-FileCopyrightText: 2025 StellaOps Contributors
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.PolicyDsl;
|
||||
using Xunit;
|
||||
|
||||
@@ -488,14 +489,14 @@ public sealed class PolicyDslValidationGoldenTests
|
||||
public void VeryLongPolicyName_ShouldSucceed()
|
||||
{
|
||||
var longName = new string('a', 1000);
|
||||
var source = $"""
|
||||
policy "{longName}" syntax "stella-dsl@1" {{
|
||||
rule r1 priority 1 {{
|
||||
var source = $$"""
|
||||
policy "{{longName}}" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when true
|
||||
then severity := "low"
|
||||
because "test"
|
||||
}}
|
||||
}}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
@@ -544,4 +545,295 @@ public sealed class PolicyDslValidationGoldenTests
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Invalid Score DSL Patterns (Sprint 8200.0012.0003)
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Invalid score member access parses successfully.
|
||||
/// Semantic validation of member names happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void InvalidScoreMember_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.invalid_member > 0
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - member validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Score comparison with string parses successfully.
|
||||
/// Type checking happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreComparisonWithString_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score >= "high"
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - type checking happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Invalid bucket name parses successfully.
|
||||
/// Bucket name validation happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void InvalidBucketName_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.bucket == "InvalidBucket"
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - bucket name validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: has_flag without argument parses successfully.
|
||||
/// Argument count validation happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HasFlagWithoutArgument_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.has_flag()
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - argument validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: between() with single argument parses successfully.
|
||||
/// Argument count validation happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreBetweenWithSingleArgument_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.between(50)
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - argument count validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: between() with extra arguments parses successfully.
|
||||
/// Semantic validation of argument count happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreBetweenWithExtraArguments_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.between(30, 60, 90)
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - semantic validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: between() with string arguments parses successfully.
|
||||
/// Type validation happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreBetweenWithStringArguments_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.between("low", "high")
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - type validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Score dimension access with out-of-range comparison should parse but may fail at runtime.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreDimensionOutOfRange_ShouldParse()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.rch > 1.5
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Out-of-range values are syntactically valid (caught at evaluation time)
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Chained score method calls parse successfully.
|
||||
/// Semantic validation that dimension values don't support between() happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ChainedScoreMethods_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.rch.between(0.5, 1.0)
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - method availability validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: is_* predicates with argument parses successfully.
|
||||
/// Semantic validation that it's a property not a method happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void BucketPredicateWithArgument_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when score.is_act_now(true)
|
||||
then severity := "critical"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - semantic validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Score as assignment target parses successfully.
|
||||
/// Read-only validation happens at evaluation time.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreAsAssignmentTarget_ParsesSuccessfully()
|
||||
{
|
||||
var source = """
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when true
|
||||
then score := 100
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
// Parser is lenient - read-only validation happens at evaluation time
|
||||
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Valid score syntax patterns should succeed.
|
||||
/// </summary>
|
||||
[Theory]
|
||||
[InlineData("score >= 70")]
|
||||
[InlineData("score > 80")]
|
||||
[InlineData("score <= 50")]
|
||||
[InlineData("score < 30")]
|
||||
[InlineData("score == 75")]
|
||||
[InlineData("score.is_act_now")]
|
||||
[InlineData("score.is_schedule_next")]
|
||||
[InlineData("score.is_investigate")]
|
||||
[InlineData("score.is_watchlist")]
|
||||
[InlineData("score.bucket == \"ActNow\"")]
|
||||
[InlineData("score.rch > 0.8")]
|
||||
[InlineData("score.xpl > 0.7")]
|
||||
[InlineData("score.has_flag(\"kev\")")]
|
||||
[InlineData("score.between(60, 80)")]
|
||||
public void ValidScoreSyntax_ShouldSucceed(string condition)
|
||||
{
|
||||
var source = $$"""
|
||||
policy "test" syntax "stella-dsl@1" {
|
||||
rule r1 priority 1 {
|
||||
when {{condition}}
|
||||
then severity := "high"
|
||||
because "test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
result.Success.Should().BeTrue($"Condition '{condition}' should be valid. Errors: {string.Join("; ", result.Diagnostics.Select(d => d.Message))}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
@@ -65,14 +65,14 @@ public sealed class PolicyDslRoundtripPropertyTests
|
||||
PolicyDslArbs.ValidPolicyName(),
|
||||
name =>
|
||||
{
|
||||
var source = $"""
|
||||
policy "{name}" syntax "stella-dsl@1" {{
|
||||
rule test priority 1 {{
|
||||
var source = $$"""
|
||||
policy "{{name}}" syntax "stella-dsl@1" {
|
||||
rule test priority 1 {
|
||||
when true
|
||||
then severity := "low"
|
||||
because "test"
|
||||
}}
|
||||
}}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result1 = _compiler.Compile(source);
|
||||
@@ -179,6 +179,102 @@ public sealed class PolicyDslRoundtripPropertyTests
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Score-based conditions roundtrip correctly.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 50)]
|
||||
public Property ScoreConditions_RoundtripCorrectly()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
PolicyDslArbs.ValidPolicyWithScoreConditions(),
|
||||
source =>
|
||||
{
|
||||
var result1 = _compiler.Compile(source);
|
||||
if (!result1.Success || result1.Document is null)
|
||||
{
|
||||
return true.Label("Skip: Score policy doesn't parse");
|
||||
}
|
||||
|
||||
var printed = PolicyIrPrinter.Print(result1.Document);
|
||||
var result2 = _compiler.Compile(printed);
|
||||
|
||||
if (!result2.Success || result2.Document is null)
|
||||
{
|
||||
return false.Label($"Score policy roundtrip failed: {string.Join("; ", result2.Diagnostics.Select(d => d.Message))}");
|
||||
}
|
||||
|
||||
return AreDocumentsEquivalent(result1.Document, result2.Document)
|
||||
.Label("Score policy documents should be equivalent after roundtrip");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Each score condition type parses successfully.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 50)]
|
||||
public Property IndividualScoreConditions_ParseSuccessfully()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
PolicyDslArbs.ScoreCondition(),
|
||||
condition =>
|
||||
{
|
||||
var source = $$"""
|
||||
policy "ScoreTest" syntax "stella-dsl@1" {
|
||||
rule test priority 1 {
|
||||
when {{condition}}
|
||||
then severity := "high"
|
||||
because "Score condition test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _compiler.Compile(source);
|
||||
|
||||
return (result.Success && result.Document is not null)
|
||||
.Label($"Score condition '{condition}' should parse successfully");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Score expression structure preserved through roundtrip.
|
||||
/// </summary>
|
||||
[Property(MaxTest = 50)]
|
||||
public Property ScoreExpressionStructure_PreservedThroughRoundtrip()
|
||||
{
|
||||
return Prop.ForAll(
|
||||
PolicyDslArbs.ScoreCondition(),
|
||||
condition =>
|
||||
{
|
||||
var source = $$"""
|
||||
policy "ScoreTest" syntax "stella-dsl@1" {
|
||||
rule test priority 1 {
|
||||
when {{condition}}
|
||||
then severity := "high"
|
||||
because "Score test"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result1 = _compiler.Compile(source);
|
||||
if (!result1.Success || result1.Document is null)
|
||||
{
|
||||
return true.Label($"Skip: Condition '{condition}' doesn't parse");
|
||||
}
|
||||
|
||||
var printed = PolicyIrPrinter.Print(result1.Document);
|
||||
var result2 = _compiler.Compile(printed);
|
||||
|
||||
if (!result2.Success || result2.Document is null)
|
||||
{
|
||||
return false.Label($"Roundtrip failed for '{condition}'");
|
||||
}
|
||||
|
||||
// Verify rule count matches
|
||||
return (result1.Document.Rules.Length == result2.Document.Rules.Length)
|
||||
.Label($"Rule count preserved for condition '{condition}'");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Different policies produce different checksums.
|
||||
/// </summary>
|
||||
@@ -256,6 +352,29 @@ internal static class PolicyDslArbs
|
||||
"status == \"blocked\""
|
||||
];
|
||||
|
||||
// Sprint 8200.0012.0003: Score-based conditions for EWS integration
|
||||
private static readonly string[] ScoreConditions =
|
||||
[
|
||||
"score >= 70",
|
||||
"score > 80",
|
||||
"score <= 50",
|
||||
"score < 40",
|
||||
"score == 75",
|
||||
"score.is_act_now",
|
||||
"score.is_schedule_next",
|
||||
"score.is_investigate",
|
||||
"score.is_watchlist",
|
||||
"score.bucket == \"ActNow\"",
|
||||
"score.rch > 0.8",
|
||||
"score.xpl > 0.7",
|
||||
"score.has_flag(\"kev\")",
|
||||
"score.has_flag(\"live-signal\")",
|
||||
"score.between(60, 80)",
|
||||
"score >= 70 and score.is_schedule_next",
|
||||
"score > 80 or score.has_flag(\"kev\")",
|
||||
"score.rch > 0.8 and score.xpl > 0.7"
|
||||
];
|
||||
|
||||
private static readonly string[] ValidActions =
|
||||
[
|
||||
"severity := \"info\"",
|
||||
@@ -296,6 +415,22 @@ internal static class PolicyDslArbs
|
||||
from rules in Gen.ArrayOf(1, GenRule())
|
||||
select BuildPolicyWithMetadata(name, hasVersion, hasAuthor, rules));
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Generates policies with score-based conditions.
|
||||
/// </summary>
|
||||
public static Arbitrary<string> ValidPolicyWithScoreConditions() =>
|
||||
Arb.From(
|
||||
from name in Gen.Elements(ValidIdentifiers)
|
||||
from ruleCount in Gen.Choose(1, 3)
|
||||
from rules in Gen.ArrayOf(ruleCount, GenScoreRule())
|
||||
select BuildPolicy(name, rules));
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Generates a specific score condition for targeted testing.
|
||||
/// </summary>
|
||||
public static Arbitrary<string> ScoreCondition() =>
|
||||
Arb.From(Gen.Elements(ScoreConditions));
|
||||
|
||||
private static Gen<string> GenRule()
|
||||
{
|
||||
return from nameIndex in Gen.Choose(0, ValidIdentifiers.Length - 1)
|
||||
@@ -306,22 +441,44 @@ internal static class PolicyDslArbs
|
||||
let priority = ValidPriorities[priorityIndex]
|
||||
let condition = ValidConditions[conditionIndex]
|
||||
let action = ValidActions[actionIndex]
|
||||
select $"""
|
||||
rule {name} priority {priority} {{
|
||||
when {condition}
|
||||
then {action}
|
||||
select $$"""
|
||||
rule {{name}} priority {{priority}} {
|
||||
when {{condition}}
|
||||
then {{action}}
|
||||
because "Generated test rule"
|
||||
}}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sprint 8200.0012.0003: Generates rules with score-based conditions.
|
||||
/// </summary>
|
||||
private static Gen<string> GenScoreRule()
|
||||
{
|
||||
return from nameIndex in Gen.Choose(0, ValidIdentifiers.Length - 1)
|
||||
from priorityIndex in Gen.Choose(0, ValidPriorities.Length - 1)
|
||||
from conditionIndex in Gen.Choose(0, ScoreConditions.Length - 1)
|
||||
from actionIndex in Gen.Choose(0, ValidActions.Length - 1)
|
||||
let name = ValidIdentifiers[nameIndex]
|
||||
let priority = ValidPriorities[priorityIndex]
|
||||
let condition = ScoreConditions[conditionIndex]
|
||||
let action = ValidActions[actionIndex]
|
||||
select $$"""
|
||||
rule {{name}} priority {{priority}} {
|
||||
when {{condition}}
|
||||
then {{action}}
|
||||
because "Score-based rule"
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string BuildPolicy(string name, string[] rules)
|
||||
{
|
||||
var rulesText = string.Join("\n", rules);
|
||||
return $"""
|
||||
policy "{name}" syntax "stella-dsl@1" {{
|
||||
{rulesText}
|
||||
}}
|
||||
return $$"""
|
||||
policy "{{name}}" syntax "stella-dsl@1" {
|
||||
{{rulesText}}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
@@ -332,20 +489,20 @@ internal static class PolicyDslArbs
|
||||
if (hasAuthor) metadataLines.Add(" author = \"test\"");
|
||||
|
||||
var metadata = metadataLines.Count > 0
|
||||
? $"""
|
||||
metadata {{
|
||||
{string.Join("\n", metadataLines)}
|
||||
}}
|
||||
? $$"""
|
||||
metadata {
|
||||
{{string.Join("\n", metadataLines)}}
|
||||
}
|
||||
"""
|
||||
: "";
|
||||
|
||||
var rulesText = string.Join("\n", rules);
|
||||
|
||||
return $"""
|
||||
policy "{name}" syntax "stella-dsl@1" {{
|
||||
{metadata}
|
||||
{rulesText}
|
||||
}}
|
||||
return $$"""
|
||||
policy "{{name}}" syntax "stella-dsl@1" {
|
||||
{{metadata}}
|
||||
{{rulesText}}
|
||||
}
|
||||
""";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,3 +189,189 @@ internal static class InvalidationTypeExtensions
|
||||
/// </summary>
|
||||
public const string VeriKey = "VeriKey";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for GET /v1/proofs/{proofRoot}.
|
||||
/// </summary>
|
||||
public sealed class ProofEvidenceResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root (Merkle root).
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of chunks available.
|
||||
/// </summary>
|
||||
public required int TotalChunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of all evidence in bytes.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The chunks in this page.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofChunkResponse> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pagination cursor for next page (null if last page).
|
||||
/// </summary>
|
||||
public string? NextCursor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether there are more chunks available.
|
||||
/// </summary>
|
||||
public bool HasMore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for a single proof chunk.
|
||||
/// </summary>
|
||||
public sealed class ProofChunkResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based chunk index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content type.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded chunk data (included only when includeData=true).
|
||||
/// </summary>
|
||||
public string? Data { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for GET /v1/proofs/{proofRoot}/manifest.
|
||||
/// </summary>
|
||||
public sealed class ProofManifestResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root (Merkle root).
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of chunks.
|
||||
/// </summary>
|
||||
public required int TotalChunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of all evidence in bytes.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Ordered list of chunk metadata (without data).
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ChunkMetadataResponse> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the manifest was generated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for chunk metadata (without data).
|
||||
/// </summary>
|
||||
public sealed class ChunkMetadataResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content type.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for POST /v1/proofs/{proofRoot}/verify.
|
||||
/// </summary>
|
||||
public sealed class ProofVerificationResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root that was verified.
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the Merkle tree is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Details about each chunk's verification.
|
||||
/// </summary>
|
||||
public IReadOnlyList<ChunkVerificationResult>? ChunkResults { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if verification failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a single chunk.
|
||||
/// </summary>
|
||||
public sealed class ChunkVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the chunk hash is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected hash from manifest.
|
||||
/// </summary>
|
||||
public required string ExpectedHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Computed hash from chunk data.
|
||||
/// </summary>
|
||||
public string? ComputedHash { get; init; }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
@@ -14,7 +15,7 @@ public sealed class ProvcacheApiEndpoints;
|
||||
/// <summary>
|
||||
/// Extension methods for mapping Provcache API endpoints.
|
||||
/// </summary>
|
||||
public static class ProvcacheEndpointExtensions
|
||||
public static partial class ProvcacheEndpointExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps Provcache API endpoints to the specified route builder.
|
||||
@@ -69,6 +70,47 @@ public static class ProvcacheEndpointExtensions
|
||||
.Produces<ProvcacheMetricsResponse>(StatusCodes.Status200OK)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// Map evidence paging endpoints under /proofs
|
||||
var proofsGroup = endpoints.MapGroup($"{prefix}/proofs")
|
||||
.WithTags("Provcache Evidence")
|
||||
.WithOpenApi();
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}
|
||||
proofsGroup.MapGet("/{proofRoot}", GetEvidenceChunks)
|
||||
.WithName("GetProofEvidence")
|
||||
.WithSummary("Get evidence chunks by proof root")
|
||||
.WithDescription("Retrieves evidence chunks for a proof root with pagination support. Use cursor parameter for subsequent pages.")
|
||||
.Produces<ProofEvidenceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}/manifest
|
||||
proofsGroup.MapGet("/{proofRoot}/manifest", GetProofManifest)
|
||||
.WithName("GetProofManifest")
|
||||
.WithSummary("Get chunk manifest (metadata without data)")
|
||||
.WithDescription("Retrieves the chunk manifest for lazy evidence fetching. Contains hashes and sizes but no blob data.")
|
||||
.Produces<ProofManifestResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}/chunks/{chunkIndex}
|
||||
proofsGroup.MapGet("/{proofRoot}/chunks/{chunkIndex:int}", GetSingleChunk)
|
||||
.WithName("GetProofChunk")
|
||||
.WithSummary("Get a single chunk by index")
|
||||
.WithDescription("Retrieves a specific chunk by its index within the proof.")
|
||||
.Produces<ProofChunkResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// POST /v1/provcache/proofs/{proofRoot}/verify
|
||||
proofsGroup.MapPost("/{proofRoot}/verify", VerifyProof)
|
||||
.WithName("VerifyProof")
|
||||
.WithSummary("Verify Merkle tree integrity")
|
||||
.WithDescription("Verifies all chunk hashes and the Merkle tree for the proof root.")
|
||||
.Produces<ProofVerificationResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
@@ -278,3 +320,234 @@ internal sealed class ProblemDetails
|
||||
public string? Detail { get; set; }
|
||||
public string? Instance { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Marker class for logging in Proofs API endpoints.
|
||||
/// </summary>
|
||||
public sealed class ProofsApiEndpoints;
|
||||
|
||||
partial class ProvcacheEndpointExtensions
|
||||
{
|
||||
private const int DefaultPageSize = 10;
|
||||
private const int MaxPageSize = 100;
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetEvidenceChunks(
|
||||
string proofRoot,
|
||||
int? offset,
|
||||
int? limit,
|
||||
bool? includeData,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot} offset={Offset} limit={Limit}", proofRoot, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
var startIndex = offset ?? 0;
|
||||
var pageSize = Math.Min(limit ?? DefaultPageSize, MaxPageSize);
|
||||
|
||||
// Get manifest for total count
|
||||
var manifest = await chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
if (manifest is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
// Get chunk range
|
||||
var chunks = await chunkRepository.GetChunkRangeAsync(proofRoot, startIndex, pageSize, cancellationToken);
|
||||
|
||||
var chunkResponses = chunks.Select(c => new ProofChunkResponse
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType,
|
||||
Data = includeData == true ? Convert.ToBase64String(c.Blob) : null
|
||||
}).ToList();
|
||||
|
||||
var hasMore = startIndex + chunks.Count < manifest.TotalChunks;
|
||||
var nextCursor = hasMore ? (startIndex + pageSize).ToString() : null;
|
||||
|
||||
return Results.Ok(new ProofEvidenceResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = manifest.TotalChunks,
|
||||
TotalSize = manifest.TotalSize,
|
||||
Chunks = chunkResponses,
|
||||
NextCursor = nextCursor,
|
||||
HasMore = hasMore
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting evidence chunks for proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Evidence retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}/manifest
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetProofManifest(
|
||||
string proofRoot,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot}/manifest", proofRoot);
|
||||
|
||||
try
|
||||
{
|
||||
var manifest = await chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
if (manifest is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var chunkMetadata = manifest.Chunks.Select(c => new ChunkMetadataResponse
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.Index,
|
||||
Hash = c.Hash,
|
||||
Size = c.Size,
|
||||
ContentType = c.ContentType
|
||||
}).ToList();
|
||||
|
||||
return Results.Ok(new ProofManifestResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = manifest.TotalChunks,
|
||||
TotalSize = manifest.TotalSize,
|
||||
Chunks = chunkMetadata,
|
||||
GeneratedAt = manifest.GeneratedAt
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting manifest for proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Manifest retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}/chunks/{chunkIndex}
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetSingleChunk(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot}/chunks/{ChunkIndex}", proofRoot, chunkIndex);
|
||||
|
||||
try
|
||||
{
|
||||
var chunk = await chunkRepository.GetChunkAsync(proofRoot, chunkIndex, cancellationToken);
|
||||
if (chunk is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(new ProofChunkResponse
|
||||
{
|
||||
ChunkId = chunk.ChunkId,
|
||||
Index = chunk.ChunkIndex,
|
||||
Hash = chunk.ChunkHash,
|
||||
Size = chunk.BlobSize,
|
||||
ContentType = chunk.ContentType,
|
||||
Data = Convert.ToBase64String(chunk.Blob)
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting chunk {ChunkIndex} for proof root {ProofRoot}", chunkIndex, proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Chunk retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// POST /v1/provcache/proofs/{proofRoot}/verify
|
||||
/// </summary>
|
||||
private static async Task<IResult> VerifyProof(
|
||||
string proofRoot,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
[FromServices] IEvidenceChunker chunker,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("POST /v1/provcache/proofs/{ProofRoot}/verify", proofRoot);
|
||||
|
||||
try
|
||||
{
|
||||
var chunks = await chunkRepository.GetChunksAsync(proofRoot, cancellationToken);
|
||||
if (chunks.Count == 0)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var chunkResults = new List<ChunkVerificationResult>();
|
||||
var allValid = true;
|
||||
|
||||
foreach (var chunk in chunks)
|
||||
{
|
||||
var isValid = chunker.VerifyChunk(chunk);
|
||||
var computedHash = isValid ? chunk.ChunkHash : ComputeChunkHash(chunk.Blob);
|
||||
|
||||
chunkResults.Add(new ChunkVerificationResult
|
||||
{
|
||||
Index = chunk.ChunkIndex,
|
||||
IsValid = isValid,
|
||||
ExpectedHash = chunk.ChunkHash,
|
||||
ComputedHash = isValid ? null : computedHash
|
||||
});
|
||||
|
||||
if (!isValid)
|
||||
{
|
||||
allValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Merkle root
|
||||
var chunkHashes = chunks.Select(c => c.ChunkHash).ToList();
|
||||
var computedRoot = chunker.ComputeMerkleRoot(chunkHashes);
|
||||
var rootMatches = string.Equals(computedRoot, proofRoot, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
return Results.Ok(new ProofVerificationResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
IsValid = allValid && rootMatches,
|
||||
ChunkResults = chunkResults,
|
||||
Error = !rootMatches ? $"Merkle root mismatch. Expected: {proofRoot}, Computed: {computedRoot}" : null
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error verifying proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Proof verification failed");
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeChunkHash(byte[] data)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,257 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Provcache.Entities;
|
||||
|
||||
namespace StellaOps.Provcache.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IEvidenceChunkRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresEvidenceChunkRepository : IEvidenceChunkRepository
|
||||
{
|
||||
private readonly ProvcacheDbContext _context;
|
||||
private readonly ILogger<PostgresEvidenceChunkRepository> _logger;
|
||||
|
||||
public PostgresEvidenceChunkRepository(
|
||||
ProvcacheDbContext context,
|
||||
ILogger<PostgresEvidenceChunkRepository> logger)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<EvidenceChunk>> GetChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var entities = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Retrieved {Count} chunks for proof root {ProofRoot}", entities.Count, proofRoot);
|
||||
return entities.Select(MapToModel).ToList();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<EvidenceChunk?> GetChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var entity = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot && e.ChunkIndex == chunkIndex)
|
||||
.AsNoTracking()
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return entity is null ? null : MapToModel(entity);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<EvidenceChunk>> GetChunkRangeAsync(
|
||||
string proofRoot,
|
||||
int startIndex,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
if (startIndex < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(startIndex), "Start index must be non-negative.");
|
||||
}
|
||||
|
||||
if (count <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(count), "Count must be positive.");
|
||||
}
|
||||
|
||||
var entities = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot && e.ChunkIndex >= startIndex)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.Take(count)
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return entities.Select(MapToModel).ToList();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ChunkManifest?> GetManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
// Get metadata without loading blobs
|
||||
var chunks = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.Select(e => new
|
||||
{
|
||||
e.ChunkId,
|
||||
e.ChunkIndex,
|
||||
e.ChunkHash,
|
||||
e.BlobSize,
|
||||
e.ContentType,
|
||||
e.CreatedAt
|
||||
})
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (chunks.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var metadata = chunks
|
||||
.Select(c => new ChunkMetadata
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType
|
||||
})
|
||||
.ToList();
|
||||
|
||||
return new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = chunks.Count,
|
||||
TotalSize = chunks.Sum(c => (long)c.BlobSize),
|
||||
Chunks = metadata,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task StoreChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(chunks);
|
||||
|
||||
var chunkList = chunks.ToList();
|
||||
|
||||
if (chunkList.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No chunks to store for proof root {ProofRoot}", proofRoot);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update proof root in chunks if not set
|
||||
var entities = chunkList.Select(c => MapToEntity(c, proofRoot)).ToList();
|
||||
|
||||
_context.EvidenceChunks.AddRange(entities);
|
||||
await _context.SaveChangesAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Stored {Count} chunks for proof root {ProofRoot}", chunkList.Count, proofRoot);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> DeleteChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var deleted = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.ExecuteDeleteAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Deleted {Count} chunks for proof root {ProofRoot}", deleted, proofRoot);
|
||||
return deleted;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> GetChunkCountAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
return await _context.EvidenceChunks
|
||||
.CountAsync(e => e.ProofRoot == proofRoot, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<long> GetTotalSizeAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
return await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.SumAsync(e => (long)e.BlobSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets total storage across all proof roots.
|
||||
/// </summary>
|
||||
public async Task<long> GetTotalStorageAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _context.EvidenceChunks
|
||||
.SumAsync(e => (long)e.BlobSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prunes chunks older than the specified date.
|
||||
/// </summary>
|
||||
public async Task<int> PruneOldChunksAsync(
|
||||
DateTimeOffset olderThan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _context.EvidenceChunks
|
||||
.Where(e => e.CreatedAt < olderThan)
|
||||
.ExecuteDeleteAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static EvidenceChunk MapToModel(ProvcacheEvidenceChunkEntity entity)
|
||||
{
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = entity.ChunkId,
|
||||
ProofRoot = entity.ProofRoot,
|
||||
ChunkIndex = entity.ChunkIndex,
|
||||
ChunkHash = entity.ChunkHash,
|
||||
Blob = entity.Blob,
|
||||
BlobSize = entity.BlobSize,
|
||||
ContentType = entity.ContentType,
|
||||
CreatedAt = entity.CreatedAt
|
||||
};
|
||||
}
|
||||
|
||||
private static ProvcacheEvidenceChunkEntity MapToEntity(EvidenceChunk chunk, string proofRoot)
|
||||
{
|
||||
return new ProvcacheEvidenceChunkEntity
|
||||
{
|
||||
ChunkId = chunk.ChunkId == Guid.Empty ? Guid.NewGuid() : chunk.ChunkId,
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = chunk.ChunkIndex,
|
||||
ChunkHash = chunk.ChunkHash,
|
||||
Blob = chunk.Blob,
|
||||
BlobSize = chunk.BlobSize,
|
||||
ContentType = chunk.ContentType,
|
||||
CreatedAt = chunk.CreatedAt
|
||||
};
|
||||
}
|
||||
}
|
||||
318
src/__Libraries/StellaOps.Provcache/Chunking/EvidenceChunker.cs
Normal file
318
src/__Libraries/StellaOps.Provcache/Chunking/EvidenceChunker.cs
Normal file
@@ -0,0 +1,318 @@
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for splitting large evidence into fixed-size chunks
|
||||
/// and reassembling them with Merkle verification.
|
||||
/// </summary>
|
||||
public interface IEvidenceChunker
|
||||
{
|
||||
/// <summary>
|
||||
/// Splits evidence into chunks.
|
||||
/// </summary>
|
||||
/// <param name="evidence">The evidence bytes to split.</param>
|
||||
/// <param name="contentType">MIME type of the evidence.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The chunking result with chunks and proof root.</returns>
|
||||
Task<ChunkingResult> ChunkAsync(
|
||||
ReadOnlyMemory<byte> evidence,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Splits evidence from a stream.
|
||||
/// </summary>
|
||||
/// <param name="evidenceStream">Stream containing evidence.</param>
|
||||
/// <param name="contentType">MIME type of the evidence.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Async enumerable of chunks as they are created.</returns>
|
||||
IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
|
||||
Stream evidenceStream,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Reassembles chunks into the original evidence.
|
||||
/// </summary>
|
||||
/// <param name="chunks">The chunks to reassemble (must be in order).</param>
|
||||
/// <param name="expectedProofRoot">Expected Merkle root for verification.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The reassembled evidence bytes.</returns>
|
||||
Task<byte[]> ReassembleAsync(
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
string expectedProofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a single chunk against its hash.
|
||||
/// </summary>
|
||||
/// <param name="chunk">The chunk to verify.</param>
|
||||
/// <returns>True if the chunk is valid.</returns>
|
||||
bool VerifyChunk(EvidenceChunk chunk);
|
||||
|
||||
/// <summary>
|
||||
/// Computes the Merkle root from chunk hashes.
|
||||
/// </summary>
|
||||
/// <param name="chunkHashes">Ordered list of chunk hashes.</param>
|
||||
/// <returns>The Merkle root.</returns>
|
||||
string ComputeMerkleRoot(IEnumerable<string> chunkHashes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of chunking evidence.
|
||||
/// </summary>
|
||||
public sealed record ChunkingResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The computed Merkle root of all chunks.
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The generated chunks.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<EvidenceChunk> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of the original evidence.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IEvidenceChunker"/>.
|
||||
/// </summary>
|
||||
public sealed class EvidenceChunker : IEvidenceChunker
|
||||
{
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public EvidenceChunker(ProvcacheOptions options, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ChunkingResult> ChunkAsync(
|
||||
ReadOnlyMemory<byte> evidence,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(contentType);
|
||||
|
||||
var chunks = new List<EvidenceChunk>();
|
||||
var chunkHashes = new List<string>();
|
||||
var chunkSize = _options.ChunkSize;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var span = evidence.Span;
|
||||
var totalSize = span.Length;
|
||||
var chunkIndex = 0;
|
||||
|
||||
for (var offset = 0; offset < totalSize; offset += chunkSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remainingBytes = totalSize - offset;
|
||||
var currentChunkSize = Math.Min(chunkSize, remainingBytes);
|
||||
var chunkData = span.Slice(offset, currentChunkSize).ToArray();
|
||||
var chunkHash = ComputeHash(chunkData);
|
||||
|
||||
chunks.Add(new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = string.Empty, // Will be set after computing Merkle root
|
||||
ChunkIndex = chunkIndex,
|
||||
ChunkHash = chunkHash,
|
||||
Blob = chunkData,
|
||||
BlobSize = currentChunkSize,
|
||||
ContentType = contentType,
|
||||
CreatedAt = now
|
||||
});
|
||||
|
||||
chunkHashes.Add(chunkHash);
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
var proofRoot = ComputeMerkleRoot(chunkHashes);
|
||||
|
||||
// Update proof root in all chunks
|
||||
var finalChunks = chunks.Select(c => c with { ProofRoot = proofRoot }).ToList();
|
||||
|
||||
return Task.FromResult(new ChunkingResult
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
Chunks = finalChunks,
|
||||
TotalSize = totalSize
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
|
||||
Stream evidenceStream,
|
||||
string contentType,
|
||||
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(evidenceStream);
|
||||
ArgumentNullException.ThrowIfNull(contentType);
|
||||
|
||||
var chunkSize = _options.ChunkSize;
|
||||
var buffer = new byte[chunkSize];
|
||||
var chunkIndex = 0;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
int bytesRead;
|
||||
while ((bytesRead = await evidenceStream.ReadAsync(buffer, cancellationToken)) > 0)
|
||||
{
|
||||
var chunkData = bytesRead == chunkSize ? buffer : buffer[..bytesRead];
|
||||
var chunkHash = ComputeHash(chunkData);
|
||||
|
||||
yield return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = string.Empty, // Caller must compute after all chunks
|
||||
ChunkIndex = chunkIndex,
|
||||
ChunkHash = chunkHash,
|
||||
Blob = chunkData.ToArray(),
|
||||
BlobSize = bytesRead,
|
||||
ContentType = contentType,
|
||||
CreatedAt = now
|
||||
};
|
||||
|
||||
chunkIndex++;
|
||||
buffer = new byte[chunkSize]; // New buffer for next chunk
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<byte[]> ReassembleAsync(
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
string expectedProofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunks);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(expectedProofRoot);
|
||||
|
||||
var orderedChunks = chunks.OrderBy(c => c.ChunkIndex).ToList();
|
||||
|
||||
if (orderedChunks.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("No chunks provided.", nameof(chunks));
|
||||
}
|
||||
|
||||
// Verify Merkle root
|
||||
var chunkHashes = orderedChunks.Select(c => c.ChunkHash).ToList();
|
||||
var computedRoot = ComputeMerkleRoot(chunkHashes);
|
||||
|
||||
if (!string.Equals(computedRoot, expectedProofRoot, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Merkle root mismatch. Expected: {expectedProofRoot}, Computed: {computedRoot}");
|
||||
}
|
||||
|
||||
// Verify each chunk
|
||||
foreach (var chunk in orderedChunks)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!VerifyChunk(chunk))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Chunk {chunk.ChunkIndex} verification failed. Expected hash: {chunk.ChunkHash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Reassemble
|
||||
var totalSize = orderedChunks.Sum(c => c.BlobSize);
|
||||
var result = new byte[totalSize];
|
||||
var offset = 0;
|
||||
|
||||
foreach (var chunk in orderedChunks)
|
||||
{
|
||||
chunk.Blob.CopyTo(result, offset);
|
||||
offset += chunk.BlobSize;
|
||||
}
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool VerifyChunk(EvidenceChunk chunk)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunk);
|
||||
|
||||
var computedHash = ComputeHash(chunk.Blob);
|
||||
return string.Equals(computedHash, chunk.ChunkHash, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string ComputeMerkleRoot(IEnumerable<string> chunkHashes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunkHashes);
|
||||
|
||||
var hashes = chunkHashes.ToList();
|
||||
|
||||
if (hashes.Count == 0)
|
||||
{
|
||||
// Empty Merkle tree
|
||||
return ComputeHash([]);
|
||||
}
|
||||
|
||||
if (hashes.Count == 1)
|
||||
{
|
||||
return hashes[0];
|
||||
}
|
||||
|
||||
// Build Merkle tree bottom-up
|
||||
var currentLevel = hashes.Select(h => HexToBytes(h)).ToList();
|
||||
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (var i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
byte[] combined;
|
||||
|
||||
if (i + 1 < currentLevel.Count)
|
||||
{
|
||||
// Pair exists - concatenate and hash
|
||||
combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
|
||||
currentLevel[i].CopyTo(combined, 0);
|
||||
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd node - duplicate itself
|
||||
combined = new byte[currentLevel[i].Length * 2];
|
||||
currentLevel[i].CopyTo(combined, 0);
|
||||
currentLevel[i].CopyTo(combined, currentLevel[i].Length);
|
||||
}
|
||||
|
||||
nextLevel.Add(SHA256.HashData(combined));
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
|
||||
}
|
||||
|
||||
private static string ComputeHash(ReadOnlySpan<byte> data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static byte[] HexToBytes(string hash)
|
||||
{
|
||||
// Strip sha256: prefix if present
|
||||
var hex = hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
|
||||
? hash[7..]
|
||||
: hash;
|
||||
|
||||
return Convert.FromHexString(hex);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user