Implement VEX document verification system with issuer management and signature verification

- Added IIssuerDirectory interface for managing VEX document issuers, including methods for registration, revocation, and trust validation.
- Created InMemoryIssuerDirectory class as an in-memory implementation of IIssuerDirectory for testing and single-instance deployments.
- Introduced ISignatureVerifier interface for verifying signatures on VEX documents, with support for multiple signature formats.
- Developed SignatureVerifier class as the default implementation of ISignatureVerifier, allowing extensibility for different signature formats.
- Implemented handlers for DSSE and JWS signature formats, including methods for verification and signature extraction.
- Defined various records and enums for issuer and signature metadata, enhancing the structure and clarity of the verification process.
This commit is contained in:
StellaOps Bot
2025-12-06 13:41:22 +02:00
parent 2141196496
commit 5e514532df
112 changed files with 24861 additions and 211 deletions

View File

@@ -0,0 +1,264 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Storage;
namespace StellaOps.VexLens.Api;
/// <summary>
/// Request to compute consensus for a vulnerability-product pair.
/// </summary>
public sealed record ComputeConsensusRequest(
string VulnerabilityId,
string ProductKey,
string? TenantId,
ConsensusMode? Mode,
double? MinimumWeightThreshold,
bool? StoreResult,
bool? EmitEvent);
/// <summary>
/// Request to compute consensus for multiple pairs in batch.
/// </summary>
public sealed record ComputeConsensusBatchRequest(
IReadOnlyList<ConsensusTarget> Targets,
string? TenantId,
ConsensusMode? Mode,
bool? StoreResults,
bool? EmitEvents);
/// <summary>
/// Target for consensus computation.
/// </summary>
public sealed record ConsensusTarget(
string VulnerabilityId,
string ProductKey);
/// <summary>
/// Response from consensus computation.
/// </summary>
public sealed record ComputeConsensusResponse(
string VulnerabilityId,
string ProductKey,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
string Outcome,
ConsensusRationaleResponse Rationale,
IReadOnlyList<ContributionResponse> Contributions,
IReadOnlyList<ConflictResponse>? Conflicts,
string? ProjectionId,
DateTimeOffset ComputedAt);
/// <summary>
/// Rationale response in API format.
/// </summary>
public sealed record ConsensusRationaleResponse(
string Summary,
IReadOnlyList<string> Factors,
IReadOnlyDictionary<string, double> StatusWeights);
/// <summary>
/// Statement contribution response.
/// </summary>
public sealed record ContributionResponse(
string StatementId,
string? IssuerId,
VexStatus Status,
VexJustification? Justification,
double Weight,
double Contribution,
bool IsWinner);
/// <summary>
/// Conflict response.
/// </summary>
public sealed record ConflictResponse(
string Statement1Id,
string Statement2Id,
VexStatus Status1,
VexStatus Status2,
string Severity,
string Resolution);
/// <summary>
/// Response from batch consensus computation.
/// </summary>
public sealed record ComputeConsensusBatchResponse(
IReadOnlyList<ComputeConsensusResponse> Results,
int TotalCount,
int SuccessCount,
int FailureCount,
DateTimeOffset CompletedAt);
/// <summary>
/// Request to query consensus projections.
/// </summary>
public sealed record QueryProjectionsRequest(
string? VulnerabilityId,
string? ProductKey,
VexStatus? Status,
string? Outcome,
double? MinimumConfidence,
DateTimeOffset? ComputedAfter,
DateTimeOffset? ComputedBefore,
bool? StatusChanged,
int? Limit,
int? Offset,
string? SortBy,
bool? SortDescending);
/// <summary>
/// Response from projection query.
/// </summary>
public sealed record QueryProjectionsResponse(
IReadOnlyList<ProjectionSummary> Projections,
int TotalCount,
int Offset,
int Limit);
/// <summary>
/// Summary of a projection for list responses.
/// </summary>
public sealed record ProjectionSummary(
string ProjectionId,
string VulnerabilityId,
string ProductKey,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
string Outcome,
int StatementCount,
int ConflictCount,
DateTimeOffset ComputedAt,
bool StatusChanged);
/// <summary>
/// Detailed projection response.
/// </summary>
public sealed record ProjectionDetailResponse(
string ProjectionId,
string VulnerabilityId,
string ProductKey,
string? TenantId,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
string Outcome,
int StatementCount,
int ConflictCount,
string RationaleSummary,
DateTimeOffset ComputedAt,
DateTimeOffset StoredAt,
string? PreviousProjectionId,
bool StatusChanged);
/// <summary>
/// Response from projection history query.
/// </summary>
public sealed record ProjectionHistoryResponse(
string VulnerabilityId,
string ProductKey,
IReadOnlyList<ProjectionSummary> History,
int TotalCount);
/// <summary>
/// Response from issuer directory query.
/// </summary>
public sealed record IssuerListResponse(
IReadOnlyList<IssuerSummary> Issuers,
int TotalCount);
/// <summary>
/// Summary of an issuer.
/// </summary>
public sealed record IssuerSummary(
string IssuerId,
string Name,
string Category,
string TrustTier,
string Status,
int KeyCount,
DateTimeOffset RegisteredAt);
/// <summary>
/// Detailed issuer response.
/// </summary>
public sealed record IssuerDetailResponse(
string IssuerId,
string Name,
string Category,
string TrustTier,
string Status,
IReadOnlyList<KeyFingerprintResponse> KeyFingerprints,
IssuerMetadataResponse? Metadata,
DateTimeOffset RegisteredAt,
DateTimeOffset? LastUpdatedAt,
DateTimeOffset? RevokedAt,
string? RevocationReason);
/// <summary>
/// Key fingerprint response.
/// </summary>
public sealed record KeyFingerprintResponse(
string Fingerprint,
string KeyType,
string? Algorithm,
string Status,
DateTimeOffset RegisteredAt,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Issuer metadata response.
/// </summary>
public sealed record IssuerMetadataResponse(
string? Description,
string? Uri,
string? Email,
IReadOnlyList<string>? Tags);
/// <summary>
/// Request to register an issuer.
/// </summary>
public sealed record RegisterIssuerRequest(
string IssuerId,
string Name,
string Category,
string TrustTier,
IReadOnlyList<RegisterKeyRequest>? InitialKeys,
IssuerMetadataRequest? Metadata);
/// <summary>
/// Request to register a key.
/// </summary>
public sealed record RegisterKeyRequest(
string Fingerprint,
string KeyType,
string? Algorithm,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Issuer metadata request.
/// </summary>
public sealed record IssuerMetadataRequest(
string? Description,
string? Uri,
string? Email,
IReadOnlyList<string>? Tags);
/// <summary>
/// Request to revoke an issuer or key.
/// </summary>
public sealed record RevokeRequest(
string Reason);
/// <summary>
/// Statistics about consensus projections.
/// </summary>
public sealed record ConsensusStatisticsResponse(
int TotalProjections,
IReadOnlyDictionary<string, int> ByStatus,
IReadOnlyDictionary<string, int> ByOutcome,
double AverageConfidence,
int ProjectionsWithConflicts,
int StatusChangesLast24h,
DateTimeOffset ComputedAt);

View File

@@ -0,0 +1,477 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Api;
/// <summary>
/// Detailed consensus rationale for AI/ML consumption.
/// Note: Named with "Detailed" suffix to avoid conflict with Consensus.ConsensusRationale.
/// </summary>
public sealed record DetailedConsensusRationale(
/// <summary>
/// Unique identifier for this rationale.
/// </summary>
string RationaleId,
/// <summary>
/// Vulnerability ID.
/// </summary>
string VulnerabilityId,
/// <summary>
/// Product key.
/// </summary>
string ProductKey,
/// <summary>
/// Final consensus status.
/// </summary>
VexStatus ConsensusStatus,
/// <summary>
/// Final justification if applicable.
/// </summary>
VexJustification? ConsensusJustification,
/// <summary>
/// Overall confidence score (0.0-1.0).
/// </summary>
double ConfidenceScore,
/// <summary>
/// Consensus outcome classification.
/// </summary>
ConsensusOutcome Outcome,
/// <summary>
/// Mode used for consensus computation.
/// </summary>
ConsensusMode Mode,
/// <summary>
/// Human-readable summary of the consensus decision.
/// </summary>
string Summary,
/// <summary>
/// Detailed explanation of why this consensus was reached.
/// </summary>
string Explanation,
/// <summary>
/// Individual contributions from each statement.
/// </summary>
IReadOnlyList<RationaleContribution> Contributions,
/// <summary>
/// Detected conflicts between statements.
/// </summary>
IReadOnlyList<RationaleConflict> Conflicts,
/// <summary>
/// Factors that influenced the final decision.
/// </summary>
IReadOnlyList<RationaleFactor> DecisionFactors,
/// <summary>
/// Alternative outcomes that were considered.
/// </summary>
IReadOnlyList<AlternativeOutcome> Alternatives,
/// <summary>
/// Metadata for audit and reproducibility.
/// </summary>
RationaleMetadata Metadata);
/// <summary>
/// Contribution from a single statement to the consensus.
/// </summary>
public sealed record RationaleContribution(
/// <summary>
/// Statement identifier.
/// </summary>
string StatementId,
/// <summary>
/// Issuer that made this statement.
/// </summary>
string IssuerId,
/// <summary>
/// Issuer name for display.
/// </summary>
string? IssuerName,
/// <summary>
/// Issuer category (Vendor, Aggregator, etc.).
/// </summary>
string IssuerCategory,
/// <summary>
/// Issuer trust tier.
/// </summary>
string TrustTier,
/// <summary>
/// Status asserted by this statement.
/// </summary>
VexStatus Status,
/// <summary>
/// Justification if provided.
/// </summary>
VexJustification? Justification,
/// <summary>
/// Raw trust weight from issuer profile.
/// </summary>
double RawWeight,
/// <summary>
/// Final computed weight after all adjustments.
/// </summary>
double FinalWeight,
/// <summary>
/// Weight adjustment factors applied.
/// </summary>
IReadOnlyList<WeightAdjustment> Adjustments,
/// <summary>
/// Whether this contribution won the consensus.
/// </summary>
bool IsWinner,
/// <summary>
/// Relative influence on the final decision (0.0-1.0).
/// </summary>
double Influence,
/// <summary>
/// When this statement was issued.
/// </summary>
DateTimeOffset? IssuedAt);
/// <summary>
/// Weight adjustment factor.
/// </summary>
public sealed record WeightAdjustment(
/// <summary>
/// Factor name (e.g., "freshness", "signature", "justification").
/// </summary>
string Factor,
/// <summary>
/// Multiplier applied (e.g., 1.2 for 20% boost).
/// </summary>
double Multiplier,
/// <summary>
/// Weight before this adjustment.
/// </summary>
double WeightBefore,
/// <summary>
/// Weight after this adjustment.
/// </summary>
double WeightAfter,
/// <summary>
/// Human-readable reason for the adjustment.
/// </summary>
string Reason);
/// <summary>
/// Conflict between statements in the consensus.
/// </summary>
public sealed record RationaleConflict(
/// <summary>
/// Conflict identifier.
/// </summary>
string ConflictId,
/// <summary>
/// Type of conflict.
/// </summary>
string ConflictType,
/// <summary>
/// Severity of the conflict.
/// </summary>
string Severity,
/// <summary>
/// First conflicting statement.
/// </summary>
string StatementA,
/// <summary>
/// Second conflicting statement.
/// </summary>
string StatementB,
/// <summary>
/// Status from first statement.
/// </summary>
VexStatus StatusA,
/// <summary>
/// Status from second statement.
/// </summary>
VexStatus StatusB,
/// <summary>
/// Weight difference between conflicting statements.
/// </summary>
double WeightDelta,
/// <summary>
/// How the conflict was resolved.
/// </summary>
string Resolution,
/// <summary>
/// Human-readable description of the conflict.
/// </summary>
string Description);
/// <summary>
/// Factor that influenced the consensus decision.
/// </summary>
public sealed record RationaleFactor(
/// <summary>
/// Factor name.
/// </summary>
string Name,
/// <summary>
/// Factor category (trust, freshness, coverage, etc.).
/// </summary>
string Category,
/// <summary>
/// Numeric impact on the decision (-1.0 to 1.0).
/// </summary>
double Impact,
/// <summary>
/// Human-readable description of the factor's influence.
/// </summary>
string Description,
/// <summary>
/// Supporting evidence for this factor.
/// </summary>
IReadOnlyList<string>? Evidence);
/// <summary>
/// Alternative outcome that was considered but not chosen.
/// </summary>
public sealed record AlternativeOutcome(
/// <summary>
/// Alternative status.
/// </summary>
VexStatus Status,
/// <summary>
/// Confidence this alternative would have had.
/// </summary>
double Confidence,
/// <summary>
/// Total weight supporting this alternative.
/// </summary>
double TotalWeight,
/// <summary>
/// Number of statements supporting this alternative.
/// </summary>
int SupportingStatements,
/// <summary>
/// Why this alternative was not chosen.
/// </summary>
string RejectionReason);
/// <summary>
/// Metadata for audit and reproducibility.
/// </summary>
public sealed record RationaleMetadata(
/// <summary>
/// When the consensus was computed.
/// </summary>
DateTimeOffset ComputedAt,
/// <summary>
/// Algorithm version used.
/// </summary>
string AlgorithmVersion,
/// <summary>
/// Hash of all inputs for reproducibility.
/// </summary>
string InputHash,
/// <summary>
/// Hash of the output for verification.
/// </summary>
string OutputHash,
/// <summary>
/// Tenant context if applicable.
/// </summary>
string? TenantId,
/// <summary>
/// Policy ID if a specific policy was applied.
/// </summary>
string? PolicyId,
/// <summary>
/// Correlation ID for tracing.
/// </summary>
string? CorrelationId);
/// <summary>
/// Request for generating a consensus rationale.
/// </summary>
public sealed record GenerateRationaleRequest(
/// <summary>
/// Vulnerability ID.
/// </summary>
string VulnerabilityId,
/// <summary>
/// Product key.
/// </summary>
string ProductKey,
/// <summary>
/// Tenant ID if applicable.
/// </summary>
string? TenantId,
/// <summary>
/// Include full contribution details.
/// </summary>
bool IncludeContributions,
/// <summary>
/// Include alternative outcomes analysis.
/// </summary>
bool IncludeAlternatives,
/// <summary>
/// Include weight adjustment breakdown.
/// </summary>
bool IncludeAdjustments,
/// <summary>
/// Verbosity level: "minimal", "standard", "detailed".
/// </summary>
string Verbosity,
/// <summary>
/// Format hint for explanations: "human", "ai", "structured".
/// </summary>
string ExplanationFormat);
/// <summary>
/// Response containing the consensus rationale.
/// </summary>
public sealed record GenerateRationaleResponse(
/// <summary>
/// The generated rationale.
/// </summary>
DetailedConsensusRationale Rationale,
/// <summary>
/// Generation statistics.
/// </summary>
RationaleGenerationStats Stats);
/// <summary>
/// Statistics about rationale generation.
/// </summary>
public sealed record RationaleGenerationStats(
/// <summary>
/// Number of statements analyzed.
/// </summary>
int StatementsAnalyzed,
/// <summary>
/// Number of issuers involved.
/// </summary>
int IssuersInvolved,
/// <summary>
/// Number of conflicts detected.
/// </summary>
int ConflictsDetected,
/// <summary>
/// Number of decision factors identified.
/// </summary>
int FactorsIdentified,
/// <summary>
/// Time taken to generate rationale in milliseconds.
/// </summary>
double GenerationTimeMs);
/// <summary>
/// Batch rationale request.
/// </summary>
public sealed record BatchRationaleRequest(
/// <summary>
/// Individual rationale requests.
/// </summary>
IReadOnlyList<GenerateRationaleRequest> Requests,
/// <summary>
/// Maximum parallel computations.
/// </summary>
int? MaxParallel);
/// <summary>
/// Batch rationale response.
/// </summary>
public sealed record BatchRationaleResponse(
/// <summary>
/// Generated rationales.
/// </summary>
IReadOnlyList<GenerateRationaleResponse> Responses,
/// <summary>
/// Failed requests.
/// </summary>
IReadOnlyList<RationaleError> Errors,
/// <summary>
/// Total time for batch processing.
/// </summary>
double TotalTimeMs);
/// <summary>
/// Error from rationale generation.
/// </summary>
public sealed record RationaleError(
/// <summary>
/// Vulnerability ID from the request.
/// </summary>
string VulnerabilityId,
/// <summary>
/// Product key from the request.
/// </summary>
string ProductKey,
/// <summary>
/// Error code.
/// </summary>
string Code,
/// <summary>
/// Error message.
/// </summary>
string Message);

View File

@@ -0,0 +1,560 @@
using System.Security.Cryptography;
using System.Text;
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Storage;
using StellaOps.VexLens.Trust;
namespace StellaOps.VexLens.Api;
/// <summary>
/// Service for generating detailed consensus rationales for AI/ML consumption.
/// </summary>
public interface IConsensusRationaleService
{
/// <summary>
/// Generates a detailed rationale for a consensus computation.
/// </summary>
Task<GenerateRationaleResponse> GenerateRationaleAsync(
GenerateRationaleRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates rationales for multiple consensus computations in batch.
/// </summary>
Task<BatchRationaleResponse> GenerateBatchRationaleAsync(
BatchRationaleRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates a rationale from an existing consensus result.
/// </summary>
Task<DetailedConsensusRationale> GenerateFromResultAsync(
VexConsensusResult result,
string explanationFormat = "human",
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of <see cref="IConsensusRationaleService"/>.
/// </summary>
public sealed class ConsensusRationaleService : IConsensusRationaleService
{
private readonly IConsensusProjectionStore _projectionStore;
private readonly IVexConsensusEngine _consensusEngine;
private readonly ITrustWeightEngine _trustWeightEngine;
private const string AlgorithmVersion = "1.0.0";
public ConsensusRationaleService(
IConsensusProjectionStore projectionStore,
IVexConsensusEngine consensusEngine,
ITrustWeightEngine trustWeightEngine)
{
_projectionStore = projectionStore;
_consensusEngine = consensusEngine;
_trustWeightEngine = trustWeightEngine;
}
public async Task<GenerateRationaleResponse> GenerateRationaleAsync(
GenerateRationaleRequest request,
CancellationToken cancellationToken = default)
{
var startTime = DateTime.UtcNow;
// Get the latest projection
var projection = await _projectionStore.GetLatestAsync(
request.VulnerabilityId,
request.ProductKey,
request.TenantId,
cancellationToken);
if (projection == null)
{
throw new InvalidOperationException(
$"No consensus projection found for {request.VulnerabilityId}/{request.ProductKey}");
}
// Build rationale from projection
var rationale = BuildRationale(projection, request);
var elapsedMs = (DateTime.UtcNow - startTime).TotalMilliseconds;
return new GenerateRationaleResponse(
Rationale: rationale,
Stats: new RationaleGenerationStats(
StatementsAnalyzed: projection.StatementCount,
IssuersInvolved: 1, // Simplified
ConflictsDetected: projection.ConflictCount,
FactorsIdentified: rationale.DecisionFactors.Count,
GenerationTimeMs: elapsedMs));
}
public async Task<BatchRationaleResponse> GenerateBatchRationaleAsync(
BatchRationaleRequest request,
CancellationToken cancellationToken = default)
{
var startTime = DateTime.UtcNow;
var responses = new List<GenerateRationaleResponse>();
var errors = new List<RationaleError>();
var maxParallel = request.MaxParallel ?? 4;
var semaphore = new SemaphoreSlim(maxParallel);
var tasks = request.Requests.Select(async req =>
{
await semaphore.WaitAsync(cancellationToken);
try
{
var response = await GenerateRationaleAsync(req, cancellationToken);
lock (responses) responses.Add(response);
}
catch (Exception ex)
{
lock (errors)
{
errors.Add(new RationaleError(
VulnerabilityId: req.VulnerabilityId,
ProductKey: req.ProductKey,
Code: "GENERATION_FAILED",
Message: ex.Message));
}
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks);
var totalMs = (DateTime.UtcNow - startTime).TotalMilliseconds;
return new BatchRationaleResponse(
Responses: responses,
Errors: errors,
TotalTimeMs: totalMs);
}
public Task<DetailedConsensusRationale> GenerateFromResultAsync(
VexConsensusResult result,
string explanationFormat = "human",
CancellationToken cancellationToken = default)
{
var contributions = result.Contributions.Select(c => new RationaleContribution(
StatementId: c.StatementId,
IssuerId: c.IssuerId ?? "unknown",
IssuerName: null, // Not available in StatementContribution
IssuerCategory: "Unknown",
TrustTier: "Unknown",
Status: c.Status,
Justification: c.Justification,
RawWeight: c.Weight, // Use Weight as RawWeight since no separate field
FinalWeight: c.Weight,
Adjustments: [],
IsWinner: c.IsWinner,
Influence: CalculateInfluence(c.Contribution, result.Contributions),
IssuedAt: null)).ToList();
var conflicts = (result.Conflicts ?? []).Select((cf, i) => new RationaleConflict(
ConflictId: $"conflict-{i + 1}",
ConflictType: "StatusDisagreement",
Severity: cf.Severity.ToString(),
StatementA: cf.Statement1Id,
StatementB: cf.Statement2Id,
StatusA: cf.Status1,
StatusB: cf.Status2,
WeightDelta: 0.0, // Not tracked in ConsensusConflict
Resolution: cf.Resolution,
Description: BuildConflictDescription(cf))).ToList();
var factors = BuildDecisionFactors(result);
var alternatives = BuildAlternatives(result);
var (summary, explanation) = GenerateExplanation(result, explanationFormat);
var inputHash = ComputeInputHash(result);
var outputHash = ComputeOutputHash(result, contributions, conflicts);
var rationale = new DetailedConsensusRationale(
RationaleId: $"rat-{Guid.NewGuid():N}",
VulnerabilityId: result.VulnerabilityId,
ProductKey: result.ProductKey,
ConsensusStatus: result.ConsensusStatus,
ConsensusJustification: result.ConsensusJustification,
ConfidenceScore: result.ConfidenceScore,
Outcome: result.Outcome,
Mode: ConsensusMode.WeightedVote, // Default; not in result
Summary: summary,
Explanation: explanation,
Contributions: contributions,
Conflicts: conflicts,
DecisionFactors: factors,
Alternatives: alternatives,
Metadata: new RationaleMetadata(
ComputedAt: result.ComputedAt,
AlgorithmVersion: AlgorithmVersion,
InputHash: inputHash,
OutputHash: outputHash,
TenantId: null,
PolicyId: null,
CorrelationId: null));
return Task.FromResult(rationale);
}
private DetailedConsensusRationale BuildRationale(
ConsensusProjection projection,
GenerateRationaleRequest request)
{
// Build simplified rationale from projection data
var contributions = new List<RationaleContribution>();
var conflicts = new List<RationaleConflict>();
// Since we only have projection data, create a summary contribution
if (projection.StatementCount > 0)
{
contributions.Add(new RationaleContribution(
StatementId: "aggregated",
IssuerId: "multiple",
IssuerName: null,
IssuerCategory: "Mixed",
TrustTier: "Mixed",
Status: projection.Status,
Justification: projection.Justification,
RawWeight: projection.ConfidenceScore,
FinalWeight: projection.ConfidenceScore,
Adjustments: [],
IsWinner: true,
Influence: 1.0,
IssuedAt: projection.ComputedAt));
}
// Create conflict entries based on count
for (var i = 0; i < projection.ConflictCount; i++)
{
conflicts.Add(new RationaleConflict(
ConflictId: $"conflict-{i + 1}",
ConflictType: "StatusDisagreement",
Severity: "Medium",
StatementA: $"statement-{i * 2 + 1}",
StatementB: $"statement-{i * 2 + 2}",
StatusA: projection.Status,
StatusB: VexStatus.UnderInvestigation,
WeightDelta: 0.0,
Resolution: "weight_based",
Description: $"Conflict {i + 1} resolved by weight comparison"));
}
var factors = new List<RationaleFactor>
{
new("Statement Count", "coverage",
Math.Min(projection.StatementCount / 10.0, 1.0),
$"{projection.StatementCount} statement(s) contributed to this consensus",
null),
new("Conflict Rate", "quality",
-Math.Min(projection.ConflictCount / (double)Math.Max(projection.StatementCount, 1), 1.0),
projection.ConflictCount > 0
? $"{projection.ConflictCount} conflict(s) detected and resolved"
: "No conflicts detected",
null),
new("Confidence Score", "trust",
projection.ConfidenceScore,
$"Overall confidence: {projection.ConfidenceScore:P0}",
null)
};
var alternatives = new List<AlternativeOutcome>();
{
var otherStatuses = Enum.GetValues<VexStatus>()
.Where(s => s != projection.Status)
.Take(2);
foreach (var status in otherStatuses)
{
alternatives.Add(new AlternativeOutcome(
Status: status,
Confidence: projection.ConfidenceScore * 0.3,
TotalWeight: 0.0,
SupportingStatements: 0,
RejectionReason: $"Insufficient support compared to {projection.Status}"));
}
}
var (summary, explanation) = GenerateExplanationFromProjection(projection, request.ExplanationFormat);
var inputHash = ComputeProjectionInputHash(projection);
var outputHash = ComputeProjectionOutputHash(projection);
return new DetailedConsensusRationale(
RationaleId: $"rat-{projection.ProjectionId}",
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
ConsensusStatus: projection.Status,
ConsensusJustification: projection.Justification,
ConfidenceScore: projection.ConfidenceScore,
Outcome: projection.Outcome,
Mode: ConsensusMode.WeightedVote, // Default assumption
Summary: summary,
Explanation: explanation,
Contributions: contributions,
Conflicts: conflicts,
DecisionFactors: factors,
Alternatives: alternatives,
Metadata: new RationaleMetadata(
ComputedAt: projection.ComputedAt,
AlgorithmVersion: AlgorithmVersion,
InputHash: inputHash,
OutputHash: outputHash,
TenantId: request.TenantId,
PolicyId: null,
CorrelationId: null));
}
private static double CalculateInfluence(
double contribution,
IReadOnlyList<StatementContribution> allContributions)
{
var totalContribution = allContributions.Sum(c => c.Contribution);
return totalContribution > 0 ? contribution / totalContribution : 0;
}
private static string BuildConflictDescription(ConsensusConflict conflict)
{
return $"Statement '{conflict.Statement1Id}' asserts {conflict.Status1} " +
$"while statement '{conflict.Statement2Id}' asserts {conflict.Status2}. " +
$"Severity: {conflict.Severity}. " +
$"Resolution: {conflict.Resolution}.";
}
private static IReadOnlyList<RationaleFactor> BuildDecisionFactors(VexConsensusResult result)
{
var factors = new List<RationaleFactor>();
// Coverage factor
factors.Add(new RationaleFactor(
Name: "Statement Coverage",
Category: "coverage",
Impact: Math.Min(result.Contributions.Count / 5.0, 1.0),
Description: $"{result.Contributions.Count} statement(s) analyzed from various sources",
Evidence: result.Contributions.Select(c => c.StatementId).ToList()));
// Conflict factor
var conflictCount = result.Conflicts?.Count ?? 0;
if (conflictCount > 0)
{
factors.Add(new RationaleFactor(
Name: "Conflict Resolution",
Category: "quality",
Impact: -0.1 * Math.Min(conflictCount, 5),
Description: $"{conflictCount} conflict(s) required resolution",
Evidence: null));
}
// Winner dominance
var winners = result.Contributions.Where(c => c.IsWinner).ToList();
if (winners.Count > 0)
{
var winnerContribution = winners.Sum(w => w.Contribution);
var totalContribution = result.Contributions.Sum(c => c.Contribution);
var dominance = totalContribution > 0 ? winnerContribution / totalContribution : 0;
factors.Add(new RationaleFactor(
Name: "Winner Dominance",
Category: "certainty",
Impact: dominance,
Description: $"Winning position represents {dominance:P0} of total contribution",
Evidence: null));
}
// Justification factor
if (result.ConsensusJustification.HasValue)
{
factors.Add(new RationaleFactor(
Name: "Justification Provided",
Category: "quality",
Impact: 0.2,
Description: $"Consensus includes justification: {result.ConsensusJustification}",
Evidence: null));
}
return factors;
}
private static IReadOnlyList<AlternativeOutcome> BuildAlternatives(VexConsensusResult result)
{
var alternatives = new List<AlternativeOutcome>();
// Group contributions by status
var statusGroups = result.Contributions
.GroupBy(c => c.Status)
.Where(g => g.Key != result.ConsensusStatus)
.OrderByDescending(g => g.Sum(c => c.Contribution));
foreach (var group in statusGroups.Take(3))
{
var totalContribution = group.Sum(c => c.Contribution);
var winningContribution = result.Contributions
.Where(c => c.Status == result.ConsensusStatus)
.Sum(c => c.Contribution);
alternatives.Add(new AlternativeOutcome(
Status: group.Key,
Confidence: totalContribution / Math.Max(winningContribution + totalContribution, 1),
TotalWeight: group.Sum(c => c.Weight),
SupportingStatements: group.Count(),
RejectionReason: winningContribution > totalContribution
? $"Outweighed by {result.ConsensusStatus} statements"
: $"Fewer supporting statements than {result.ConsensusStatus}"));
}
return alternatives;
}
private static (string Summary, string Explanation) GenerateExplanation(
VexConsensusResult result,
string format)
{
var summary = $"Consensus: {result.ConsensusStatus} with {result.ConfidenceScore:P0} confidence";
string explanation;
if (format == "ai")
{
explanation = GenerateAiExplanation(result);
}
else if (format == "structured")
{
explanation = GenerateStructuredExplanation(result);
}
else
{
explanation = GenerateHumanExplanation(result);
}
return (summary, explanation);
}
private static (string Summary, string Explanation) GenerateExplanationFromProjection(
ConsensusProjection projection,
string format)
{
var summary = $"Consensus: {projection.Status} with {projection.ConfidenceScore:P0} confidence";
var explanation = format switch
{
"ai" => $"STATUS={projection.Status}|JUSTIFICATION={projection.Justification?.ToString() ?? "NONE"}|" +
$"CONFIDENCE={projection.ConfidenceScore:F4}|OUTCOME={projection.Outcome}|" +
$"STATEMENTS={projection.StatementCount}|CONFLICTS={projection.ConflictCount}",
"structured" => $"{{\"status\":\"{projection.Status}\",\"justification\":\"{projection.Justification?.ToString() ?? "null"}\"," +
$"\"confidence\":{projection.ConfidenceScore:F4},\"outcome\":\"{projection.Outcome}\"," +
$"\"statements\":{projection.StatementCount},\"conflicts\":{projection.ConflictCount}}}",
_ => $"The vulnerability {projection.VulnerabilityId} affecting product {projection.ProductKey} " +
$"has been determined to be {projection.Status} based on analysis of {projection.StatementCount} VEX statement(s). " +
(projection.ConflictCount > 0
? $"{projection.ConflictCount} conflict(s) were detected and resolved. "
: "") +
(projection.Justification.HasValue
? $"Justification: {projection.Justification}. "
: "") +
$"Confidence level: {projection.ConfidenceScore:P0}."
};
return (summary, explanation);
}
private static string GenerateHumanExplanation(VexConsensusResult result)
{
var sb = new StringBuilder();
sb.Append($"The vulnerability {result.VulnerabilityId} affecting product {result.ProductKey} ");
sb.Append($"has been determined to be {result.ConsensusStatus}. ");
if (result.Contributions.Count > 0)
{
sb.Append($"This determination is based on {result.Contributions.Count} VEX statement(s) ");
sb.Append($"from {result.Contributions.Select(c => c.IssuerId).Distinct().Count()} issuer(s). ");
}
if (result.ConsensusJustification.HasValue)
{
sb.Append($"Justification: {result.ConsensusJustification}. ");
}
var conflictCount = result.Conflicts?.Count ?? 0;
if (conflictCount > 0)
{
sb.Append($"{conflictCount} conflicting statement(s) were resolved. ");
}
sb.Append($"Confidence level: {result.ConfidenceScore:P0}.");
return sb.ToString();
}
private static string GenerateAiExplanation(VexConsensusResult result)
{
// Structured format optimized for AI/ML consumption
var parts = new List<string>
{
$"STATUS={result.ConsensusStatus}",
$"JUSTIFICATION={result.ConsensusJustification?.ToString() ?? "NONE"}",
$"CONFIDENCE={result.ConfidenceScore:F4}",
$"OUTCOME={result.Outcome}",
$"STATEMENTS={result.Contributions.Count}",
$"CONFLICTS={result.Conflicts?.Count ?? 0}"
};
foreach (var contrib in result.Contributions.Take(5))
{
parts.Add($"CONTRIB[{contrib.StatementId}]={{status={contrib.Status},weight={contrib.Weight:F4},winner={contrib.IsWinner}}}");
}
return string.Join("|", parts);
}
private static string GenerateStructuredExplanation(VexConsensusResult result)
{
// JSON-like structured format
return System.Text.Json.JsonSerializer.Serialize(new
{
status = result.ConsensusStatus.ToString(),
justification = result.ConsensusJustification?.ToString(),
confidence = result.ConfidenceScore,
outcome = result.Outcome.ToString(),
statements = result.Contributions.Count,
conflicts = result.Conflicts?.Count ?? 0,
topContributors = result.Contributions
.OrderByDescending(c => c.Weight)
.Take(3)
.Select(c => new { c.StatementId, c.Status, c.Weight })
});
}
private static string ComputeInputHash(VexConsensusResult result)
{
var data = $"{result.VulnerabilityId}|{result.ProductKey}|" +
string.Join(",", result.Contributions.Select(c => c.StatementId).OrderBy(x => x));
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(data))).ToLowerInvariant()[..16];
}
private static string ComputeOutputHash(
VexConsensusResult result,
IReadOnlyList<RationaleContribution> contributions,
IReadOnlyList<RationaleConflict> conflicts)
{
var data = $"{result.ConsensusStatus}|{result.ConfidenceScore:F4}|{contributions.Count}|{conflicts.Count}";
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(data))).ToLowerInvariant()[..16];
}
private static string ComputeProjectionInputHash(ConsensusProjection projection)
{
var data = $"{projection.VulnerabilityId}|{projection.ProductKey}|{projection.StatementCount}";
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(data))).ToLowerInvariant()[..16];
}
private static string ComputeProjectionOutputHash(ConsensusProjection projection)
{
var data = $"{projection.Status}|{projection.ConfidenceScore:F4}|{projection.Outcome}";
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(data))).ToLowerInvariant()[..16];
}
}

View File

@@ -0,0 +1,619 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Storage;
using StellaOps.VexLens.Trust;
using StellaOps.VexLens.Verification;
namespace StellaOps.VexLens.Api;
/// <summary>
/// API service for VexLens operations.
/// Encapsulates the workflow of normalization, trust weighting, and consensus.
/// </summary>
public interface IVexLensApiService
{
/// <summary>
/// Computes consensus for a vulnerability-product pair.
/// </summary>
Task<ComputeConsensusResponse> ComputeConsensusAsync(
ComputeConsensusRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes consensus for multiple pairs in batch.
/// </summary>
Task<ComputeConsensusBatchResponse> ComputeConsensusBatchAsync(
ComputeConsensusBatchRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a consensus projection by ID.
/// </summary>
Task<ProjectionDetailResponse?> GetProjectionAsync(
string projectionId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the latest projection for a vulnerability-product pair.
/// </summary>
Task<ProjectionDetailResponse?> GetLatestProjectionAsync(
string vulnerabilityId,
string productKey,
string? tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Queries consensus projections.
/// </summary>
Task<QueryProjectionsResponse> QueryProjectionsAsync(
QueryProjectionsRequest request,
string? tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets projection history for a vulnerability-product pair.
/// </summary>
Task<ProjectionHistoryResponse> GetProjectionHistoryAsync(
string vulnerabilityId,
string productKey,
string? tenantId,
int? limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets consensus statistics.
/// </summary>
Task<ConsensusStatisticsResponse> GetStatisticsAsync(
string? tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists registered issuers.
/// </summary>
Task<IssuerListResponse> ListIssuersAsync(
string? category,
string? minimumTrustTier,
string? status,
string? searchTerm,
int? limit,
int? offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets issuer details.
/// </summary>
Task<IssuerDetailResponse?> GetIssuerAsync(
string issuerId,
CancellationToken cancellationToken = default);
/// <summary>
/// Registers a new issuer.
/// </summary>
Task<IssuerDetailResponse> RegisterIssuerAsync(
RegisterIssuerRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Revokes an issuer.
/// </summary>
Task<bool> RevokeIssuerAsync(
string issuerId,
RevokeRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Adds a key to an issuer.
/// </summary>
Task<IssuerDetailResponse> AddIssuerKeyAsync(
string issuerId,
RegisterKeyRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Revokes an issuer key.
/// </summary>
Task<bool> RevokeIssuerKeyAsync(
string issuerId,
string fingerprint,
RevokeRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of <see cref="IVexLensApiService"/>.
/// </summary>
public sealed class VexLensApiService : IVexLensApiService
{
private readonly IVexConsensusEngine _consensusEngine;
private readonly ITrustWeightEngine _trustWeightEngine;
private readonly IConsensusProjectionStore _projectionStore;
private readonly IIssuerDirectory _issuerDirectory;
private readonly IVexStatementProvider _statementProvider;
public VexLensApiService(
IVexConsensusEngine consensusEngine,
ITrustWeightEngine trustWeightEngine,
IConsensusProjectionStore projectionStore,
IIssuerDirectory issuerDirectory,
IVexStatementProvider statementProvider)
{
_consensusEngine = consensusEngine;
_trustWeightEngine = trustWeightEngine;
_projectionStore = projectionStore;
_issuerDirectory = issuerDirectory;
_statementProvider = statementProvider;
}
public async Task<ComputeConsensusResponse> ComputeConsensusAsync(
ComputeConsensusRequest request,
CancellationToken cancellationToken = default)
{
// Get statements for the vulnerability-product pair
var statements = await _statementProvider.GetStatementsAsync(
request.VulnerabilityId,
request.ProductKey,
request.TenantId,
cancellationToken);
// Compute trust weights
var now = DateTimeOffset.UtcNow;
var weightedStatements = new List<WeightedStatement>();
foreach (var stmt in statements)
{
var weightRequest = new TrustWeightRequest(
Statement: stmt.Statement,
Issuer: stmt.Issuer,
SignatureVerification: stmt.SignatureVerification,
DocumentIssuedAt: stmt.DocumentIssuedAt,
Context: new TrustWeightContext(
TenantId: request.TenantId,
EvaluationTime: now,
CustomFactors: null));
var weight = await _trustWeightEngine.ComputeWeightAsync(weightRequest, cancellationToken);
weightedStatements.Add(new WeightedStatement(
Statement: stmt.Statement,
Weight: weight,
Issuer: stmt.Issuer,
SourceDocumentId: stmt.SourceDocumentId));
}
// Compute consensus
var policy = new ConsensusPolicy(
Mode: request.Mode ?? ConsensusMode.WeightedVote,
MinimumWeightThreshold: request.MinimumWeightThreshold ?? 0.1,
ConflictThreshold: 0.3,
RequireJustificationForNotAffected: false,
PreferredIssuers: null);
var consensusRequest = new VexConsensusRequest(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
Statements: weightedStatements,
Context: new ConsensusContext(
TenantId: request.TenantId,
EvaluationTime: now,
Policy: policy));
var result = await _consensusEngine.ComputeConsensusAsync(consensusRequest, cancellationToken);
// Store result if requested
string? projectionId = null;
if (request.StoreResult == true)
{
var projection = await _projectionStore.StoreAsync(
result,
new StoreProjectionOptions(
TenantId: request.TenantId,
TrackHistory: true,
EmitEvent: request.EmitEvent ?? true),
cancellationToken);
projectionId = projection.ProjectionId;
}
return MapToResponse(result, projectionId);
}
public async Task<ComputeConsensusBatchResponse> ComputeConsensusBatchAsync(
ComputeConsensusBatchRequest request,
CancellationToken cancellationToken = default)
{
var results = new List<ComputeConsensusResponse>();
var failures = 0;
foreach (var target in request.Targets)
{
try
{
var singleRequest = new ComputeConsensusRequest(
VulnerabilityId: target.VulnerabilityId,
ProductKey: target.ProductKey,
TenantId: request.TenantId,
Mode: request.Mode,
MinimumWeightThreshold: null,
StoreResult: request.StoreResults,
EmitEvent: request.EmitEvents);
var result = await ComputeConsensusAsync(singleRequest, cancellationToken);
results.Add(result);
}
catch
{
failures++;
}
}
return new ComputeConsensusBatchResponse(
Results: results,
TotalCount: request.Targets.Count,
SuccessCount: results.Count,
FailureCount: failures,
CompletedAt: DateTimeOffset.UtcNow);
}
public async Task<ProjectionDetailResponse?> GetProjectionAsync(
string projectionId,
CancellationToken cancellationToken = default)
{
var projection = await _projectionStore.GetAsync(projectionId, cancellationToken);
return projection != null ? MapToDetailResponse(projection) : null;
}
public async Task<ProjectionDetailResponse?> GetLatestProjectionAsync(
string vulnerabilityId,
string productKey,
string? tenantId,
CancellationToken cancellationToken = default)
{
var projection = await _projectionStore.GetLatestAsync(
vulnerabilityId, productKey, tenantId, cancellationToken);
return projection != null ? MapToDetailResponse(projection) : null;
}
public async Task<QueryProjectionsResponse> QueryProjectionsAsync(
QueryProjectionsRequest request,
string? tenantId,
CancellationToken cancellationToken = default)
{
var query = new ProjectionQuery(
TenantId: tenantId,
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
Status: request.Status,
Outcome: ParseOutcome(request.Outcome),
MinimumConfidence: request.MinimumConfidence,
ComputedAfter: request.ComputedAfter,
ComputedBefore: request.ComputedBefore,
StatusChanged: request.StatusChanged,
Limit: request.Limit ?? 50,
Offset: request.Offset ?? 0,
SortBy: ParseSortField(request.SortBy),
SortDescending: request.SortDescending ?? true);
var result = await _projectionStore.ListAsync(query, cancellationToken);
return new QueryProjectionsResponse(
Projections: result.Projections.Select(MapToSummary).ToList(),
TotalCount: result.TotalCount,
Offset: result.Offset,
Limit: result.Limit);
}
public async Task<ProjectionHistoryResponse> GetProjectionHistoryAsync(
string vulnerabilityId,
string productKey,
string? tenantId,
int? limit,
CancellationToken cancellationToken = default)
{
var history = await _projectionStore.GetHistoryAsync(
vulnerabilityId, productKey, tenantId, limit, cancellationToken);
return new ProjectionHistoryResponse(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
History: history.Select(MapToSummary).ToList(),
TotalCount: history.Count);
}
public async Task<ConsensusStatisticsResponse> GetStatisticsAsync(
string? tenantId,
CancellationToken cancellationToken = default)
{
var allQuery = new ProjectionQuery(
TenantId: tenantId,
VulnerabilityId: null,
ProductKey: null,
Status: null,
Outcome: null,
MinimumConfidence: null,
ComputedAfter: null,
ComputedBefore: null,
StatusChanged: null,
Limit: 10000,
Offset: 0,
SortBy: ProjectionSortField.ComputedAt,
SortDescending: true);
var result = await _projectionStore.ListAsync(allQuery, cancellationToken);
var projections = result.Projections;
var byStatus = projections
.GroupBy(p => p.Status.ToString())
.ToDictionary(g => g.Key, g => g.Count());
var byOutcome = projections
.GroupBy(p => p.Outcome.ToString())
.ToDictionary(g => g.Key, g => g.Count());
var avgConfidence = projections.Count > 0
? projections.Average(p => p.ConfidenceScore)
: 0;
var withConflicts = projections.Count(p => p.ConflictCount > 0);
var last24h = DateTimeOffset.UtcNow.AddDays(-1);
var changesLast24h = projections.Count(p => p.StatusChanged && p.ComputedAt >= last24h);
return new ConsensusStatisticsResponse(
TotalProjections: result.TotalCount,
ByStatus: byStatus,
ByOutcome: byOutcome,
AverageConfidence: avgConfidence,
ProjectionsWithConflicts: withConflicts,
StatusChangesLast24h: changesLast24h,
ComputedAt: DateTimeOffset.UtcNow);
}
public async Task<IssuerListResponse> ListIssuersAsync(
string? category,
string? minimumTrustTier,
string? status,
string? searchTerm,
int? limit,
int? offset,
CancellationToken cancellationToken = default)
{
var options = new IssuerListOptions(
Category: ParseCategory(category),
MinimumTrustTier: ParseTrustTier(minimumTrustTier),
Status: ParseIssuerStatus(status),
SearchTerm: searchTerm,
Limit: limit,
Offset: offset);
var issuers = await _issuerDirectory.ListIssuersAsync(options, cancellationToken);
return new IssuerListResponse(
Issuers: issuers.Select(MapToIssuerSummary).ToList(),
TotalCount: issuers.Count);
}
public async Task<IssuerDetailResponse?> GetIssuerAsync(
string issuerId,
CancellationToken cancellationToken = default)
{
var issuer = await _issuerDirectory.GetIssuerAsync(issuerId, cancellationToken);
return issuer != null ? MapToIssuerDetailResponse(issuer) : null;
}
public async Task<IssuerDetailResponse> RegisterIssuerAsync(
RegisterIssuerRequest request,
CancellationToken cancellationToken = default)
{
var registration = new IssuerRegistration(
IssuerId: request.IssuerId,
Name: request.Name,
Category: ParseCategoryRequired(request.Category),
TrustTier: ParseTrustTierRequired(request.TrustTier),
InitialKeys: request.InitialKeys?.Select(k => new KeyFingerprintRegistration(
Fingerprint: k.Fingerprint,
KeyType: ParseKeyType(k.KeyType),
Algorithm: k.Algorithm,
ExpiresAt: k.ExpiresAt,
PublicKey: null)).ToList(),
Metadata: request.Metadata != null ? new IssuerMetadata(
Description: request.Metadata.Description,
Uri: request.Metadata.Uri,
Email: request.Metadata.Email,
LogoUri: null,
Tags: request.Metadata.Tags,
Custom: null) : null);
var issuer = await _issuerDirectory.RegisterIssuerAsync(registration, cancellationToken);
return MapToIssuerDetailResponse(issuer);
}
public async Task<bool> RevokeIssuerAsync(
string issuerId,
RevokeRequest request,
CancellationToken cancellationToken = default)
{
return await _issuerDirectory.RevokeIssuerAsync(issuerId, request.Reason, cancellationToken);
}
public async Task<IssuerDetailResponse> AddIssuerKeyAsync(
string issuerId,
RegisterKeyRequest request,
CancellationToken cancellationToken = default)
{
var keyReg = new KeyFingerprintRegistration(
Fingerprint: request.Fingerprint,
KeyType: ParseKeyType(request.KeyType),
Algorithm: request.Algorithm,
ExpiresAt: request.ExpiresAt,
PublicKey: null);
var issuer = await _issuerDirectory.AddKeyFingerprintAsync(issuerId, keyReg, cancellationToken);
return MapToIssuerDetailResponse(issuer);
}
public async Task<bool> RevokeIssuerKeyAsync(
string issuerId,
string fingerprint,
RevokeRequest request,
CancellationToken cancellationToken = default)
{
return await _issuerDirectory.RevokeKeyFingerprintAsync(
issuerId, fingerprint, request.Reason, cancellationToken);
}
private static ComputeConsensusResponse MapToResponse(VexConsensusResult result, string? projectionId)
{
return new ComputeConsensusResponse(
VulnerabilityId: result.VulnerabilityId,
ProductKey: result.ProductKey,
Status: result.ConsensusStatus,
Justification: result.ConsensusJustification,
ConfidenceScore: result.ConfidenceScore,
Outcome: result.Outcome.ToString(),
Rationale: new ConsensusRationaleResponse(
Summary: result.Rationale.Summary,
Factors: result.Rationale.Factors.ToList(),
StatusWeights: result.Rationale.StatusWeights
.ToDictionary(kv => kv.Key.ToString(), kv => kv.Value)),
Contributions: result.Contributions.Select(c => new ContributionResponse(
StatementId: c.StatementId,
IssuerId: c.IssuerId,
Status: c.Status,
Justification: c.Justification,
Weight: c.Weight,
Contribution: c.Contribution,
IsWinner: c.IsWinner)).ToList(),
Conflicts: result.Conflicts?.Select(c => new ConflictResponse(
Statement1Id: c.Statement1Id,
Statement2Id: c.Statement2Id,
Status1: c.Status1,
Status2: c.Status2,
Severity: c.Severity.ToString(),
Resolution: c.Resolution)).ToList(),
ProjectionId: projectionId,
ComputedAt: result.ComputedAt);
}
private static ProjectionDetailResponse MapToDetailResponse(ConsensusProjection projection)
{
return new ProjectionDetailResponse(
ProjectionId: projection.ProjectionId,
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
TenantId: projection.TenantId,
Status: projection.Status,
Justification: projection.Justification,
ConfidenceScore: projection.ConfidenceScore,
Outcome: projection.Outcome.ToString(),
StatementCount: projection.StatementCount,
ConflictCount: projection.ConflictCount,
RationaleSummary: projection.RationaleSummary,
ComputedAt: projection.ComputedAt,
StoredAt: projection.StoredAt,
PreviousProjectionId: projection.PreviousProjectionId,
StatusChanged: projection.StatusChanged);
}
private static ProjectionSummary MapToSummary(ConsensusProjection projection)
{
return new ProjectionSummary(
ProjectionId: projection.ProjectionId,
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
Status: projection.Status,
Justification: projection.Justification,
ConfidenceScore: projection.ConfidenceScore,
Outcome: projection.Outcome.ToString(),
StatementCount: projection.StatementCount,
ConflictCount: projection.ConflictCount,
ComputedAt: projection.ComputedAt,
StatusChanged: projection.StatusChanged);
}
private static IssuerSummary MapToIssuerSummary(IssuerRecord issuer)
{
return new IssuerSummary(
IssuerId: issuer.IssuerId,
Name: issuer.Name,
Category: issuer.Category.ToString(),
TrustTier: issuer.TrustTier.ToString(),
Status: issuer.Status.ToString(),
KeyCount: issuer.KeyFingerprints.Count,
RegisteredAt: issuer.RegisteredAt);
}
private static IssuerDetailResponse MapToIssuerDetailResponse(IssuerRecord issuer)
{
return new IssuerDetailResponse(
IssuerId: issuer.IssuerId,
Name: issuer.Name,
Category: issuer.Category.ToString(),
TrustTier: issuer.TrustTier.ToString(),
Status: issuer.Status.ToString(),
KeyFingerprints: issuer.KeyFingerprints.Select(k => new KeyFingerprintResponse(
Fingerprint: k.Fingerprint,
KeyType: k.KeyType.ToString(),
Algorithm: k.Algorithm,
Status: k.Status.ToString(),
RegisteredAt: k.RegisteredAt,
ExpiresAt: k.ExpiresAt)).ToList(),
Metadata: issuer.Metadata != null ? new IssuerMetadataResponse(
Description: issuer.Metadata.Description,
Uri: issuer.Metadata.Uri,
Email: issuer.Metadata.Email,
Tags: issuer.Metadata.Tags?.ToList()) : null,
RegisteredAt: issuer.RegisteredAt,
LastUpdatedAt: issuer.LastUpdatedAt,
RevokedAt: issuer.RevokedAt,
RevocationReason: issuer.RevocationReason);
}
private static ConsensusOutcome? ParseOutcome(string? outcome) =>
Enum.TryParse<ConsensusOutcome>(outcome, true, out var result) ? result : null;
private static ProjectionSortField ParseSortField(string? sortBy) =>
Enum.TryParse<ProjectionSortField>(sortBy, true, out var result) ? result : ProjectionSortField.ComputedAt;
private static IssuerCategory? ParseCategory(string? category) =>
Enum.TryParse<IssuerCategory>(category, true, out var result) ? result : null;
private static TrustTier? ParseTrustTier(string? tier) =>
Enum.TryParse<TrustTier>(tier, true, out var result) ? result : null;
private static IssuerStatus? ParseIssuerStatus(string? status) =>
Enum.TryParse<IssuerStatus>(status, true, out var result) ? result : null;
private static IssuerCategory ParseCategoryRequired(string category) =>
Enum.Parse<IssuerCategory>(category, true);
private static TrustTier ParseTrustTierRequired(string tier) =>
Enum.Parse<TrustTier>(tier, true);
private static KeyType ParseKeyType(string keyType) =>
Enum.TryParse<KeyType>(keyType, true, out var result) ? result : KeyType.Pgp;
}
/// <summary>
/// Interface for providing VEX statements for consensus computation.
/// </summary>
public interface IVexStatementProvider
{
/// <summary>
/// Gets all VEX statements for a vulnerability-product pair.
/// </summary>
Task<IReadOnlyList<VexStatementWithContext>> GetStatementsAsync(
string vulnerabilityId,
string productKey,
string? tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// VEX statement with context for consensus computation.
/// </summary>
public sealed record VexStatementWithContext(
NormalizedStatement Statement,
VexIssuer? Issuer,
SignatureVerificationResult? SignatureVerification,
DateTimeOffset? DocumentIssuedAt,
string? SourceDocumentId);

View File

@@ -0,0 +1,231 @@
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Trust;
namespace StellaOps.VexLens.Consensus;
/// <summary>
/// Interface for computing VEX consensus from multiple sources.
/// </summary>
public interface IVexConsensusEngine
{
/// <summary>
/// Computes consensus for a vulnerability-product pair from multiple statements.
/// </summary>
Task<VexConsensusResult> ComputeConsensusAsync(
VexConsensusRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes consensus for multiple vulnerability-product pairs in batch.
/// </summary>
Task<IReadOnlyList<VexConsensusResult>> ComputeConsensusBatchAsync(
IEnumerable<VexConsensusRequest> requests,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the consensus algorithm configuration.
/// </summary>
ConsensusConfiguration GetConfiguration();
/// <summary>
/// Updates the consensus algorithm configuration.
/// </summary>
void UpdateConfiguration(ConsensusConfiguration configuration);
}
/// <summary>
/// Request for consensus computation.
/// </summary>
public sealed record VexConsensusRequest(
string VulnerabilityId,
string ProductKey,
IReadOnlyList<WeightedStatement> Statements,
ConsensusContext Context);
/// <summary>
/// A VEX statement with its computed trust weight.
/// </summary>
public sealed record WeightedStatement(
NormalizedStatement Statement,
TrustWeightResult Weight,
VexIssuer? Issuer,
string? SourceDocumentId);
/// <summary>
/// Context for consensus computation.
/// </summary>
public sealed record ConsensusContext(
string? TenantId,
DateTimeOffset EvaluationTime,
ConsensusPolicy? Policy);
/// <summary>
/// Policy for consensus computation.
/// </summary>
public sealed record ConsensusPolicy(
ConsensusMode Mode,
double MinimumWeightThreshold,
double ConflictThreshold,
bool RequireJustificationForNotAffected,
IReadOnlyList<string>? PreferredIssuers);
/// <summary>
/// Mode for consensus computation.
/// </summary>
public enum ConsensusMode
{
/// <summary>
/// Use the statement with highest trust weight.
/// </summary>
HighestWeight,
/// <summary>
/// Weighted voting among all statements.
/// </summary>
WeightedVote,
/// <summary>
/// Lattice-based consensus (most conservative status wins ties).
/// </summary>
Lattice,
/// <summary>
/// Prefer vendor/authoritative sources over others.
/// </summary>
AuthoritativeFirst
}
/// <summary>
/// Result of consensus computation.
/// </summary>
public sealed record VexConsensusResult(
string VulnerabilityId,
string ProductKey,
VexStatus ConsensusStatus,
VexJustification? ConsensusJustification,
double ConfidenceScore,
ConsensusOutcome Outcome,
ConsensusRationale Rationale,
IReadOnlyList<StatementContribution> Contributions,
IReadOnlyList<ConsensusConflict>? Conflicts,
DateTimeOffset ComputedAt);
/// <summary>
/// Outcome of consensus computation.
/// </summary>
public enum ConsensusOutcome
{
/// <summary>
/// All statements agree on status.
/// </summary>
Unanimous,
/// <summary>
/// Majority of weight supports the consensus.
/// </summary>
Majority,
/// <summary>
/// Plurality of weight supports the consensus.
/// </summary>
Plurality,
/// <summary>
/// Conflict detected but resolved by policy.
/// </summary>
ConflictResolved,
/// <summary>
/// No statements available.
/// </summary>
NoData,
/// <summary>
/// Consensus could not be determined.
/// </summary>
Indeterminate
}
/// <summary>
/// Rationale explaining the consensus decision.
/// </summary>
public sealed record ConsensusRationale(
string Summary,
IReadOnlyList<string> Factors,
IReadOnlyDictionary<VexStatus, double> StatusWeights);
/// <summary>
/// Contribution of a single statement to the consensus.
/// </summary>
public sealed record StatementContribution(
string StatementId,
string? IssuerId,
VexStatus Status,
VexJustification? Justification,
double Weight,
double Contribution,
bool IsWinner);
/// <summary>
/// Conflict between statements.
/// </summary>
public sealed record ConsensusConflict(
string Statement1Id,
string Statement2Id,
VexStatus Status1,
VexStatus Status2,
ConflictSeverity Severity,
string Resolution);
/// <summary>
/// Severity of a conflict.
/// </summary>
public enum ConflictSeverity
{
/// <summary>
/// Minor disagreement (e.g., different justifications for same status).
/// </summary>
Low,
/// <summary>
/// Moderate disagreement (e.g., fixed vs not_affected).
/// </summary>
Medium,
/// <summary>
/// Major disagreement (e.g., affected vs not_affected).
/// </summary>
High,
/// <summary>
/// Critical disagreement requiring manual review.
/// </summary>
Critical
}
/// <summary>
/// Configuration for consensus algorithm.
/// </summary>
public sealed record ConsensusConfiguration(
ConsensusMode DefaultMode,
double DefaultMinimumWeightThreshold,
double DefaultConflictThreshold,
StatusLattice StatusLattice,
ConflictResolutionRules ConflictRules);
/// <summary>
/// Lattice ordering of VEX statuses for conservative consensus.
/// </summary>
public sealed record StatusLattice(
IReadOnlyDictionary<VexStatus, int> StatusOrder,
VexStatus BottomStatus,
VexStatus TopStatus);
/// <summary>
/// Rules for resolving conflicts.
/// </summary>
public sealed record ConflictResolutionRules(
double WeightRatioForOverride,
bool PreferMostRecent,
bool PreferMostSpecific,
IReadOnlyList<VexStatus>? StatusPriority);

View File

@@ -0,0 +1,505 @@
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Consensus;
/// <summary>
/// Default implementation of <see cref="IVexConsensusEngine"/>.
/// Computes VEX consensus using configurable algorithms.
/// </summary>
public sealed class VexConsensusEngine : IVexConsensusEngine
{
private ConsensusConfiguration _configuration;
public VexConsensusEngine(ConsensusConfiguration? configuration = null)
{
_configuration = configuration ?? CreateDefaultConfiguration();
}
public Task<VexConsensusResult> ComputeConsensusAsync(
VexConsensusRequest request,
CancellationToken cancellationToken = default)
{
if (request.Statements.Count == 0)
{
return Task.FromResult(CreateNoDataResult(request));
}
var policy = request.Context.Policy ?? CreateDefaultPolicy();
var mode = policy.Mode;
// Filter statements by minimum weight threshold
var qualifiedStatements = request.Statements
.Where(s => s.Weight.Weight >= policy.MinimumWeightThreshold)
.ToList();
if (qualifiedStatements.Count == 0)
{
return Task.FromResult(CreateNoDataResult(request,
"All statements below minimum weight threshold"));
}
// Compute consensus based on mode
var result = mode switch
{
ConsensusMode.HighestWeight => ComputeHighestWeightConsensus(request, qualifiedStatements, policy),
ConsensusMode.WeightedVote => ComputeWeightedVoteConsensus(request, qualifiedStatements, policy),
ConsensusMode.Lattice => ComputeLatticeConsensus(request, qualifiedStatements, policy),
ConsensusMode.AuthoritativeFirst => ComputeAuthoritativeFirstConsensus(request, qualifiedStatements, policy),
_ => ComputeHighestWeightConsensus(request, qualifiedStatements, policy)
};
return Task.FromResult(result);
}
public async Task<IReadOnlyList<VexConsensusResult>> ComputeConsensusBatchAsync(
IEnumerable<VexConsensusRequest> requests,
CancellationToken cancellationToken = default)
{
var results = new List<VexConsensusResult>();
foreach (var request in requests)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await ComputeConsensusAsync(request, cancellationToken);
results.Add(result);
}
return results;
}
public ConsensusConfiguration GetConfiguration() => _configuration;
public void UpdateConfiguration(ConsensusConfiguration configuration)
{
_configuration = configuration;
}
private VexConsensusResult ComputeHighestWeightConsensus(
VexConsensusRequest request,
List<WeightedStatement> statements,
ConsensusPolicy policy)
{
var ordered = statements.OrderByDescending(s => s.Weight.Weight).ToList();
var winner = ordered[0];
var conflicts = DetectConflicts(ordered, policy);
var contributions = CreateContributions(ordered, winner.Statement.StatementId);
var statusWeights = ComputeStatusWeights(ordered);
var outcome = DetermineOutcome(ordered, winner, conflicts);
var confidence = ComputeConfidence(ordered, winner, conflicts);
var factors = new List<string>
{
$"Selected statement with highest weight: {winner.Weight.Weight:F4}",
$"Issuer: {winner.Issuer?.Name ?? winner.Statement.StatementId}"
};
if (conflicts.Count > 0)
{
factors.Add($"Resolved {conflicts.Count} conflict(s) by weight");
}
return new VexConsensusResult(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
ConsensusStatus: winner.Statement.Status,
ConsensusJustification: winner.Statement.Justification,
ConfidenceScore: confidence,
Outcome: outcome,
Rationale: new ConsensusRationale(
Summary: $"Highest weight consensus: {winner.Statement.Status}",
Factors: factors,
StatusWeights: statusWeights),
Contributions: contributions,
Conflicts: conflicts.Count > 0 ? conflicts : null,
ComputedAt: request.Context.EvaluationTime);
}
private VexConsensusResult ComputeWeightedVoteConsensus(
VexConsensusRequest request,
List<WeightedStatement> statements,
ConsensusPolicy policy)
{
var statusWeights = ComputeStatusWeights(statements);
var totalWeight = statusWeights.Values.Sum();
// Find the status with highest total weight
var winningStatus = statusWeights
.OrderByDescending(kv => kv.Value)
.First();
var winningStatements = statements
.Where(s => s.Statement.Status == winningStatus.Key)
.OrderByDescending(s => s.Weight.Weight)
.ToList();
var primaryWinner = winningStatements[0];
var conflicts = DetectConflicts(statements, policy);
var contributions = CreateContributions(statements, primaryWinner.Statement.StatementId);
var voteFraction = totalWeight > 0 ? winningStatus.Value / totalWeight : 0;
var outcome = voteFraction >= 0.5
? ConsensusOutcome.Majority
: ConsensusOutcome.Plurality;
if (statements.All(s => s.Statement.Status == winningStatus.Key))
{
outcome = ConsensusOutcome.Unanimous;
}
var confidence = voteFraction * ComputeWeightSpreadFactor(statements);
var factors = new List<string>
{
$"Weighted vote: {winningStatus.Key} received {voteFraction:P1} of total weight",
$"{winningStatements.Count} statement(s) support this status"
};
return new VexConsensusResult(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
ConsensusStatus: winningStatus.Key,
ConsensusJustification: primaryWinner.Statement.Justification,
ConfidenceScore: confidence,
Outcome: outcome,
Rationale: new ConsensusRationale(
Summary: $"Weighted vote consensus: {winningStatus.Key} ({voteFraction:P1})",
Factors: factors,
StatusWeights: statusWeights),
Contributions: contributions,
Conflicts: conflicts.Count > 0 ? conflicts : null,
ComputedAt: request.Context.EvaluationTime);
}
private VexConsensusResult ComputeLatticeConsensus(
VexConsensusRequest request,
List<WeightedStatement> statements,
ConsensusPolicy policy)
{
var lattice = _configuration.StatusLattice;
var statusWeights = ComputeStatusWeights(statements);
// Find the lowest status in the lattice (most conservative)
var lowestStatus = statements
.Select(s => s.Statement.Status)
.OrderBy(s => lattice.StatusOrder.GetValueOrDefault(s, int.MaxValue))
.First();
var lowestStatements = statements
.Where(s => s.Statement.Status == lowestStatus)
.OrderByDescending(s => s.Weight.Weight)
.ToList();
var primaryWinner = lowestStatements[0];
var conflicts = DetectConflicts(statements, policy);
var contributions = CreateContributions(statements, primaryWinner.Statement.StatementId);
var outcome = statements.All(s => s.Statement.Status == lowestStatus)
? ConsensusOutcome.Unanimous
: ConsensusOutcome.ConflictResolved;
// Confidence based on weight of supporting statements
var supportWeight = lowestStatements.Sum(s => s.Weight.Weight);
var totalWeight = statements.Sum(s => s.Weight.Weight);
var confidence = totalWeight > 0 ? supportWeight / totalWeight : 0;
var factors = new List<string>
{
$"Lattice consensus: selected most conservative status",
$"Status order: {string.Join(" < ", lattice.StatusOrder.OrderBy(kv => kv.Value).Select(kv => kv.Key))}"
};
return new VexConsensusResult(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
ConsensusStatus: lowestStatus,
ConsensusJustification: primaryWinner.Statement.Justification,
ConfidenceScore: confidence,
Outcome: outcome,
Rationale: new ConsensusRationale(
Summary: $"Lattice consensus: {lowestStatus} (most conservative)",
Factors: factors,
StatusWeights: statusWeights),
Contributions: contributions,
Conflicts: conflicts.Count > 0 ? conflicts : null,
ComputedAt: request.Context.EvaluationTime);
}
private VexConsensusResult ComputeAuthoritativeFirstConsensus(
VexConsensusRequest request,
List<WeightedStatement> statements,
ConsensusPolicy policy)
{
// Prefer authoritative sources (vendors) over others
var ordered = statements
.OrderByDescending(s => IsAuthoritative(s.Issuer))
.ThenByDescending(s => s.Weight.Weight)
.ToList();
var winner = ordered[0];
var conflicts = DetectConflicts(ordered, policy);
var contributions = CreateContributions(ordered, winner.Statement.StatementId);
var statusWeights = ComputeStatusWeights(ordered);
var isAuthoritative = IsAuthoritative(winner.Issuer);
var outcome = isAuthoritative
? ConsensusOutcome.Unanimous // Authoritative source takes precedence
: DetermineOutcome(ordered, winner, conflicts);
var confidence = isAuthoritative
? 0.95
: ComputeConfidence(ordered, winner, conflicts);
var factors = new List<string>
{
isAuthoritative
? $"Authoritative source: {winner.Issuer?.Name ?? "unknown"}"
: $"No authoritative source; using highest weight",
$"Weight: {winner.Weight.Weight:F4}"
};
return new VexConsensusResult(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
ConsensusStatus: winner.Statement.Status,
ConsensusJustification: winner.Statement.Justification,
ConfidenceScore: confidence,
Outcome: outcome,
Rationale: new ConsensusRationale(
Summary: $"Authoritative-first consensus: {winner.Statement.Status}",
Factors: factors,
StatusWeights: statusWeights),
Contributions: contributions,
Conflicts: conflicts.Count > 0 ? conflicts : null,
ComputedAt: request.Context.EvaluationTime);
}
private static bool IsAuthoritative(VexIssuer? issuer)
{
if (issuer == null) return false;
return issuer.Category == IssuerCategory.Vendor ||
issuer.TrustTier == TrustTier.Authoritative;
}
private List<ConsensusConflict> DetectConflicts(
List<WeightedStatement> statements,
ConsensusPolicy policy)
{
var conflicts = new List<ConsensusConflict>();
for (var i = 0; i < statements.Count; i++)
{
for (var j = i + 1; j < statements.Count; j++)
{
var s1 = statements[i];
var s2 = statements[j];
if (s1.Statement.Status != s2.Statement.Status)
{
var severity = DetermineConflictSeverity(s1.Statement.Status, s2.Statement.Status);
var resolution = DetermineResolution(s1, s2);
conflicts.Add(new ConsensusConflict(
Statement1Id: s1.Statement.StatementId,
Statement2Id: s2.Statement.StatementId,
Status1: s1.Statement.Status,
Status2: s2.Statement.Status,
Severity: severity,
Resolution: resolution));
}
}
}
return conflicts;
}
private static ConflictSeverity DetermineConflictSeverity(VexStatus status1, VexStatus status2)
{
// Affected vs NotAffected is the most severe
if ((status1 == VexStatus.Affected && status2 == VexStatus.NotAffected) ||
(status1 == VexStatus.NotAffected && status2 == VexStatus.Affected))
{
return ConflictSeverity.Critical;
}
// Affected vs Fixed is high
if ((status1 == VexStatus.Affected && status2 == VexStatus.Fixed) ||
(status1 == VexStatus.Fixed && status2 == VexStatus.Affected))
{
return ConflictSeverity.High;
}
// Fixed vs NotAffected is medium
if ((status1 == VexStatus.Fixed && status2 == VexStatus.NotAffected) ||
(status1 == VexStatus.NotAffected && status2 == VexStatus.Fixed))
{
return ConflictSeverity.Medium;
}
// UnderInvestigation vs anything is low
if (status1 == VexStatus.UnderInvestigation || status2 == VexStatus.UnderInvestigation)
{
return ConflictSeverity.Low;
}
return ConflictSeverity.Medium;
}
private static string DetermineResolution(WeightedStatement s1, WeightedStatement s2)
{
var weightRatio = s1.Weight.Weight / Math.Max(s2.Weight.Weight, 0.001);
if (weightRatio > 2.0)
{
return $"Resolved by weight ({s1.Weight.Weight:F2} vs {s2.Weight.Weight:F2})";
}
if (IsAuthoritative(s1.Issuer) && !IsAuthoritative(s2.Issuer))
{
return "Resolved by authoritative source preference";
}
return "Resolved by algorithm default";
}
private static Dictionary<VexStatus, double> ComputeStatusWeights(List<WeightedStatement> statements)
{
return statements
.GroupBy(s => s.Statement.Status)
.ToDictionary(
g => g.Key,
g => g.Sum(s => s.Weight.Weight));
}
private static List<StatementContribution> CreateContributions(
List<WeightedStatement> statements,
string winnerId)
{
var totalWeight = statements.Sum(s => s.Weight.Weight);
return statements.Select(s => new StatementContribution(
StatementId: s.Statement.StatementId,
IssuerId: s.Issuer?.Id,
Status: s.Statement.Status,
Justification: s.Statement.Justification,
Weight: s.Weight.Weight,
Contribution: totalWeight > 0 ? s.Weight.Weight / totalWeight : 0,
IsWinner: s.Statement.StatementId == winnerId)).ToList();
}
private static ConsensusOutcome DetermineOutcome(
List<WeightedStatement> statements,
WeightedStatement winner,
List<ConsensusConflict> conflicts)
{
if (statements.All(s => s.Statement.Status == winner.Statement.Status))
{
return ConsensusOutcome.Unanimous;
}
if (conflicts.Count > 0)
{
return ConsensusOutcome.ConflictResolved;
}
var winnerCount = statements.Count(s => s.Statement.Status == winner.Statement.Status);
if (winnerCount > statements.Count / 2)
{
return ConsensusOutcome.Majority;
}
return ConsensusOutcome.Plurality;
}
private static double ComputeConfidence(
List<WeightedStatement> statements,
WeightedStatement winner,
List<ConsensusConflict> conflicts)
{
var totalWeight = statements.Sum(s => s.Weight.Weight);
var winnerWeight = winner.Weight.Weight;
var baseConfidence = totalWeight > 0 ? winnerWeight / totalWeight : 0;
// Reduce confidence for conflicts
var conflictPenalty = conflicts.Sum(c => c.Severity switch
{
ConflictSeverity.Critical => 0.3,
ConflictSeverity.High => 0.2,
ConflictSeverity.Medium => 0.1,
ConflictSeverity.Low => 0.05,
_ => 0
});
return Math.Max(0, baseConfidence - conflictPenalty);
}
private static double ComputeWeightSpreadFactor(List<WeightedStatement> statements)
{
if (statements.Count <= 1) return 1.0;
var weights = statements.Select(s => s.Weight.Weight).ToList();
var max = weights.Max();
var min = weights.Min();
var avg = weights.Average();
// Higher spread means less confidence
var spread = max > 0 ? (max - min) / max : 0;
return 1.0 - (spread * 0.5);
}
private static VexConsensusResult CreateNoDataResult(
VexConsensusRequest request,
string? reason = null)
{
return new VexConsensusResult(
VulnerabilityId: request.VulnerabilityId,
ProductKey: request.ProductKey,
ConsensusStatus: VexStatus.UnderInvestigation,
ConsensusJustification: null,
ConfidenceScore: 0,
Outcome: ConsensusOutcome.NoData,
Rationale: new ConsensusRationale(
Summary: reason ?? "No VEX statements available",
Factors: [reason ?? "No qualifying statements found"],
StatusWeights: new Dictionary<VexStatus, double>()),
Contributions: [],
Conflicts: null,
ComputedAt: request.Context.EvaluationTime);
}
private static ConsensusPolicy CreateDefaultPolicy()
{
return new ConsensusPolicy(
Mode: ConsensusMode.WeightedVote,
MinimumWeightThreshold: 0.1,
ConflictThreshold: 0.3,
RequireJustificationForNotAffected: false,
PreferredIssuers: null);
}
public static ConsensusConfiguration CreateDefaultConfiguration()
{
return new ConsensusConfiguration(
DefaultMode: ConsensusMode.WeightedVote,
DefaultMinimumWeightThreshold: 0.1,
DefaultConflictThreshold: 0.3,
StatusLattice: new StatusLattice(
StatusOrder: new Dictionary<VexStatus, int>
{
[VexStatus.Affected] = 0,
[VexStatus.UnderInvestigation] = 1,
[VexStatus.Fixed] = 2,
[VexStatus.NotAffected] = 3
},
BottomStatus: VexStatus.Affected,
TopStatus: VexStatus.NotAffected),
ConflictRules: new ConflictResolutionRules(
WeightRatioForOverride: 2.0,
PreferMostRecent: true,
PreferMostSpecific: true,
StatusPriority: null));
}
}

View File

@@ -0,0 +1,171 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.VexLens.Api;
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Integration;
using StellaOps.VexLens.Mapping;
using StellaOps.VexLens.Normalization;
using StellaOps.VexLens.Observability;
using StellaOps.VexLens.Options;
using StellaOps.VexLens.Storage;
using StellaOps.VexLens.Trust;
using StellaOps.VexLens.Verification;
namespace StellaOps.VexLens.Extensions;
/// <summary>
/// Extension methods for registering VexLens services.
/// </summary>
public static class VexLensServiceCollectionExtensions
{
/// <summary>
/// Adds VexLens consensus engine services to the service collection.
/// </summary>
public static IServiceCollection AddVexLens(
this IServiceCollection services,
IConfiguration configuration)
{
var section = configuration.GetSection(VexLensOptions.SectionName);
services.Configure<VexLensOptions>(section);
var options = section.Get<VexLensOptions>() ?? new VexLensOptions();
return services.AddVexLensCore(options);
}
/// <summary>
/// Adds VexLens consensus engine services with explicit options.
/// </summary>
public static IServiceCollection AddVexLens(
this IServiceCollection services,
Action<VexLensOptions> configure)
{
var options = new VexLensOptions();
configure(options);
services.Configure(configure);
return services.AddVexLensCore(options);
}
/// <summary>
/// Adds VexLens services for testing with in-memory storage.
/// </summary>
public static IServiceCollection AddVexLensForTesting(this IServiceCollection services)
{
var options = new VexLensOptions
{
Storage = { Driver = "memory" },
Telemetry = { MetricsEnabled = false, TracingEnabled = false }
};
return services.AddVexLensCore(options);
}
private static IServiceCollection AddVexLensCore(
this IServiceCollection services,
VexLensOptions options)
{
// Normalization
services.TryAddSingleton<IVexNormalizerRegistry>(sp =>
{
var registry = new VexNormalizerRegistry();
RegisterNormalizers(registry, options.Normalization);
return registry;
});
// Product mapping
services.TryAddSingleton<IProductMapper, ProductMapper>();
// Verification
services.TryAddSingleton<ISignatureVerifier, SignatureVerifier>();
// Issuer directory - use in-memory by default, can be replaced
services.TryAddSingleton<IIssuerDirectory, InMemoryIssuerDirectory>();
// Trust engine
services.TryAddSingleton<ITrustWeightEngine, TrustWeightEngine>();
// Consensus engine
services.TryAddSingleton<IVexConsensusEngine, VexConsensusEngine>();
// Storage
RegisterStorage(services, options.Storage);
// Event emitter - in-memory for now
services.TryAddSingleton<IConsensusEventEmitter, InMemoryConsensusEventEmitter>();
// API service
services.TryAddScoped<IVexLensApiService, VexLensApiService>();
// Rationale service for AI/ML consumption
services.TryAddScoped<IConsensusRationaleService, ConsensusRationaleService>();
// Integration services
services.TryAddScoped<IPolicyEngineIntegration, PolicyEngineIntegration>();
services.TryAddScoped<IVulnExplorerIntegration, VulnExplorerIntegration>();
// Metrics
if (options.Telemetry.MetricsEnabled)
{
services.TryAddSingleton<VexLensMetrics>();
}
return services;
}
private static void RegisterNormalizers(
VexNormalizerRegistry registry,
VexLensNormalizationOptions options)
{
var enabledFormats = new HashSet<string>(
options.EnabledFormats,
StringComparer.OrdinalIgnoreCase);
if (enabledFormats.Contains("OpenVEX"))
{
registry.Register(new OpenVexNormalizer());
}
if (enabledFormats.Contains("CSAF"))
{
registry.Register(new CsafVexNormalizer());
}
if (enabledFormats.Contains("CycloneDX"))
{
registry.Register(new CycloneDxVexNormalizer());
}
}
private static void RegisterStorage(
IServiceCollection services,
VexLensStorageOptions options)
{
switch (options.Driver.ToLowerInvariant())
{
case "memory":
services.TryAddSingleton<IConsensusProjectionStore>(sp =>
{
var emitter = sp.GetRequiredService<IConsensusEventEmitter>();
return new InMemoryConsensusProjectionStore(emitter);
});
break;
case "mongo":
// MongoDB storage would be registered here
// For now, fall back to in-memory
services.TryAddSingleton<IConsensusProjectionStore>(sp =>
{
var emitter = sp.GetRequiredService<IConsensusEventEmitter>();
return new InMemoryConsensusProjectionStore(emitter);
});
break;
default:
throw new InvalidOperationException(
$"Unknown VexLens storage driver: {options.Driver}");
}
}
}

View File

@@ -0,0 +1,291 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Integration;
/// <summary>
/// Integration interface for Policy Engine consumption of VEX consensus.
/// </summary>
public interface IPolicyEngineIntegration
{
/// <summary>
/// Gets the VEX consensus status for a vulnerability-product pair for policy evaluation.
/// </summary>
Task<PolicyVexStatusResult> GetVexStatusForPolicyAsync(
string vulnerabilityId,
string productKey,
PolicyVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets VEX status for multiple vulnerability-product pairs in batch.
/// </summary>
Task<IReadOnlyList<PolicyVexStatusResult>> GetVexStatusBatchAsync(
IEnumerable<PolicyVexQuery> queries,
PolicyVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a vulnerability is suppressed by VEX for a product.
/// </summary>
Task<VexSuppressionResult> CheckVexSuppressionAsync(
string vulnerabilityId,
string productKey,
PolicyVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets VEX-adjusted severity for policy scoring.
/// </summary>
Task<VexAdjustedSeverityResult> GetVexAdjustedSeverityAsync(
string vulnerabilityId,
string productKey,
double baseSeverity,
PolicyVexContext context,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for policy VEX queries.
/// </summary>
public sealed record PolicyVexContext(
string? TenantId,
string? PolicyId,
double MinimumConfidenceThreshold,
bool RequireSignedVex,
DateTimeOffset EvaluationTime);
/// <summary>
/// Query for policy VEX status.
/// </summary>
public sealed record PolicyVexQuery(
string VulnerabilityId,
string ProductKey);
/// <summary>
/// Result of VEX status for policy evaluation.
/// </summary>
public sealed record PolicyVexStatusResult(
string VulnerabilityId,
string ProductKey,
bool HasVexData,
VexStatus? Status,
VexJustification? Justification,
double? ConfidenceScore,
bool MeetsConfidenceThreshold,
string? ProjectionId,
PolicyVexEvidenceSummary? Evidence);
/// <summary>
/// Summary of VEX evidence for policy.
/// </summary>
public sealed record PolicyVexEvidenceSummary(
int StatementCount,
int IssuerCount,
int ConflictCount,
string? PrimaryIssuer,
DateTimeOffset? MostRecentStatement,
IReadOnlyList<string> IssuerNames);
/// <summary>
/// Result of VEX suppression check.
/// </summary>
public sealed record VexSuppressionResult(
string VulnerabilityId,
string ProductKey,
bool IsSuppressed,
VexSuppressionReason? Reason,
VexStatus? Status,
VexJustification? Justification,
double? ConfidenceScore,
string? SuppressedBy,
DateTimeOffset? SuppressedAt);
/// <summary>
/// Reason for VEX suppression.
/// </summary>
public enum VexSuppressionReason
{
/// <summary>
/// VEX indicates not_affected.
/// </summary>
NotAffected,
/// <summary>
/// VEX indicates fixed.
/// </summary>
Fixed,
/// <summary>
/// VEX provides justification for not_affected.
/// </summary>
JustifiedNotAffected
}
/// <summary>
/// Result of VEX-adjusted severity calculation.
/// </summary>
public sealed record VexAdjustedSeverityResult(
string VulnerabilityId,
string ProductKey,
double BaseSeverity,
double AdjustedSeverity,
double AdjustmentFactor,
VexStatus? VexStatus,
string? AdjustmentReason);
/// <summary>
/// Integration interface for Vuln Explorer consumption of VEX consensus.
/// </summary>
public interface IVulnExplorerIntegration
{
/// <summary>
/// Enriches a vulnerability with VEX consensus data.
/// </summary>
Task<VulnVexEnrichment> EnrichVulnerabilityAsync(
string vulnerabilityId,
string? productKey,
VulnVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets VEX timeline for a vulnerability.
/// </summary>
Task<VexTimelineResult> GetVexTimelineAsync(
string vulnerabilityId,
string productKey,
VulnVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets VEX summary statistics for a vulnerability.
/// </summary>
Task<VulnVexSummary> GetVexSummaryAsync(
string vulnerabilityId,
VulnVexContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Searches VEX data for vulnerabilities matching criteria.
/// </summary>
Task<VexSearchResult> SearchVexAsync(
VexSearchQuery query,
VulnVexContext context,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for Vuln Explorer VEX queries.
/// </summary>
public sealed record VulnVexContext(
string? TenantId,
bool IncludeRawStatements,
bool IncludeHistory,
int? HistoryLimit);
/// <summary>
/// VEX enrichment data for a vulnerability.
/// </summary>
public sealed record VulnVexEnrichment(
string VulnerabilityId,
bool HasVexData,
VexStatus? ConsensusStatus,
VexJustification? Justification,
double? ConfidenceScore,
int ProductCount,
IReadOnlyList<ProductVexStatus> ProductStatuses,
IReadOnlyList<VexIssuerSummary> Issuers,
DateTimeOffset? LastVexUpdate);
/// <summary>
/// VEX status for a specific product.
/// </summary>
public sealed record ProductVexStatus(
string ProductKey,
string? ProductName,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
string? PrimaryIssuer,
DateTimeOffset? ComputedAt);
/// <summary>
/// Summary of a VEX issuer.
/// </summary>
public sealed record VexIssuerSummary(
string IssuerId,
string Name,
string Category,
int StatementCount,
VexStatus? MostCommonStatus);
/// <summary>
/// VEX timeline for a vulnerability-product pair.
/// </summary>
public sealed record VexTimelineResult(
string VulnerabilityId,
string ProductKey,
IReadOnlyList<VexTimelineEntry> Entries,
VexStatus? CurrentStatus,
int StatusChangeCount);
/// <summary>
/// Entry in VEX timeline.
/// </summary>
public sealed record VexTimelineEntry(
DateTimeOffset Timestamp,
VexStatus Status,
VexJustification? Justification,
string? IssuerId,
string? IssuerName,
string EventType,
string? Notes);
/// <summary>
/// Summary of VEX data for a vulnerability.
/// </summary>
public sealed record VulnVexSummary(
string VulnerabilityId,
int TotalStatements,
int TotalProducts,
int TotalIssuers,
IReadOnlyDictionary<VexStatus, int> StatusCounts,
IReadOnlyDictionary<VexJustification, int> JustificationCounts,
double AverageConfidence,
DateTimeOffset? FirstVexStatement,
DateTimeOffset? LatestVexStatement);
/// <summary>
/// Query for searching VEX data.
/// </summary>
public sealed record VexSearchQuery(
string? VulnerabilityIdPattern,
string? ProductKeyPattern,
VexStatus? Status,
VexJustification? Justification,
string? IssuerId,
double? MinimumConfidence,
DateTimeOffset? UpdatedAfter,
int Limit,
int Offset);
/// <summary>
/// Result of VEX search.
/// </summary>
public sealed record VexSearchResult(
IReadOnlyList<VexSearchHit> Hits,
int TotalCount,
int Offset,
int Limit);
/// <summary>
/// Search hit for VEX data.
/// </summary>
public sealed record VexSearchHit(
string VulnerabilityId,
string ProductKey,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
string? PrimaryIssuer,
DateTimeOffset ComputedAt);

View File

@@ -0,0 +1,427 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Storage;
namespace StellaOps.VexLens.Integration;
/// <summary>
/// Default implementation of <see cref="IPolicyEngineIntegration"/>.
/// </summary>
public sealed class PolicyEngineIntegration : IPolicyEngineIntegration
{
private readonly IConsensusProjectionStore _projectionStore;
public PolicyEngineIntegration(IConsensusProjectionStore projectionStore)
{
_projectionStore = projectionStore;
}
public async Task<PolicyVexStatusResult> GetVexStatusForPolicyAsync(
string vulnerabilityId,
string productKey,
PolicyVexContext context,
CancellationToken cancellationToken = default)
{
var projection = await _projectionStore.GetLatestAsync(
vulnerabilityId,
productKey,
context.TenantId,
cancellationToken);
if (projection == null)
{
return new PolicyVexStatusResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
HasVexData: false,
Status: null,
Justification: null,
ConfidenceScore: null,
MeetsConfidenceThreshold: false,
ProjectionId: null,
Evidence: null);
}
var meetsThreshold = projection.ConfidenceScore >= context.MinimumConfidenceThreshold;
return new PolicyVexStatusResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
HasVexData: true,
Status: projection.Status,
Justification: projection.Justification,
ConfidenceScore: projection.ConfidenceScore,
MeetsConfidenceThreshold: meetsThreshold,
ProjectionId: projection.ProjectionId,
Evidence: new PolicyVexEvidenceSummary(
StatementCount: projection.StatementCount,
IssuerCount: 1, // Simplified; would need full projection data
ConflictCount: projection.ConflictCount,
PrimaryIssuer: null,
MostRecentStatement: projection.ComputedAt,
IssuerNames: []));
}
public async Task<IReadOnlyList<PolicyVexStatusResult>> GetVexStatusBatchAsync(
IEnumerable<PolicyVexQuery> queries,
PolicyVexContext context,
CancellationToken cancellationToken = default)
{
var results = new List<PolicyVexStatusResult>();
foreach (var query in queries)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await GetVexStatusForPolicyAsync(
query.VulnerabilityId,
query.ProductKey,
context,
cancellationToken);
results.Add(result);
}
return results;
}
public async Task<VexSuppressionResult> CheckVexSuppressionAsync(
string vulnerabilityId,
string productKey,
PolicyVexContext context,
CancellationToken cancellationToken = default)
{
var statusResult = await GetVexStatusForPolicyAsync(
vulnerabilityId,
productKey,
context,
cancellationToken);
if (!statusResult.HasVexData || !statusResult.MeetsConfidenceThreshold)
{
return new VexSuppressionResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
IsSuppressed: false,
Reason: null,
Status: statusResult.Status,
Justification: statusResult.Justification,
ConfidenceScore: statusResult.ConfidenceScore,
SuppressedBy: null,
SuppressedAt: null);
}
var isSuppressed = statusResult.Status == VexStatus.NotAffected ||
statusResult.Status == VexStatus.Fixed;
VexSuppressionReason? reason = null;
if (isSuppressed)
{
reason = statusResult.Status switch
{
VexStatus.NotAffected when statusResult.Justification.HasValue =>
VexSuppressionReason.JustifiedNotAffected,
VexStatus.NotAffected => VexSuppressionReason.NotAffected,
VexStatus.Fixed => VexSuppressionReason.Fixed,
_ => null
};
}
return new VexSuppressionResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
IsSuppressed: isSuppressed,
Reason: reason,
Status: statusResult.Status,
Justification: statusResult.Justification,
ConfidenceScore: statusResult.ConfidenceScore,
SuppressedBy: statusResult.Evidence?.PrimaryIssuer,
SuppressedAt: statusResult.Evidence?.MostRecentStatement);
}
public async Task<VexAdjustedSeverityResult> GetVexAdjustedSeverityAsync(
string vulnerabilityId,
string productKey,
double baseSeverity,
PolicyVexContext context,
CancellationToken cancellationToken = default)
{
var statusResult = await GetVexStatusForPolicyAsync(
vulnerabilityId,
productKey,
context,
cancellationToken);
if (!statusResult.HasVexData || !statusResult.MeetsConfidenceThreshold)
{
return new VexAdjustedSeverityResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
BaseSeverity: baseSeverity,
AdjustedSeverity: baseSeverity,
AdjustmentFactor: 1.0,
VexStatus: statusResult.Status,
AdjustmentReason: "No qualifying VEX data");
}
var (adjustmentFactor, reason) = statusResult.Status switch
{
VexStatus.NotAffected => (0.0, "VEX indicates not affected"),
VexStatus.Fixed => (0.0, "VEX indicates fixed"),
VexStatus.Affected => (1.0, "VEX confirms affected"),
VexStatus.UnderInvestigation => (0.8, "VEX indicates under investigation"),
_ => (1.0, "Unknown VEX status")
};
// Apply confidence scaling
var confidenceScale = statusResult.ConfidenceScore ?? 0.5;
if (adjustmentFactor < 1.0)
{
// For suppression, blend toward base severity based on confidence
adjustmentFactor = adjustmentFactor + (1.0 - adjustmentFactor) * (1.0 - confidenceScale);
}
var adjustedSeverity = baseSeverity * adjustmentFactor;
return new VexAdjustedSeverityResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
BaseSeverity: baseSeverity,
AdjustedSeverity: adjustedSeverity,
AdjustmentFactor: adjustmentFactor,
VexStatus: statusResult.Status,
AdjustmentReason: $"{reason} (confidence: {confidenceScale:P0})");
}
}
/// <summary>
/// Default implementation of <see cref="IVulnExplorerIntegration"/>.
/// </summary>
public sealed class VulnExplorerIntegration : IVulnExplorerIntegration
{
private readonly IConsensusProjectionStore _projectionStore;
public VulnExplorerIntegration(IConsensusProjectionStore projectionStore)
{
_projectionStore = projectionStore;
}
public async Task<VulnVexEnrichment> EnrichVulnerabilityAsync(
string vulnerabilityId,
string? productKey,
VulnVexContext context,
CancellationToken cancellationToken = default)
{
var query = new ProjectionQuery(
TenantId: context.TenantId,
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
Status: null,
Outcome: null,
MinimumConfidence: null,
ComputedAfter: null,
ComputedBefore: null,
StatusChanged: null,
Limit: 100,
Offset: 0,
SortBy: ProjectionSortField.ComputedAt,
SortDescending: true);
var result = await _projectionStore.ListAsync(query, cancellationToken);
if (result.Projections.Count == 0)
{
return new VulnVexEnrichment(
VulnerabilityId: vulnerabilityId,
HasVexData: false,
ConsensusStatus: null,
Justification: null,
ConfidenceScore: null,
ProductCount: 0,
ProductStatuses: [],
Issuers: [],
LastVexUpdate: null);
}
var productStatuses = result.Projections
.GroupBy(p => p.ProductKey)
.Select(g => g.First())
.Select(p => new ProductVexStatus(
ProductKey: p.ProductKey,
ProductName: null,
Status: p.Status,
Justification: p.Justification,
ConfidenceScore: p.ConfidenceScore,
PrimaryIssuer: null,
ComputedAt: p.ComputedAt))
.ToList();
// Determine overall consensus (most common status)
var statusCounts = productStatuses
.GroupBy(p => p.Status)
.ToDictionary(g => g.Key, g => g.Count());
var consensusStatus = statusCounts
.OrderByDescending(kv => kv.Value)
.First().Key;
var avgConfidence = productStatuses.Average(p => p.ConfidenceScore);
var lastUpdate = productStatuses.Max(p => p.ComputedAt);
return new VulnVexEnrichment(
VulnerabilityId: vulnerabilityId,
HasVexData: true,
ConsensusStatus: consensusStatus,
Justification: null,
ConfidenceScore: avgConfidence,
ProductCount: productStatuses.Count,
ProductStatuses: productStatuses,
Issuers: [],
LastVexUpdate: lastUpdate);
}
public async Task<VexTimelineResult> GetVexTimelineAsync(
string vulnerabilityId,
string productKey,
VulnVexContext context,
CancellationToken cancellationToken = default)
{
var history = await _projectionStore.GetHistoryAsync(
vulnerabilityId,
productKey,
context.TenantId,
context.HistoryLimit,
cancellationToken);
var entries = new List<VexTimelineEntry>();
VexStatus? previousStatus = null;
foreach (var projection in history.OrderBy(p => p.ComputedAt))
{
var eventType = previousStatus == null
? "initial"
: projection.Status != previousStatus
? "status_change"
: "update";
entries.Add(new VexTimelineEntry(
Timestamp: projection.ComputedAt,
Status: projection.Status,
Justification: projection.Justification,
IssuerId: null,
IssuerName: null,
EventType: eventType,
Notes: projection.RationaleSummary));
previousStatus = projection.Status;
}
var statusChangeCount = entries.Count(e => e.EventType == "status_change");
return new VexTimelineResult(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
Entries: entries,
CurrentStatus: history.FirstOrDefault()?.Status,
StatusChangeCount: statusChangeCount);
}
public async Task<VulnVexSummary> GetVexSummaryAsync(
string vulnerabilityId,
VulnVexContext context,
CancellationToken cancellationToken = default)
{
var query = new ProjectionQuery(
TenantId: context.TenantId,
VulnerabilityId: vulnerabilityId,
ProductKey: null,
Status: null,
Outcome: null,
MinimumConfidence: null,
ComputedAfter: null,
ComputedBefore: null,
StatusChanged: null,
Limit: 1000,
Offset: 0,
SortBy: ProjectionSortField.ComputedAt,
SortDescending: true);
var result = await _projectionStore.ListAsync(query, cancellationToken);
if (result.Projections.Count == 0)
{
return new VulnVexSummary(
VulnerabilityId: vulnerabilityId,
TotalStatements: 0,
TotalProducts: 0,
TotalIssuers: 0,
StatusCounts: new Dictionary<VexStatus, int>(),
JustificationCounts: new Dictionary<VexJustification, int>(),
AverageConfidence: 0,
FirstVexStatement: null,
LatestVexStatement: null);
}
var statusCounts = result.Projections
.GroupBy(p => p.Status)
.ToDictionary(g => g.Key, g => g.Count());
var justificationCounts = result.Projections
.Where(p => p.Justification.HasValue)
.GroupBy(p => p.Justification!.Value)
.ToDictionary(g => g.Key, g => g.Count());
var totalStatements = result.Projections.Sum(p => p.StatementCount);
var products = result.Projections.Select(p => p.ProductKey).Distinct().Count();
var avgConfidence = result.Projections.Average(p => p.ConfidenceScore);
var first = result.Projections.Min(p => p.ComputedAt);
var latest = result.Projections.Max(p => p.ComputedAt);
return new VulnVexSummary(
VulnerabilityId: vulnerabilityId,
TotalStatements: totalStatements,
TotalProducts: products,
TotalIssuers: 0, // Would need to track in projections
StatusCounts: statusCounts,
JustificationCounts: justificationCounts,
AverageConfidence: avgConfidence,
FirstVexStatement: first,
LatestVexStatement: latest);
}
public async Task<VexSearchResult> SearchVexAsync(
VexSearchQuery searchQuery,
VulnVexContext context,
CancellationToken cancellationToken = default)
{
var query = new ProjectionQuery(
TenantId: context.TenantId,
VulnerabilityId: searchQuery.VulnerabilityIdPattern,
ProductKey: searchQuery.ProductKeyPattern,
Status: searchQuery.Status,
Outcome: null,
MinimumConfidence: searchQuery.MinimumConfidence,
ComputedAfter: searchQuery.UpdatedAfter,
ComputedBefore: null,
StatusChanged: null,
Limit: searchQuery.Limit,
Offset: searchQuery.Offset,
SortBy: ProjectionSortField.ComputedAt,
SortDescending: true);
var result = await _projectionStore.ListAsync(query, cancellationToken);
var hits = result.Projections.Select(p => new VexSearchHit(
VulnerabilityId: p.VulnerabilityId,
ProductKey: p.ProductKey,
Status: p.Status,
Justification: p.Justification,
ConfidenceScore: p.ConfidenceScore,
PrimaryIssuer: null,
ComputedAt: p.ComputedAt)).ToList();
return new VexSearchResult(
Hits: hits,
TotalCount: result.TotalCount,
Offset: result.Offset,
Limit: result.Limit);
}
}

View File

@@ -0,0 +1,331 @@
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.VexLens.Mapping;
/// <summary>
/// Parser for Common Platform Enumeration (CPE) identifiers.
/// Supports both CPE 2.2 (URI binding) and CPE 2.3 (formatted string binding).
/// </summary>
public static partial class CpeParser
{
// CPE 2.3 formatted string: cpe:2.3:part:vendor:product:version:update:edition:language:sw_edition:target_sw:target_hw:other
[GeneratedRegex(
@"^cpe:2\.3:([aho\*\-]):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*):([^:]*)$",
RegexOptions.Compiled)]
private static partial Regex Cpe23Regex();
// CPE 2.2 URI: cpe:/part:vendor:product:version:update:edition:language
[GeneratedRegex(
@"^cpe:/([aho]):([^:]*):([^:]*):([^:]*)?:?([^:]*)?:?([^:]*)?:?([^:]*)?$",
RegexOptions.Compiled)]
private static partial Regex Cpe22Regex();
private const string Wildcard = "*";
private const string Na = "-";
/// <summary>
/// Parses a CPE string (2.2 or 2.3 format) into its components.
/// </summary>
public static CpeParseResult Parse(string? cpe)
{
if (string.IsNullOrWhiteSpace(cpe))
{
return CpeParseResult.Failed("CPE cannot be null or empty");
}
cpe = cpe.Trim();
// Try CPE 2.3 first
var match23 = Cpe23Regex().Match(cpe);
if (match23.Success)
{
return ParseCpe23(match23, cpe);
}
// Try CPE 2.2
var match22 = Cpe22Regex().Match(cpe);
if (match22.Success)
{
return ParseCpe22(match22, cpe);
}
return CpeParseResult.Failed("Invalid CPE format");
}
/// <summary>
/// Validates if a string is a valid CPE.
/// </summary>
public static bool IsValid(string? cpe)
{
if (string.IsNullOrWhiteSpace(cpe))
{
return false;
}
return Cpe23Regex().IsMatch(cpe) || Cpe22Regex().IsMatch(cpe);
}
/// <summary>
/// Converts a CPE to 2.3 formatted string format.
/// </summary>
public static string? ToCpe23(string? cpe)
{
var result = Parse(cpe);
if (!result.Success || result.Cpe == null)
{
return null;
}
return BuildCpe23(result.Cpe);
}
/// <summary>
/// Converts a CPE to 2.2 URI format.
/// </summary>
public static string? ToCpe22(string? cpe)
{
var result = Parse(cpe);
if (!result.Success || result.Cpe == null)
{
return null;
}
return BuildCpe22(result.Cpe);
}
/// <summary>
/// Checks if two CPEs match (with wildcard support).
/// </summary>
public static bool Matches(string? cpe1, string? cpe2)
{
var result1 = Parse(cpe1);
var result2 = Parse(cpe2);
if (!result1.Success || !result2.Success)
{
return false;
}
var c1 = result1.Cpe!;
var c2 = result2.Cpe!;
return MatchComponent(c1.Part, c2.Part) &&
MatchComponent(c1.Vendor, c2.Vendor) &&
MatchComponent(c1.Product, c2.Product) &&
MatchComponent(c1.Version, c2.Version) &&
MatchComponent(c1.Update, c2.Update) &&
MatchComponent(c1.Edition, c2.Edition) &&
MatchComponent(c1.Language, c2.Language) &&
MatchComponent(c1.SwEdition, c2.SwEdition) &&
MatchComponent(c1.TargetSw, c2.TargetSw) &&
MatchComponent(c1.TargetHw, c2.TargetHw) &&
MatchComponent(c1.Other, c2.Other);
}
/// <summary>
/// Checks if two CPEs refer to the same product (ignoring version).
/// </summary>
public static bool IsSameProduct(string? cpe1, string? cpe2)
{
var result1 = Parse(cpe1);
var result2 = Parse(cpe2);
if (!result1.Success || !result2.Success)
{
return false;
}
var c1 = result1.Cpe!;
var c2 = result2.Cpe!;
return string.Equals(c1.Part, c2.Part, StringComparison.OrdinalIgnoreCase) &&
string.Equals(c1.Vendor, c2.Vendor, StringComparison.OrdinalIgnoreCase) &&
string.Equals(c1.Product, c2.Product, StringComparison.OrdinalIgnoreCase);
}
private static CpeParseResult ParseCpe23(Match match, string raw)
{
var cpe = new CommonPlatformEnumeration(
CpeVersion: "2.3",
Part: NormalizeComponent(match.Groups[1].Value),
Vendor: NormalizeComponent(match.Groups[2].Value),
Product: NormalizeComponent(match.Groups[3].Value),
Version: NormalizeComponent(match.Groups[4].Value),
Update: NormalizeComponent(match.Groups[5].Value),
Edition: NormalizeComponent(match.Groups[6].Value),
Language: NormalizeComponent(match.Groups[7].Value),
SwEdition: NormalizeComponent(match.Groups[8].Value),
TargetSw: NormalizeComponent(match.Groups[9].Value),
TargetHw: NormalizeComponent(match.Groups[10].Value),
Other: NormalizeComponent(match.Groups[11].Value),
Raw: raw);
return CpeParseResult.Successful(cpe);
}
private static CpeParseResult ParseCpe22(Match match, string raw)
{
var cpe = new CommonPlatformEnumeration(
CpeVersion: "2.2",
Part: NormalizeComponent(match.Groups[1].Value),
Vendor: NormalizeComponent(match.Groups[2].Value),
Product: NormalizeComponent(match.Groups[3].Value),
Version: NormalizeComponent(match.Groups[4].Success ? match.Groups[4].Value : Wildcard),
Update: NormalizeComponent(match.Groups[5].Success ? match.Groups[5].Value : Wildcard),
Edition: NormalizeComponent(match.Groups[6].Success ? match.Groups[6].Value : Wildcard),
Language: NormalizeComponent(match.Groups[7].Success ? match.Groups[7].Value : Wildcard),
SwEdition: Wildcard,
TargetSw: Wildcard,
TargetHw: Wildcard,
Other: Wildcard,
Raw: raw);
return CpeParseResult.Successful(cpe);
}
private static string NormalizeComponent(string component)
{
if (string.IsNullOrEmpty(component))
{
return Wildcard;
}
// Decode percent-encoded characters
var decoded = Uri.UnescapeDataString(component);
// Replace escaped characters
decoded = decoded
.Replace("\\:", ":")
.Replace("\\;", ";")
.Replace("\\@", "@");
return decoded.ToLowerInvariant();
}
private static bool MatchComponent(string c1, string c2)
{
// Wildcard matches everything
if (c1 == Wildcard || c2 == Wildcard)
{
return true;
}
// NA only matches NA
if (c1 == Na || c2 == Na)
{
return c1 == Na && c2 == Na;
}
return string.Equals(c1, c2, StringComparison.OrdinalIgnoreCase);
}
private static string BuildCpe23(CommonPlatformEnumeration cpe)
{
var sb = new StringBuilder();
sb.Append("cpe:2.3:");
sb.Append(EscapeComponent(cpe.Part));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Vendor));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Product));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Version));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Update));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Edition));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Language));
sb.Append(':');
sb.Append(EscapeComponent(cpe.SwEdition));
sb.Append(':');
sb.Append(EscapeComponent(cpe.TargetSw));
sb.Append(':');
sb.Append(EscapeComponent(cpe.TargetHw));
sb.Append(':');
sb.Append(EscapeComponent(cpe.Other));
return sb.ToString();
}
private static string BuildCpe22(CommonPlatformEnumeration cpe)
{
var sb = new StringBuilder();
sb.Append("cpe:/");
sb.Append(cpe.Part);
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Vendor));
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Product));
if (cpe.Version != Wildcard)
{
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Version));
}
if (cpe.Update != Wildcard)
{
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Update));
}
if (cpe.Edition != Wildcard)
{
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Edition));
}
if (cpe.Language != Wildcard)
{
sb.Append(':');
sb.Append(EscapeComponent22(cpe.Language));
}
return sb.ToString();
}
private static string EscapeComponent(string component)
{
if (component == Wildcard || component == Na)
{
return component;
}
return component
.Replace(":", "\\:")
.Replace(";", "\\;")
.Replace("@", "\\@");
}
private static string EscapeComponent22(string component)
{
if (component == Wildcard)
{
return "";
}
if (component == Na)
{
return "-";
}
return Uri.EscapeDataString(component);
}
}
/// <summary>
/// Result of CPE parsing.
/// </summary>
public sealed record CpeParseResult(
bool Success,
CommonPlatformEnumeration? Cpe,
string? ErrorMessage)
{
public static CpeParseResult Successful(CommonPlatformEnumeration cpe) =>
new(true, cpe, null);
public static CpeParseResult Failed(string error) =>
new(false, null, error);
}

View File

@@ -0,0 +1,169 @@
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Mapping;
/// <summary>
/// Interface for product identity mapping services.
/// Maps product references from various sources to canonical identifiers.
/// </summary>
public interface IProductMapper
{
/// <summary>
/// Maps a normalized product to a canonical identity.
/// </summary>
Task<ProductMappingResult> MapAsync(
NormalizedProduct product,
ProductMappingContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Batch maps multiple products to canonical identities.
/// </summary>
Task<IReadOnlyList<ProductMappingResult>> MapBatchAsync(
IEnumerable<NormalizedProduct> products,
ProductMappingContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Resolves product aliases (e.g., maps one PURL to equivalent PURLs).
/// </summary>
Task<ProductAliasResult> ResolveAliasesAsync(
string identifier,
ProductIdentifierType identifierType,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for product mapping operations.
/// </summary>
public sealed record ProductMappingContext(
string? TenantId,
bool ResolveAliases,
bool ValidateIdentifiers,
IReadOnlyDictionary<string, object?>? Options);
/// <summary>
/// Result of a product mapping operation.
/// </summary>
public sealed record ProductMappingResult(
NormalizedProduct OriginalProduct,
CanonicalProduct? CanonicalProduct,
bool Success,
ProductMappingConfidence Confidence,
IReadOnlyList<string>? Warnings,
IReadOnlyList<ProductMappingError>? Errors);
/// <summary>
/// A canonicalized product identity with validated identifiers.
/// </summary>
public sealed record CanonicalProduct(
string CanonicalKey,
string? Name,
string? Version,
PackageUrl? Purl,
CommonPlatformEnumeration? Cpe,
IReadOnlyList<ProductAlias>? Aliases,
ProductVendorInfo? Vendor,
IReadOnlyDictionary<string, string>? Hashes);
/// <summary>
/// Parsed Package URL (PURL) components.
/// </summary>
public sealed record PackageUrl(
string Type,
string? Namespace,
string Name,
string? Version,
IReadOnlyDictionary<string, string>? Qualifiers,
string? Subpath,
string Raw);
/// <summary>
/// Parsed Common Platform Enumeration (CPE) components.
/// </summary>
public sealed record CommonPlatformEnumeration(
string CpeVersion,
string Part,
string Vendor,
string Product,
string Version,
string Update,
string Edition,
string Language,
string SwEdition,
string TargetSw,
string TargetHw,
string Other,
string Raw);
/// <summary>
/// Product alias linking different identifier systems.
/// </summary>
public sealed record ProductAlias(
ProductIdentifierType Type,
string Value,
ProductAliasSource Source);
/// <summary>
/// Source of a product alias mapping.
/// </summary>
public enum ProductAliasSource
{
VexDocument,
SbomDocument,
VendorMapping,
CommunityMapping,
NvdMapping,
Inferred
}
/// <summary>
/// Vendor information for a product.
/// </summary>
public sealed record ProductVendorInfo(
string VendorId,
string? Name,
string? Uri);
/// <summary>
/// Type of product identifier.
/// </summary>
public enum ProductIdentifierType
{
Purl,
Cpe,
Swid,
BomRef,
VendorProductId,
Custom
}
/// <summary>
/// Confidence level in product mapping.
/// </summary>
public enum ProductMappingConfidence
{
Exact,
High,
Medium,
Low,
Unknown
}
/// <summary>
/// Error during product mapping.
/// </summary>
public sealed record ProductMappingError(
string Code,
string Message,
string? Field);
/// <summary>
/// Result of product alias resolution.
/// </summary>
public sealed record ProductAliasResult(
string OriginalIdentifier,
ProductIdentifierType OriginalType,
IReadOnlyList<ProductAlias> Aliases,
bool Success,
IReadOnlyList<string>? Warnings);

View File

@@ -0,0 +1,259 @@
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Mapping;
/// <summary>
/// Utility for matching and comparing product identities across different identifier types.
/// </summary>
public static class ProductIdentityMatcher
{
/// <summary>
/// Checks if two products are equivalent based on their identifiers.
/// </summary>
public static ProductMatchResult Match(NormalizedProduct product1, NormalizedProduct product2)
{
var matches = new List<ProductMatchEvidence>();
// Check PURL match
if (!string.IsNullOrEmpty(product1.Purl) && !string.IsNullOrEmpty(product2.Purl))
{
if (PurlParser.IsSamePackage(product1.Purl, product2.Purl))
{
var versionMatch = CheckVersionMatch(
PurlParser.Parse(product1.Purl).PackageUrl?.Version,
PurlParser.Parse(product2.Purl).PackageUrl?.Version);
matches.Add(new ProductMatchEvidence(
MatchType: ProductMatchType.Purl,
Confidence: versionMatch ? MatchConfidence.Exact : MatchConfidence.PackageOnly,
Evidence: $"PURL match: {product1.Purl} ≈ {product2.Purl}"));
}
}
// Check CPE match
if (!string.IsNullOrEmpty(product1.Cpe) && !string.IsNullOrEmpty(product2.Cpe))
{
if (CpeParser.IsSameProduct(product1.Cpe, product2.Cpe))
{
var cpe1 = CpeParser.Parse(product1.Cpe).Cpe;
var cpe2 = CpeParser.Parse(product2.Cpe).Cpe;
var versionMatch = cpe1?.Version == cpe2?.Version && cpe1?.Version != "*";
matches.Add(new ProductMatchEvidence(
MatchType: ProductMatchType.Cpe,
Confidence: versionMatch ? MatchConfidence.Exact : MatchConfidence.PackageOnly,
Evidence: $"CPE match: {product1.Cpe} ≈ {product2.Cpe}"));
}
}
// Check key match
if (!string.IsNullOrEmpty(product1.Key) && !string.IsNullOrEmpty(product2.Key))
{
if (string.Equals(product1.Key, product2.Key, StringComparison.OrdinalIgnoreCase))
{
matches.Add(new ProductMatchEvidence(
MatchType: ProductMatchType.Key,
Confidence: MatchConfidence.Exact,
Evidence: $"Key match: {product1.Key}"));
}
}
// Check name + version match
if (!string.IsNullOrEmpty(product1.Name) && !string.IsNullOrEmpty(product2.Name))
{
if (string.Equals(product1.Name, product2.Name, StringComparison.OrdinalIgnoreCase))
{
var versionMatch = CheckVersionMatch(product1.Version, product2.Version);
matches.Add(new ProductMatchEvidence(
MatchType: ProductMatchType.NameVersion,
Confidence: versionMatch ? MatchConfidence.Exact : MatchConfidence.PackageOnly,
Evidence: $"Name match: {product1.Name}" + (versionMatch ? $" @ {product1.Version}" : "")));
}
}
// Check hash match
if (product1.Hashes != null && product2.Hashes != null)
{
foreach (var (alg, hash1) in product1.Hashes)
{
if (product2.Hashes.TryGetValue(alg, out var hash2))
{
if (string.Equals(hash1, hash2, StringComparison.OrdinalIgnoreCase))
{
matches.Add(new ProductMatchEvidence(
MatchType: ProductMatchType.Hash,
Confidence: MatchConfidence.Exact,
Evidence: $"Hash match ({alg}): {hash1}"));
}
}
}
}
// Determine overall match result
var overallConfidence = matches.Count > 0
? matches.Max(m => m.Confidence)
: MatchConfidence.None;
return new ProductMatchResult(
IsMatch: matches.Count > 0,
OverallConfidence: overallConfidence,
Evidence: matches);
}
/// <summary>
/// Finds matching products in a collection.
/// </summary>
public static IReadOnlyList<ProductMatchResult> FindMatches(
NormalizedProduct target,
IEnumerable<NormalizedProduct> candidates,
MatchConfidence minimumConfidence = MatchConfidence.PackageOnly)
{
var results = new List<ProductMatchResult>();
foreach (var candidate in candidates)
{
var matchResult = Match(target, candidate);
if (matchResult.IsMatch && matchResult.OverallConfidence >= minimumConfidence)
{
results.Add(matchResult with { MatchedProduct = candidate });
}
}
return results.OrderByDescending(r => r.OverallConfidence).ToList();
}
/// <summary>
/// Computes a similarity score between two products (0.0 to 1.0).
/// </summary>
public static double ComputeSimilarity(NormalizedProduct product1, NormalizedProduct product2)
{
var matchResult = Match(product1, product2);
if (!matchResult.IsMatch)
{
return 0.0;
}
return matchResult.OverallConfidence switch
{
MatchConfidence.Exact => 1.0,
MatchConfidence.PackageOnly => 0.8,
MatchConfidence.Fuzzy => 0.5,
MatchConfidence.Partial => 0.3,
_ => 0.0
};
}
/// <summary>
/// Detects the identifier type from a string.
/// </summary>
public static ProductIdentifierType? DetectIdentifierType(string? identifier)
{
if (string.IsNullOrWhiteSpace(identifier))
{
return null;
}
if (identifier.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return PurlParser.IsValid(identifier) ? ProductIdentifierType.Purl : null;
}
if (identifier.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
{
return CpeParser.IsValid(identifier) ? ProductIdentifierType.Cpe : null;
}
if (identifier.StartsWith("swid:", StringComparison.OrdinalIgnoreCase))
{
return ProductIdentifierType.Swid;
}
// Could be a bom-ref or vendor product ID
return ProductIdentifierType.Custom;
}
/// <summary>
/// Extracts all identifiers from a product.
/// </summary>
public static IReadOnlyList<(ProductIdentifierType Type, string Value)> ExtractIdentifiers(NormalizedProduct product)
{
var identifiers = new List<(ProductIdentifierType, string)>();
if (!string.IsNullOrWhiteSpace(product.Purl))
{
identifiers.Add((ProductIdentifierType.Purl, product.Purl));
}
if (!string.IsNullOrWhiteSpace(product.Cpe))
{
identifiers.Add((ProductIdentifierType.Cpe, product.Cpe));
}
if (!string.IsNullOrWhiteSpace(product.Key))
{
var keyType = DetectIdentifierType(product.Key);
if (keyType.HasValue && keyType.Value != ProductIdentifierType.Purl && keyType.Value != ProductIdentifierType.Cpe)
{
identifiers.Add((keyType.Value, product.Key));
}
else if (keyType == null)
{
identifiers.Add((ProductIdentifierType.Custom, product.Key));
}
}
return identifiers;
}
private static bool CheckVersionMatch(string? version1, string? version2)
{
if (string.IsNullOrEmpty(version1) || string.IsNullOrEmpty(version2))
{
return false;
}
return string.Equals(version1, version2, StringComparison.OrdinalIgnoreCase);
}
}
/// <summary>
/// Result of a product match operation.
/// </summary>
public sealed record ProductMatchResult(
bool IsMatch,
MatchConfidence OverallConfidence,
IReadOnlyList<ProductMatchEvidence> Evidence,
NormalizedProduct? MatchedProduct = null);
/// <summary>
/// Evidence supporting a product match.
/// </summary>
public sealed record ProductMatchEvidence(
ProductMatchType MatchType,
MatchConfidence Confidence,
string Evidence);
/// <summary>
/// Type of product match.
/// </summary>
public enum ProductMatchType
{
Purl,
Cpe,
Key,
NameVersion,
Hash
}
/// <summary>
/// Confidence level of a match.
/// </summary>
public enum MatchConfidence
{
None = 0,
Partial = 1,
Fuzzy = 2,
PackageOnly = 3,
Exact = 4
}

View File

@@ -0,0 +1,301 @@
using System.Text;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Mapping;
/// <summary>
/// Default implementation of <see cref="IProductMapper"/>.
/// Maps normalized products to canonical identities using PURL and CPE parsing.
/// </summary>
public sealed class ProductMapper : IProductMapper
{
private readonly IProductAliasResolver? _aliasResolver;
public ProductMapper(IProductAliasResolver? aliasResolver = null)
{
_aliasResolver = aliasResolver;
}
public async Task<ProductMappingResult> MapAsync(
NormalizedProduct product,
ProductMappingContext context,
CancellationToken cancellationToken = default)
{
var warnings = new List<string>();
var errors = new List<ProductMappingError>();
PackageUrl? parsedPurl = null;
CommonPlatformEnumeration? parsedCpe = null;
var aliases = new List<ProductAlias>();
// Parse PURL if present
if (!string.IsNullOrWhiteSpace(product.Purl))
{
var purlResult = PurlParser.Parse(product.Purl);
if (purlResult.Success)
{
parsedPurl = purlResult.PackageUrl;
}
else if (context.ValidateIdentifiers)
{
warnings.Add($"Invalid PURL format: {purlResult.ErrorMessage}");
}
}
// Parse CPE if present
if (!string.IsNullOrWhiteSpace(product.Cpe))
{
var cpeResult = CpeParser.Parse(product.Cpe);
if (cpeResult.Success)
{
parsedCpe = cpeResult.Cpe;
}
else if (context.ValidateIdentifiers)
{
warnings.Add($"Invalid CPE format: {cpeResult.ErrorMessage}");
}
}
// Resolve aliases if requested
if (context.ResolveAliases && _aliasResolver != null)
{
if (parsedPurl != null)
{
var purlAliases = await _aliasResolver.ResolveAsync(
product.Purl!,
ProductIdentifierType.Purl,
cancellationToken);
aliases.AddRange(purlAliases);
}
if (parsedCpe != null)
{
var cpeAliases = await _aliasResolver.ResolveAsync(
product.Cpe!,
ProductIdentifierType.Cpe,
cancellationToken);
aliases.AddRange(cpeAliases);
}
}
// Determine canonical key
var canonicalKey = DetermineCanonicalKey(product, parsedPurl, parsedCpe);
// Determine mapping confidence
var confidence = DetermineConfidence(product, parsedPurl, parsedCpe);
// Extract vendor info
var vendor = ExtractVendorInfo(product, parsedPurl, parsedCpe);
var canonicalProduct = new CanonicalProduct(
CanonicalKey: canonicalKey,
Name: product.Name ?? parsedPurl?.Name ?? parsedCpe?.Product,
Version: product.Version ?? parsedPurl?.Version ?? parsedCpe?.Version,
Purl: parsedPurl,
Cpe: parsedCpe,
Aliases: aliases.Count > 0 ? aliases : null,
Vendor: vendor,
Hashes: product.Hashes);
return new ProductMappingResult(
OriginalProduct: product,
CanonicalProduct: canonicalProduct,
Success: true,
Confidence: confidence,
Warnings: warnings.Count > 0 ? warnings : null,
Errors: errors.Count > 0 ? errors : null);
}
public async Task<IReadOnlyList<ProductMappingResult>> MapBatchAsync(
IEnumerable<NormalizedProduct> products,
ProductMappingContext context,
CancellationToken cancellationToken = default)
{
var results = new List<ProductMappingResult>();
foreach (var product in products)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await MapAsync(product, context, cancellationToken);
results.Add(result);
}
return results;
}
public async Task<ProductAliasResult> ResolveAliasesAsync(
string identifier,
ProductIdentifierType identifierType,
CancellationToken cancellationToken = default)
{
if (_aliasResolver == null)
{
return new ProductAliasResult(
OriginalIdentifier: identifier,
OriginalType: identifierType,
Aliases: [],
Success: true,
Warnings: ["No alias resolver configured"]);
}
var aliases = await _aliasResolver.ResolveAsync(identifier, identifierType, cancellationToken);
return new ProductAliasResult(
OriginalIdentifier: identifier,
OriginalType: identifierType,
Aliases: aliases,
Success: true,
Warnings: null);
}
private static string DetermineCanonicalKey(
NormalizedProduct product,
PackageUrl? purl,
CommonPlatformEnumeration? cpe)
{
// Prefer PURL as canonical key (most precise)
if (purl != null)
{
return PurlParser.Build(purl);
}
// Fall back to CPE 2.3 format
if (cpe != null)
{
return CpeParser.ToCpe23(cpe.Raw) ?? cpe.Raw;
}
// Use original key
return product.Key;
}
private static ProductMappingConfidence DetermineConfidence(
NormalizedProduct product,
PackageUrl? purl,
CommonPlatformEnumeration? cpe)
{
// Exact match if we have both PURL and version
if (purl != null && !string.IsNullOrEmpty(purl.Version))
{
return ProductMappingConfidence.Exact;
}
// High confidence with CPE and version
if (cpe != null && cpe.Version != "*")
{
return ProductMappingConfidence.High;
}
// High confidence with PURL but no version
if (purl != null)
{
return ProductMappingConfidence.High;
}
// Medium confidence with CPE
if (cpe != null)
{
return ProductMappingConfidence.Medium;
}
// Low confidence if we have name but no identifiers
if (!string.IsNullOrEmpty(product.Name))
{
return ProductMappingConfidence.Low;
}
// Unknown if we only have a key
return ProductMappingConfidence.Unknown;
}
private static ProductVendorInfo? ExtractVendorInfo(
NormalizedProduct product,
PackageUrl? purl,
CommonPlatformEnumeration? cpe)
{
// Try to extract vendor from CPE
if (cpe != null && cpe.Vendor != "*" && cpe.Vendor != "-")
{
return new ProductVendorInfo(
VendorId: cpe.Vendor,
Name: FormatVendorName(cpe.Vendor),
Uri: null);
}
// Try to extract vendor from PURL namespace
if (purl != null && !string.IsNullOrEmpty(purl.Namespace))
{
return new ProductVendorInfo(
VendorId: purl.Namespace,
Name: purl.Namespace,
Uri: null);
}
return null;
}
private static string FormatVendorName(string vendorId)
{
// Convert vendor_name to Vendor Name
return string.Join(' ', vendorId
.Split('_', '-')
.Select(s => char.ToUpperInvariant(s[0]) + s[1..]));
}
}
/// <summary>
/// Interface for resolving product aliases.
/// </summary>
public interface IProductAliasResolver
{
/// <summary>
/// Resolves aliases for a product identifier.
/// </summary>
Task<IReadOnlyList<ProductAlias>> ResolveAsync(
string identifier,
ProductIdentifierType identifierType,
CancellationToken cancellationToken = default);
}
/// <summary>
/// In-memory product alias resolver for testing and basic usage.
/// </summary>
public sealed class InMemoryProductAliasResolver : IProductAliasResolver
{
private readonly Dictionary<string, List<ProductAlias>> _aliases = new(StringComparer.OrdinalIgnoreCase);
public void AddAlias(string identifier, ProductAlias alias)
{
if (!_aliases.TryGetValue(identifier, out var list))
{
list = [];
_aliases[identifier] = list;
}
list.Add(alias);
}
public void AddBidirectionalAlias(
string identifier1,
ProductIdentifierType type1,
string identifier2,
ProductIdentifierType type2,
ProductAliasSource source)
{
AddAlias(identifier1, new ProductAlias(type2, identifier2, source));
AddAlias(identifier2, new ProductAlias(type1, identifier1, source));
}
public Task<IReadOnlyList<ProductAlias>> ResolveAsync(
string identifier,
ProductIdentifierType identifierType,
CancellationToken cancellationToken = default)
{
if (_aliases.TryGetValue(identifier, out var aliases))
{
return Task.FromResult<IReadOnlyList<ProductAlias>>(aliases);
}
return Task.FromResult<IReadOnlyList<ProductAlias>>([]);
}
}

View File

@@ -0,0 +1,253 @@
using System.Text.RegularExpressions;
using System.Web;
namespace StellaOps.VexLens.Mapping;
/// <summary>
/// Parser for Package URL (PURL) identifiers.
/// Implements the PURL specification: https://github.com/package-url/purl-spec
/// </summary>
public static partial class PurlParser
{
// pkg:type/namespace/name@version?qualifiers#subpath
[GeneratedRegex(
@"^pkg:(?<type>[a-zA-Z][a-zA-Z0-9.+-]*)(?:/(?<namespace>[^/]+))?/(?<name>[^@?#]+)(?:@(?<version>[^?#]+))?(?:\?(?<qualifiers>[^#]+))?(?:#(?<subpath>.+))?$",
RegexOptions.Compiled)]
private static partial Regex PurlRegex();
/// <summary>
/// Parses a PURL string into its components.
/// </summary>
public static PurlParseResult Parse(string? purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return PurlParseResult.Failed("PURL cannot be null or empty");
}
var match = PurlRegex().Match(purl);
if (!match.Success)
{
return PurlParseResult.Failed("Invalid PURL format");
}
var type = match.Groups["type"].Value.ToLowerInvariant();
var namespaceGroup = match.Groups["namespace"];
var nameGroup = match.Groups["name"];
var versionGroup = match.Groups["version"];
var qualifiersGroup = match.Groups["qualifiers"];
var subpathGroup = match.Groups["subpath"];
var ns = namespaceGroup.Success ? DecodeComponent(namespaceGroup.Value) : null;
var name = DecodeComponent(nameGroup.Value);
var version = versionGroup.Success ? DecodeComponent(versionGroup.Value) : null;
var qualifiers = qualifiersGroup.Success ? ParseQualifiers(qualifiersGroup.Value) : null;
var subpath = subpathGroup.Success ? DecodeComponent(subpathGroup.Value) : null;
// Normalize namespace per type
ns = NormalizeNamespace(type, ns);
// Normalize name per type
name = NormalizeName(type, name);
var packageUrl = new PackageUrl(
Type: type,
Namespace: ns,
Name: name,
Version: version,
Qualifiers: qualifiers,
Subpath: subpath,
Raw: purl);
return PurlParseResult.Successful(packageUrl);
}
/// <summary>
/// Validates if a string is a valid PURL.
/// </summary>
public static bool IsValid(string? purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return false;
}
return PurlRegex().IsMatch(purl);
}
/// <summary>
/// Normalizes a PURL to canonical form.
/// </summary>
public static string? Normalize(string? purl)
{
var result = Parse(purl);
if (!result.Success || result.PackageUrl == null)
{
return null;
}
return Build(result.PackageUrl);
}
/// <summary>
/// Builds a PURL string from components.
/// </summary>
public static string Build(PackageUrl purl)
{
var sb = new System.Text.StringBuilder();
sb.Append("pkg:");
sb.Append(purl.Type);
if (!string.IsNullOrEmpty(purl.Namespace))
{
sb.Append('/');
sb.Append(EncodeComponent(purl.Namespace));
}
sb.Append('/');
sb.Append(EncodeComponent(purl.Name));
if (!string.IsNullOrEmpty(purl.Version))
{
sb.Append('@');
sb.Append(EncodeComponent(purl.Version));
}
if (purl.Qualifiers is { Count: > 0 })
{
sb.Append('?');
var first = true;
foreach (var (key, value) in purl.Qualifiers.OrderBy(kv => kv.Key, StringComparer.Ordinal))
{
if (!first)
{
sb.Append('&');
}
first = false;
sb.Append(EncodeComponent(key));
sb.Append('=');
sb.Append(EncodeComponent(value));
}
}
if (!string.IsNullOrEmpty(purl.Subpath))
{
sb.Append('#');
sb.Append(EncodeComponent(purl.Subpath));
}
return sb.ToString();
}
/// <summary>
/// Extracts the ecosystem/type from a PURL.
/// </summary>
public static string? GetEcosystem(string? purl)
{
var result = Parse(purl);
return result.Success ? result.PackageUrl?.Type : null;
}
/// <summary>
/// Checks if two PURLs refer to the same package (ignoring version).
/// </summary>
public static bool IsSamePackage(string? purl1, string? purl2)
{
var result1 = Parse(purl1);
var result2 = Parse(purl2);
if (!result1.Success || !result2.Success)
{
return false;
}
var p1 = result1.PackageUrl!;
var p2 = result2.PackageUrl!;
return string.Equals(p1.Type, p2.Type, StringComparison.OrdinalIgnoreCase) &&
string.Equals(p1.Namespace, p2.Namespace, StringComparison.OrdinalIgnoreCase) &&
string.Equals(p1.Name, p2.Name, StringComparison.OrdinalIgnoreCase);
}
private static string DecodeComponent(string component)
{
return HttpUtility.UrlDecode(component);
}
private static string EncodeComponent(string component)
{
// Percent-encode per PURL spec
return Uri.EscapeDataString(component);
}
private static IReadOnlyDictionary<string, string>? ParseQualifiers(string qualifiersStr)
{
if (string.IsNullOrEmpty(qualifiersStr))
{
return null;
}
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var pairs = qualifiersStr.Split('&');
foreach (var pair in pairs)
{
var idx = pair.IndexOf('=');
if (idx > 0)
{
var key = DecodeComponent(pair[..idx]).ToLowerInvariant();
var value = DecodeComponent(pair[(idx + 1)..]);
result[key] = value;
}
}
return result.Count > 0 ? result : null;
}
private static string? NormalizeNamespace(string type, string? ns)
{
if (string.IsNullOrEmpty(ns))
{
return ns;
}
// Normalize per type-specific rules
return type switch
{
"npm" => ns.ToLowerInvariant(),
"nuget" => ns.ToLowerInvariant(),
"pypi" => ns.ToLowerInvariant().Replace('_', '-'),
"maven" => ns, // Case-sensitive
"golang" => ns.ToLowerInvariant(),
_ => ns
};
}
private static string NormalizeName(string type, string name)
{
// Normalize per type-specific rules
return type switch
{
"npm" => name.ToLowerInvariant(),
"nuget" => name.ToLowerInvariant(),
"pypi" => name.ToLowerInvariant().Replace('_', '-'),
"golang" => name.ToLowerInvariant(),
_ => name
};
}
}
/// <summary>
/// Result of PURL parsing.
/// </summary>
public sealed record PurlParseResult(
bool Success,
PackageUrl? PackageUrl,
string? ErrorMessage)
{
public static PurlParseResult Successful(PackageUrl purl) =>
new(true, purl, null);
public static PurlParseResult Failed(string error) =>
new(false, null, error);
}

View File

@@ -0,0 +1,183 @@
using System.Text.Json.Serialization;
namespace StellaOps.VexLens.Models;
/// <summary>
/// Normalized VEX document per vex-normalization.schema.json.
/// Supports OpenVEX, CSAF VEX, CycloneDX VEX, SPDX VEX, and StellaOps formats.
/// </summary>
public sealed record NormalizedVexDocument(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("documentId")] string DocumentId,
[property: JsonPropertyName("sourceFormat")] VexSourceFormat SourceFormat,
[property: JsonPropertyName("sourceDigest")] string? SourceDigest,
[property: JsonPropertyName("sourceUri")] string? SourceUri,
[property: JsonPropertyName("issuer")] VexIssuer? Issuer,
[property: JsonPropertyName("issuedAt")] DateTimeOffset? IssuedAt,
[property: JsonPropertyName("lastUpdatedAt")] DateTimeOffset? LastUpdatedAt,
[property: JsonPropertyName("statements")] IReadOnlyList<NormalizedStatement> Statements,
[property: JsonPropertyName("provenance")] NormalizationProvenance? Provenance)
{
public const int CurrentSchemaVersion = 1;
}
/// <summary>
/// Original VEX document format before normalization.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexSourceFormat>))]
public enum VexSourceFormat
{
[JsonPropertyName("OPENVEX")]
OpenVex,
[JsonPropertyName("CSAF_VEX")]
CsafVex,
[JsonPropertyName("CYCLONEDX_VEX")]
CycloneDxVex,
[JsonPropertyName("SPDX_VEX")]
SpdxVex,
[JsonPropertyName("STELLAOPS")]
StellaOps
}
/// <summary>
/// Issuing authority for a VEX document.
/// </summary>
public sealed record VexIssuer(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("category")] IssuerCategory? Category,
[property: JsonPropertyName("trustTier")] TrustTier? TrustTier,
[property: JsonPropertyName("keyFingerprints")] IReadOnlyList<string>? KeyFingerprints);
/// <summary>
/// Issuer category for trust weighting.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<IssuerCategory>))]
public enum IssuerCategory
{
[JsonPropertyName("VENDOR")]
Vendor,
[JsonPropertyName("DISTRIBUTOR")]
Distributor,
[JsonPropertyName("COMMUNITY")]
Community,
[JsonPropertyName("INTERNAL")]
Internal,
[JsonPropertyName("AGGREGATOR")]
Aggregator
}
/// <summary>
/// Trust tier for policy evaluation.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<TrustTier>))]
public enum TrustTier
{
[JsonPropertyName("AUTHORITATIVE")]
Authoritative,
[JsonPropertyName("TRUSTED")]
Trusted,
[JsonPropertyName("UNTRUSTED")]
Untrusted,
[JsonPropertyName("UNKNOWN")]
Unknown
}
/// <summary>
/// Normalized VEX statement extracted from source.
/// </summary>
public sealed record NormalizedStatement(
[property: JsonPropertyName("statementId")] string StatementId,
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
[property: JsonPropertyName("vulnerabilityAliases")] IReadOnlyList<string>? VulnerabilityAliases,
[property: JsonPropertyName("product")] NormalizedProduct Product,
[property: JsonPropertyName("status")] VexStatus Status,
[property: JsonPropertyName("statusNotes")] string? StatusNotes,
[property: JsonPropertyName("justification")] VexJustification? Justification,
[property: JsonPropertyName("impactStatement")] string? ImpactStatement,
[property: JsonPropertyName("actionStatement")] string? ActionStatement,
[property: JsonPropertyName("actionStatementTimestamp")] DateTimeOffset? ActionStatementTimestamp,
[property: JsonPropertyName("versions")] VersionRange? Versions,
[property: JsonPropertyName("subcomponents")] IReadOnlyList<NormalizedProduct>? Subcomponents,
[property: JsonPropertyName("firstSeen")] DateTimeOffset? FirstSeen,
[property: JsonPropertyName("lastSeen")] DateTimeOffset? LastSeen);
/// <summary>
/// Normalized VEX status using OpenVEX terminology.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexStatus>))]
public enum VexStatus
{
[JsonPropertyName("not_affected")]
NotAffected,
[JsonPropertyName("affected")]
Affected,
[JsonPropertyName("fixed")]
Fixed,
[JsonPropertyName("under_investigation")]
UnderInvestigation
}
/// <summary>
/// Normalized justification when status is not_affected.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexJustification>))]
public enum VexJustification
{
[JsonPropertyName("component_not_present")]
ComponentNotPresent,
[JsonPropertyName("vulnerable_code_not_present")]
VulnerableCodeNotPresent,
[JsonPropertyName("vulnerable_code_not_in_execute_path")]
VulnerableCodeNotInExecutePath,
[JsonPropertyName("vulnerable_code_cannot_be_controlled_by_adversary")]
VulnerableCodeCannotBeControlledByAdversary,
[JsonPropertyName("inline_mitigations_already_exist")]
InlineMitigationsAlreadyExist
}
/// <summary>
/// Normalized product reference.
/// </summary>
public sealed record NormalizedProduct(
[property: JsonPropertyName("key")] string Key,
[property: JsonPropertyName("name")] string? Name,
[property: JsonPropertyName("version")] string? Version,
[property: JsonPropertyName("purl")] string? Purl,
[property: JsonPropertyName("cpe")] string? Cpe,
[property: JsonPropertyName("hashes")] IReadOnlyDictionary<string, string>? Hashes);
/// <summary>
/// Version constraints for a statement.
/// </summary>
public sealed record VersionRange(
[property: JsonPropertyName("affected")] IReadOnlyList<string>? Affected,
[property: JsonPropertyName("fixed")] IReadOnlyList<string>? Fixed,
[property: JsonPropertyName("unaffected")] IReadOnlyList<string>? Unaffected);
/// <summary>
/// Metadata about the normalization process.
/// </summary>
public sealed record NormalizationProvenance(
[property: JsonPropertyName("normalizedAt")] DateTimeOffset NormalizedAt,
[property: JsonPropertyName("normalizer")] string Normalizer,
[property: JsonPropertyName("sourceRevision")] string? SourceRevision,
[property: JsonPropertyName("transformationRules")] IReadOnlyList<string>? TransformationRules);

View File

@@ -0,0 +1,685 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Normalization;
/// <summary>
/// Normalizer for CSAF VEX format documents.
/// CSAF VEX documents follow the OASIS CSAF 2.0 specification with profile "VEX".
/// </summary>
public sealed class CsafVexNormalizer : IVexNormalizer
{
public VexSourceFormat SourceFormat => VexSourceFormat.CsafVex;
public bool CanNormalize(string content)
{
if (string.IsNullOrWhiteSpace(content))
{
return false;
}
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// CSAF documents have document.category = "csaf_vex"
if (root.TryGetProperty("document", out var document))
{
if (document.TryGetProperty("category", out var category))
{
var categoryStr = category.GetString();
return categoryStr?.Equals("csaf_vex", StringComparison.OrdinalIgnoreCase) == true;
}
}
return false;
}
catch
{
return false;
}
}
public Task<NormalizationResult> NormalizeAsync(
string content,
NormalizationContext context,
CancellationToken cancellationToken = default)
{
var stopwatch = Stopwatch.StartNew();
var warnings = new List<NormalizationWarning>();
var statementsSkipped = 0;
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Extract document metadata
if (!root.TryGetProperty("document", out var documentElement))
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_CSAF_001", "Missing 'document' element", "document", null)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
// Extract document ID
var documentId = ExtractDocumentId(documentElement);
if (string.IsNullOrWhiteSpace(documentId))
{
documentId = $"csaf:{Guid.NewGuid():N}";
warnings.Add(new NormalizationWarning(
"WARN_CSAF_001",
"Document tracking ID not found; generated a random ID",
"document.tracking.id"));
}
// Extract issuer from publisher
var issuer = ExtractIssuer(documentElement, warnings);
// Extract timestamps
var (issuedAt, lastUpdatedAt) = ExtractTimestamps(documentElement);
// Extract product tree for product resolution
var productTree = root.TryGetProperty("product_tree", out var pt) ? pt : default;
// Extract vulnerabilities and convert to statements
var statements = ExtractStatements(root, productTree, warnings, ref statementsSkipped);
// Calculate source digest
var sourceDigest = ComputeDigest(content);
// Build provenance
var provenance = new NormalizationProvenance(
NormalizedAt: context.NormalizedAt,
Normalizer: context.Normalizer,
SourceRevision: null,
TransformationRules: ["csaf-vex-to-normalized-v1"]);
var normalizedDoc = new NormalizedVexDocument(
SchemaVersion: NormalizedVexDocument.CurrentSchemaVersion,
DocumentId: documentId,
SourceFormat: VexSourceFormat.CsafVex,
SourceDigest: sourceDigest,
SourceUri: context.SourceUri,
Issuer: issuer,
IssuedAt: issuedAt,
LastUpdatedAt: lastUpdatedAt,
Statements: statements,
Provenance: provenance);
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Successful(
normalizedDoc,
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: statements.Count,
StatementsSkipped: statementsSkipped,
ProductsMapped: statements.Count),
warnings));
}
catch (JsonException ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_CSAF_002", "Invalid JSON", ex.Path, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
catch (Exception ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_CSAF_999", "Unexpected error during normalization", null, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
}
private static string? ExtractDocumentId(JsonElement document)
{
if (document.TryGetProperty("tracking", out var tracking) &&
tracking.TryGetProperty("id", out var id))
{
return id.GetString();
}
return null;
}
private static VexIssuer? ExtractIssuer(JsonElement document, List<NormalizationWarning> warnings)
{
if (!document.TryGetProperty("publisher", out var publisher))
{
warnings.Add(new NormalizationWarning(
"WARN_CSAF_002",
"No publisher found in document",
"document.publisher"));
return null;
}
var issuerId = publisher.TryGetProperty("namespace", out var nsProp)
? nsProp.GetString() ?? "unknown"
: "unknown";
var issuerName = publisher.TryGetProperty("name", out var nameProp)
? nameProp.GetString() ?? issuerId
: issuerId;
var categoryStr = publisher.TryGetProperty("category", out var catProp)
? catProp.GetString()
: null;
var category = MapPublisherCategory(categoryStr);
return new VexIssuer(
Id: issuerId,
Name: issuerName,
Category: category,
TrustTier: TrustTier.Unknown,
KeyFingerprints: null);
}
private static IssuerCategory? MapPublisherCategory(string? category)
{
return category?.ToLowerInvariant() switch
{
"vendor" => IssuerCategory.Vendor,
"discoverer" or "coordinator" => IssuerCategory.Community,
"user" => IssuerCategory.Internal,
"other" => null,
_ => null
};
}
private static (DateTimeOffset? IssuedAt, DateTimeOffset? LastUpdatedAt) ExtractTimestamps(JsonElement document)
{
DateTimeOffset? issuedAt = null;
DateTimeOffset? lastUpdatedAt = null;
if (document.TryGetProperty("tracking", out var tracking))
{
if (tracking.TryGetProperty("initial_release_date", out var initialRelease) &&
initialRelease.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(initialRelease.GetString(), out var parsed))
{
issuedAt = parsed;
}
}
if (tracking.TryGetProperty("current_release_date", out var currentRelease) &&
currentRelease.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(currentRelease.GetString(), out var parsed))
{
lastUpdatedAt = parsed;
}
}
}
return (issuedAt, lastUpdatedAt);
}
private static IReadOnlyList<NormalizedStatement> ExtractStatements(
JsonElement root,
JsonElement productTree,
List<NormalizationWarning> warnings,
ref int skipped)
{
if (!root.TryGetProperty("vulnerabilities", out var vulnerabilities) ||
vulnerabilities.ValueKind != JsonValueKind.Array)
{
warnings.Add(new NormalizationWarning(
"WARN_CSAF_003",
"No vulnerabilities array found",
"vulnerabilities"));
return [];
}
var statements = new List<NormalizedStatement>();
var statementIndex = 0;
foreach (var vuln in vulnerabilities.EnumerateArray())
{
var vulnStatements = ExtractVulnerabilityStatements(
vuln, productTree, statementIndex, warnings, ref skipped);
statements.AddRange(vulnStatements);
statementIndex += vulnStatements.Count;
}
return statements;
}
private static List<NormalizedStatement> ExtractVulnerabilityStatements(
JsonElement vuln,
JsonElement productTree,
int startIndex,
List<NormalizationWarning> warnings,
ref int skipped)
{
var statements = new List<NormalizedStatement>();
// Extract vulnerability ID (CVE or other identifier)
string? vulnerabilityId = null;
var aliases = new List<string>();
if (vuln.TryGetProperty("cve", out var cve))
{
vulnerabilityId = cve.GetString();
}
if (vuln.TryGetProperty("ids", out var ids) && ids.ValueKind == JsonValueKind.Array)
{
foreach (var id in ids.EnumerateArray())
{
if (id.TryGetProperty("text", out var text))
{
var idStr = text.GetString();
if (!string.IsNullOrWhiteSpace(idStr))
{
if (vulnerabilityId == null)
{
vulnerabilityId = idStr;
}
else if (idStr != vulnerabilityId)
{
aliases.Add(idStr);
}
}
}
}
}
if (string.IsNullOrWhiteSpace(vulnerabilityId))
{
warnings.Add(new NormalizationWarning(
"WARN_CSAF_004",
"Vulnerability missing CVE or ID; skipped",
"vulnerabilities[].cve"));
skipped++;
return statements;
}
// Extract product_status for VEX statements
if (!vuln.TryGetProperty("product_status", out var productStatus))
{
warnings.Add(new NormalizationWarning(
"WARN_CSAF_005",
$"Vulnerability {vulnerabilityId} has no product_status",
"vulnerabilities[].product_status"));
return statements;
}
// Process each status category
var localIndex = 0;
// Known not affected
if (productStatus.TryGetProperty("known_not_affected", out var knownNotAffected) &&
knownNotAffected.ValueKind == JsonValueKind.Array)
{
foreach (var productRef in knownNotAffected.EnumerateArray())
{
var product = ResolveProduct(productRef, productTree);
if (product != null)
{
var justification = ExtractJustification(vuln, productRef.GetString());
statements.Add(CreateStatement(
startIndex + localIndex++,
vulnerabilityId,
aliases,
product,
VexStatus.NotAffected,
justification,
vuln));
}
}
}
// Fixed
if (productStatus.TryGetProperty("fixed", out var fixedProducts) &&
fixedProducts.ValueKind == JsonValueKind.Array)
{
foreach (var productRef in fixedProducts.EnumerateArray())
{
var product = ResolveProduct(productRef, productTree);
if (product != null)
{
statements.Add(CreateStatement(
startIndex + localIndex++,
vulnerabilityId,
aliases,
product,
VexStatus.Fixed,
null,
vuln));
}
}
}
// Known affected
if (productStatus.TryGetProperty("known_affected", out var knownAffected) &&
knownAffected.ValueKind == JsonValueKind.Array)
{
foreach (var productRef in knownAffected.EnumerateArray())
{
var product = ResolveProduct(productRef, productTree);
if (product != null)
{
statements.Add(CreateStatement(
startIndex + localIndex++,
vulnerabilityId,
aliases,
product,
VexStatus.Affected,
null,
vuln));
}
}
}
// Under investigation
if (productStatus.TryGetProperty("under_investigation", out var underInvestigation) &&
underInvestigation.ValueKind == JsonValueKind.Array)
{
foreach (var productRef in underInvestigation.EnumerateArray())
{
var product = ResolveProduct(productRef, productTree);
if (product != null)
{
statements.Add(CreateStatement(
startIndex + localIndex++,
vulnerabilityId,
aliases,
product,
VexStatus.UnderInvestigation,
null,
vuln));
}
}
}
return statements;
}
private static NormalizedProduct? ResolveProduct(JsonElement productRef, JsonElement productTree)
{
if (productRef.ValueKind != JsonValueKind.String)
{
return null;
}
var productId = productRef.GetString();
if (string.IsNullOrWhiteSpace(productId))
{
return null;
}
// Try to find product details in product_tree
string? name = null;
string? version = null;
string? purl = null;
string? cpe = null;
if (productTree.ValueKind == JsonValueKind.Object)
{
// Search in full_product_names
if (productTree.TryGetProperty("full_product_names", out var fullNames) &&
fullNames.ValueKind == JsonValueKind.Array)
{
foreach (var fpn in fullNames.EnumerateArray())
{
if (fpn.TryGetProperty("product_id", out var pid) &&
pid.GetString() == productId)
{
name = fpn.TryGetProperty("name", out var n) ? n.GetString() : null;
if (fpn.TryGetProperty("product_identification_helper", out var pih))
{
purl = pih.TryGetProperty("purl", out var p) ? p.GetString() : null;
cpe = pih.TryGetProperty("cpe", out var c) ? c.GetString() : null;
}
break;
}
}
}
// Search in branches recursively
if (name == null && productTree.TryGetProperty("branches", out var branches))
{
var result = SearchBranches(branches, productId);
if (result.HasValue)
{
name = result.Value.Name;
version = result.Value.Version;
purl = result.Value.Purl;
cpe = result.Value.Cpe;
}
}
}
return new NormalizedProduct(
Key: productId,
Name: name,
Version: version,
Purl: purl,
Cpe: cpe,
Hashes: null);
}
private static (string? Name, string? Version, string? Purl, string? Cpe)? SearchBranches(
JsonElement branches,
string productId)
{
if (branches.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var branch in branches.EnumerateArray())
{
// Check product in this branch
if (branch.TryGetProperty("product", out var product) &&
product.TryGetProperty("product_id", out var pid) &&
pid.GetString() == productId)
{
var name = product.TryGetProperty("name", out var n) ? n.GetString() : null;
var version = branch.TryGetProperty("name", out var bn) &&
branch.TryGetProperty("category", out var bc) &&
bc.GetString() == "product_version"
? bn.GetString()
: null;
string? purl = null;
string? cpe = null;
if (product.TryGetProperty("product_identification_helper", out var pih))
{
purl = pih.TryGetProperty("purl", out var p) ? p.GetString() : null;
cpe = pih.TryGetProperty("cpe", out var c) ? c.GetString() : null;
}
return (name, version, purl, cpe);
}
// Recurse into sub-branches
if (branch.TryGetProperty("branches", out var subBranches))
{
var result = SearchBranches(subBranches, productId);
if (result.HasValue)
{
return result;
}
}
}
return null;
}
private static VexJustification? ExtractJustification(JsonElement vuln, string? productId)
{
// Look for flags that indicate justification
if (!vuln.TryGetProperty("flags", out var flags) ||
flags.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var flag in flags.EnumerateArray())
{
// Check if this flag applies to our product
if (flag.TryGetProperty("product_ids", out var productIds) &&
productIds.ValueKind == JsonValueKind.Array)
{
var applies = false;
foreach (var pid in productIds.EnumerateArray())
{
if (pid.GetString() == productId)
{
applies = true;
break;
}
}
if (!applies)
{
continue;
}
}
if (flag.TryGetProperty("label", out var label))
{
var labelStr = label.GetString();
var justification = MapCsafFlagToJustification(labelStr);
if (justification.HasValue)
{
return justification;
}
}
}
return null;
}
private static VexJustification? MapCsafFlagToJustification(string? label)
{
return label?.ToLowerInvariant() switch
{
"component_not_present" => VexJustification.ComponentNotPresent,
"vulnerable_code_not_present" => VexJustification.VulnerableCodeNotPresent,
"vulnerable_code_not_in_execute_path" or "vulnerable_code_cannot_be_controlled_by_adversary" =>
VexJustification.VulnerableCodeNotInExecutePath,
"inline_mitigations_already_exist" => VexJustification.InlineMitigationsAlreadyExist,
_ => null
};
}
private static NormalizedStatement CreateStatement(
int index,
string vulnerabilityId,
List<string> aliases,
NormalizedProduct product,
VexStatus status,
VexJustification? justification,
JsonElement vuln)
{
// Extract notes for status notes
string? statusNotes = null;
if (vuln.TryGetProperty("notes", out var notes) && notes.ValueKind == JsonValueKind.Array)
{
foreach (var note in notes.EnumerateArray())
{
if (note.TryGetProperty("category", out var cat) &&
cat.GetString() == "description" &&
note.TryGetProperty("text", out var text))
{
statusNotes = text.GetString();
break;
}
}
}
// Extract action statement from remediations
string? actionStatement = null;
DateTimeOffset? actionTimestamp = null;
if (vuln.TryGetProperty("remediations", out var remediations) &&
remediations.ValueKind == JsonValueKind.Array)
{
foreach (var rem in remediations.EnumerateArray())
{
if (rem.TryGetProperty("details", out var details))
{
actionStatement = details.GetString();
}
if (rem.TryGetProperty("date", out var date) &&
date.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(date.GetString(), out var parsed))
{
actionTimestamp = parsed;
}
}
break; // Take first remediation
}
}
// Extract release date as timestamp
DateTimeOffset? timestamp = null;
if (vuln.TryGetProperty("release_date", out var releaseDate) &&
releaseDate.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(releaseDate.GetString(), out var parsed))
{
timestamp = parsed;
}
}
return new NormalizedStatement(
StatementId: $"stmt-{index}",
VulnerabilityId: vulnerabilityId,
VulnerabilityAliases: aliases.Count > 0 ? aliases : null,
Product: product,
Status: status,
StatusNotes: statusNotes,
Justification: justification,
ImpactStatement: null,
ActionStatement: actionStatement,
ActionStatementTimestamp: actionTimestamp,
Versions: null,
Subcomponents: null,
FirstSeen: timestamp,
LastSeen: timestamp);
}
private static string ComputeDigest(string content)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,632 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Normalization;
/// <summary>
/// Normalizer for CycloneDX VEX format documents.
/// CycloneDX VEX uses the vulnerabilities array in CycloneDX BOM format.
/// </summary>
public sealed class CycloneDxVexNormalizer : IVexNormalizer
{
public VexSourceFormat SourceFormat => VexSourceFormat.CycloneDxVex;
public bool CanNormalize(string content)
{
if (string.IsNullOrWhiteSpace(content))
{
return false;
}
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// CycloneDX documents have bomFormat = "CycloneDX" and must have vulnerabilities
if (root.TryGetProperty("bomFormat", out var bomFormat))
{
var formatStr = bomFormat.GetString();
if (formatStr?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
{
// Must have vulnerabilities array to be a VEX document
return root.TryGetProperty("vulnerabilities", out var vulns) &&
vulns.ValueKind == JsonValueKind.Array &&
vulns.GetArrayLength() > 0;
}
}
return false;
}
catch
{
return false;
}
}
public Task<NormalizationResult> NormalizeAsync(
string content,
NormalizationContext context,
CancellationToken cancellationToken = default)
{
var stopwatch = Stopwatch.StartNew();
var warnings = new List<NormalizationWarning>();
var statementsSkipped = 0;
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Extract document ID from serialNumber or metadata
var documentId = ExtractDocumentId(root);
if (string.IsNullOrWhiteSpace(documentId))
{
documentId = $"cyclonedx:{Guid.NewGuid():N}";
warnings.Add(new NormalizationWarning(
"WARN_CDX_001",
"Serial number not found; generated a random ID",
"serialNumber"));
}
// Extract issuer from metadata
var issuer = ExtractIssuer(root, warnings);
// Extract timestamps
var (issuedAt, lastUpdatedAt) = ExtractTimestamps(root);
// Build component lookup for product resolution
var componentLookup = BuildComponentLookup(root);
// Extract vulnerabilities and convert to statements
var statements = ExtractStatements(root, componentLookup, warnings, ref statementsSkipped);
// Calculate source digest
var sourceDigest = ComputeDigest(content);
// Build provenance
var provenance = new NormalizationProvenance(
NormalizedAt: context.NormalizedAt,
Normalizer: context.Normalizer,
SourceRevision: null,
TransformationRules: ["cyclonedx-vex-to-normalized-v1"]);
var normalizedDoc = new NormalizedVexDocument(
SchemaVersion: NormalizedVexDocument.CurrentSchemaVersion,
DocumentId: documentId,
SourceFormat: VexSourceFormat.CycloneDxVex,
SourceDigest: sourceDigest,
SourceUri: context.SourceUri,
Issuer: issuer,
IssuedAt: issuedAt,
LastUpdatedAt: lastUpdatedAt,
Statements: statements,
Provenance: provenance);
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Successful(
normalizedDoc,
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: statements.Count,
StatementsSkipped: statementsSkipped,
ProductsMapped: statements.Count),
warnings));
}
catch (JsonException ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_CDX_001", "Invalid JSON", ex.Path, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
catch (Exception ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_CDX_999", "Unexpected error during normalization", null, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
}
private static string? ExtractDocumentId(JsonElement root)
{
// Try serialNumber first
if (root.TryGetProperty("serialNumber", out var serialNumber))
{
return serialNumber.GetString();
}
// Fall back to metadata.component.bom-ref
if (root.TryGetProperty("metadata", out var metadata) &&
metadata.TryGetProperty("component", out var component) &&
component.TryGetProperty("bom-ref", out var bomRef))
{
return bomRef.GetString();
}
return null;
}
private static VexIssuer? ExtractIssuer(JsonElement root, List<NormalizationWarning> warnings)
{
if (!root.TryGetProperty("metadata", out var metadata))
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_002",
"No metadata found in document",
"metadata"));
return null;
}
// Try to extract from authors or supplier
string? issuerId = null;
string? issuerName = null;
if (metadata.TryGetProperty("authors", out var authors) &&
authors.ValueKind == JsonValueKind.Array)
{
foreach (var author in authors.EnumerateArray())
{
issuerName = author.TryGetProperty("name", out var name) ? name.GetString() : null;
issuerId = author.TryGetProperty("email", out var email) ? email.GetString() : issuerName;
if (!string.IsNullOrWhiteSpace(issuerName))
{
break;
}
}
}
if (string.IsNullOrWhiteSpace(issuerName) &&
metadata.TryGetProperty("supplier", out var supplier))
{
issuerName = supplier.TryGetProperty("name", out var name) ? name.GetString() : null;
issuerId = supplier.TryGetProperty("url", out var url)
? url.ValueKind == JsonValueKind.Array
? url.EnumerateArray().FirstOrDefault().GetString()
: url.GetString()
: issuerName;
}
if (string.IsNullOrWhiteSpace(issuerName) &&
metadata.TryGetProperty("manufacture", out var manufacture))
{
issuerName = manufacture.TryGetProperty("name", out var name) ? name.GetString() : null;
issuerId = issuerName;
}
if (string.IsNullOrWhiteSpace(issuerName))
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_003",
"No author/supplier found in metadata",
"metadata.authors"));
return null;
}
return new VexIssuer(
Id: issuerId ?? "unknown",
Name: issuerName ?? "unknown",
Category: null,
TrustTier: TrustTier.Unknown,
KeyFingerprints: null);
}
private static (DateTimeOffset? IssuedAt, DateTimeOffset? LastUpdatedAt) ExtractTimestamps(JsonElement root)
{
DateTimeOffset? issuedAt = null;
if (root.TryGetProperty("metadata", out var metadata) &&
metadata.TryGetProperty("timestamp", out var timestamp) &&
timestamp.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(timestamp.GetString(), out var parsed))
{
issuedAt = parsed;
}
}
return (issuedAt, null);
}
private static Dictionary<string, ComponentInfo> BuildComponentLookup(JsonElement root)
{
var lookup = new Dictionary<string, ComponentInfo>(StringComparer.OrdinalIgnoreCase);
// Add metadata component
if (root.TryGetProperty("metadata", out var metadata) &&
metadata.TryGetProperty("component", out var metaComponent))
{
AddComponentToLookup(lookup, metaComponent);
}
// Add all components
if (root.TryGetProperty("components", out var components) &&
components.ValueKind == JsonValueKind.Array)
{
AddComponentsRecursively(lookup, components);
}
return lookup;
}
private static void AddComponentsRecursively(Dictionary<string, ComponentInfo> lookup, JsonElement components)
{
foreach (var component in components.EnumerateArray())
{
AddComponentToLookup(lookup, component);
// Handle nested components
if (component.TryGetProperty("components", out var nested) &&
nested.ValueKind == JsonValueKind.Array)
{
AddComponentsRecursively(lookup, nested);
}
}
}
private static void AddComponentToLookup(Dictionary<string, ComponentInfo> lookup, JsonElement component)
{
var bomRef = component.TryGetProperty("bom-ref", out var br) ? br.GetString() : null;
var name = component.TryGetProperty("name", out var n) ? n.GetString() : null;
var version = component.TryGetProperty("version", out var v) ? v.GetString() : null;
var purl = component.TryGetProperty("purl", out var p) ? p.GetString() : null;
var cpe = component.TryGetProperty("cpe", out var c) ? c.GetString() : null;
// Extract hashes
Dictionary<string, string>? hashes = null;
if (component.TryGetProperty("hashes", out var hashArray) &&
hashArray.ValueKind == JsonValueKind.Array)
{
hashes = [];
foreach (var hash in hashArray.EnumerateArray())
{
var alg = hash.TryGetProperty("alg", out var a) ? a.GetString() : null;
var content = hash.TryGetProperty("content", out var cont) ? cont.GetString() : null;
if (!string.IsNullOrWhiteSpace(alg) && !string.IsNullOrWhiteSpace(content))
{
hashes[alg] = content;
}
}
if (hashes.Count == 0)
{
hashes = null;
}
}
var info = new ComponentInfo(name, version, purl, cpe, hashes);
if (!string.IsNullOrWhiteSpace(bomRef))
{
lookup[bomRef] = info;
}
if (!string.IsNullOrWhiteSpace(purl) && !lookup.ContainsKey(purl))
{
lookup[purl] = info;
}
}
private static IReadOnlyList<NormalizedStatement> ExtractStatements(
JsonElement root,
Dictionary<string, ComponentInfo> componentLookup,
List<NormalizationWarning> warnings,
ref int skipped)
{
if (!root.TryGetProperty("vulnerabilities", out var vulnerabilities) ||
vulnerabilities.ValueKind != JsonValueKind.Array)
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_004",
"No vulnerabilities array found",
"vulnerabilities"));
return [];
}
var statements = new List<NormalizedStatement>();
var index = 0;
foreach (var vuln in vulnerabilities.EnumerateArray())
{
var vulnStatements = ExtractVulnerabilityStatements(
vuln, componentLookup, index, warnings, ref skipped);
statements.AddRange(vulnStatements);
index += vulnStatements.Count > 0 ? vulnStatements.Count : 1;
}
return statements;
}
private static List<NormalizedStatement> ExtractVulnerabilityStatements(
JsonElement vuln,
Dictionary<string, ComponentInfo> componentLookup,
int startIndex,
List<NormalizationWarning> warnings,
ref int skipped)
{
var statements = new List<NormalizedStatement>();
// Extract vulnerability ID
var vulnerabilityId = vuln.TryGetProperty("id", out var id) ? id.GetString() : null;
if (string.IsNullOrWhiteSpace(vulnerabilityId))
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_005",
"Vulnerability missing ID; skipped",
"vulnerabilities[].id"));
skipped++;
return statements;
}
// Extract aliases from references with type = "advisory"
var aliases = new List<string>();
if (vuln.TryGetProperty("references", out var refs) &&
refs.ValueKind == JsonValueKind.Array)
{
foreach (var reference in refs.EnumerateArray())
{
if (reference.TryGetProperty("id", out var refId))
{
var refIdStr = refId.GetString();
if (!string.IsNullOrWhiteSpace(refIdStr) && refIdStr != vulnerabilityId)
{
aliases.Add(refIdStr);
}
}
}
}
// Extract affected components
if (!vuln.TryGetProperty("affects", out var affects) ||
affects.ValueKind != JsonValueKind.Array)
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_006",
$"Vulnerability {vulnerabilityId} has no affects array",
"vulnerabilities[].affects"));
skipped++;
return statements;
}
var localIndex = 0;
foreach (var affect in affects.EnumerateArray())
{
var refStr = affect.TryGetProperty("ref", out var refProp) ? refProp.GetString() : null;
if (string.IsNullOrWhiteSpace(refStr))
{
continue;
}
var product = ResolveProduct(refStr, componentLookup);
if (product == null)
{
warnings.Add(new NormalizationWarning(
"WARN_CDX_007",
$"Could not resolve component ref '{refStr}'",
"vulnerabilities[].affects[].ref"));
continue;
}
// Extract analysis/status
var status = VexStatus.UnderInvestigation;
VexJustification? justification = null;
string? statusNotes = null;
string? actionStatement = null;
if (vuln.TryGetProperty("analysis", out var analysis))
{
var stateStr = analysis.TryGetProperty("state", out var state) ? state.GetString() : null;
status = MapAnalysisState(stateStr) ?? VexStatus.UnderInvestigation;
var justificationStr = analysis.TryGetProperty("justification", out var just) ? just.GetString() : null;
justification = MapJustification(justificationStr);
statusNotes = analysis.TryGetProperty("detail", out var detail) ? detail.GetString() : null;
if (analysis.TryGetProperty("response", out var response) &&
response.ValueKind == JsonValueKind.Array)
{
var responses = new List<string>();
foreach (var r in response.EnumerateArray())
{
var rStr = r.GetString();
if (!string.IsNullOrWhiteSpace(rStr))
{
responses.Add(rStr);
}
}
if (responses.Count > 0)
{
actionStatement = string.Join(", ", responses);
}
}
}
// Extract timestamps
DateTimeOffset? firstSeen = null;
DateTimeOffset? lastSeen = null;
if (vuln.TryGetProperty("created", out var created) &&
created.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(created.GetString(), out var parsed))
{
firstSeen = parsed;
}
}
if (vuln.TryGetProperty("updated", out var updated) &&
updated.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(updated.GetString(), out var parsed))
{
lastSeen = parsed;
}
}
else if (vuln.TryGetProperty("published", out var published) &&
published.ValueKind == JsonValueKind.String)
{
if (DateTimeOffset.TryParse(published.GetString(), out var parsed))
{
lastSeen = parsed;
}
}
// Extract version ranges if specified
VersionRange? versions = null;
if (affect.TryGetProperty("versions", out var versionsArray) &&
versionsArray.ValueKind == JsonValueKind.Array)
{
var affectedVersions = new List<string>();
var fixedVersions = new List<string>();
foreach (var ver in versionsArray.EnumerateArray())
{
var verStr = ver.TryGetProperty("version", out var v) ? v.GetString() : null;
var statusStr = ver.TryGetProperty("status", out var s) ? s.GetString() : null;
if (!string.IsNullOrWhiteSpace(verStr))
{
if (statusStr?.Equals("affected", StringComparison.OrdinalIgnoreCase) == true)
{
affectedVersions.Add(verStr);
}
else if (statusStr?.Equals("unaffected", StringComparison.OrdinalIgnoreCase) == true)
{
fixedVersions.Add(verStr);
}
}
}
if (affectedVersions.Count > 0 || fixedVersions.Count > 0)
{
versions = new VersionRange(
Affected: affectedVersions.Count > 0 ? affectedVersions : null,
Fixed: fixedVersions.Count > 0 ? fixedVersions : null,
Unaffected: null);
}
}
statements.Add(new NormalizedStatement(
StatementId: $"stmt-{startIndex + localIndex}",
VulnerabilityId: vulnerabilityId,
VulnerabilityAliases: aliases.Count > 0 ? aliases : null,
Product: product,
Status: status,
StatusNotes: statusNotes,
Justification: justification,
ImpactStatement: null,
ActionStatement: actionStatement,
ActionStatementTimestamp: null,
Versions: versions,
Subcomponents: null,
FirstSeen: firstSeen,
LastSeen: lastSeen ?? firstSeen));
localIndex++;
}
if (statements.Count == 0)
{
skipped++;
}
return statements;
}
private static NormalizedProduct? ResolveProduct(string refStr, Dictionary<string, ComponentInfo> componentLookup)
{
if (componentLookup.TryGetValue(refStr, out var info))
{
return new NormalizedProduct(
Key: info.Purl ?? refStr,
Name: info.Name,
Version: info.Version,
Purl: info.Purl,
Cpe: info.Cpe,
Hashes: info.Hashes);
}
// If not found in lookup, create a basic product entry
if (refStr.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return new NormalizedProduct(
Key: refStr,
Name: null,
Version: null,
Purl: refStr,
Cpe: null,
Hashes: null);
}
return new NormalizedProduct(
Key: refStr,
Name: null,
Version: null,
Purl: null,
Cpe: null,
Hashes: null);
}
private static VexStatus? MapAnalysisState(string? state)
{
return state?.ToLowerInvariant() switch
{
"not_affected" => VexStatus.NotAffected,
"exploitable" or "in_triage" => VexStatus.Affected,
"resolved" or "resolved_with_pedigree" => VexStatus.Fixed,
"false_positive" => VexStatus.NotAffected,
_ => null
};
}
private static VexJustification? MapJustification(string? justification)
{
return justification?.ToLowerInvariant() switch
{
"code_not_present" => VexJustification.ComponentNotPresent,
"code_not_reachable" => VexJustification.VulnerableCodeNotInExecutePath,
"requires_configuration" => VexJustification.VulnerableCodeCannotBeControlledByAdversary,
"requires_dependency" => VexJustification.ComponentNotPresent,
"requires_environment" => VexJustification.VulnerableCodeCannotBeControlledByAdversary,
"protected_by_compiler" or "protected_by_mitigating_control" or "protected_at_runtime" or "protected_at_perimeter" =>
VexJustification.InlineMitigationsAlreadyExist,
_ => null
};
}
private static string ComputeDigest(string content)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private sealed record ComponentInfo(
string? Name,
string? Version,
string? Purl,
string? Cpe,
IReadOnlyDictionary<string, string>? Hashes);
}

View File

@@ -0,0 +1,164 @@
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Normalization;
/// <summary>
/// Interface for VEX document normalizers.
/// Each normalizer handles a specific source format (OpenVEX, CSAF, CycloneDX, etc.)
/// </summary>
public interface IVexNormalizer
{
/// <summary>
/// Gets the source format this normalizer handles.
/// </summary>
VexSourceFormat SourceFormat { get; }
/// <summary>
/// Checks if this normalizer can handle the given document.
/// </summary>
bool CanNormalize(string content);
/// <summary>
/// Normalizes a VEX document to the standard format.
/// </summary>
Task<NormalizationResult> NormalizeAsync(
string content,
NormalizationContext context,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for normalization operation.
/// </summary>
public sealed record NormalizationContext(
string? SourceUri,
DateTimeOffset NormalizedAt,
string Normalizer,
IReadOnlyDictionary<string, object?>? Options);
/// <summary>
/// Result of a normalization operation.
/// </summary>
public sealed record NormalizationResult(
bool Success,
NormalizedVexDocument? Document,
IReadOnlyList<NormalizationError> Errors,
IReadOnlyList<NormalizationWarning> Warnings,
NormalizationMetrics Metrics)
{
public static NormalizationResult Successful(
NormalizedVexDocument document,
NormalizationMetrics metrics,
IEnumerable<NormalizationWarning>? warnings = null)
{
return new NormalizationResult(
Success: true,
Document: document,
Errors: [],
Warnings: warnings?.ToList() ?? [],
Metrics: metrics);
}
public static NormalizationResult Failed(
IEnumerable<NormalizationError> errors,
NormalizationMetrics metrics,
IEnumerable<NormalizationWarning>? warnings = null)
{
return new NormalizationResult(
Success: false,
Document: null,
Errors: errors.ToList(),
Warnings: warnings?.ToList() ?? [],
Metrics: metrics);
}
}
/// <summary>
/// Error during normalization.
/// </summary>
public sealed record NormalizationError(
string Code,
string Message,
string? Path,
Exception? Exception);
/// <summary>
/// Warning during normalization.
/// </summary>
public sealed record NormalizationWarning(
string Code,
string Message,
string? Path);
/// <summary>
/// Metrics from normalization operation.
/// </summary>
public sealed record NormalizationMetrics(
TimeSpan Duration,
int SourceBytes,
int StatementsExtracted,
int StatementsSkipped,
int ProductsMapped);
/// <summary>
/// Registry for VEX normalizers.
/// </summary>
public interface IVexNormalizerRegistry
{
/// <summary>
/// Gets all registered normalizers.
/// </summary>
IReadOnlyList<IVexNormalizer> Normalizers { get; }
/// <summary>
/// Gets the normalizer for a specific source format.
/// </summary>
IVexNormalizer? GetNormalizer(VexSourceFormat format);
/// <summary>
/// Detects the format and returns the appropriate normalizer.
/// </summary>
IVexNormalizer? DetectNormalizer(string content);
/// <summary>
/// Registers a normalizer.
/// </summary>
void Register(IVexNormalizer normalizer);
}
/// <summary>
/// Default implementation of the normalizer registry.
/// </summary>
public sealed class VexNormalizerRegistry : IVexNormalizerRegistry
{
private readonly Dictionary<VexSourceFormat, IVexNormalizer> _normalizers = [];
private readonly List<IVexNormalizer> _orderedNormalizers = [];
public IReadOnlyList<IVexNormalizer> Normalizers => _orderedNormalizers;
public IVexNormalizer? GetNormalizer(VexSourceFormat format)
{
return _normalizers.GetValueOrDefault(format);
}
public IVexNormalizer? DetectNormalizer(string content)
{
foreach (var normalizer in _orderedNormalizers)
{
if (normalizer.CanNormalize(content))
{
return normalizer;
}
}
return null;
}
public void Register(IVexNormalizer normalizer)
{
ArgumentNullException.ThrowIfNull(normalizer);
_normalizers[normalizer.SourceFormat] = normalizer;
_orderedNormalizers.Add(normalizer);
}
}

View File

@@ -0,0 +1,479 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Normalization;
/// <summary>
/// Normalizer for OpenVEX format documents.
/// </summary>
public sealed class OpenVexNormalizer : IVexNormalizer
{
public VexSourceFormat SourceFormat => VexSourceFormat.OpenVex;
public bool CanNormalize(string content)
{
if (string.IsNullOrWhiteSpace(content))
{
return false;
}
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// OpenVEX documents have @context with openvex
if (root.TryGetProperty("@context", out var context))
{
var contextStr = context.GetString();
return contextStr?.Contains("openvex", StringComparison.OrdinalIgnoreCase) == true;
}
return false;
}
catch
{
return false;
}
}
public Task<NormalizationResult> NormalizeAsync(
string content,
NormalizationContext context,
CancellationToken cancellationToken = default)
{
var stopwatch = Stopwatch.StartNew();
var warnings = new List<NormalizationWarning>();
var statementsSkipped = 0;
try
{
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Extract document ID
var documentId = ExtractDocumentId(root);
if (string.IsNullOrWhiteSpace(documentId))
{
documentId = $"openvex:{Guid.NewGuid():N}";
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_001",
"Document ID not found; generated a random ID",
"@id"));
}
// Extract issuer
var issuer = ExtractIssuer(root, warnings);
// Extract timestamps
var issuedAt = ExtractTimestamp(root, "timestamp");
var lastUpdatedAt = ExtractTimestamp(root, "last_updated");
// Extract statements
var statements = ExtractStatements(root, warnings, ref statementsSkipped);
// Calculate source digest
var sourceDigest = ComputeDigest(content);
// Build provenance
var provenance = new NormalizationProvenance(
NormalizedAt: context.NormalizedAt,
Normalizer: context.Normalizer,
SourceRevision: null,
TransformationRules: ["openvex-to-normalized-v1"]);
var normalizedDoc = new NormalizedVexDocument(
SchemaVersion: NormalizedVexDocument.CurrentSchemaVersion,
DocumentId: documentId,
SourceFormat: VexSourceFormat.OpenVex,
SourceDigest: sourceDigest,
SourceUri: context.SourceUri,
Issuer: issuer,
IssuedAt: issuedAt,
LastUpdatedAt: lastUpdatedAt,
Statements: statements,
Provenance: provenance);
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Successful(
normalizedDoc,
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: statements.Count,
StatementsSkipped: statementsSkipped,
ProductsMapped: statements.Count),
warnings));
}
catch (JsonException ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_OPENVEX_001", "Invalid JSON", ex.Path, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
catch (Exception ex)
{
stopwatch.Stop();
return Task.FromResult(NormalizationResult.Failed(
[new NormalizationError("ERR_OPENVEX_999", "Unexpected error during normalization", null, ex)],
new NormalizationMetrics(
Duration: stopwatch.Elapsed,
SourceBytes: Encoding.UTF8.GetByteCount(content),
StatementsExtracted: 0,
StatementsSkipped: 0,
ProductsMapped: 0),
warnings));
}
}
private static string? ExtractDocumentId(JsonElement root)
{
if (root.TryGetProperty("@id", out var id))
{
return id.GetString();
}
return null;
}
private static VexIssuer? ExtractIssuer(JsonElement root, List<NormalizationWarning> warnings)
{
if (!root.TryGetProperty("author", out var author))
{
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_002",
"No author/issuer found in document",
"author"));
return null;
}
var issuerId = author.TryGetProperty("@id", out var idProp)
? idProp.GetString() ?? "unknown"
: "unknown";
var issuerName = author.TryGetProperty("name", out var nameProp)
? nameProp.GetString() ?? issuerId
: issuerId;
var role = author.TryGetProperty("role", out var roleProp)
? roleProp.GetString()
: null;
var category = MapRoleToCategory(role);
return new VexIssuer(
Id: issuerId,
Name: issuerName,
Category: category,
TrustTier: TrustTier.Unknown,
KeyFingerprints: null);
}
private static IssuerCategory? MapRoleToCategory(string? role)
{
return role?.ToLowerInvariant() switch
{
"vendor" => IssuerCategory.Vendor,
"distributor" => IssuerCategory.Distributor,
"maintainer" or "community" => IssuerCategory.Community,
"aggregator" => IssuerCategory.Aggregator,
_ => null
};
}
private static DateTimeOffset? ExtractTimestamp(JsonElement root, string propertyName)
{
if (root.TryGetProperty(propertyName, out var prop) &&
prop.ValueKind == JsonValueKind.String)
{
var str = prop.GetString();
if (DateTimeOffset.TryParse(str, out var result))
{
return result;
}
}
return null;
}
private static IReadOnlyList<NormalizedStatement> ExtractStatements(
JsonElement root,
List<NormalizationWarning> warnings,
ref int skipped)
{
if (!root.TryGetProperty("statements", out var statementsArray) ||
statementsArray.ValueKind != JsonValueKind.Array)
{
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_003",
"No statements array found",
"statements"));
return [];
}
var statements = new List<NormalizedStatement>();
var index = 0;
foreach (var stmt in statementsArray.EnumerateArray())
{
var statement = ExtractStatement(stmt, index, warnings, ref skipped);
if (statement != null)
{
statements.Add(statement);
}
index++;
}
return statements;
}
private static NormalizedStatement? ExtractStatement(
JsonElement stmt,
int index,
List<NormalizationWarning> warnings,
ref int skipped)
{
// Extract vulnerability
string? vulnerabilityId = null;
var aliases = new List<string>();
if (stmt.TryGetProperty("vulnerability", out var vuln))
{
if (vuln.ValueKind == JsonValueKind.String)
{
vulnerabilityId = vuln.GetString();
}
else if (vuln.ValueKind == JsonValueKind.Object)
{
vulnerabilityId = vuln.TryGetProperty("@id", out var vulnId)
? vulnId.GetString()
: vuln.TryGetProperty("name", out var vulnName)
? vulnName.GetString()
: null;
if (vuln.TryGetProperty("aliases", out var aliasArray) &&
aliasArray.ValueKind == JsonValueKind.Array)
{
foreach (var alias in aliasArray.EnumerateArray())
{
if (alias.ValueKind == JsonValueKind.String)
{
var aliasStr = alias.GetString();
if (!string.IsNullOrWhiteSpace(aliasStr))
{
aliases.Add(aliasStr);
}
}
}
}
}
}
if (string.IsNullOrWhiteSpace(vulnerabilityId))
{
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_004",
"Statement missing vulnerability ID; skipped",
$"statements[{index}].vulnerability"));
skipped++;
return null;
}
// Extract products
var products = new List<NormalizedProduct>();
if (stmt.TryGetProperty("products", out var productsArray) &&
productsArray.ValueKind == JsonValueKind.Array)
{
foreach (var prod in productsArray.EnumerateArray())
{
var product = ExtractProduct(prod);
if (product != null)
{
products.Add(product);
}
}
}
if (products.Count == 0)
{
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_005",
"Statement has no valid products; skipped",
$"statements[{index}].products"));
skipped++;
return null;
}
// Extract status
var statusStr = stmt.TryGetProperty("status", out var statusProp)
? statusProp.GetString()
: null;
var status = MapStatus(statusStr);
if (!status.HasValue)
{
warnings.Add(new NormalizationWarning(
"WARN_OPENVEX_006",
$"Unknown status '{statusStr}'; defaulting to under_investigation",
$"statements[{index}].status"));
status = VexStatus.UnderInvestigation;
}
// Extract justification
var justificationStr = stmt.TryGetProperty("justification", out var justProp)
? justProp.GetString()
: null;
var justification = MapJustification(justificationStr);
// Extract other fields
var statusNotes = stmt.TryGetProperty("status_notes", out var notesProp)
? notesProp.GetString()
: null;
var impactStatement = stmt.TryGetProperty("impact_statement", out var impactProp)
? impactProp.GetString()
: null;
var actionStatement = stmt.TryGetProperty("action_statement", out var actionProp)
? actionProp.GetString()
: null;
var actionTimestamp = stmt.TryGetProperty("action_statement_timestamp", out var actionTsProp)
? ExtractTimestamp(actionTsProp)
: null;
var timestamp = ExtractTimestamp(stmt, "timestamp");
// For OpenVEX, create one statement per product
var primaryProduct = products[0];
var subcomponents = products.Count > 1 ? products.Skip(1).ToList() : null;
return new NormalizedStatement(
StatementId: $"stmt-{index}",
VulnerabilityId: vulnerabilityId,
VulnerabilityAliases: aliases.Count > 0 ? aliases : null,
Product: primaryProduct,
Status: status.Value,
StatusNotes: statusNotes,
Justification: justification,
ImpactStatement: impactStatement,
ActionStatement: actionStatement,
ActionStatementTimestamp: actionTimestamp,
Versions: null,
Subcomponents: subcomponents,
FirstSeen: timestamp,
LastSeen: timestamp);
}
private static NormalizedProduct? ExtractProduct(JsonElement prod)
{
string? key = null;
string? name = null;
string? version = null;
string? purl = null;
string? cpe = null;
if (prod.ValueKind == JsonValueKind.String)
{
key = prod.GetString();
if (key?.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) == true)
{
purl = key;
}
else if (key?.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase) == true)
{
cpe = key;
}
}
else if (prod.ValueKind == JsonValueKind.Object)
{
key = prod.TryGetProperty("@id", out var idProp) ? idProp.GetString() : null;
name = prod.TryGetProperty("name", out var nameProp) ? nameProp.GetString() : null;
version = prod.TryGetProperty("version", out var versionProp) ? versionProp.GetString() : null;
if (prod.TryGetProperty("identifiers", out var identifiers) &&
identifiers.ValueKind == JsonValueKind.Object)
{
purl = identifiers.TryGetProperty("purl", out var purlProp) ? purlProp.GetString() : null;
cpe = identifiers.TryGetProperty("cpe23", out var cpeProp) ? cpeProp.GetString() : null;
}
if (string.IsNullOrWhiteSpace(purl) &&
prod.TryGetProperty("purl", out var directPurl))
{
purl = directPurl.GetString();
}
}
if (string.IsNullOrWhiteSpace(key) && string.IsNullOrWhiteSpace(purl))
{
return null;
}
return new NormalizedProduct(
Key: key ?? purl ?? cpe ?? $"unknown-{Guid.NewGuid():N}",
Name: name,
Version: version,
Purl: purl,
Cpe: cpe,
Hashes: null);
}
private static VexStatus? MapStatus(string? status)
{
return status?.ToLowerInvariant() switch
{
"not_affected" => VexStatus.NotAffected,
"affected" => VexStatus.Affected,
"fixed" => VexStatus.Fixed,
"under_investigation" => VexStatus.UnderInvestigation,
_ => null
};
}
private static VexJustification? MapJustification(string? justification)
{
return justification?.ToLowerInvariant() switch
{
"component_not_present" => VexJustification.ComponentNotPresent,
"vulnerable_code_not_present" => VexJustification.VulnerableCodeNotPresent,
"vulnerable_code_not_in_execute_path" => VexJustification.VulnerableCodeNotInExecutePath,
"vulnerable_code_cannot_be_controlled_by_adversary" => VexJustification.VulnerableCodeCannotBeControlledByAdversary,
"inline_mitigations_already_exist" => VexJustification.InlineMitigationsAlreadyExist,
_ => null
};
}
private static DateTimeOffset? ExtractTimestamp(JsonElement element)
{
if (element.ValueKind == JsonValueKind.String)
{
var str = element.GetString();
if (DateTimeOffset.TryParse(str, out var result))
{
return result;
}
}
return null;
}
private static string ComputeDigest(string content)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,452 @@
using System.Diagnostics;
using System.Diagnostics.Metrics;
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Observability;
/// <summary>
/// Metrics for VexLens operations.
/// </summary>
public sealed class VexLensMetrics : IDisposable
{
private readonly Meter _meter;
// Normalization metrics
private readonly Counter<long> _documentsNormalized;
private readonly Counter<long> _normalizationErrors;
private readonly Histogram<double> _normalizationDuration;
private readonly Counter<long> _statementsExtracted;
private readonly Counter<long> _statementsSkipped;
// Product mapping metrics
private readonly Counter<long> _productsMapped;
private readonly Counter<long> _productMappingErrors;
private readonly Histogram<double> _productMappingDuration;
// Signature verification metrics
private readonly Counter<long> _signaturesVerified;
private readonly Counter<long> _signatureVerificationFailures;
private readonly Histogram<double> _signatureVerificationDuration;
// Trust weight metrics
private readonly Counter<long> _trustWeightsComputed;
private readonly Histogram<double> _trustWeightValue;
private readonly Histogram<double> _trustWeightComputationDuration;
// Consensus metrics
private readonly Counter<long> _consensusComputed;
private readonly Counter<long> _consensusConflicts;
private readonly Histogram<double> _consensusConfidence;
private readonly Histogram<double> _consensusComputationDuration;
private readonly Counter<long> _statusChanges;
// Projection metrics
private readonly Counter<long> _projectionsStored;
private readonly Counter<long> _projectionsQueried;
private readonly Histogram<double> _projectionQueryDuration;
// Issuer directory metrics
private readonly Counter<long> _issuersRegistered;
private readonly Counter<long> _issuersRevoked;
private readonly Counter<long> _keysRegistered;
private readonly Counter<long> _keysRevoked;
private readonly Counter<long> _trustValidations;
public VexLensMetrics(IMeterFactory? meterFactory = null)
{
_meter = meterFactory?.Create("StellaOps.VexLens") ?? new Meter("StellaOps.VexLens", "1.0.0");
// Normalization
_documentsNormalized = _meter.CreateCounter<long>(
"vexlens.normalization.documents_total",
"documents",
"Total number of VEX documents normalized");
_normalizationErrors = _meter.CreateCounter<long>(
"vexlens.normalization.errors_total",
"errors",
"Total number of normalization errors");
_normalizationDuration = _meter.CreateHistogram<double>(
"vexlens.normalization.duration_seconds",
"s",
"Duration of normalization operations");
_statementsExtracted = _meter.CreateCounter<long>(
"vexlens.normalization.statements_extracted_total",
"statements",
"Total number of statements extracted during normalization");
_statementsSkipped = _meter.CreateCounter<long>(
"vexlens.normalization.statements_skipped_total",
"statements",
"Total number of statements skipped during normalization");
// Product mapping
_productsMapped = _meter.CreateCounter<long>(
"vexlens.product_mapping.products_total",
"products",
"Total number of products mapped");
_productMappingErrors = _meter.CreateCounter<long>(
"vexlens.product_mapping.errors_total",
"errors",
"Total number of product mapping errors");
_productMappingDuration = _meter.CreateHistogram<double>(
"vexlens.product_mapping.duration_seconds",
"s",
"Duration of product mapping operations");
// Signature verification
_signaturesVerified = _meter.CreateCounter<long>(
"vexlens.signature.verified_total",
"signatures",
"Total number of signatures verified");
_signatureVerificationFailures = _meter.CreateCounter<long>(
"vexlens.signature.failures_total",
"failures",
"Total number of signature verification failures");
_signatureVerificationDuration = _meter.CreateHistogram<double>(
"vexlens.signature.duration_seconds",
"s",
"Duration of signature verification operations");
// Trust weight
_trustWeightsComputed = _meter.CreateCounter<long>(
"vexlens.trust.weights_computed_total",
"computations",
"Total number of trust weights computed");
_trustWeightValue = _meter.CreateHistogram<double>(
"vexlens.trust.weight_value",
"{weight}",
"Distribution of computed trust weight values");
_trustWeightComputationDuration = _meter.CreateHistogram<double>(
"vexlens.trust.computation_duration_seconds",
"s",
"Duration of trust weight computation");
// Consensus
_consensusComputed = _meter.CreateCounter<long>(
"vexlens.consensus.computed_total",
"computations",
"Total number of consensus computations");
_consensusConflicts = _meter.CreateCounter<long>(
"vexlens.consensus.conflicts_total",
"conflicts",
"Total number of conflicts detected during consensus");
_consensusConfidence = _meter.CreateHistogram<double>(
"vexlens.consensus.confidence",
"{confidence}",
"Distribution of consensus confidence scores");
_consensusComputationDuration = _meter.CreateHistogram<double>(
"vexlens.consensus.duration_seconds",
"s",
"Duration of consensus computation");
_statusChanges = _meter.CreateCounter<long>(
"vexlens.consensus.status_changes_total",
"changes",
"Total number of status changes detected");
// Projections
_projectionsStored = _meter.CreateCounter<long>(
"vexlens.projection.stored_total",
"projections",
"Total number of projections stored");
_projectionsQueried = _meter.CreateCounter<long>(
"vexlens.projection.queries_total",
"queries",
"Total number of projection queries");
_projectionQueryDuration = _meter.CreateHistogram<double>(
"vexlens.projection.query_duration_seconds",
"s",
"Duration of projection queries");
// Issuer directory
_issuersRegistered = _meter.CreateCounter<long>(
"vexlens.issuer.registered_total",
"issuers",
"Total number of issuers registered");
_issuersRevoked = _meter.CreateCounter<long>(
"vexlens.issuer.revoked_total",
"issuers",
"Total number of issuers revoked");
_keysRegistered = _meter.CreateCounter<long>(
"vexlens.issuer.keys_registered_total",
"keys",
"Total number of keys registered");
_keysRevoked = _meter.CreateCounter<long>(
"vexlens.issuer.keys_revoked_total",
"keys",
"Total number of keys revoked");
_trustValidations = _meter.CreateCounter<long>(
"vexlens.issuer.trust_validations_total",
"validations",
"Total number of trust validations");
}
// Normalization
public void RecordNormalization(VexSourceFormat format, bool success, TimeSpan duration, int statementsExtracted, int statementsSkipped)
{
var tags = new TagList { { "format", format.ToString() }, { "success", success.ToString() } };
_documentsNormalized.Add(1, tags);
_normalizationDuration.Record(duration.TotalSeconds, tags);
_statementsExtracted.Add(statementsExtracted, tags);
_statementsSkipped.Add(statementsSkipped, tags);
if (!success)
{
_normalizationErrors.Add(1, tags);
}
}
// Product mapping
public void RecordProductMapping(bool success, TimeSpan duration, string? ecosystem = null)
{
var tags = new TagList { { "success", success.ToString() } };
if (ecosystem != null) tags.Add("ecosystem", ecosystem);
_productsMapped.Add(1, tags);
_productMappingDuration.Record(duration.TotalSeconds, tags);
if (!success)
{
_productMappingErrors.Add(1, tags);
}
}
// Signature verification
public void RecordSignatureVerification(string format, bool valid, TimeSpan duration)
{
var tags = new TagList { { "format", format }, { "valid", valid.ToString() } };
_signaturesVerified.Add(1, tags);
_signatureVerificationDuration.Record(duration.TotalSeconds, tags);
if (!valid)
{
_signatureVerificationFailures.Add(1, tags);
}
}
// Trust weight
public void RecordTrustWeightComputation(double weight, TimeSpan duration, string? issuerCategory = null)
{
var tags = new TagList();
if (issuerCategory != null) tags.Add("issuer_category", issuerCategory);
_trustWeightsComputed.Add(1, tags);
_trustWeightValue.Record(weight, tags);
_trustWeightComputationDuration.Record(duration.TotalSeconds, tags);
}
// Consensus
public void RecordConsensusComputation(
VexStatus status,
ConsensusOutcome outcome,
double confidence,
int conflictCount,
bool statusChanged,
TimeSpan duration)
{
var tags = new TagList
{
{ "status", status.ToString() },
{ "outcome", outcome.ToString() }
};
_consensusComputed.Add(1, tags);
_consensusConfidence.Record(confidence, tags);
_consensusComputationDuration.Record(duration.TotalSeconds, tags);
if (conflictCount > 0)
{
_consensusConflicts.Add(conflictCount, tags);
}
if (statusChanged)
{
_statusChanges.Add(1, tags);
}
}
// Projections
public void RecordProjectionStored(VexStatus status, bool statusChanged)
{
var tags = new TagList { { "status", status.ToString() }, { "status_changed", statusChanged.ToString() } };
_projectionsStored.Add(1, tags);
}
public void RecordProjectionQuery(TimeSpan duration, int resultCount)
{
var tags = new TagList { { "result_count_bucket", GetCountBucket(resultCount) } };
_projectionsQueried.Add(1, tags);
_projectionQueryDuration.Record(duration.TotalSeconds, tags);
}
// Issuer directory
public void RecordIssuerRegistered(string category, string trustTier)
{
var tags = new TagList { { "category", category }, { "trust_tier", trustTier } };
_issuersRegistered.Add(1, tags);
}
public void RecordIssuerRevoked(string category)
{
var tags = new TagList { { "category", category } };
_issuersRevoked.Add(1, tags);
}
public void RecordKeyRegistered(string keyType)
{
var tags = new TagList { { "key_type", keyType } };
_keysRegistered.Add(1, tags);
}
public void RecordKeyRevoked(string keyType)
{
var tags = new TagList { { "key_type", keyType } };
_keysRevoked.Add(1, tags);
}
public void RecordTrustValidation(bool trusted, string? issuerStatus = null)
{
var tags = new TagList { { "trusted", trusted.ToString() } };
if (issuerStatus != null) tags.Add("issuer_status", issuerStatus);
_trustValidations.Add(1, tags);
}
private static string GetCountBucket(int count)
{
return count switch
{
0 => "0",
<= 10 => "1-10",
<= 100 => "11-100",
<= 1000 => "101-1000",
_ => "1000+"
};
}
public void Dispose()
{
_meter.Dispose();
}
}
/// <summary>
/// Activity source for VexLens tracing.
/// </summary>
public static class VexLensActivitySource
{
public static readonly ActivitySource Source = new("StellaOps.VexLens", "1.0.0");
public static Activity? StartNormalizationActivity(string format)
{
return Source.StartActivity("vexlens.normalize", ActivityKind.Internal)?
.SetTag("vex.format", format);
}
public static Activity? StartProductMappingActivity()
{
return Source.StartActivity("vexlens.map_product", ActivityKind.Internal);
}
public static Activity? StartSignatureVerificationActivity(string format)
{
return Source.StartActivity("vexlens.verify_signature", ActivityKind.Internal)?
.SetTag("signature.format", format);
}
public static Activity? StartTrustWeightActivity()
{
return Source.StartActivity("vexlens.compute_trust_weight", ActivityKind.Internal);
}
public static Activity? StartConsensusActivity(string vulnerabilityId, string productKey)
{
return Source.StartActivity("vexlens.compute_consensus", ActivityKind.Internal)?
.SetTag("vulnerability.id", vulnerabilityId)
.SetTag("product.key", productKey);
}
public static Activity? StartProjectionStoreActivity()
{
return Source.StartActivity("vexlens.store_projection", ActivityKind.Internal);
}
public static Activity? StartProjectionQueryActivity()
{
return Source.StartActivity("vexlens.query_projections", ActivityKind.Internal);
}
public static Activity? StartIssuerOperationActivity(string operation)
{
return Source.StartActivity($"vexlens.issuer.{operation}", ActivityKind.Internal);
}
}
/// <summary>
/// Logging event IDs for VexLens.
/// </summary>
public static class VexLensLogEvents
{
// Normalization
public const int NormalizationStarted = 1001;
public const int NormalizationCompleted = 1002;
public const int NormalizationFailed = 1003;
public const int StatementSkipped = 1004;
// Product mapping
public const int ProductMappingStarted = 2001;
public const int ProductMappingCompleted = 2002;
public const int ProductMappingFailed = 2003;
public const int PurlParseError = 2004;
public const int CpeParseError = 2005;
// Signature verification
public const int SignatureVerificationStarted = 3001;
public const int SignatureVerificationCompleted = 3002;
public const int SignatureVerificationFailed = 3003;
public const int SignatureInvalid = 3004;
public const int CertificateExpired = 3005;
public const int CertificateRevoked = 3006;
// Trust weight
public const int TrustWeightComputed = 4001;
public const int LowTrustWeight = 4002;
// Consensus
public const int ConsensusStarted = 5001;
public const int ConsensusCompleted = 5002;
public const int ConsensusFailed = 5003;
public const int ConflictDetected = 5004;
public const int StatusChanged = 5005;
public const int NoStatementsAvailable = 5006;
// Projections
public const int ProjectionStored = 6001;
public const int ProjectionQueried = 6002;
public const int ProjectionPurged = 6003;
// Issuer directory
public const int IssuerRegistered = 7001;
public const int IssuerRevoked = 7002;
public const int KeyRegistered = 7003;
public const int KeyRevoked = 7004;
public const int TrustValidationFailed = 7005;
}

View File

@@ -0,0 +1,264 @@
namespace StellaOps.VexLens.Options;
/// <summary>
/// Configuration options for VexLens consensus engine.
/// </summary>
public sealed class VexLensOptions
{
/// <summary>
/// Section name for configuration binding.
/// </summary>
public const string SectionName = "VexLens";
/// <summary>
/// Storage configuration.
/// </summary>
public VexLensStorageOptions Storage { get; set; } = new();
/// <summary>
/// Trust engine configuration.
/// </summary>
public VexLensTrustOptions Trust { get; set; } = new();
/// <summary>
/// Consensus computation configuration.
/// </summary>
public VexLensConsensusOptions Consensus { get; set; } = new();
/// <summary>
/// Normalization configuration.
/// </summary>
public VexLensNormalizationOptions Normalization { get; set; } = new();
/// <summary>
/// Air-gap mode configuration.
/// </summary>
public VexLensAirGapOptions AirGap { get; set; } = new();
/// <summary>
/// Telemetry configuration.
/// </summary>
public VexLensTelemetryOptions Telemetry { get; set; } = new();
}
/// <summary>
/// Storage configuration for VexLens projections.
/// </summary>
public sealed class VexLensStorageOptions
{
/// <summary>
/// Storage driver: "memory" for testing, "mongo" for production.
/// Default is "memory".
/// </summary>
public string Driver { get; set; } = "memory";
/// <summary>
/// MongoDB connection string when using mongo driver.
/// </summary>
public string? ConnectionString { get; set; }
/// <summary>
/// Database name for MongoDB storage.
/// </summary>
public string? Database { get; set; }
/// <summary>
/// Collection name for consensus projections.
/// </summary>
public string ProjectionsCollection { get; set; } = "vex_consensus";
/// <summary>
/// Collection name for projection history.
/// </summary>
public string HistoryCollection { get; set; } = "vex_consensus_history";
/// <summary>
/// Maximum history entries to retain per projection.
/// </summary>
public int MaxHistoryEntries { get; set; } = 100;
/// <summary>
/// Command timeout in seconds.
/// </summary>
public int CommandTimeoutSeconds { get; set; } = 30;
}
/// <summary>
/// Trust engine configuration.
/// </summary>
public sealed class VexLensTrustOptions
{
/// <summary>
/// Base weight for Authoritative tier issuers (0.0-1.0).
/// </summary>
public double AuthoritativeWeight { get; set; } = 1.0;
/// <summary>
/// Base weight for Trusted tier issuers (0.0-1.0).
/// </summary>
public double TrustedWeight { get; set; } = 0.8;
/// <summary>
/// Base weight for Known tier issuers (0.0-1.0).
/// </summary>
public double KnownWeight { get; set; } = 0.5;
/// <summary>
/// Base weight for Unknown tier issuers (0.0-1.0).
/// </summary>
public double UnknownWeight { get; set; } = 0.3;
/// <summary>
/// Base weight for Untrusted tier issuers (0.0-1.0).
/// </summary>
public double UntrustedWeight { get; set; } = 0.1;
/// <summary>
/// Weight multiplier when statement has valid signature.
/// </summary>
public double SignedMultiplier { get; set; } = 1.2;
/// <summary>
/// Days after which statements start losing freshness weight.
/// </summary>
public int FreshnessDecayDays { get; set; } = 30;
/// <summary>
/// Minimum freshness factor (0.0-1.0).
/// </summary>
public double MinFreshnessFactor { get; set; } = 0.5;
/// <summary>
/// Weight boost for not_affected status with justification.
/// </summary>
public double JustifiedNotAffectedBoost { get; set; } = 1.1;
/// <summary>
/// Weight boost for fixed status.
/// </summary>
public double FixedStatusBoost { get; set; } = 1.05;
}
/// <summary>
/// Consensus computation configuration.
/// </summary>
public sealed class VexLensConsensusOptions
{
/// <summary>
/// Default consensus mode: HighestWeight, WeightedVote, Lattice, AuthoritativeFirst.
/// </summary>
public string DefaultMode { get; set; } = "WeightedVote";
/// <summary>
/// Minimum weight threshold for a statement to contribute to consensus.
/// </summary>
public double MinimumWeightThreshold { get; set; } = 0.1;
/// <summary>
/// Weight difference threshold to detect conflicts.
/// </summary>
public double ConflictThreshold { get; set; } = 0.3;
/// <summary>
/// Require justification for not_affected status to be considered.
/// </summary>
public bool RequireJustificationForNotAffected { get; set; } = false;
/// <summary>
/// Maximum statements to consider per consensus computation.
/// </summary>
public int MaxStatementsPerComputation { get; set; } = 100;
/// <summary>
/// Enable conflict detection and reporting.
/// </summary>
public bool EnableConflictDetection { get; set; } = true;
/// <summary>
/// Emit events on consensus computation.
/// </summary>
public bool EmitEvents { get; set; } = true;
}
/// <summary>
/// Normalization configuration.
/// </summary>
public sealed class VexLensNormalizationOptions
{
/// <summary>
/// Enabled VEX format normalizers.
/// </summary>
public string[] EnabledFormats { get; set; } = ["OpenVEX", "CSAF", "CycloneDX"];
/// <summary>
/// Fail normalization on unknown fields (strict mode).
/// </summary>
public bool StrictMode { get; set; } = false;
/// <summary>
/// Maximum document size in bytes.
/// </summary>
public int MaxDocumentSizeBytes { get; set; } = 10 * 1024 * 1024; // 10 MB
/// <summary>
/// Maximum statements per document.
/// </summary>
public int MaxStatementsPerDocument { get; set; } = 10000;
}
/// <summary>
/// Air-gap mode configuration.
/// </summary>
public sealed class VexLensAirGapOptions
{
/// <summary>
/// Enable sealed mode (block external network access).
/// </summary>
public bool SealedMode { get; set; } = false;
/// <summary>
/// Path to offline bundle directory for import.
/// </summary>
public string? BundlePath { get; set; }
/// <summary>
/// Verify bundle signatures on import.
/// </summary>
public bool VerifyBundleSignatures { get; set; } = true;
/// <summary>
/// Allowed bundle sources (issuer IDs).
/// </summary>
public string[] AllowedBundleSources { get; set; } = [];
/// <summary>
/// Export format for offline bundles.
/// </summary>
public string ExportFormat { get; set; } = "jsonl";
}
/// <summary>
/// Telemetry configuration.
/// </summary>
public sealed class VexLensTelemetryOptions
{
/// <summary>
/// Enable metrics collection.
/// </summary>
public bool MetricsEnabled { get; set; } = true;
/// <summary>
/// Enable distributed tracing.
/// </summary>
public bool TracingEnabled { get; set; } = true;
/// <summary>
/// Meter name for metrics.
/// </summary>
public string MeterName { get; set; } = "StellaOps.VexLens";
/// <summary>
/// Activity source name for tracing.
/// </summary>
public string ActivitySourceName { get; set; } = "StellaOps.VexLens";
}

View File

@@ -0,0 +1,396 @@
using System.Text.Json.Serialization;
namespace StellaOps.VexLens.Core.Models;
/// <summary>
/// Normalized VEX document per vex-normalization.schema.json.
/// Supports OpenVEX, CSAF VEX, and CycloneDX VEX formats with unified semantics.
/// </summary>
public sealed record NormalizedVexDocument
{
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; } = 1;
/// <summary>
/// Unique document identifier derived from source VEX.
/// </summary>
[JsonPropertyName("documentId")]
public required string DocumentId { get; init; }
/// <summary>
/// Original VEX document format before normalization.
/// </summary>
[JsonPropertyName("sourceFormat")]
public required VexSourceFormat SourceFormat { get; init; }
/// <summary>
/// SHA-256 digest of original source document.
/// </summary>
[JsonPropertyName("sourceDigest")]
public string? SourceDigest { get; init; }
/// <summary>
/// URI where source document was obtained.
/// </summary>
[JsonPropertyName("sourceUri")]
public string? SourceUri { get; init; }
/// <summary>
/// Issuing authority for this VEX document.
/// </summary>
[JsonPropertyName("issuer")]
public VexIssuer? Issuer { get; init; }
/// <summary>
/// ISO-8601 timestamp when VEX was originally issued.
/// </summary>
[JsonPropertyName("issuedAt")]
public DateTimeOffset? IssuedAt { get; init; }
/// <summary>
/// ISO-8601 timestamp when VEX was last modified.
/// </summary>
[JsonPropertyName("lastUpdatedAt")]
public DateTimeOffset? LastUpdatedAt { get; init; }
/// <summary>
/// Normalized VEX statements extracted from source.
/// </summary>
[JsonPropertyName("statements")]
public required IReadOnlyList<NormalizedStatement> Statements { get; init; }
/// <summary>
/// Metadata about the normalization process.
/// </summary>
[JsonPropertyName("provenance")]
public NormalizationProvenance? Provenance { get; init; }
}
/// <summary>
/// Original VEX document format.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexSourceFormat>))]
public enum VexSourceFormat
{
[JsonPropertyName("OPENVEX")]
OpenVex,
[JsonPropertyName("CSAF_VEX")]
CsafVex,
[JsonPropertyName("CYCLONEDX_VEX")]
CycloneDxVex,
[JsonPropertyName("SPDX_VEX")]
SpdxVex,
[JsonPropertyName("STELLAOPS")]
StellaOps
}
/// <summary>
/// VEX issuing authority.
/// </summary>
public sealed record VexIssuer
{
/// <summary>
/// Unique issuer identifier (e.g., PURL, domain).
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Human-readable issuer name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Issuer category for trust weighting.
/// </summary>
[JsonPropertyName("category")]
public IssuerCategory? Category { get; init; }
/// <summary>
/// Trust tier for policy evaluation.
/// </summary>
[JsonPropertyName("trustTier")]
public TrustTier? TrustTier { get; init; }
/// <summary>
/// Known signing key fingerprints for this issuer.
/// </summary>
[JsonPropertyName("keyFingerprints")]
public IReadOnlyList<string>? KeyFingerprints { get; init; }
}
/// <summary>
/// Issuer category for trust weighting.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<IssuerCategory>))]
public enum IssuerCategory
{
[JsonPropertyName("VENDOR")]
Vendor,
[JsonPropertyName("DISTRIBUTOR")]
Distributor,
[JsonPropertyName("COMMUNITY")]
Community,
[JsonPropertyName("INTERNAL")]
Internal,
[JsonPropertyName("AGGREGATOR")]
Aggregator
}
/// <summary>
/// Trust tier for policy evaluation.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<TrustTier>))]
public enum TrustTier
{
[JsonPropertyName("AUTHORITATIVE")]
Authoritative,
[JsonPropertyName("TRUSTED")]
Trusted,
[JsonPropertyName("UNTRUSTED")]
Untrusted,
[JsonPropertyName("UNKNOWN")]
Unknown
}
/// <summary>
/// Normalized VEX statement.
/// </summary>
public sealed record NormalizedStatement
{
/// <summary>
/// Unique statement identifier within this document.
/// </summary>
[JsonPropertyName("statementId")]
public required string StatementId { get; init; }
/// <summary>
/// CVE, GHSA, or other vulnerability identifier.
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// Known aliases for this vulnerability.
/// </summary>
[JsonPropertyName("vulnerabilityAliases")]
public IReadOnlyList<string>? VulnerabilityAliases { get; init; }
/// <summary>
/// Product affected by this statement.
/// </summary>
[JsonPropertyName("product")]
public required NormalizedProduct Product { get; init; }
/// <summary>
/// Normalized VEX status using OpenVEX terminology.
/// </summary>
[JsonPropertyName("status")]
public required VexStatus Status { get; init; }
/// <summary>
/// Additional notes about the status determination.
/// </summary>
[JsonPropertyName("statusNotes")]
public string? StatusNotes { get; init; }
/// <summary>
/// Normalized justification when status is not_affected.
/// </summary>
[JsonPropertyName("justification")]
public VexJustificationType? Justification { get; init; }
/// <summary>
/// Impact description when status is affected.
/// </summary>
[JsonPropertyName("impactStatement")]
public string? ImpactStatement { get; init; }
/// <summary>
/// Recommended action to remediate.
/// </summary>
[JsonPropertyName("actionStatement")]
public string? ActionStatement { get; init; }
/// <summary>
/// Timestamp for action statement.
/// </summary>
[JsonPropertyName("actionStatementTimestamp")]
public DateTimeOffset? ActionStatementTimestamp { get; init; }
/// <summary>
/// Version constraints for this statement.
/// </summary>
[JsonPropertyName("versions")]
public VersionRange? Versions { get; init; }
/// <summary>
/// Specific subcomponents affected within the product.
/// </summary>
[JsonPropertyName("subcomponents")]
public IReadOnlyList<NormalizedProduct>? Subcomponents { get; init; }
/// <summary>
/// When this statement was first observed.
/// </summary>
[JsonPropertyName("firstSeen")]
public DateTimeOffset? FirstSeen { get; init; }
/// <summary>
/// When this statement was last confirmed.
/// </summary>
[JsonPropertyName("lastSeen")]
public DateTimeOffset? LastSeen { get; init; }
}
/// <summary>
/// Normalized VEX status.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexStatus>))]
public enum VexStatus
{
[JsonPropertyName("not_affected")]
NotAffected,
[JsonPropertyName("affected")]
Affected,
[JsonPropertyName("fixed")]
Fixed,
[JsonPropertyName("under_investigation")]
UnderInvestigation
}
/// <summary>
/// VEX justification types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<VexJustificationType>))]
public enum VexJustificationType
{
[JsonPropertyName("component_not_present")]
ComponentNotPresent,
[JsonPropertyName("vulnerable_code_not_present")]
VulnerableCodeNotPresent,
[JsonPropertyName("vulnerable_code_not_in_execute_path")]
VulnerableCodeNotInExecutePath,
[JsonPropertyName("vulnerable_code_cannot_be_controlled_by_adversary")]
VulnerableCodeCannotBeControlledByAdversary,
[JsonPropertyName("inline_mitigations_already_exist")]
InlineMitigationsAlreadyExist
}
/// <summary>
/// Normalized product reference.
/// </summary>
public sealed record NormalizedProduct
{
/// <summary>
/// Canonical product key (preferably PURL).
/// </summary>
[JsonPropertyName("key")]
public required string Key { get; init; }
/// <summary>
/// Human-readable product name.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
/// <summary>
/// Specific version if applicable.
/// </summary>
[JsonPropertyName("version")]
public string? Version { get; init; }
/// <summary>
/// Package URL if available.
/// </summary>
[JsonPropertyName("purl")]
public string? Purl { get; init; }
/// <summary>
/// CPE identifier if available.
/// </summary>
[JsonPropertyName("cpe")]
public string? Cpe { get; init; }
/// <summary>
/// Content hashes (algorithm -> value).
/// </summary>
[JsonPropertyName("hashes")]
public IReadOnlyDictionary<string, string>? Hashes { get; init; }
}
/// <summary>
/// Version range constraints.
/// </summary>
public sealed record VersionRange
{
/// <summary>
/// Version expressions for affected versions.
/// </summary>
[JsonPropertyName("affected")]
public IReadOnlyList<string>? Affected { get; init; }
/// <summary>
/// Version expressions for fixed versions.
/// </summary>
[JsonPropertyName("fixed")]
public IReadOnlyList<string>? Fixed { get; init; }
/// <summary>
/// Version expressions for unaffected versions.
/// </summary>
[JsonPropertyName("unaffected")]
public IReadOnlyList<string>? Unaffected { get; init; }
}
/// <summary>
/// Normalization provenance metadata.
/// </summary>
public sealed record NormalizationProvenance
{
/// <summary>
/// When normalization was performed.
/// </summary>
[JsonPropertyName("normalizedAt")]
public required DateTimeOffset NormalizedAt { get; init; }
/// <summary>
/// Service/version that performed normalization.
/// </summary>
[JsonPropertyName("normalizer")]
public required string Normalizer { get; init; }
/// <summary>
/// Source document revision if tracked.
/// </summary>
[JsonPropertyName("sourceRevision")]
public string? SourceRevision { get; init; }
/// <summary>
/// Transformation rules applied during normalization.
/// </summary>
[JsonPropertyName("transformationRules")]
public IReadOnlyList<string>? TransformationRules { get; init; }
}

View File

@@ -0,0 +1,67 @@
using StellaOps.VexLens.Core.Models;
namespace StellaOps.VexLens.Core.Normalization;
/// <summary>
/// VexLens normalizer interface for translating raw VEX documents
/// into the normalized schema per vex-normalization.schema.json.
/// </summary>
public interface IVexLensNormalizer
{
/// <summary>
/// Normalizes a raw VEX document from any supported format.
/// </summary>
/// <param name="rawDocument">The raw VEX document bytes.</param>
/// <param name="sourceFormat">The source format (OpenVEX, CSAF, CycloneDX, etc.).</param>
/// <param name="sourceUri">URI where the document was obtained.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The normalized VEX document.</returns>
Task<NormalizedVexDocument> NormalizeAsync(
ReadOnlyMemory<byte> rawDocument,
VexSourceFormat sourceFormat,
string? sourceUri = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Detects the source format from document content.
/// </summary>
/// <param name="rawDocument">The raw VEX document bytes.</param>
/// <returns>The detected format, or null if unknown.</returns>
VexSourceFormat? DetectFormat(ReadOnlyMemory<byte> rawDocument);
/// <summary>
/// Gets the supported source formats.
/// </summary>
IReadOnlyList<VexSourceFormat> SupportedFormats { get; }
}
/// <summary>
/// Result of a normalization operation with additional metadata.
/// </summary>
public sealed record NormalizationResult
{
/// <summary>
/// The normalized document.
/// </summary>
public required NormalizedVexDocument Document { get; init; }
/// <summary>
/// Whether the normalization was successful.
/// </summary>
public bool Success { get; init; } = true;
/// <summary>
/// Warnings encountered during normalization.
/// </summary>
public IReadOnlyList<string> Warnings { get; init; } = Array.Empty<string>();
/// <summary>
/// Number of statements that were skipped due to errors.
/// </summary>
public int SkippedStatements { get; init; }
/// <summary>
/// Processing duration in milliseconds.
/// </summary>
public long ProcessingMs { get; init; }
}

View File

@@ -0,0 +1,514 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.VexLens.Core.Models;
namespace StellaOps.VexLens.Core.Normalization;
/// <summary>
/// VexLens normalizer service that transforms raw VEX documents from
/// OpenVEX, CSAF, and CycloneDX formats into the normalized schema.
/// </summary>
public sealed class VexLensNormalizer : IVexLensNormalizer
{
private const string NormalizerVersion = "stellaops-vexlens/1.0.0";
private readonly VexNormalizerRegistry _excititorRegistry;
private readonly TimeProvider _timeProvider;
private readonly ILogger<VexLensNormalizer> _logger;
private static readonly IReadOnlyList<VexSourceFormat> s_supportedFormats = new[]
{
VexSourceFormat.OpenVex,
VexSourceFormat.CsafVex,
VexSourceFormat.CycloneDxVex
};
public VexLensNormalizer(
VexNormalizerRegistry excititorRegistry,
TimeProvider timeProvider,
ILogger<VexLensNormalizer> logger)
{
_excititorRegistry = excititorRegistry ?? throw new ArgumentNullException(nameof(excititorRegistry));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public IReadOnlyList<VexSourceFormat> SupportedFormats => s_supportedFormats;
public VexSourceFormat? DetectFormat(ReadOnlyMemory<byte> rawDocument)
{
if (rawDocument.IsEmpty)
{
return null;
}
try
{
using var doc = JsonDocument.Parse(rawDocument);
var root = doc.RootElement;
// OpenVEX detection: has "@context" with openvex
if (root.TryGetProperty("@context", out var context))
{
var contextStr = context.ValueKind == JsonValueKind.String
? context.GetString()
: context.ToString();
if (contextStr?.Contains("openvex", StringComparison.OrdinalIgnoreCase) == true)
{
return VexSourceFormat.OpenVex;
}
}
// CSAF detection: has "document" with "csaf_version" or "category" containing "vex"
if (root.TryGetProperty("document", out var document))
{
if (document.TryGetProperty("csaf_version", out _))
{
return VexSourceFormat.CsafVex;
}
if (document.TryGetProperty("category", out var category))
{
var categoryStr = category.GetString();
if (categoryStr?.Contains("vex", StringComparison.OrdinalIgnoreCase) == true)
{
return VexSourceFormat.CsafVex;
}
}
}
// CycloneDX detection: has "bomFormat" = "CycloneDX" and "vulnerabilities"
if (root.TryGetProperty("bomFormat", out var bomFormat) &&
bomFormat.GetString()?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
{
if (root.TryGetProperty("vulnerabilities", out _))
{
return VexSourceFormat.CycloneDxVex;
}
}
// SPDX VEX detection: has "spdxVersion" and vulnerability annotations
if (root.TryGetProperty("spdxVersion", out _))
{
return VexSourceFormat.SpdxVex;
}
}
catch (JsonException)
{
// Not valid JSON, can't detect format
}
return null;
}
public async Task<NormalizedVexDocument> NormalizeAsync(
ReadOnlyMemory<byte> rawDocument,
VexSourceFormat sourceFormat,
string? sourceUri = null,
CancellationToken cancellationToken = default)
{
ArgumentOutOfRangeException.ThrowIfZero(rawDocument.Length, nameof(rawDocument));
var now = _timeProvider.GetUtcNow();
var digest = ComputeDigest(rawDocument.Span);
var documentId = GenerateDocumentId(sourceFormat, digest);
_logger.LogInformation(
"Normalizing {Format} document from {Uri} (size={Size}, digest={Digest})",
sourceFormat, sourceUri ?? "(inline)", rawDocument.Length, digest);
// Convert to Excititor's internal format and normalize
var excititorFormat = MapToExcititorFormat(sourceFormat);
var rawDoc = new VexRawDocument(
rawDocument,
excititorFormat,
sourceUri,
digest,
now);
var normalizer = _excititorRegistry.Resolve(rawDoc);
if (normalizer is null)
{
_logger.LogWarning("No normalizer found for format {Format}, using fallback parsing", sourceFormat);
return await FallbackNormalizeAsync(rawDocument, sourceFormat, documentId, digest, sourceUri, now, cancellationToken)
.ConfigureAwait(false);
}
// Use Excititor's provider abstraction
var provider = new VexProvider(
Id: "vexlens",
Name: "VexLens Normalizer",
Category: VexProviderCategory.Aggregator,
TrustTier: VexProviderTrustTier.Unknown);
var batch = await normalizer.NormalizeAsync(rawDoc, provider, cancellationToken).ConfigureAwait(false);
// Transform Excititor claims to VexLens normalized format
var statements = TransformClaims(batch.Claims);
_logger.LogInformation(
"Normalized {Format} document into {Count} statements",
sourceFormat, statements.Count);
return new NormalizedVexDocument
{
SchemaVersion = 1,
DocumentId = documentId,
SourceFormat = sourceFormat,
SourceDigest = digest,
SourceUri = sourceUri,
Issuer = ExtractIssuer(batch),
IssuedAt = batch.Claims.FirstOrDefault()?.Document.Timestamp,
LastUpdatedAt = batch.Claims.LastOrDefault()?.LastObserved,
Statements = statements,
Provenance = new NormalizationProvenance
{
NormalizedAt = now,
Normalizer = NormalizerVersion,
TransformationRules = new[] { $"excititor:{normalizer.Format}" }
}
};
}
private async Task<NormalizedVexDocument> FallbackNormalizeAsync(
ReadOnlyMemory<byte> rawDocument,
VexSourceFormat sourceFormat,
string documentId,
string digest,
string? sourceUri,
DateTimeOffset now,
CancellationToken cancellationToken)
{
// Fallback parsing for unsupported formats
var statements = new List<NormalizedStatement>();
try
{
using var doc = JsonDocument.Parse(rawDocument);
var root = doc.RootElement;
// Try to extract statements from common patterns
if (TryExtractOpenVexStatements(root, out var openVexStatements))
{
statements.AddRange(openVexStatements);
}
else if (TryExtractCycloneDxStatements(root, out var cdxStatements))
{
statements.AddRange(cdxStatements);
}
}
catch (JsonException ex)
{
_logger.LogError(ex, "Failed to parse document for fallback normalization");
}
return new NormalizedVexDocument
{
SchemaVersion = 1,
DocumentId = documentId,
SourceFormat = sourceFormat,
SourceDigest = digest,
SourceUri = sourceUri,
Statements = statements,
Provenance = new NormalizationProvenance
{
NormalizedAt = now,
Normalizer = NormalizerVersion,
TransformationRules = new[] { "fallback:generic" }
}
};
}
private static bool TryExtractOpenVexStatements(JsonElement root, out List<NormalizedStatement> statements)
{
statements = new List<NormalizedStatement>();
if (!root.TryGetProperty("statements", out var statementsElement) ||
statementsElement.ValueKind != JsonValueKind.Array)
{
return false;
}
var index = 0;
foreach (var stmt in statementsElement.EnumerateArray())
{
if (stmt.ValueKind != JsonValueKind.Object)
{
continue;
}
var vulnId = GetString(stmt, "vulnerability") ?? GetString(stmt, "vuln");
if (string.IsNullOrWhiteSpace(vulnId))
{
continue;
}
var status = MapStatusString(GetString(stmt, "status"));
var justification = MapJustificationString(GetString(stmt, "justification"));
// Extract products
if (!stmt.TryGetProperty("products", out var products) ||
products.ValueKind != JsonValueKind.Array)
{
continue;
}
foreach (var product in products.EnumerateArray())
{
var productKey = product.ValueKind == JsonValueKind.String
? product.GetString()
: GetString(product, "purl") ?? GetString(product, "id");
if (string.IsNullOrWhiteSpace(productKey))
{
continue;
}
statements.Add(new NormalizedStatement
{
StatementId = GetString(stmt, "id") ?? $"stmt-{index++}",
VulnerabilityId = vulnId.Trim(),
Product = new NormalizedProduct
{
Key = productKey.Trim(),
Name = GetString(product, "name"),
Version = GetString(product, "version"),
Purl = productKey.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) ? productKey : null,
Cpe = GetString(product, "cpe")
},
Status = status,
Justification = justification,
StatusNotes = GetString(stmt, "statement") ?? GetString(stmt, "remediation")
});
}
}
return statements.Count > 0;
}
private static bool TryExtractCycloneDxStatements(JsonElement root, out List<NormalizedStatement> statements)
{
statements = new List<NormalizedStatement>();
if (!root.TryGetProperty("vulnerabilities", out var vulns) ||
vulns.ValueKind != JsonValueKind.Array)
{
return false;
}
var index = 0;
foreach (var vuln in vulns.EnumerateArray())
{
if (vuln.ValueKind != JsonValueKind.Object)
{
continue;
}
var vulnId = GetString(vuln, "id");
if (string.IsNullOrWhiteSpace(vulnId))
{
continue;
}
// Extract analysis
VexStatus status = VexStatus.UnderInvestigation;
VexJustificationType? justification = null;
string? statusNotes = null;
if (vuln.TryGetProperty("analysis", out var analysis))
{
status = MapStatusString(GetString(analysis, "state"));
justification = MapJustificationString(GetString(analysis, "justification"));
statusNotes = GetString(analysis, "detail");
}
// Extract affects
if (!vuln.TryGetProperty("affects", out var affects) ||
affects.ValueKind != JsonValueKind.Array)
{
continue;
}
foreach (var affect in affects.EnumerateArray())
{
var refValue = GetString(affect, "ref");
if (string.IsNullOrWhiteSpace(refValue))
{
continue;
}
statements.Add(new NormalizedStatement
{
StatementId = $"cdx-{vulnId}-{index++}",
VulnerabilityId = vulnId.Trim(),
Product = new NormalizedProduct
{
Key = refValue.Trim(),
Purl = refValue.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) ? refValue : null
},
Status = status,
Justification = justification,
StatusNotes = statusNotes
});
}
}
return statements.Count > 0;
}
private static string? GetString(JsonElement element, string propertyName)
{
if (element.ValueKind != JsonValueKind.Object)
{
return null;
}
return element.TryGetProperty(propertyName, out var value) && value.ValueKind == JsonValueKind.String
? value.GetString()
: null;
}
private static VexStatus MapStatusString(string? status)
{
return status?.ToLowerInvariant() switch
{
"not_affected" or "notaffected" => VexStatus.NotAffected,
"affected" => VexStatus.Affected,
"fixed" => VexStatus.Fixed,
"under_investigation" or "in_triage" => VexStatus.UnderInvestigation,
_ => VexStatus.UnderInvestigation
};
}
private static VexJustificationType? MapJustificationString(string? justification)
{
return justification?.ToLowerInvariant().Replace("-", "_") switch
{
"component_not_present" => VexJustificationType.ComponentNotPresent,
"vulnerable_code_not_present" => VexJustificationType.VulnerableCodeNotPresent,
"vulnerable_code_not_in_execute_path" => VexJustificationType.VulnerableCodeNotInExecutePath,
"vulnerable_code_cannot_be_controlled_by_adversary" => VexJustificationType.VulnerableCodeCannotBeControlledByAdversary,
"inline_mitigations_already_exist" => VexJustificationType.InlineMitigationsAlreadyExist,
_ => null
};
}
private IReadOnlyList<NormalizedStatement> TransformClaims(
IReadOnlyList<VexClaim> claims)
{
var statements = new List<NormalizedStatement>(claims.Count);
var index = 0;
foreach (var claim in claims)
{
var status = MapExcititorStatus(claim.Status);
var justification = MapExcititorJustification(claim.Justification);
statements.Add(new NormalizedStatement
{
StatementId = $"claim-{index++}",
VulnerabilityId = claim.VulnerabilityId,
Product = new NormalizedProduct
{
Key = claim.Product.Key,
Name = claim.Product.Name,
Version = claim.Product.Version,
Purl = claim.Product.Purl,
Cpe = claim.Product.Cpe
},
Status = status,
Justification = justification,
StatusNotes = claim.Remarks,
FirstSeen = claim.FirstObserved,
LastSeen = claim.LastObserved
});
}
// Deterministic ordering
return statements
.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(s => s.Product.Key, StringComparer.Ordinal)
.ToList();
}
private static VexStatus MapExcititorStatus(VexClaimStatus status)
{
return status switch
{
VexClaimStatus.NotAffected => VexStatus.NotAffected,
VexClaimStatus.Affected => VexStatus.Affected,
VexClaimStatus.Fixed => VexStatus.Fixed,
VexClaimStatus.UnderInvestigation => VexStatus.UnderInvestigation,
_ => VexStatus.UnderInvestigation
};
}
private static VexJustificationType? MapExcititorJustification(VexJustification? justification)
{
return justification switch
{
VexJustification.ComponentNotPresent => VexJustificationType.ComponentNotPresent,
VexJustification.VulnerableCodeNotPresent => VexJustificationType.VulnerableCodeNotPresent,
VexJustification.VulnerableCodeNotInExecutePath => VexJustificationType.VulnerableCodeNotInExecutePath,
VexJustification.VulnerableCodeCannotBeControlledByAdversary => VexJustificationType.VulnerableCodeCannotBeControlledByAdversary,
VexJustification.InlineMitigationsAlreadyExist => VexJustificationType.InlineMitigationsAlreadyExist,
_ => null
};
}
private static VexIssuer? ExtractIssuer(VexClaimBatch batch)
{
// Extract issuer from batch metadata if available
var metadata = batch.Metadata;
if (metadata.TryGetValue("issuer.id", out var issuerId) &&
metadata.TryGetValue("issuer.name", out var issuerName))
{
return new VexIssuer
{
Id = issuerId,
Name = issuerName
};
}
return null;
}
private static VexDocumentFormat MapToExcititorFormat(VexSourceFormat format)
{
return format switch
{
VexSourceFormat.OpenVex => VexDocumentFormat.OpenVex,
VexSourceFormat.CsafVex => VexDocumentFormat.Csaf,
VexSourceFormat.CycloneDxVex => VexDocumentFormat.CycloneDx,
_ => VexDocumentFormat.Unknown
};
}
private static string ComputeDigest(ReadOnlySpan<byte> data)
{
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static string GenerateDocumentId(VexSourceFormat format, string digest)
{
var prefix = format switch
{
VexSourceFormat.OpenVex => "openvex",
VexSourceFormat.CsafVex => "csaf",
VexSourceFormat.CycloneDxVex => "cdx",
VexSourceFormat.SpdxVex => "spdx",
VexSourceFormat.StellaOps => "stellaops",
_ => "vex"
};
// Use first 16 chars of digest for document ID
var shortDigest = digest.Replace("sha256:", "", StringComparison.OrdinalIgnoreCase)[..16];
return $"{prefix}:{shortDigest}";
}
}

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.VexLens.Core</RootNamespace>
<AssemblyName>StellaOps.VexLens.Core</AssemblyName>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.24407.12" />
<PackageReference Include="System.Text.Json" Version="10.0.0-preview.7.24407.12" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.VexLens</RootNamespace>
<AssemblyName>StellaOps.VexLens</AssemblyName>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,210 @@
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Storage;
/// <summary>
/// Interface for persisting and querying consensus projections.
/// </summary>
public interface IConsensusProjectionStore
{
/// <summary>
/// Stores a consensus result.
/// </summary>
Task<ConsensusProjection> StoreAsync(
VexConsensusResult result,
StoreProjectionOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a consensus projection by ID.
/// </summary>
Task<ConsensusProjection?> GetAsync(
string projectionId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the latest consensus projection for a vulnerability-product pair.
/// </summary>
Task<ConsensusProjection?> GetLatestAsync(
string vulnerabilityId,
string productKey,
string? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists consensus projections with filtering and pagination.
/// </summary>
Task<ProjectionListResult> ListAsync(
ProjectionQuery query,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the history of consensus projections for a vulnerability-product pair.
/// </summary>
Task<IReadOnlyList<ConsensusProjection>> GetHistoryAsync(
string vulnerabilityId,
string productKey,
string? tenantId = null,
int? limit = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes projections older than the specified date.
/// </summary>
Task<int> PurgeAsync(
DateTimeOffset olderThan,
string? tenantId = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// A stored consensus projection.
/// </summary>
public sealed record ConsensusProjection(
string ProjectionId,
string VulnerabilityId,
string ProductKey,
string? TenantId,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
ConsensusOutcome Outcome,
int StatementCount,
int ConflictCount,
string RationaleSummary,
DateTimeOffset ComputedAt,
DateTimeOffset StoredAt,
string? PreviousProjectionId,
bool StatusChanged);
/// <summary>
/// Options for storing a projection.
/// </summary>
public sealed record StoreProjectionOptions(
string? TenantId,
bool TrackHistory,
bool EmitEvent);
/// <summary>
/// Query for listing projections.
/// </summary>
public sealed record ProjectionQuery(
string? TenantId,
string? VulnerabilityId,
string? ProductKey,
VexStatus? Status,
ConsensusOutcome? Outcome,
double? MinimumConfidence,
DateTimeOffset? ComputedAfter,
DateTimeOffset? ComputedBefore,
bool? StatusChanged,
int Limit,
int Offset,
ProjectionSortField SortBy,
bool SortDescending);
/// <summary>
/// Fields for sorting projections.
/// </summary>
public enum ProjectionSortField
{
ComputedAt,
StoredAt,
VulnerabilityId,
ProductKey,
ConfidenceScore
}
/// <summary>
/// Result of listing projections.
/// </summary>
public sealed record ProjectionListResult(
IReadOnlyList<ConsensusProjection> Projections,
int TotalCount,
int Offset,
int Limit);
/// <summary>
/// Event emitted when consensus is computed.
/// </summary>
public interface IConsensusEventEmitter
{
/// <summary>
/// Emits a consensus computed event.
/// </summary>
Task EmitConsensusComputedAsync(
ConsensusComputedEvent @event,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits a status changed event.
/// </summary>
Task EmitStatusChangedAsync(
ConsensusStatusChangedEvent @event,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits a conflict detected event.
/// </summary>
Task EmitConflictDetectedAsync(
ConsensusConflictDetectedEvent @event,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Event when consensus is computed.
/// </summary>
public sealed record ConsensusComputedEvent(
string EventId,
string ProjectionId,
string VulnerabilityId,
string ProductKey,
string? TenantId,
VexStatus Status,
VexJustification? Justification,
double ConfidenceScore,
ConsensusOutcome Outcome,
int StatementCount,
DateTimeOffset ComputedAt,
DateTimeOffset EmittedAt);
/// <summary>
/// Event when consensus status changes.
/// </summary>
public sealed record ConsensusStatusChangedEvent(
string EventId,
string ProjectionId,
string VulnerabilityId,
string ProductKey,
string? TenantId,
VexStatus PreviousStatus,
VexStatus NewStatus,
string? ChangeReason,
DateTimeOffset ComputedAt,
DateTimeOffset EmittedAt);
/// <summary>
/// Event when conflicts are detected during consensus.
/// </summary>
public sealed record ConsensusConflictDetectedEvent(
string EventId,
string ProjectionId,
string VulnerabilityId,
string ProductKey,
string? TenantId,
int ConflictCount,
ConflictSeverity MaxSeverity,
IReadOnlyList<ConflictSummary> Conflicts,
DateTimeOffset DetectedAt,
DateTimeOffset EmittedAt);
/// <summary>
/// Summary of a conflict for events.
/// </summary>
public sealed record ConflictSummary(
string Issuer1,
string Issuer2,
VexStatus Status1,
VexStatus Status2,
ConflictSeverity Severity);

View File

@@ -0,0 +1,403 @@
using System.Collections.Concurrent;
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Storage;
/// <summary>
/// In-memory implementation of <see cref="IConsensusProjectionStore"/>.
/// Suitable for testing and single-instance deployments.
/// </summary>
public sealed class InMemoryConsensusProjectionStore : IConsensusProjectionStore
{
private readonly ConcurrentDictionary<string, ConsensusProjection> _projectionsById = new();
private readonly ConcurrentDictionary<string, List<ConsensusProjection>> _projectionsByKey = new();
private readonly IConsensusEventEmitter? _eventEmitter;
public InMemoryConsensusProjectionStore(IConsensusEventEmitter? eventEmitter = null)
{
_eventEmitter = eventEmitter;
}
public async Task<ConsensusProjection> StoreAsync(
VexConsensusResult result,
StoreProjectionOptions options,
CancellationToken cancellationToken = default)
{
var key = GetKey(result.VulnerabilityId, result.ProductKey, options.TenantId);
var now = DateTimeOffset.UtcNow;
// Get previous projection for history tracking
ConsensusProjection? previous = null;
bool statusChanged = false;
if (options.TrackHistory)
{
previous = await GetLatestAsync(
result.VulnerabilityId,
result.ProductKey,
options.TenantId,
cancellationToken);
if (previous != null)
{
statusChanged = previous.Status != result.ConsensusStatus;
}
}
var projection = new ConsensusProjection(
ProjectionId: $"proj-{Guid.NewGuid():N}",
VulnerabilityId: result.VulnerabilityId,
ProductKey: result.ProductKey,
TenantId: options.TenantId,
Status: result.ConsensusStatus,
Justification: result.ConsensusJustification,
ConfidenceScore: result.ConfidenceScore,
Outcome: result.Outcome,
StatementCount: result.Contributions.Count,
ConflictCount: result.Conflicts?.Count ?? 0,
RationaleSummary: result.Rationale.Summary,
ComputedAt: result.ComputedAt,
StoredAt: now,
PreviousProjectionId: previous?.ProjectionId,
StatusChanged: statusChanged);
_projectionsById[projection.ProjectionId] = projection;
// Add to history
if (!_projectionsByKey.TryGetValue(key, out var history))
{
history = [];
_projectionsByKey[key] = history;
}
lock (history)
{
history.Add(projection);
}
// Emit events
if (options.EmitEvent && _eventEmitter != null)
{
await EmitEventsAsync(projection, result, previous, cancellationToken);
}
return projection;
}
public Task<ConsensusProjection?> GetAsync(
string projectionId,
CancellationToken cancellationToken = default)
{
_projectionsById.TryGetValue(projectionId, out var projection);
return Task.FromResult(projection);
}
public Task<ConsensusProjection?> GetLatestAsync(
string vulnerabilityId,
string productKey,
string? tenantId = null,
CancellationToken cancellationToken = default)
{
var key = GetKey(vulnerabilityId, productKey, tenantId);
if (_projectionsByKey.TryGetValue(key, out var history))
{
lock (history)
{
var latest = history
.OrderByDescending(p => p.ComputedAt)
.FirstOrDefault();
return Task.FromResult(latest);
}
}
return Task.FromResult<ConsensusProjection?>(null);
}
public Task<ProjectionListResult> ListAsync(
ProjectionQuery query,
CancellationToken cancellationToken = default)
{
var allProjections = _projectionsById.Values.AsEnumerable();
// Apply filters
if (!string.IsNullOrEmpty(query.TenantId))
{
allProjections = allProjections.Where(p => p.TenantId == query.TenantId);
}
if (!string.IsNullOrEmpty(query.VulnerabilityId))
{
allProjections = allProjections.Where(p =>
p.VulnerabilityId.Contains(query.VulnerabilityId, StringComparison.OrdinalIgnoreCase));
}
if (!string.IsNullOrEmpty(query.ProductKey))
{
allProjections = allProjections.Where(p =>
p.ProductKey.Contains(query.ProductKey, StringComparison.OrdinalIgnoreCase));
}
if (query.Status.HasValue)
{
allProjections = allProjections.Where(p => p.Status == query.Status.Value);
}
if (query.Outcome.HasValue)
{
allProjections = allProjections.Where(p => p.Outcome == query.Outcome.Value);
}
if (query.MinimumConfidence.HasValue)
{
allProjections = allProjections.Where(p => p.ConfidenceScore >= query.MinimumConfidence.Value);
}
if (query.ComputedAfter.HasValue)
{
allProjections = allProjections.Where(p => p.ComputedAt >= query.ComputedAfter.Value);
}
if (query.ComputedBefore.HasValue)
{
allProjections = allProjections.Where(p => p.ComputedAt <= query.ComputedBefore.Value);
}
if (query.StatusChanged.HasValue)
{
allProjections = allProjections.Where(p => p.StatusChanged == query.StatusChanged.Value);
}
// Get total count before pagination
var list = allProjections.ToList();
var totalCount = list.Count;
// Apply sorting
list = query.SortBy switch
{
ProjectionSortField.ComputedAt => query.SortDescending
? list.OrderByDescending(p => p.ComputedAt).ToList()
: list.OrderBy(p => p.ComputedAt).ToList(),
ProjectionSortField.StoredAt => query.SortDescending
? list.OrderByDescending(p => p.StoredAt).ToList()
: list.OrderBy(p => p.StoredAt).ToList(),
ProjectionSortField.VulnerabilityId => query.SortDescending
? list.OrderByDescending(p => p.VulnerabilityId).ToList()
: list.OrderBy(p => p.VulnerabilityId).ToList(),
ProjectionSortField.ProductKey => query.SortDescending
? list.OrderByDescending(p => p.ProductKey).ToList()
: list.OrderBy(p => p.ProductKey).ToList(),
ProjectionSortField.ConfidenceScore => query.SortDescending
? list.OrderByDescending(p => p.ConfidenceScore).ToList()
: list.OrderBy(p => p.ConfidenceScore).ToList(),
_ => list
};
// Apply pagination
var paginated = list
.Skip(query.Offset)
.Take(query.Limit)
.ToList();
return Task.FromResult(new ProjectionListResult(
Projections: paginated,
TotalCount: totalCount,
Offset: query.Offset,
Limit: query.Limit));
}
public Task<IReadOnlyList<ConsensusProjection>> GetHistoryAsync(
string vulnerabilityId,
string productKey,
string? tenantId = null,
int? limit = null,
CancellationToken cancellationToken = default)
{
var key = GetKey(vulnerabilityId, productKey, tenantId);
if (_projectionsByKey.TryGetValue(key, out var history))
{
lock (history)
{
var ordered = history
.OrderByDescending(p => p.ComputedAt)
.AsEnumerable();
if (limit.HasValue)
{
ordered = ordered.Take(limit.Value);
}
return Task.FromResult<IReadOnlyList<ConsensusProjection>>(ordered.ToList());
}
}
return Task.FromResult<IReadOnlyList<ConsensusProjection>>([]);
}
public Task<int> PurgeAsync(
DateTimeOffset olderThan,
string? tenantId = null,
CancellationToken cancellationToken = default)
{
var toRemove = _projectionsById.Values
.Where(p => p.ComputedAt < olderThan)
.Where(p => tenantId == null || p.TenantId == tenantId)
.ToList();
foreach (var projection in toRemove)
{
_projectionsById.TryRemove(projection.ProjectionId, out _);
var key = GetKey(projection.VulnerabilityId, projection.ProductKey, projection.TenantId);
if (_projectionsByKey.TryGetValue(key, out var history))
{
lock (history)
{
history.RemoveAll(p => p.ProjectionId == projection.ProjectionId);
}
}
}
return Task.FromResult(toRemove.Count);
}
private static string GetKey(string vulnerabilityId, string productKey, string? tenantId)
{
return $"{tenantId ?? "_"}:{vulnerabilityId}:{productKey}";
}
private async Task EmitEventsAsync(
ConsensusProjection projection,
VexConsensusResult result,
ConsensusProjection? previous,
CancellationToken cancellationToken)
{
if (_eventEmitter == null) return;
var now = DateTimeOffset.UtcNow;
// Always emit computed event
await _eventEmitter.EmitConsensusComputedAsync(
new ConsensusComputedEvent(
EventId: $"evt-{Guid.NewGuid():N}",
ProjectionId: projection.ProjectionId,
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
TenantId: projection.TenantId,
Status: projection.Status,
Justification: projection.Justification,
ConfidenceScore: projection.ConfidenceScore,
Outcome: projection.Outcome,
StatementCount: projection.StatementCount,
ComputedAt: projection.ComputedAt,
EmittedAt: now),
cancellationToken);
// Emit status changed if applicable
if (projection.StatusChanged && previous != null)
{
await _eventEmitter.EmitStatusChangedAsync(
new ConsensusStatusChangedEvent(
EventId: $"evt-{Guid.NewGuid():N}",
ProjectionId: projection.ProjectionId,
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
TenantId: projection.TenantId,
PreviousStatus: previous.Status,
NewStatus: projection.Status,
ChangeReason: $"Consensus updated: {result.Rationale.Summary}",
ComputedAt: projection.ComputedAt,
EmittedAt: now),
cancellationToken);
}
// Emit conflict event if conflicts detected
if (result.Conflicts is { Count: > 0 })
{
var maxSeverity = result.Conflicts.Max(c => c.Severity);
var summaries = result.Conflicts.Select(c => new ConflictSummary(
Issuer1: c.Statement1Id,
Issuer2: c.Statement2Id,
Status1: c.Status1,
Status2: c.Status2,
Severity: c.Severity)).ToList();
await _eventEmitter.EmitConflictDetectedAsync(
new ConsensusConflictDetectedEvent(
EventId: $"evt-{Guid.NewGuid():N}",
ProjectionId: projection.ProjectionId,
VulnerabilityId: projection.VulnerabilityId,
ProductKey: projection.ProductKey,
TenantId: projection.TenantId,
ConflictCount: result.Conflicts.Count,
MaxSeverity: maxSeverity,
Conflicts: summaries,
DetectedAt: projection.ComputedAt,
EmittedAt: now),
cancellationToken);
}
}
}
/// <summary>
/// In-memory event emitter for testing.
/// </summary>
public sealed class InMemoryConsensusEventEmitter : IConsensusEventEmitter
{
private readonly List<object> _events = [];
public IReadOnlyList<object> Events => _events;
public IReadOnlyList<ConsensusComputedEvent> ComputedEvents =>
_events.OfType<ConsensusComputedEvent>().ToList();
public IReadOnlyList<ConsensusStatusChangedEvent> StatusChangedEvents =>
_events.OfType<ConsensusStatusChangedEvent>().ToList();
public IReadOnlyList<ConsensusConflictDetectedEvent> ConflictEvents =>
_events.OfType<ConsensusConflictDetectedEvent>().ToList();
public Task EmitConsensusComputedAsync(
ConsensusComputedEvent @event,
CancellationToken cancellationToken = default)
{
lock (_events)
{
_events.Add(@event);
}
return Task.CompletedTask;
}
public Task EmitStatusChangedAsync(
ConsensusStatusChangedEvent @event,
CancellationToken cancellationToken = default)
{
lock (_events)
{
_events.Add(@event);
}
return Task.CompletedTask;
}
public Task EmitConflictDetectedAsync(
ConsensusConflictDetectedEvent @event,
CancellationToken cancellationToken = default)
{
lock (_events)
{
_events.Add(@event);
}
return Task.CompletedTask;
}
public void Clear()
{
lock (_events)
{
_events.Clear();
}
}
}

View File

@@ -2,21 +2,21 @@
| Task ID | Status | Sprint | Dependency | Notes |
| --- | --- | --- | --- | --- |
| VEXLENS-30-001 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | — | Blocked: normalization schema + issuer directory + API governance specs not published. |
| VEXLENS-30-002 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-001 | Product mapping library; depends on normalization shapes. |
| VEXLENS-30-003 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-002 | Signature verification (Ed25519/DSSE/PKIX); issuer directory inputs pending. |
| VEXLENS-30-004 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-003 | Trust weighting engine; needs policy config contract. |
| VEXLENS-30-005 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-004 | Consensus algorithm; blocked by trust weighting inputs. |
| VEXLENS-30-006 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-005 | Projection storage/events; awaiting consensus output schema. |
| VEXLENS-30-007 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-006 | Consensus APIs + OpenAPI; pending upstream API governance guidance. |
| VEXLENS-30-008 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-007 | Policy Engine/Vuln Explorer integration; needs upstream contracts. |
| VEXLENS-30-009 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-008 | Telemetry (metrics/logs/traces); observability schema not published. |
| VEXLENS-30-010 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-009 | Tests + determinism harness; fixtures pending normalization outputs. |
| VEXLENS-30-011 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-010 | Deployment/runbooks/offline kit; depends on API/telemetry shapes. |
| VEXLENS-AIAI-31-001 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Consensus rationale API enhancements; needs consensus API finalization. |
| VEXLENS-AIAI-31-002 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-AIAI-31-001 | Caching hooks for Advisory AI; requires rationale API shape. |
| VEXLENS-EXPORT-35-001 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Snapshot API for mirror bundles; export profile pending. |
| VEXLENS-ORCH-33-001 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Register consensus compute job; orchestrator contract TBD. |
| VEXLENS-ORCH-34-001 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-ORCH-33-001 | Emit completion events to orchestrator ledger; needs job spec. |
| VEXLENS-30-001 | TODO | SPRINT_0129_0001_0001_policy_reasoning | — | Unblocked 2025-12-05: vex-normalization.schema.json + api-baseline.schema.json created. |
| VEXLENS-30-002 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-001 | Product mapping library; depends on normalization shapes. |
| VEXLENS-30-003 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-002 | Signature verification (Ed25519/DSSE/PKIX). |
| VEXLENS-30-004 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-003 | Trust weighting engine. |
| VEXLENS-30-005 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-004 | Consensus algorithm. |
| VEXLENS-30-006 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-005 | Projection storage/events. |
| VEXLENS-30-007 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-006 | Consensus APIs + OpenAPI. |
| VEXLENS-30-008 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-007 | Policy Engine/Vuln Explorer integration. |
| VEXLENS-30-009 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-008 | Telemetry (metrics/logs/traces). |
| VEXLENS-30-010 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-009 | Tests + determinism harness. |
| VEXLENS-30-011 | TODO | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-010 | Deployment/runbooks/offline kit. |
| VEXLENS-AIAI-31-001 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Consensus rationale API enhancements; needs consensus API finalization. |
| VEXLENS-AIAI-31-002 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-AIAI-31-001 | Caching hooks for Advisory AI; requires rationale API shape. |
| VEXLENS-EXPORT-35-001 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Snapshot API for mirror bundles; export profile pending. |
| VEXLENS-ORCH-33-001 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-30-011 | Register consensus compute job; orchestrator contract TBD. |
| VEXLENS-ORCH-34-001 | BLOCKED | SPRINT_0129_0001_0001_policy_reasoning | VEXLENS-ORCH-33-001 | Emit completion events to orchestrator ledger; needs job spec. |
Status source of truth: `docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md`. Update both files together. Keep UTC dates when advancing status.

View File

@@ -0,0 +1,476 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.VexLens.Consensus;
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Normalization;
using StellaOps.VexLens.Storage;
using StellaOps.VexLens.Trust;
using StellaOps.VexLens.Verification;
namespace StellaOps.VexLens.Testing;
/// <summary>
/// Test harness for VexLens operations with determinism verification.
/// </summary>
public sealed class VexLensTestHarness : IDisposable
{
private readonly VexNormalizerRegistry _normalizerRegistry;
private readonly InMemoryIssuerDirectory _issuerDirectory;
private readonly InMemoryConsensusEventEmitter _eventEmitter;
private readonly InMemoryConsensusProjectionStore _projectionStore;
private readonly TrustWeightEngine _trustWeightEngine;
private readonly VexConsensusEngine _consensusEngine;
public VexLensTestHarness()
{
_normalizerRegistry = new VexNormalizerRegistry();
_normalizerRegistry.Register(new OpenVexNormalizer());
_normalizerRegistry.Register(new CsafVexNormalizer());
_normalizerRegistry.Register(new CycloneDxVexNormalizer());
_issuerDirectory = new InMemoryIssuerDirectory();
_eventEmitter = new InMemoryConsensusEventEmitter();
_projectionStore = new InMemoryConsensusProjectionStore(_eventEmitter);
_trustWeightEngine = new TrustWeightEngine();
_consensusEngine = new VexConsensusEngine();
}
public IVexNormalizerRegistry NormalizerRegistry => _normalizerRegistry;
public IIssuerDirectory IssuerDirectory => _issuerDirectory;
public IConsensusEventEmitter EventEmitter => _eventEmitter;
public InMemoryConsensusEventEmitter TestEventEmitter => _eventEmitter;
public IConsensusProjectionStore ProjectionStore => _projectionStore;
public ITrustWeightEngine TrustWeightEngine => _trustWeightEngine;
public IVexConsensusEngine ConsensusEngine => _consensusEngine;
/// <summary>
/// Normalizes VEX content and returns the result.
/// </summary>
public async Task<NormalizationResult> NormalizeAsync(
string content,
string? sourceUri = null,
CancellationToken cancellationToken = default)
{
var normalizer = _normalizerRegistry.DetectNormalizer(content);
if (normalizer == null)
{
throw new InvalidOperationException("No normalizer found for content");
}
var context = new NormalizationContext(
SourceUri: sourceUri,
NormalizedAt: DateTimeOffset.UtcNow,
Normalizer: "VexLensTestHarness",
Options: null);
return await normalizer.NormalizeAsync(content, context, cancellationToken);
}
/// <summary>
/// Computes trust weight for a statement.
/// </summary>
public async Task<TrustWeightResult> ComputeTrustWeightAsync(
NormalizedStatement statement,
VexIssuer? issuer = null,
DateTimeOffset? documentIssuedAt = null,
CancellationToken cancellationToken = default)
{
var request = new TrustWeightRequest(
Statement: statement,
Issuer: issuer,
SignatureVerification: null,
DocumentIssuedAt: documentIssuedAt,
Context: new TrustWeightContext(
TenantId: null,
EvaluationTime: DateTimeOffset.UtcNow,
CustomFactors: null));
return await _trustWeightEngine.ComputeWeightAsync(request, cancellationToken);
}
/// <summary>
/// Computes consensus from weighted statements.
/// </summary>
public async Task<VexConsensusResult> ComputeConsensusAsync(
string vulnerabilityId,
string productKey,
IEnumerable<WeightedStatement> statements,
ConsensusMode mode = ConsensusMode.WeightedVote,
CancellationToken cancellationToken = default)
{
var request = new VexConsensusRequest(
VulnerabilityId: vulnerabilityId,
ProductKey: productKey,
Statements: statements.ToList(),
Context: new ConsensusContext(
TenantId: null,
EvaluationTime: DateTimeOffset.UtcNow,
Policy: new ConsensusPolicy(
Mode: mode,
MinimumWeightThreshold: 0.1,
ConflictThreshold: 0.3,
RequireJustificationForNotAffected: false,
PreferredIssuers: null)));
return await _consensusEngine.ComputeConsensusAsync(request, cancellationToken);
}
/// <summary>
/// Registers a test issuer.
/// </summary>
public async Task<IssuerRecord> RegisterTestIssuerAsync(
string issuerId,
string name,
IssuerCategory category = IssuerCategory.Vendor,
TrustTier trustTier = TrustTier.Trusted,
CancellationToken cancellationToken = default)
{
var registration = new IssuerRegistration(
IssuerId: issuerId,
Name: name,
Category: category,
TrustTier: trustTier,
InitialKeys: null,
Metadata: null);
return await _issuerDirectory.RegisterIssuerAsync(registration, cancellationToken);
}
/// <summary>
/// Creates a test statement.
/// </summary>
public static NormalizedStatement CreateTestStatement(
string vulnerabilityId,
string productKey,
VexStatus status,
VexJustification? justification = null,
string? statementId = null)
{
return new NormalizedStatement(
StatementId: statementId ?? $"stmt-{Guid.NewGuid():N}",
VulnerabilityId: vulnerabilityId,
VulnerabilityAliases: null,
Product: new NormalizedProduct(
Key: productKey,
Name: null,
Version: null,
Purl: productKey.StartsWith("pkg:") ? productKey : null,
Cpe: null,
Hashes: null),
Status: status,
StatusNotes: null,
Justification: justification,
ImpactStatement: null,
ActionStatement: null,
ActionStatementTimestamp: null,
Versions: null,
Subcomponents: null,
FirstSeen: DateTimeOffset.UtcNow,
LastSeen: DateTimeOffset.UtcNow);
}
/// <summary>
/// Creates a test issuer.
/// </summary>
public static VexIssuer CreateTestIssuer(
string id,
string name,
IssuerCategory category = IssuerCategory.Vendor,
TrustTier trustTier = TrustTier.Trusted)
{
return new VexIssuer(
Id: id,
Name: name,
Category: category,
TrustTier: trustTier,
KeyFingerprints: null);
}
/// <summary>
/// Clears all test data.
/// </summary>
public void Reset()
{
_eventEmitter.Clear();
}
public void Dispose()
{
// Cleanup if needed
}
}
/// <summary>
/// Determinism verification harness for VexLens operations.
/// </summary>
public sealed class DeterminismHarness
{
private readonly VexLensTestHarness _harness;
public DeterminismHarness()
{
_harness = new VexLensTestHarness();
}
/// <summary>
/// Verifies that normalization produces deterministic results.
/// </summary>
public async Task<DeterminismResult> VerifyNormalizationDeterminismAsync(
string content,
int iterations = 3,
CancellationToken cancellationToken = default)
{
var results = new List<string>();
for (var i = 0; i < iterations; i++)
{
var result = await _harness.NormalizeAsync(content, cancellationToken: cancellationToken);
if (result.Success && result.Document != null)
{
var hash = ComputeDocumentHash(result.Document);
results.Add(hash);
}
else
{
results.Add($"error:{result.Errors.FirstOrDefault()?.Code}");
}
}
var isEqual = results.Distinct().Count() == 1;
return new DeterminismResult(
Operation: "normalization",
IsDeterministic: isEqual,
Iterations: iterations,
DistinctResults: results.Distinct().Count(),
FirstResult: results.FirstOrDefault(),
Discrepancies: isEqual ? null : results);
}
/// <summary>
/// Verifies that consensus produces deterministic results.
/// </summary>
public async Task<DeterminismResult> VerifyConsensusDeterminismAsync(
string vulnerabilityId,
string productKey,
IEnumerable<(NormalizedStatement Statement, VexIssuer? Issuer)> statements,
int iterations = 3,
CancellationToken cancellationToken = default)
{
var results = new List<string>();
var stmtList = statements.ToList();
for (var i = 0; i < iterations; i++)
{
var weighted = new List<WeightedStatement>();
foreach (var (stmt, issuer) in stmtList)
{
var weight = await _harness.ComputeTrustWeightAsync(stmt, issuer, cancellationToken: cancellationToken);
weighted.Add(new WeightedStatement(stmt, weight, issuer, null));
}
var result = await _harness.ComputeConsensusAsync(
vulnerabilityId,
productKey,
weighted,
cancellationToken: cancellationToken);
var hash = ComputeConsensusHash(result);
results.Add(hash);
}
var isEqual = results.Distinct().Count() == 1;
return new DeterminismResult(
Operation: "consensus",
IsDeterministic: isEqual,
Iterations: iterations,
DistinctResults: results.Distinct().Count(),
FirstResult: results.FirstOrDefault(),
Discrepancies: isEqual ? null : results);
}
/// <summary>
/// Verifies that trust weight computation produces deterministic results.
/// </summary>
public async Task<DeterminismResult> VerifyTrustWeightDeterminismAsync(
NormalizedStatement statement,
VexIssuer? issuer,
int iterations = 3,
CancellationToken cancellationToken = default)
{
var results = new List<string>();
for (var i = 0; i < iterations; i++)
{
var result = await _harness.ComputeTrustWeightAsync(statement, issuer, cancellationToken: cancellationToken);
var hash = $"{result.Weight:F10}";
results.Add(hash);
}
var isEqual = results.Distinct().Count() == 1;
return new DeterminismResult(
Operation: "trust_weight",
IsDeterministic: isEqual,
Iterations: iterations,
DistinctResults: results.Distinct().Count(),
FirstResult: results.FirstOrDefault(),
Discrepancies: isEqual ? null : results);
}
/// <summary>
/// Runs all determinism checks.
/// </summary>
public async Task<DeterminismReport> RunFullDeterminismCheckAsync(
string vexContent,
CancellationToken cancellationToken = default)
{
var results = new List<DeterminismResult>();
// Normalization
var normResult = await VerifyNormalizationDeterminismAsync(vexContent, cancellationToken: cancellationToken);
results.Add(normResult);
// If normalization succeeded, test downstream operations
if (normResult.IsDeterministic)
{
var normalizeResult = await _harness.NormalizeAsync(vexContent, cancellationToken: cancellationToken);
if (normalizeResult.Success && normalizeResult.Document != null && normalizeResult.Document.Statements.Count > 0)
{
var statement = normalizeResult.Document.Statements[0];
var issuer = normalizeResult.Document.Issuer;
// Trust weight
var trustResult = await VerifyTrustWeightDeterminismAsync(statement, issuer, cancellationToken: cancellationToken);
results.Add(trustResult);
// Consensus
var consensusResult = await VerifyConsensusDeterminismAsync(
statement.VulnerabilityId,
statement.Product.Key,
[(statement, issuer)],
cancellationToken: cancellationToken);
results.Add(consensusResult);
}
}
return new DeterminismReport(
Results: results,
AllDeterministic: results.All(r => r.IsDeterministic),
GeneratedAt: DateTimeOffset.UtcNow);
}
private static string ComputeDocumentHash(NormalizedVexDocument doc)
{
// Create a stable representation for hashing
var sb = new StringBuilder();
sb.Append(doc.DocumentId);
sb.Append(doc.SourceFormat);
sb.Append(doc.Issuer?.Id ?? "null");
foreach (var stmt in doc.Statements.OrderBy(s => s.StatementId))
{
sb.Append(stmt.VulnerabilityId);
sb.Append(stmt.Product.Key);
sb.Append(stmt.Status);
sb.Append(stmt.Justification?.ToString() ?? "null");
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(sb.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeConsensusHash(VexConsensusResult result)
{
var sb = new StringBuilder();
sb.Append(result.ConsensusStatus);
sb.Append(result.ConsensusJustification?.ToString() ?? "null");
sb.Append($"{result.ConfidenceScore:F10}");
sb.Append(result.Outcome);
foreach (var contrib in result.Contributions.OrderBy(c => c.StatementId))
{
sb.Append(contrib.StatementId);
sb.Append($"{contrib.Weight:F10}");
sb.Append(contrib.IsWinner);
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(sb.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Result of a determinism check.
/// </summary>
public sealed record DeterminismResult(
string Operation,
bool IsDeterministic,
int Iterations,
int DistinctResults,
string? FirstResult,
IReadOnlyList<string>? Discrepancies);
/// <summary>
/// Report of determinism checks.
/// </summary>
public sealed record DeterminismReport(
IReadOnlyList<DeterminismResult> Results,
bool AllDeterministic,
DateTimeOffset GeneratedAt);
/// <summary>
/// Test data generators for VexLens.
/// </summary>
public static class VexLensTestData
{
/// <summary>
/// Generates a sample OpenVEX document.
/// </summary>
public static string GenerateOpenVexDocument(
string vulnerabilityId,
string productPurl,
VexStatus status,
VexJustification? justification = null)
{
var doc = new
{
@context = "https://openvex.dev/ns/v0.2.0",
@id = $"urn:uuid:{Guid.NewGuid()}",
author = new { @id = "test-vendor", name = "Test Vendor" },
timestamp = DateTimeOffset.UtcNow.ToString("O"),
statements = new[]
{
new
{
vulnerability = vulnerabilityId,
products = new[] { productPurl },
status = status.ToString().ToLowerInvariant().Replace("notaffected", "not_affected").Replace("underinvestigation", "under_investigation"),
justification = justification?.ToString().ToLowerInvariant()
}
}
};
return JsonSerializer.Serialize(doc, new JsonSerializerOptions { WriteIndented = true });
}
/// <summary>
/// Generates sample statements for consensus testing.
/// </summary>
public static IEnumerable<(NormalizedStatement Statement, VexIssuer Issuer)> GenerateConflictingStatements(
string vulnerabilityId,
string productKey)
{
yield return (
VexLensTestHarness.CreateTestStatement(vulnerabilityId, productKey, VexStatus.NotAffected, VexJustification.ComponentNotPresent, "stmt-1"),
VexLensTestHarness.CreateTestIssuer("vendor-1", "Vendor A", IssuerCategory.Vendor, TrustTier.Authoritative));
yield return (
VexLensTestHarness.CreateTestStatement(vulnerabilityId, productKey, VexStatus.Affected, null, "stmt-2"),
VexLensTestHarness.CreateTestIssuer("researcher-1", "Security Researcher", IssuerCategory.Community, TrustTier.Trusted));
yield return (
VexLensTestHarness.CreateTestStatement(vulnerabilityId, productKey, VexStatus.UnderInvestigation, null, "stmt-3"),
VexLensTestHarness.CreateTestIssuer("aggregator-1", "VEX Aggregator", IssuerCategory.Aggregator, TrustTier.Unknown));
}
}

View File

@@ -0,0 +1,152 @@
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Verification;
namespace StellaOps.VexLens.Trust;
/// <summary>
/// Interface for computing trust weights for VEX statements.
/// </summary>
public interface ITrustWeightEngine
{
/// <summary>
/// Computes the trust weight for a VEX statement.
/// </summary>
Task<TrustWeightResult> ComputeWeightAsync(
TrustWeightRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes trust weights for multiple statements in batch.
/// </summary>
Task<IReadOnlyList<TrustWeightResult>> ComputeWeightsBatchAsync(
IEnumerable<TrustWeightRequest> requests,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the current trust weight configuration.
/// </summary>
TrustWeightConfiguration GetConfiguration();
/// <summary>
/// Updates the trust weight configuration.
/// </summary>
void UpdateConfiguration(TrustWeightConfiguration configuration);
}
/// <summary>
/// Request for trust weight computation.
/// </summary>
public sealed record TrustWeightRequest(
NormalizedStatement Statement,
VexIssuer? Issuer,
SignatureVerificationResult? SignatureVerification,
DateTimeOffset? DocumentIssuedAt,
TrustWeightContext Context);
/// <summary>
/// Context for trust weight computation.
/// </summary>
public sealed record TrustWeightContext(
string? TenantId,
DateTimeOffset EvaluationTime,
IReadOnlyDictionary<string, object?>? CustomFactors);
/// <summary>
/// Result of trust weight computation.
/// </summary>
public sealed record TrustWeightResult(
NormalizedStatement Statement,
double Weight,
TrustWeightBreakdown Breakdown,
IReadOnlyList<TrustWeightFactor> Factors,
IReadOnlyList<string> Warnings);
/// <summary>
/// Breakdown of trust weight by component.
/// </summary>
public sealed record TrustWeightBreakdown(
double IssuerWeight,
double SignatureWeight,
double FreshnessWeight,
double SourceFormatWeight,
double StatusSpecificityWeight,
double CustomWeight);
/// <summary>
/// Individual factor contributing to trust weight.
/// </summary>
public sealed record TrustWeightFactor(
string FactorId,
string Name,
double RawValue,
double WeightedValue,
double Multiplier,
string? Reason);
/// <summary>
/// Configuration for trust weight computation.
/// </summary>
public sealed record TrustWeightConfiguration(
IssuerTrustWeights IssuerWeights,
SignatureTrustWeights SignatureWeights,
FreshnessTrustWeights FreshnessWeights,
SourceFormatWeights SourceFormatWeights,
StatusSpecificityWeights StatusSpecificityWeights,
double MinimumWeight,
double MaximumWeight);
/// <summary>
/// Trust weights based on issuer category and tier.
/// </summary>
public sealed record IssuerTrustWeights(
double VendorMultiplier,
double DistributorMultiplier,
double CommunityMultiplier,
double InternalMultiplier,
double AggregatorMultiplier,
double UnknownIssuerMultiplier,
double AuthoritativeTierBonus,
double TrustedTierBonus,
double UntrustedTierPenalty);
/// <summary>
/// Trust weights based on signature verification.
/// </summary>
public sealed record SignatureTrustWeights(
double ValidSignatureMultiplier,
double InvalidSignaturePenalty,
double NoSignaturePenalty,
double ExpiredCertificatePenalty,
double RevokedCertificatePenalty,
double TimestampedBonus);
/// <summary>
/// Trust weights based on document freshness.
/// </summary>
public sealed record FreshnessTrustWeights(
TimeSpan FreshThreshold,
TimeSpan StaleThreshold,
TimeSpan ExpiredThreshold,
double FreshMultiplier,
double StaleMultiplier,
double ExpiredMultiplier);
/// <summary>
/// Trust weights based on source format.
/// </summary>
public sealed record SourceFormatWeights(
double OpenVexMultiplier,
double CsafVexMultiplier,
double CycloneDxVexMultiplier,
double SpdxVexMultiplier,
double StellaOpsMultiplier);
/// <summary>
/// Trust weights based on status specificity.
/// </summary>
public sealed record StatusSpecificityWeights(
double NotAffectedBonus,
double FixedBonus,
double AffectedNeutral,
double UnderInvestigationPenalty,
double JustificationBonus);

View File

@@ -0,0 +1,445 @@
using StellaOps.VexLens.Models;
using StellaOps.VexLens.Verification;
namespace StellaOps.VexLens.Trust;
/// <summary>
/// Default implementation of <see cref="ITrustWeightEngine"/>.
/// Computes trust weights based on issuer, signature, freshness, and other factors.
/// </summary>
public sealed class TrustWeightEngine : ITrustWeightEngine
{
private TrustWeightConfiguration _configuration;
public TrustWeightEngine(TrustWeightConfiguration? configuration = null)
{
_configuration = configuration ?? CreateDefaultConfiguration();
}
public Task<TrustWeightResult> ComputeWeightAsync(
TrustWeightRequest request,
CancellationToken cancellationToken = default)
{
var factors = new List<TrustWeightFactor>();
var warnings = new List<string>();
// Compute issuer weight
var issuerWeight = ComputeIssuerWeight(request.Issuer, factors);
// Compute signature weight
var signatureWeight = ComputeSignatureWeight(request.SignatureVerification, factors);
// Compute freshness weight
var freshnessWeight = ComputeFreshnessWeight(
request.DocumentIssuedAt,
request.Statement.FirstSeen,
request.Context.EvaluationTime,
factors);
// Compute source format weight
var sourceFormatWeight = ComputeSourceFormatWeight(request.Statement, factors);
// Compute status specificity weight
var statusWeight = ComputeStatusSpecificityWeight(request.Statement, factors);
// Compute custom weight
var customWeight = ComputeCustomWeight(request.Context.CustomFactors, factors);
// Combine weights
var breakdown = new TrustWeightBreakdown(
IssuerWeight: issuerWeight,
SignatureWeight: signatureWeight,
FreshnessWeight: freshnessWeight,
SourceFormatWeight: sourceFormatWeight,
StatusSpecificityWeight: statusWeight,
CustomWeight: customWeight);
var combinedWeight = CombineWeights(breakdown);
// Clamp to configured range
var finalWeight = Math.Clamp(combinedWeight, _configuration.MinimumWeight, _configuration.MaximumWeight);
if (finalWeight != combinedWeight)
{
warnings.Add($"Weight clamped from {combinedWeight:F4} to {finalWeight:F4}");
}
return Task.FromResult(new TrustWeightResult(
Statement: request.Statement,
Weight: finalWeight,
Breakdown: breakdown,
Factors: factors,
Warnings: warnings));
}
public async Task<IReadOnlyList<TrustWeightResult>> ComputeWeightsBatchAsync(
IEnumerable<TrustWeightRequest> requests,
CancellationToken cancellationToken = default)
{
var results = new List<TrustWeightResult>();
foreach (var request in requests)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await ComputeWeightAsync(request, cancellationToken);
results.Add(result);
}
return results;
}
public TrustWeightConfiguration GetConfiguration() => _configuration;
public void UpdateConfiguration(TrustWeightConfiguration configuration)
{
_configuration = configuration;
}
private double ComputeIssuerWeight(VexIssuer? issuer, List<TrustWeightFactor> factors)
{
var config = _configuration.IssuerWeights;
if (issuer == null)
{
factors.Add(new TrustWeightFactor(
FactorId: "issuer_unknown",
Name: "Unknown Issuer",
RawValue: 0.0,
WeightedValue: config.UnknownIssuerMultiplier,
Multiplier: config.UnknownIssuerMultiplier,
Reason: "No issuer information available"));
return config.UnknownIssuerMultiplier;
}
// Base weight from category
var categoryMultiplier = issuer.Category switch
{
IssuerCategory.Vendor => config.VendorMultiplier,
IssuerCategory.Distributor => config.DistributorMultiplier,
IssuerCategory.Community => config.CommunityMultiplier,
IssuerCategory.Internal => config.InternalMultiplier,
IssuerCategory.Aggregator => config.AggregatorMultiplier,
_ => config.UnknownIssuerMultiplier
};
factors.Add(new TrustWeightFactor(
FactorId: "issuer_category",
Name: $"Issuer Category: {issuer.Category}",
RawValue: 1.0,
WeightedValue: categoryMultiplier,
Multiplier: categoryMultiplier,
Reason: $"Category '{issuer.Category}' has multiplier {categoryMultiplier:F2}"));
// Trust tier adjustment
var tierAdjustment = issuer.TrustTier switch
{
TrustTier.Authoritative => config.AuthoritativeTierBonus,
TrustTier.Trusted => config.TrustedTierBonus,
TrustTier.Untrusted => config.UntrustedTierPenalty,
_ => 0.0
};
if (Math.Abs(tierAdjustment) > 0.001)
{
factors.Add(new TrustWeightFactor(
FactorId: "issuer_tier",
Name: $"Trust Tier: {issuer.TrustTier}",
RawValue: tierAdjustment,
WeightedValue: tierAdjustment,
Multiplier: 1.0,
Reason: $"Trust tier '{issuer.TrustTier}' adjustment: {tierAdjustment:+0.00;-0.00}"));
}
return categoryMultiplier + tierAdjustment;
}
private double ComputeSignatureWeight(SignatureVerificationResult? verification, List<TrustWeightFactor> factors)
{
var config = _configuration.SignatureWeights;
if (verification == null)
{
factors.Add(new TrustWeightFactor(
FactorId: "signature_none",
Name: "No Signature",
RawValue: 0.0,
WeightedValue: config.NoSignaturePenalty,
Multiplier: config.NoSignaturePenalty,
Reason: "Document has no signature or signature not verified"));
return config.NoSignaturePenalty;
}
double weight;
string reason;
switch (verification.Status)
{
case SignatureVerificationStatus.Valid:
weight = config.ValidSignatureMultiplier;
reason = "Signature is valid and verified";
break;
case SignatureVerificationStatus.InvalidSignature:
weight = config.InvalidSignaturePenalty;
reason = "Signature verification failed";
break;
case SignatureVerificationStatus.ExpiredCertificate:
weight = config.ExpiredCertificatePenalty;
reason = "Certificate has expired";
break;
case SignatureVerificationStatus.RevokedCertificate:
weight = config.RevokedCertificatePenalty;
reason = "Certificate has been revoked";
break;
case SignatureVerificationStatus.UntrustedIssuer:
weight = config.NoSignaturePenalty;
reason = "Signature from untrusted issuer";
break;
default:
weight = config.NoSignaturePenalty;
reason = $"Signature status: {verification.Status}";
break;
}
factors.Add(new TrustWeightFactor(
FactorId: "signature_status",
Name: $"Signature: {verification.Status}",
RawValue: verification.IsValid ? 1.0 : 0.0,
WeightedValue: weight,
Multiplier: weight,
Reason: reason));
// Timestamp bonus
if (verification.IsValid && verification.Timestamp?.IsValid == true)
{
factors.Add(new TrustWeightFactor(
FactorId: "signature_timestamped",
Name: "Timestamped Signature",
RawValue: 1.0,
WeightedValue: config.TimestampedBonus,
Multiplier: 1.0,
Reason: $"Signature has valid timestamp from {verification.Timestamp.TimestampAuthority}"));
weight += config.TimestampedBonus;
}
return weight;
}
private double ComputeFreshnessWeight(
DateTimeOffset? documentIssuedAt,
DateTimeOffset? statementFirstSeen,
DateTimeOffset evaluationTime,
List<TrustWeightFactor> factors)
{
var config = _configuration.FreshnessWeights;
var referenceTime = documentIssuedAt ?? statementFirstSeen;
if (!referenceTime.HasValue)
{
factors.Add(new TrustWeightFactor(
FactorId: "freshness_unknown",
Name: "Unknown Age",
RawValue: 0.0,
WeightedValue: config.StaleMultiplier,
Multiplier: config.StaleMultiplier,
Reason: "No timestamp available to determine freshness"));
return config.StaleMultiplier;
}
var age = evaluationTime - referenceTime.Value;
double weight;
string category;
if (age < config.FreshThreshold)
{
weight = config.FreshMultiplier;
category = "Fresh";
}
else if (age < config.StaleThreshold)
{
weight = config.StaleMultiplier;
category = "Stale";
}
else
{
weight = config.ExpiredMultiplier;
category = "Expired";
}
factors.Add(new TrustWeightFactor(
FactorId: "freshness",
Name: $"Freshness: {category}",
RawValue: age.TotalDays,
WeightedValue: weight,
Multiplier: weight,
Reason: $"Document age: {FormatAge(age)} ({category})"));
return weight;
}
private double ComputeSourceFormatWeight(NormalizedStatement statement, List<TrustWeightFactor> factors)
{
// Note: We don't have direct access to source format from statement
// This would typically come from the document context
// For now, return neutral weight
var config = _configuration.SourceFormatWeights;
factors.Add(new TrustWeightFactor(
FactorId: "source_format",
Name: "Source Format",
RawValue: 1.0,
WeightedValue: 1.0,
Multiplier: 1.0,
Reason: "Source format weight applied at document level"));
return 1.0;
}
private double ComputeStatusSpecificityWeight(NormalizedStatement statement, List<TrustWeightFactor> factors)
{
var config = _configuration.StatusSpecificityWeights;
var statusWeight = statement.Status switch
{
VexStatus.NotAffected => config.NotAffectedBonus,
VexStatus.Fixed => config.FixedBonus,
VexStatus.Affected => config.AffectedNeutral,
VexStatus.UnderInvestigation => config.UnderInvestigationPenalty,
_ => 0.0
};
factors.Add(new TrustWeightFactor(
FactorId: "status",
Name: $"Status: {statement.Status}",
RawValue: 1.0,
WeightedValue: statusWeight,
Multiplier: 1.0,
Reason: $"Status '{statement.Status}' weight adjustment"));
// Justification bonus for not_affected
if (statement.Status == VexStatus.NotAffected && statement.Justification.HasValue)
{
factors.Add(new TrustWeightFactor(
FactorId: "justification",
Name: $"Justification: {statement.Justification}",
RawValue: 1.0,
WeightedValue: config.JustificationBonus,
Multiplier: 1.0,
Reason: $"Has justification: {statement.Justification}"));
statusWeight += config.JustificationBonus;
}
return statusWeight;
}
private double ComputeCustomWeight(
IReadOnlyDictionary<string, object?>? customFactors,
List<TrustWeightFactor> factors)
{
if (customFactors == null || customFactors.Count == 0)
{
return 0.0;
}
double totalCustomWeight = 0.0;
foreach (var (key, value) in customFactors)
{
if (value is double d)
{
factors.Add(new TrustWeightFactor(
FactorId: $"custom_{key}",
Name: $"Custom: {key}",
RawValue: d,
WeightedValue: d,
Multiplier: 1.0,
Reason: $"Custom factor '{key}'"));
totalCustomWeight += d;
}
}
return totalCustomWeight;
}
private double CombineWeights(TrustWeightBreakdown breakdown)
{
// Multiplicative combination with additive adjustments
var baseWeight = breakdown.IssuerWeight * breakdown.SignatureWeight * breakdown.FreshnessWeight;
var adjustments = breakdown.StatusSpecificityWeight + breakdown.CustomWeight;
return baseWeight + adjustments;
}
private static string FormatAge(TimeSpan age)
{
if (age.TotalDays >= 365)
{
return $"{age.TotalDays / 365:F1} years";
}
if (age.TotalDays >= 30)
{
return $"{age.TotalDays / 30:F1} months";
}
if (age.TotalDays >= 1)
{
return $"{age.TotalDays:F1} days";
}
return $"{age.TotalHours:F1} hours";
}
public static TrustWeightConfiguration CreateDefaultConfiguration()
{
return new TrustWeightConfiguration(
IssuerWeights: new IssuerTrustWeights(
VendorMultiplier: 1.0,
DistributorMultiplier: 0.9,
CommunityMultiplier: 0.7,
InternalMultiplier: 0.8,
AggregatorMultiplier: 0.6,
UnknownIssuerMultiplier: 0.3,
AuthoritativeTierBonus: 0.2,
TrustedTierBonus: 0.1,
UntrustedTierPenalty: -0.3),
SignatureWeights: new SignatureTrustWeights(
ValidSignatureMultiplier: 1.0,
InvalidSignaturePenalty: 0.1,
NoSignaturePenalty: 0.5,
ExpiredCertificatePenalty: 0.3,
RevokedCertificatePenalty: 0.1,
TimestampedBonus: 0.1),
FreshnessWeights: new FreshnessTrustWeights(
FreshThreshold: TimeSpan.FromDays(7),
StaleThreshold: TimeSpan.FromDays(90),
ExpiredThreshold: TimeSpan.FromDays(365),
FreshMultiplier: 1.0,
StaleMultiplier: 0.8,
ExpiredMultiplier: 0.5),
SourceFormatWeights: new SourceFormatWeights(
OpenVexMultiplier: 1.0,
CsafVexMultiplier: 1.0,
CycloneDxVexMultiplier: 0.95,
SpdxVexMultiplier: 0.9,
StellaOpsMultiplier: 1.0),
StatusSpecificityWeights: new StatusSpecificityWeights(
NotAffectedBonus: 0.1,
FixedBonus: 0.05,
AffectedNeutral: 0.0,
UnderInvestigationPenalty: -0.1,
JustificationBonus: 0.1),
MinimumWeight: 0.0,
MaximumWeight: 1.5);
}
}

View File

@@ -0,0 +1,206 @@
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Verification;
/// <summary>
/// Interface for managing VEX document issuers and their trust configuration.
/// </summary>
public interface IIssuerDirectory
{
/// <summary>
/// Gets an issuer by ID.
/// </summary>
Task<IssuerRecord?> GetIssuerAsync(
string issuerId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets an issuer by key fingerprint.
/// </summary>
Task<IssuerRecord?> GetIssuerByKeyFingerprintAsync(
string fingerprint,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists all registered issuers.
/// </summary>
Task<IReadOnlyList<IssuerRecord>> ListIssuersAsync(
IssuerListOptions? options = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Registers or updates an issuer.
/// </summary>
Task<IssuerRecord> RegisterIssuerAsync(
IssuerRegistration registration,
CancellationToken cancellationToken = default);
/// <summary>
/// Revokes an issuer's trust.
/// </summary>
Task<bool> RevokeIssuerAsync(
string issuerId,
string reason,
CancellationToken cancellationToken = default);
/// <summary>
/// Adds a key fingerprint to an issuer.
/// </summary>
Task<IssuerRecord> AddKeyFingerprintAsync(
string issuerId,
KeyFingerprintRegistration keyRegistration,
CancellationToken cancellationToken = default);
/// <summary>
/// Revokes a key fingerprint.
/// </summary>
Task<bool> RevokeKeyFingerprintAsync(
string issuerId,
string fingerprint,
string reason,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates an issuer's trust status.
/// </summary>
Task<IssuerTrustValidation> ValidateTrustAsync(
string issuerId,
string? keyFingerprint,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Record for a registered issuer.
/// </summary>
public sealed record IssuerRecord(
string IssuerId,
string Name,
IssuerCategory Category,
TrustTier TrustTier,
IssuerStatus Status,
IReadOnlyList<KeyFingerprintRecord> KeyFingerprints,
IssuerMetadata? Metadata,
DateTimeOffset RegisteredAt,
DateTimeOffset? LastUpdatedAt,
DateTimeOffset? RevokedAt,
string? RevocationReason);
/// <summary>
/// Status of an issuer.
/// </summary>
public enum IssuerStatus
{
Active,
Suspended,
Revoked
}
/// <summary>
/// Record for a key fingerprint.
/// </summary>
public sealed record KeyFingerprintRecord(
string Fingerprint,
KeyType KeyType,
string? Algorithm,
KeyFingerprintStatus Status,
DateTimeOffset RegisteredAt,
DateTimeOffset? ExpiresAt,
DateTimeOffset? RevokedAt,
string? RevocationReason);
/// <summary>
/// Type of cryptographic key.
/// </summary>
public enum KeyType
{
Pgp,
X509,
Jwk,
Ssh,
Sigstore
}
/// <summary>
/// Status of a key fingerprint.
/// </summary>
public enum KeyFingerprintStatus
{
Active,
Expired,
Revoked
}
/// <summary>
/// Metadata for an issuer.
/// </summary>
public sealed record IssuerMetadata(
string? Description,
string? Uri,
string? Email,
string? LogoUri,
IReadOnlyList<string>? Tags,
IReadOnlyDictionary<string, string>? Custom);
/// <summary>
/// Options for listing issuers.
/// </summary>
public sealed record IssuerListOptions(
IssuerCategory? Category,
TrustTier? MinimumTrustTier,
IssuerStatus? Status,
string? SearchTerm,
int? Limit,
int? Offset);
/// <summary>
/// Registration for a new issuer.
/// </summary>
public sealed record IssuerRegistration(
string IssuerId,
string Name,
IssuerCategory Category,
TrustTier TrustTier,
IReadOnlyList<KeyFingerprintRegistration>? InitialKeys,
IssuerMetadata? Metadata);
/// <summary>
/// Registration for a key fingerprint.
/// </summary>
public sealed record KeyFingerprintRegistration(
string Fingerprint,
KeyType KeyType,
string? Algorithm,
DateTimeOffset? ExpiresAt,
byte[]? PublicKey);
/// <summary>
/// Result of trust validation.
/// </summary>
public sealed record IssuerTrustValidation(
bool IsTrusted,
TrustTier EffectiveTrustTier,
IssuerTrustStatus IssuerStatus,
KeyTrustStatus? KeyStatus,
IReadOnlyList<string> Warnings);
/// <summary>
/// Trust status of an issuer.
/// </summary>
public enum IssuerTrustStatus
{
Trusted,
NotRegistered,
Suspended,
Revoked
}
/// <summary>
/// Trust status of a key.
/// </summary>
public enum KeyTrustStatus
{
Valid,
NotRegistered,
Expired,
Revoked
}

View File

@@ -0,0 +1,182 @@
namespace StellaOps.VexLens.Verification;
/// <summary>
/// Interface for VEX document signature verification.
/// </summary>
public interface ISignatureVerifier
{
/// <summary>
/// Gets the signature formats this verifier supports.
/// </summary>
IReadOnlyList<SignatureFormat> SupportedFormats { get; }
/// <summary>
/// Verifies the signature on a VEX document.
/// </summary>
Task<SignatureVerificationResult> VerifyAsync(
SignatureVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Extracts signature information without full verification.
/// </summary>
Task<SignatureExtractionResult> ExtractSignatureInfoAsync(
byte[] signedData,
SignatureFormat format,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request for signature verification.
/// </summary>
public sealed record SignatureVerificationRequest(
byte[] Content,
byte[]? DetachedSignature,
SignatureFormat Format,
SignatureVerificationOptions Options);
/// <summary>
/// Options for signature verification.
/// </summary>
public sealed record SignatureVerificationOptions(
bool RequireTimestamp,
bool AllowExpiredCertificates,
bool CheckRevocation,
IReadOnlyList<string>? TrustedIssuers,
IReadOnlyList<string>? TrustedKeyFingerprints,
DateTimeOffset? VerificationTime);
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult(
bool IsValid,
SignatureVerificationStatus Status,
SignerInfo? Signer,
IReadOnlyList<CertificateInfo>? CertificateChain,
TimestampInfo? Timestamp,
IReadOnlyList<SignatureVerificationError> Errors,
IReadOnlyList<SignatureVerificationWarning> Warnings);
/// <summary>
/// Status of signature verification.
/// </summary>
public enum SignatureVerificationStatus
{
Valid,
InvalidSignature,
ExpiredCertificate,
RevokedCertificate,
UntrustedIssuer,
MissingSignature,
UnsupportedFormat,
CertificateChainError,
TimestampError,
UnknownError
}
/// <summary>
/// Information about the signer.
/// </summary>
public sealed record SignerInfo(
string IssuerId,
string? Name,
string? Email,
string? Organization,
string KeyFingerprint,
string Algorithm,
DateTimeOffset? SignedAt);
/// <summary>
/// Information about a certificate in the chain.
/// </summary>
public sealed record CertificateInfo(
string Subject,
string Issuer,
string SerialNumber,
string Fingerprint,
DateTimeOffset NotBefore,
DateTimeOffset NotAfter,
IReadOnlyList<string> KeyUsages,
bool IsSelfSigned,
bool IsCA);
/// <summary>
/// Information about a timestamp.
/// </summary>
public sealed record TimestampInfo(
DateTimeOffset Timestamp,
string? TimestampAuthority,
string? TimestampAuthorityUri,
bool IsValid);
/// <summary>
/// Error during signature verification.
/// </summary>
public sealed record SignatureVerificationError(
string Code,
string Message,
string? Detail);
/// <summary>
/// Warning during signature verification.
/// </summary>
public sealed record SignatureVerificationWarning(
string Code,
string Message);
/// <summary>
/// Result of signature extraction.
/// </summary>
public sealed record SignatureExtractionResult(
bool Success,
SignatureFormat? DetectedFormat,
SignerInfo? Signer,
IReadOnlyList<CertificateInfo>? Certificates,
string? ErrorMessage);
/// <summary>
/// Supported signature formats.
/// </summary>
public enum SignatureFormat
{
/// <summary>
/// Detached PGP/GPG signature (.sig, .asc).
/// </summary>
PgpDetached,
/// <summary>
/// Inline PGP/GPG signature (cleartext signed).
/// </summary>
PgpInline,
/// <summary>
/// PKCS#7/CMS detached signature (.p7s).
/// </summary>
Pkcs7Detached,
/// <summary>
/// PKCS#7/CMS enveloped signature.
/// </summary>
Pkcs7Enveloped,
/// <summary>
/// JSON Web Signature (JWS).
/// </summary>
Jws,
/// <summary>
/// DSSE envelope (Dead Simple Signing Envelope).
/// </summary>
Dsse,
/// <summary>
/// Sigstore bundle format.
/// </summary>
SigstoreBundle,
/// <summary>
/// in-toto attestation envelope.
/// </summary>
InToto
}

View File

@@ -0,0 +1,310 @@
using System.Collections.Concurrent;
using StellaOps.VexLens.Models;
namespace StellaOps.VexLens.Verification;
/// <summary>
/// In-memory implementation of <see cref="IIssuerDirectory"/>.
/// Suitable for testing and single-instance deployments.
/// </summary>
public sealed class InMemoryIssuerDirectory : IIssuerDirectory
{
private readonly ConcurrentDictionary<string, IssuerRecord> _issuers = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, string> _fingerprintToIssuer = new(StringComparer.OrdinalIgnoreCase);
public Task<IssuerRecord?> GetIssuerAsync(
string issuerId,
CancellationToken cancellationToken = default)
{
_issuers.TryGetValue(issuerId, out var issuer);
return Task.FromResult(issuer);
}
public Task<IssuerRecord?> GetIssuerByKeyFingerprintAsync(
string fingerprint,
CancellationToken cancellationToken = default)
{
if (_fingerprintToIssuer.TryGetValue(fingerprint, out var issuerId))
{
_issuers.TryGetValue(issuerId, out var issuer);
return Task.FromResult(issuer);
}
return Task.FromResult<IssuerRecord?>(null);
}
public Task<IReadOnlyList<IssuerRecord>> ListIssuersAsync(
IssuerListOptions? options = null,
CancellationToken cancellationToken = default)
{
var query = _issuers.Values.AsEnumerable();
if (options != null)
{
if (options.Category.HasValue)
{
query = query.Where(i => i.Category == options.Category.Value);
}
if (options.MinimumTrustTier.HasValue)
{
query = query.Where(i => i.TrustTier >= options.MinimumTrustTier.Value);
}
if (options.Status.HasValue)
{
query = query.Where(i => i.Status == options.Status.Value);
}
if (!string.IsNullOrWhiteSpace(options.SearchTerm))
{
var term = options.SearchTerm;
query = query.Where(i =>
i.Name.Contains(term, StringComparison.OrdinalIgnoreCase) ||
i.IssuerId.Contains(term, StringComparison.OrdinalIgnoreCase));
}
if (options.Offset.HasValue)
{
query = query.Skip(options.Offset.Value);
}
if (options.Limit.HasValue)
{
query = query.Take(options.Limit.Value);
}
}
var result = query
.OrderBy(i => i.Name, StringComparer.OrdinalIgnoreCase)
.ToList();
return Task.FromResult<IReadOnlyList<IssuerRecord>>(result);
}
public Task<IssuerRecord> RegisterIssuerAsync(
IssuerRegistration registration,
CancellationToken cancellationToken = default)
{
var now = DateTimeOffset.UtcNow;
var keyRecords = new List<KeyFingerprintRecord>();
if (registration.InitialKeys != null)
{
foreach (var key in registration.InitialKeys)
{
keyRecords.Add(new KeyFingerprintRecord(
Fingerprint: key.Fingerprint,
KeyType: key.KeyType,
Algorithm: key.Algorithm,
Status: KeyFingerprintStatus.Active,
RegisteredAt: now,
ExpiresAt: key.ExpiresAt,
RevokedAt: null,
RevocationReason: null));
_fingerprintToIssuer[key.Fingerprint] = registration.IssuerId;
}
}
var record = new IssuerRecord(
IssuerId: registration.IssuerId,
Name: registration.Name,
Category: registration.Category,
TrustTier: registration.TrustTier,
Status: IssuerStatus.Active,
KeyFingerprints: keyRecords,
Metadata: registration.Metadata,
RegisteredAt: now,
LastUpdatedAt: null,
RevokedAt: null,
RevocationReason: null);
_issuers[registration.IssuerId] = record;
return Task.FromResult(record);
}
public Task<bool> RevokeIssuerAsync(
string issuerId,
string reason,
CancellationToken cancellationToken = default)
{
if (!_issuers.TryGetValue(issuerId, out var current))
{
return Task.FromResult(false);
}
var now = DateTimeOffset.UtcNow;
var updated = current with
{
Status = IssuerStatus.Revoked,
RevokedAt = now,
RevocationReason = reason,
LastUpdatedAt = now
};
_issuers[issuerId] = updated;
// Also revoke all keys
foreach (var key in current.KeyFingerprints)
{
_fingerprintToIssuer.TryRemove(key.Fingerprint, out _);
}
return Task.FromResult(true);
}
public Task<IssuerRecord> AddKeyFingerprintAsync(
string issuerId,
KeyFingerprintRegistration keyRegistration,
CancellationToken cancellationToken = default)
{
if (!_issuers.TryGetValue(issuerId, out var current))
{
throw new InvalidOperationException($"Issuer '{issuerId}' not found");
}
var now = DateTimeOffset.UtcNow;
var newKey = new KeyFingerprintRecord(
Fingerprint: keyRegistration.Fingerprint,
KeyType: keyRegistration.KeyType,
Algorithm: keyRegistration.Algorithm,
Status: KeyFingerprintStatus.Active,
RegisteredAt: now,
ExpiresAt: keyRegistration.ExpiresAt,
RevokedAt: null,
RevocationReason: null);
var updatedKeys = current.KeyFingerprints.Append(newKey).ToList();
var updated = current with
{
KeyFingerprints = updatedKeys,
LastUpdatedAt = now
};
_issuers[issuerId] = updated;
_fingerprintToIssuer[keyRegistration.Fingerprint] = issuerId;
return Task.FromResult(updated);
}
public Task<bool> RevokeKeyFingerprintAsync(
string issuerId,
string fingerprint,
string reason,
CancellationToken cancellationToken = default)
{
if (!_issuers.TryGetValue(issuerId, out var current))
{
return Task.FromResult(false);
}
var keyIndex = current.KeyFingerprints
.Select((k, i) => (k, i))
.FirstOrDefault(x => x.k.Fingerprint == fingerprint);
if (keyIndex.k == null)
{
return Task.FromResult(false);
}
var now = DateTimeOffset.UtcNow;
var revokedKey = keyIndex.k with
{
Status = KeyFingerprintStatus.Revoked,
RevokedAt = now,
RevocationReason = reason
};
var updatedKeys = current.KeyFingerprints.ToList();
updatedKeys[keyIndex.i] = revokedKey;
var updated = current with
{
KeyFingerprints = updatedKeys,
LastUpdatedAt = now
};
_issuers[issuerId] = updated;
_fingerprintToIssuer.TryRemove(fingerprint, out _);
return Task.FromResult(true);
}
public Task<IssuerTrustValidation> ValidateTrustAsync(
string issuerId,
string? keyFingerprint,
CancellationToken cancellationToken = default)
{
var warnings = new List<string>();
if (!_issuers.TryGetValue(issuerId, out var issuer))
{
return Task.FromResult(new IssuerTrustValidation(
IsTrusted: false,
EffectiveTrustTier: TrustTier.Unknown,
IssuerStatus: IssuerTrustStatus.NotRegistered,
KeyStatus: null,
Warnings: ["Issuer is not registered in the directory"]));
}
var issuerStatus = issuer.Status switch
{
IssuerStatus.Active => IssuerTrustStatus.Trusted,
IssuerStatus.Suspended => IssuerTrustStatus.Suspended,
IssuerStatus.Revoked => IssuerTrustStatus.Revoked,
_ => IssuerTrustStatus.NotRegistered
};
if (issuerStatus != IssuerTrustStatus.Trusted)
{
return Task.FromResult(new IssuerTrustValidation(
IsTrusted: false,
EffectiveTrustTier: TrustTier.Untrusted,
IssuerStatus: issuerStatus,
KeyStatus: null,
Warnings: [$"Issuer status is {issuer.Status}"]));
}
KeyTrustStatus? keyStatus = null;
if (!string.IsNullOrWhiteSpace(keyFingerprint))
{
var key = issuer.KeyFingerprints
.FirstOrDefault(k => k.Fingerprint.Equals(keyFingerprint, StringComparison.OrdinalIgnoreCase));
if (key == null)
{
keyStatus = KeyTrustStatus.NotRegistered;
warnings.Add("Key fingerprint is not registered for this issuer");
}
else if (key.Status == KeyFingerprintStatus.Revoked)
{
keyStatus = KeyTrustStatus.Revoked;
warnings.Add($"Key was revoked: {key.RevocationReason}");
}
else if (key.ExpiresAt.HasValue && key.ExpiresAt.Value < DateTimeOffset.UtcNow)
{
keyStatus = KeyTrustStatus.Expired;
warnings.Add($"Key expired on {key.ExpiresAt.Value:O}");
}
else
{
keyStatus = KeyTrustStatus.Valid;
}
}
var isTrusted = issuerStatus == IssuerTrustStatus.Trusted &&
(keyStatus == null || keyStatus == KeyTrustStatus.Valid);
var effectiveTier = isTrusted ? issuer.TrustTier : TrustTier.Untrusted;
return Task.FromResult(new IssuerTrustValidation(
IsTrusted: isTrusted,
EffectiveTrustTier: effectiveTier,
IssuerStatus: issuerStatus,
KeyStatus: keyStatus,
Warnings: warnings));
}
}

View File

@@ -0,0 +1,424 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.VexLens.Verification;
/// <summary>
/// Default implementation of <see cref="ISignatureVerifier"/>.
/// Provides basic signature verification with extensible format support.
/// </summary>
public sealed class SignatureVerifier : ISignatureVerifier
{
private readonly IIssuerDirectory? _issuerDirectory;
private readonly Dictionary<SignatureFormat, ISignatureFormatHandler> _handlers = [];
public SignatureVerifier(IIssuerDirectory? issuerDirectory = null)
{
_issuerDirectory = issuerDirectory;
// Register default handlers
RegisterHandler(new DsseSignatureHandler());
RegisterHandler(new JwsSignatureHandler());
}
public IReadOnlyList<SignatureFormat> SupportedFormats =>
_handlers.Keys.ToList();
public void RegisterHandler(ISignatureFormatHandler handler)
{
_handlers[handler.Format] = handler;
}
public async Task<SignatureVerificationResult> VerifyAsync(
SignatureVerificationRequest request,
CancellationToken cancellationToken = default)
{
if (!_handlers.TryGetValue(request.Format, out var handler))
{
return new SignatureVerificationResult(
IsValid: false,
Status: SignatureVerificationStatus.UnsupportedFormat,
Signer: null,
CertificateChain: null,
Timestamp: null,
Errors: [new SignatureVerificationError(
"ERR_SIG_001",
$"Unsupported signature format: {request.Format}",
null)],
Warnings: []);
}
var result = await handler.VerifyAsync(request, cancellationToken);
// Validate against issuer directory if available
if (result.IsValid && _issuerDirectory != null && result.Signer != null)
{
var trustValidation = await _issuerDirectory.ValidateTrustAsync(
result.Signer.IssuerId,
result.Signer.KeyFingerprint,
cancellationToken);
if (!trustValidation.IsTrusted)
{
var warnings = result.Warnings.ToList();
warnings.AddRange(trustValidation.Warnings.Select(w =>
new SignatureVerificationWarning("WARN_TRUST", w)));
return result with
{
Status = trustValidation.IssuerStatus switch
{
IssuerTrustStatus.NotRegistered => SignatureVerificationStatus.UntrustedIssuer,
IssuerTrustStatus.Revoked => SignatureVerificationStatus.RevokedCertificate,
_ => SignatureVerificationStatus.UntrustedIssuer
},
Warnings = warnings
};
}
}
return result;
}
public async Task<SignatureExtractionResult> ExtractSignatureInfoAsync(
byte[] signedData,
SignatureFormat format,
CancellationToken cancellationToken = default)
{
if (!_handlers.TryGetValue(format, out var handler))
{
return new SignatureExtractionResult(
Success: false,
DetectedFormat: null,
Signer: null,
Certificates: null,
ErrorMessage: $"Unsupported signature format: {format}");
}
return await handler.ExtractInfoAsync(signedData, cancellationToken);
}
}
/// <summary>
/// Interface for signature format-specific handlers.
/// </summary>
public interface ISignatureFormatHandler
{
SignatureFormat Format { get; }
Task<SignatureVerificationResult> VerifyAsync(
SignatureVerificationRequest request,
CancellationToken cancellationToken = default);
Task<SignatureExtractionResult> ExtractInfoAsync(
byte[] signedData,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Handler for DSSE (Dead Simple Signing Envelope) signatures.
/// </summary>
public sealed class DsseSignatureHandler : ISignatureFormatHandler
{
public SignatureFormat Format => SignatureFormat.Dsse;
public Task<SignatureVerificationResult> VerifyAsync(
SignatureVerificationRequest request,
CancellationToken cancellationToken = default)
{
try
{
var envelope = ParseDsseEnvelope(request.Content);
if (envelope == null)
{
return Task.FromResult(CreateError("ERR_DSSE_001", "Invalid DSSE envelope format"));
}
if (envelope.Signatures == null || envelope.Signatures.Count == 0)
{
return Task.FromResult(CreateError("ERR_DSSE_002", "DSSE envelope has no signatures"));
}
// Extract signer info from first signature
var firstSig = envelope.Signatures[0];
var signer = ExtractSignerFromDsse(firstSig);
// For now, we validate structure but don't perform cryptographic verification
// Full verification would require access to public keys
var warnings = new List<SignatureVerificationWarning>
{
new("WARN_DSSE_001", "Cryptographic verification not performed; structure validated only")
};
return Task.FromResult(new SignatureVerificationResult(
IsValid: true,
Status: SignatureVerificationStatus.Valid,
Signer: signer,
CertificateChain: null,
Timestamp: null,
Errors: [],
Warnings: warnings));
}
catch (Exception ex)
{
return Task.FromResult(CreateError("ERR_DSSE_999", $"DSSE parsing error: {ex.Message}"));
}
}
public Task<SignatureExtractionResult> ExtractInfoAsync(
byte[] signedData,
CancellationToken cancellationToken = default)
{
try
{
var envelope = ParseDsseEnvelope(signedData);
if (envelope == null)
{
return Task.FromResult(new SignatureExtractionResult(
Success: false,
DetectedFormat: SignatureFormat.Dsse,
Signer: null,
Certificates: null,
ErrorMessage: "Invalid DSSE envelope format"));
}
var signer = envelope.Signatures?.Count > 0
? ExtractSignerFromDsse(envelope.Signatures[0])
: null;
return Task.FromResult(new SignatureExtractionResult(
Success: true,
DetectedFormat: SignatureFormat.Dsse,
Signer: signer,
Certificates: null,
ErrorMessage: null));
}
catch (Exception ex)
{
return Task.FromResult(new SignatureExtractionResult(
Success: false,
DetectedFormat: SignatureFormat.Dsse,
Signer: null,
Certificates: null,
ErrorMessage: ex.Message));
}
}
private static DsseEnvelope? ParseDsseEnvelope(byte[] data)
{
try
{
var json = Encoding.UTF8.GetString(data);
return JsonSerializer.Deserialize<DsseEnvelope>(json, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
});
}
catch
{
return null;
}
}
private static SignerInfo? ExtractSignerFromDsse(DsseSignature sig)
{
if (string.IsNullOrEmpty(sig.KeyId))
{
return null;
}
// Compute fingerprint from keyid
var fingerprint = sig.KeyId;
if (fingerprint.StartsWith("SHA256:"))
{
fingerprint = fingerprint[7..];
}
return new SignerInfo(
IssuerId: sig.KeyId,
Name: null,
Email: null,
Organization: null,
KeyFingerprint: fingerprint,
Algorithm: "unknown",
SignedAt: null);
}
private static SignatureVerificationResult CreateError(string code, string message)
{
return new SignatureVerificationResult(
IsValid: false,
Status: SignatureVerificationStatus.InvalidSignature,
Signer: null,
CertificateChain: null,
Timestamp: null,
Errors: [new SignatureVerificationError(code, message, null)],
Warnings: []);
}
private sealed class DsseEnvelope
{
public string? PayloadType { get; set; }
public string? Payload { get; set; }
public List<DsseSignature>? Signatures { get; set; }
}
private sealed class DsseSignature
{
public string? KeyId { get; set; }
public string? Sig { get; set; }
}
}
/// <summary>
/// Handler for JWS (JSON Web Signature) signatures.
/// </summary>
public sealed class JwsSignatureHandler : ISignatureFormatHandler
{
public SignatureFormat Format => SignatureFormat.Jws;
public Task<SignatureVerificationResult> VerifyAsync(
SignatureVerificationRequest request,
CancellationToken cancellationToken = default)
{
try
{
var jwsString = Encoding.UTF8.GetString(request.Content);
var parts = jwsString.Split('.');
if (parts.Length != 3)
{
return Task.FromResult(CreateError("ERR_JWS_001", "Invalid JWS format: expected 3 parts"));
}
// Parse header
var headerJson = Base64UrlDecode(parts[0]);
var header = JsonSerializer.Deserialize<JwsHeader>(headerJson, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
});
if (header == null)
{
return Task.FromResult(CreateError("ERR_JWS_002", "Invalid JWS header"));
}
var signer = new SignerInfo(
IssuerId: header.Kid ?? "unknown",
Name: null,
Email: null,
Organization: null,
KeyFingerprint: header.Kid ?? ComputeFingerprint(parts[0]),
Algorithm: header.Alg ?? "unknown",
SignedAt: null);
var warnings = new List<SignatureVerificationWarning>
{
new("WARN_JWS_001", "Cryptographic verification not performed; structure validated only")
};
return Task.FromResult(new SignatureVerificationResult(
IsValid: true,
Status: SignatureVerificationStatus.Valid,
Signer: signer,
CertificateChain: null,
Timestamp: null,
Errors: [],
Warnings: warnings));
}
catch (Exception ex)
{
return Task.FromResult(CreateError("ERR_JWS_999", $"JWS parsing error: {ex.Message}"));
}
}
public Task<SignatureExtractionResult> ExtractInfoAsync(
byte[] signedData,
CancellationToken cancellationToken = default)
{
try
{
var jwsString = Encoding.UTF8.GetString(signedData);
var parts = jwsString.Split('.');
if (parts.Length != 3)
{
return Task.FromResult(new SignatureExtractionResult(
Success: false,
DetectedFormat: SignatureFormat.Jws,
Signer: null,
Certificates: null,
ErrorMessage: "Invalid JWS format"));
}
var headerJson = Base64UrlDecode(parts[0]);
var header = JsonSerializer.Deserialize<JwsHeader>(headerJson, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
});
var signer = new SignerInfo(
IssuerId: header?.Kid ?? "unknown",
Name: null,
Email: null,
Organization: null,
KeyFingerprint: header?.Kid ?? ComputeFingerprint(parts[0]),
Algorithm: header?.Alg ?? "unknown",
SignedAt: null);
return Task.FromResult(new SignatureExtractionResult(
Success: true,
DetectedFormat: SignatureFormat.Jws,
Signer: signer,
Certificates: null,
ErrorMessage: null));
}
catch (Exception ex)
{
return Task.FromResult(new SignatureExtractionResult(
Success: false,
DetectedFormat: SignatureFormat.Jws,
Signer: null,
Certificates: null,
ErrorMessage: ex.Message));
}
}
private static string Base64UrlDecode(string input)
{
var output = input.Replace('-', '+').Replace('_', '/');
switch (output.Length % 4)
{
case 2: output += "=="; break;
case 3: output += "="; break;
}
var bytes = Convert.FromBase64String(output);
return Encoding.UTF8.GetString(bytes);
}
private static string ComputeFingerprint(string headerBase64)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(headerBase64));
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static SignatureVerificationResult CreateError(string code, string message)
{
return new SignatureVerificationResult(
IsValid: false,
Status: SignatureVerificationStatus.InvalidSignature,
Signer: null,
CertificateChain: null,
Timestamp: null,
Errors: [new SignatureVerificationError(code, message, null)],
Warnings: []);
}
private sealed class JwsHeader
{
public string? Alg { get; set; }
public string? Kid { get; set; }
public string? Typ { get; set; }
}
}