sprints work

This commit is contained in:
master
2026-01-11 11:19:40 +02:00
parent f6ef1ef337
commit 582a41d7a9
72 changed files with 2680 additions and 390 deletions

View File

@@ -31,15 +31,18 @@ public sealed class DeltaAttestationService : IDeltaAttestationService
private readonly IAttestationSigningService _signingService;
private readonly ILogger<DeltaAttestationService> _logger;
private readonly DeltaAttestationOptions _options;
private readonly TimeProvider _timeProvider;
public DeltaAttestationService(
IAttestationSigningService signingService,
IOptions<DeltaAttestationOptions> options,
ILogger<DeltaAttestationService> logger)
ILogger<DeltaAttestationService> logger,
TimeProvider? timeProvider = null)
{
_signingService = signingService ?? throw new ArgumentNullException(nameof(signingService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new DeltaAttestationOptions();
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -196,7 +199,7 @@ public sealed class DeltaAttestationService : IDeltaAttestationService
EnvelopeBase64 = envelopeBase64,
TransparencyLogIndex = logIndex,
PredicateType = predicateType,
CreatedAt = DateTimeOffset.UtcNow
CreatedAt = _timeProvider.GetUtcNow()
};
}
catch (Exception ex)
@@ -209,7 +212,7 @@ public sealed class DeltaAttestationService : IDeltaAttestationService
Success = false,
Error = ex.Message,
PredicateType = predicateType,
CreatedAt = DateTimeOffset.UtcNow
CreatedAt = _timeProvider.GetUtcNow()
};
}
}

View File

@@ -172,10 +172,12 @@ public interface ITimeSkewValidator
public sealed class TimeSkewValidator : ITimeSkewValidator
{
private readonly TimeSkewOptions _options;
private readonly TimeProvider _timeProvider;
public TimeSkewValidator(TimeSkewOptions options)
public TimeSkewValidator(TimeSkewOptions options, TimeProvider? timeProvider = null)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -191,7 +193,7 @@ public sealed class TimeSkewValidator : ITimeSkewValidator
return TimeSkewValidationResult.Skipped("No integrated time available");
}
var now = localTime ?? DateTimeOffset.UtcNow;
var now = localTime ?? _timeProvider.GetUtcNow();
var skew = (now - integratedTime.Value).TotalSeconds;
// Future timestamp (integrated time is ahead of local time)

View File

@@ -0,0 +1,775 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
using System.Collections.Immutable;
using Microsoft.AspNetCore.Mvc;
using StellaOps.BinaryIndex.GoldenSet;
namespace StellaOps.BinaryIndex.WebService.Controllers;
/// <summary>
/// API endpoints for golden set curation and management.
/// </summary>
/// <remarks>
/// Provides CRUD operations for golden set definitions, review workflow,
/// and audit log access. Used by experts to author and maintain ground-truth
/// vulnerability signatures.
/// </remarks>
[ApiController]
[Route("api/v1/golden-sets")]
[Produces("application/json")]
public sealed class GoldenSetController : ControllerBase
{
private readonly IGoldenSetStore _store;
private readonly IGoldenSetValidator _validator;
private readonly ILogger<GoldenSetController> _logger;
public GoldenSetController(
IGoldenSetStore store,
IGoldenSetValidator validator,
ILogger<GoldenSetController> logger)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_validator = validator ?? throw new ArgumentNullException(nameof(validator));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// List golden sets with optional filtering.
/// </summary>
/// <remarks>
/// Returns paginated list of golden set summaries matching the specified filters.
///
/// Sample request:
///
/// GET /api/v1/golden-sets?component=openssl&amp;status=Approved&amp;limit=20
///
/// Sample response:
///
/// {
/// "items": [
/// {
/// "id": "CVE-2024-0727",
/// "component": "openssl",
/// "status": "Approved",
/// "targetCount": 3,
/// "createdAt": "2024-01-15T10:30:00Z",
/// "reviewedAt": "2024-01-16T14:00:00Z",
/// "contentDigest": "sha256:abc123...",
/// "tags": ["memory-corruption", "heap-overflow"]
/// }
/// ],
/// "totalCount": 42,
/// "offset": 0,
/// "limit": 20
/// }
/// </remarks>
/// <param name="component">Optional component name filter.</param>
/// <param name="status">Optional status filter (Draft, InReview, Approved, Deprecated, Archived).</param>
/// <param name="tags">Optional tags filter (comma-separated).</param>
/// <param name="limit">Maximum results to return (1-500, default 100).</param>
/// <param name="offset">Pagination offset (default 0).</param>
/// <param name="orderBy">Sort order (IdAsc, IdDesc, CreatedAtAsc, CreatedAtDesc, ComponentAsc, ComponentDesc).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Paginated list of golden set summaries.</returns>
/// <response code="200">Returns the list of golden sets.</response>
/// <response code="400">Invalid parameters.</response>
[HttpGet]
[ProducesResponseType<GoldenSetListResponse>(StatusCodes.Status200OK)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status400BadRequest)]
public async Task<ActionResult<GoldenSetListResponse>> ListAsync(
[FromQuery] string? component = null,
[FromQuery] string? status = null,
[FromQuery] string? tags = null,
[FromQuery] int limit = 100,
[FromQuery] int offset = 0,
[FromQuery] string? orderBy = null,
CancellationToken ct = default)
{
if (limit < 1 || limit > 500)
{
return BadRequest(CreateProblem(
"Limit must be between 1 and 500.",
"InvalidLimit",
StatusCodes.Status400BadRequest));
}
if (offset < 0)
{
return BadRequest(CreateProblem(
"Offset must be non-negative.",
"InvalidOffset",
StatusCodes.Status400BadRequest));
}
GoldenSetStatus? statusFilter = null;
if (!string.IsNullOrWhiteSpace(status))
{
if (!Enum.TryParse<GoldenSetStatus>(status, true, out var parsedStatus))
{
return BadRequest(CreateProblem(
"Invalid status. Must be one of: Draft, InReview, Approved, Deprecated, Archived.",
"InvalidStatus",
StatusCodes.Status400BadRequest));
}
statusFilter = parsedStatus;
}
GoldenSetOrderBy orderByValue = GoldenSetOrderBy.CreatedAtDesc;
if (!string.IsNullOrWhiteSpace(orderBy))
{
if (!Enum.TryParse<GoldenSetOrderBy>(orderBy, true, out var parsedOrderBy))
{
return BadRequest(CreateProblem(
"Invalid orderBy. Must be one of: IdAsc, IdDesc, CreatedAtAsc, CreatedAtDesc, ComponentAsc, ComponentDesc.",
"InvalidOrderBy",
StatusCodes.Status400BadRequest));
}
orderByValue = parsedOrderBy;
}
ImmutableArray<string>? tagsFilter = null;
if (!string.IsNullOrWhiteSpace(tags))
{
tagsFilter = tags.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.ToImmutableArray();
}
_logger.LogInformation(
"ListGoldenSets: component={Component}, status={Status}, tags={Tags}, limit={Limit}, offset={Offset}",
component, status, tags, limit, offset);
try
{
var query = new GoldenSetListQuery
{
ComponentFilter = component,
StatusFilter = statusFilter,
TagsFilter = tagsFilter,
Limit = limit,
Offset = offset,
OrderBy = orderByValue
};
var items = await _store.ListAsync(query, ct);
return Ok(new GoldenSetListResponse
{
Items = items,
TotalCount = items.Length, // Note: For proper pagination, store should return total count
Offset = offset,
Limit = limit
});
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to list golden sets");
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "ListError", StatusCodes.Status500InternalServerError));
}
}
/// <summary>
/// Get a golden set by ID.
/// </summary>
/// <remarks>
/// Returns the full golden set definition with current status.
/// </remarks>
/// <param name="id">Golden set ID (CVE/GHSA ID).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The golden set with status.</returns>
/// <response code="200">Returns the golden set.</response>
/// <response code="404">Golden set not found.</response>
[HttpGet("{id}")]
[ProducesResponseType<GoldenSetResponse>(StatusCodes.Status200OK)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status404NotFound)]
public async Task<ActionResult<GoldenSetResponse>> GetByIdAsync(
[FromRoute] string id,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(id))
{
return BadRequest(CreateProblem(
"Golden set ID is required.",
"MissingId",
StatusCodes.Status400BadRequest));
}
_logger.LogInformation("GetGoldenSet: id={Id}", id);
try
{
var stored = await _store.GetAsync(id, ct);
if (stored is null)
{
return NotFound(CreateProblem(
$"Golden set '{id}' not found.",
"NotFound",
StatusCodes.Status404NotFound));
}
return Ok(new GoldenSetResponse
{
Definition = stored.Definition,
Status = stored.Status,
CreatedAt = stored.CreatedAt,
UpdatedAt = stored.UpdatedAt
});
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get golden set {Id}", id);
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "GetError", StatusCodes.Status500InternalServerError));
}
}
/// <summary>
/// Create a new golden set.
/// </summary>
/// <remarks>
/// Creates a new golden set definition in Draft status.
/// The definition is validated before storage.
///
/// Sample request:
///
/// POST /api/v1/golden-sets
/// {
/// "id": "CVE-2024-0727",
/// "component": "openssl",
/// "targets": [
/// {
/// "functionName": "PKCS7_verify",
/// "sinks": ["memcpy"],
/// "edges": [{"from": "bb3", "to": "bb7"}],
/// "taintInvariant": "attacker-controlled input reaches memcpy without bounds check"
/// }
/// ],
/// "metadata": {
/// "authorId": "user@example.com",
/// "sourceRef": "https://nvd.nist.gov/vuln/detail/CVE-2024-0727",
/// "tags": ["memory-corruption"]
/// }
/// }
/// </remarks>
/// <param name="request">Golden set creation request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Created golden set with content digest.</returns>
/// <response code="201">Golden set created successfully.</response>
/// <response code="400">Validation failed.</response>
/// <response code="409">Golden set with this ID already exists.</response>
[HttpPost]
[ProducesResponseType<GoldenSetCreateResponse>(StatusCodes.Status201Created)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status409Conflict)]
public async Task<ActionResult<GoldenSetCreateResponse>> CreateAsync(
[FromBody] GoldenSetCreateRequest request,
CancellationToken ct = default)
{
if (request is null)
{
return BadRequest(CreateProblem(
"Request body is required.",
"MissingBody",
StatusCodes.Status400BadRequest));
}
_logger.LogInformation("CreateGoldenSet: id={Id}, component={Component}", request.Id, request.Component);
try
{
// Check if already exists
var existing = await _store.GetByIdAsync(request.Id, ct);
if (existing is not null)
{
return Conflict(CreateProblem(
$"Golden set '{request.Id}' already exists.",
"AlreadyExists",
StatusCodes.Status409Conflict));
}
// Build definition
var definition = new GoldenSetDefinition
{
Id = request.Id,
Component = request.Component,
Targets = request.Targets.Select(t => new VulnerableTarget
{
FunctionName = t.FunctionName,
Sinks = t.Sinks?.ToImmutableArray() ?? [],
Edges = t.Edges?.Select(e => new BasicBlockEdge { From = e.From, To = e.To }).ToImmutableArray() ?? [],
Constants = t.Constants?.ToImmutableArray() ?? [],
TaintInvariant = t.TaintInvariant,
SourceFile = t.SourceFile,
SourceLine = t.SourceLine
}).ToImmutableArray(),
Witness = request.Witness is not null ? new WitnessInput
{
Arguments = request.Witness.Arguments?.ToImmutableArray() ?? [],
Invariant = request.Witness.Invariant,
PocFileRef = request.Witness.PocFileRef
} : null,
Metadata = new GoldenSetMetadata
{
AuthorId = request.Metadata.AuthorId,
CreatedAt = DateTimeOffset.UtcNow,
SourceRef = request.Metadata.SourceRef,
Tags = request.Metadata.Tags?.ToImmutableArray() ?? []
}
};
// Validate
var validationResult = _validator.Validate(definition);
if (!validationResult.IsValid)
{
return BadRequest(CreateProblem(
$"Validation failed: {string.Join("; ", validationResult.Errors)}",
"ValidationFailed",
StatusCodes.Status400BadRequest));
}
// Store
var result = await _store.StoreAsync(definition, GoldenSetStatus.Draft, ct);
if (!result.Success)
{
return BadRequest(CreateProblem(
result.Error ?? "Failed to store golden set.",
"StoreError",
StatusCodes.Status400BadRequest));
}
var response = new GoldenSetCreateResponse
{
Id = definition.Id,
ContentDigest = result.ContentDigest,
Status = GoldenSetStatus.Draft
};
return CreatedAtAction(nameof(GetByIdAsync), new { id = definition.Id }, response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create golden set {Id}", request.Id);
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "CreateError", StatusCodes.Status500InternalServerError));
}
}
/// <summary>
/// Update golden set status (workflow transition).
/// </summary>
/// <remarks>
/// Transitions a golden set through the review workflow:
/// Draft -> InReview -> Approved -> Deprecated/Archived
///
/// Sample request:
///
/// PATCH /api/v1/golden-sets/CVE-2024-0727/status
/// {
/// "status": "InReview",
/// "actorId": "reviewer@example.com",
/// "comment": "Submitting for expert review"
/// }
/// </remarks>
/// <param name="id">Golden set ID.</param>
/// <param name="request">Status update request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Updated status confirmation.</returns>
/// <response code="200">Status updated successfully.</response>
/// <response code="400">Invalid status transition.</response>
/// <response code="404">Golden set not found.</response>
[HttpPatch("{id}/status")]
[ProducesResponseType<GoldenSetStatusResponse>(StatusCodes.Status200OK)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status404NotFound)]
public async Task<ActionResult<GoldenSetStatusResponse>> UpdateStatusAsync(
[FromRoute] string id,
[FromBody] GoldenSetStatusRequest request,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(id))
{
return BadRequest(CreateProblem(
"Golden set ID is required.",
"MissingId",
StatusCodes.Status400BadRequest));
}
if (request is null)
{
return BadRequest(CreateProblem(
"Request body is required.",
"MissingBody",
StatusCodes.Status400BadRequest));
}
_logger.LogInformation(
"UpdateGoldenSetStatus: id={Id}, status={Status}, actor={Actor}",
id, request.Status, request.ActorId);
try
{
var existing = await _store.GetAsync(id, ct);
if (existing is null)
{
return NotFound(CreateProblem(
$"Golden set '{id}' not found.",
"NotFound",
StatusCodes.Status404NotFound));
}
// Validate transition
if (!IsValidTransition(existing.Status, request.Status))
{
return BadRequest(CreateProblem(
$"Invalid status transition from {existing.Status} to {request.Status}.",
"InvalidTransition",
StatusCodes.Status400BadRequest));
}
var result = await _store.UpdateStatusAsync(
id,
request.Status,
request.ActorId,
request.Comment ?? string.Empty,
ct);
return Ok(new GoldenSetStatusResponse
{
Id = id,
PreviousStatus = existing.Status,
CurrentStatus = request.Status,
ContentDigest = result.ContentDigest
});
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update status for {Id}", id);
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "UpdateStatusError", StatusCodes.Status500InternalServerError));
}
}
/// <summary>
/// Get audit log for a golden set.
/// </summary>
/// <remarks>
/// Returns the full audit history of status changes and modifications.
/// </remarks>
/// <param name="id">Golden set ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Audit log entries.</returns>
/// <response code="200">Returns the audit log.</response>
/// <response code="404">Golden set not found.</response>
[HttpGet("{id}/audit")]
[ProducesResponseType<GoldenSetAuditResponse>(StatusCodes.Status200OK)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status404NotFound)]
public async Task<ActionResult<GoldenSetAuditResponse>> GetAuditLogAsync(
[FromRoute] string id,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(id))
{
return BadRequest(CreateProblem(
"Golden set ID is required.",
"MissingId",
StatusCodes.Status400BadRequest));
}
_logger.LogInformation("GetGoldenSetAudit: id={Id}", id);
try
{
var existing = await _store.GetByIdAsync(id, ct);
if (existing is null)
{
return NotFound(CreateProblem(
$"Golden set '{id}' not found.",
"NotFound",
StatusCodes.Status404NotFound));
}
var entries = await _store.GetAuditLogAsync(id, ct);
return Ok(new GoldenSetAuditResponse
{
Id = id,
Entries = entries
});
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get audit log for {Id}", id);
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "AuditLogError", StatusCodes.Status500InternalServerError));
}
}
/// <summary>
/// Delete (archive) a golden set.
/// </summary>
/// <remarks>
/// Soft deletes a golden set by moving it to Archived status.
/// </remarks>
/// <param name="id">Golden set ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>No content on success.</returns>
/// <response code="204">Golden set archived successfully.</response>
/// <response code="404">Golden set not found.</response>
[HttpDelete("{id}")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType<ProblemDetails>(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DeleteAsync(
[FromRoute] string id,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(id))
{
return BadRequest(CreateProblem(
"Golden set ID is required.",
"MissingId",
StatusCodes.Status400BadRequest));
}
_logger.LogInformation("DeleteGoldenSet: id={Id}", id);
try
{
var deleted = await _store.DeleteAsync(id, ct);
if (!deleted)
{
return NotFound(CreateProblem(
$"Golden set '{id}' not found.",
"NotFound",
StatusCodes.Status404NotFound));
}
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete golden set {Id}", id);
return StatusCode(StatusCodes.Status500InternalServerError,
CreateProblem("Internal server error.", "DeleteError", StatusCodes.Status500InternalServerError));
}
}
private static bool IsValidTransition(GoldenSetStatus from, GoldenSetStatus to)
{
return (from, to) switch
{
(GoldenSetStatus.Draft, GoldenSetStatus.InReview) => true,
(GoldenSetStatus.InReview, GoldenSetStatus.Approved) => true,
(GoldenSetStatus.InReview, GoldenSetStatus.Draft) => true, // Reject back to draft
(GoldenSetStatus.Approved, GoldenSetStatus.Deprecated) => true,
(GoldenSetStatus.Approved, GoldenSetStatus.Archived) => true,
(GoldenSetStatus.Deprecated, GoldenSetStatus.Archived) => true,
(GoldenSetStatus.Draft, GoldenSetStatus.Archived) => true, // Can archive drafts
_ => false
};
}
private static ProblemDetails CreateProblem(string detail, string type, int statusCode)
{
return new ProblemDetails
{
Title = "Golden Set Error",
Detail = detail,
Type = $"https://stellaops.dev/errors/{type}",
Status = statusCode
};
}
}
#region DTOs
/// <summary>
/// Response for listing golden sets.
/// </summary>
public sealed record GoldenSetListResponse
{
/// <summary>List of golden set summaries.</summary>
public required ImmutableArray<GoldenSetSummary> Items { get; init; }
/// <summary>Total count (for pagination).</summary>
public required int TotalCount { get; init; }
/// <summary>Current offset.</summary>
public required int Offset { get; init; }
/// <summary>Current limit.</summary>
public required int Limit { get; init; }
}
/// <summary>
/// Response for getting a single golden set.
/// </summary>
public sealed record GoldenSetResponse
{
/// <summary>The golden set definition.</summary>
public required GoldenSetDefinition Definition { get; init; }
/// <summary>Current status.</summary>
public required GoldenSetStatus Status { get; init; }
/// <summary>Creation timestamp.</summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>Last update timestamp.</summary>
public required DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>
/// Request to create a golden set.
/// </summary>
public sealed record GoldenSetCreateRequest
{
/// <summary>Golden set ID (CVE/GHSA ID).</summary>
public required string Id { get; init; }
/// <summary>Component name.</summary>
public required string Component { get; init; }
/// <summary>Vulnerable targets.</summary>
public required IReadOnlyList<VulnerableTargetDto> Targets { get; init; }
/// <summary>Optional witness input.</summary>
public WitnessInputDto? Witness { get; init; }
/// <summary>Metadata.</summary>
public required GoldenSetMetadataDto Metadata { get; init; }
}
/// <summary>
/// Vulnerable target DTO for API.
/// </summary>
public sealed record VulnerableTargetDto
{
/// <summary>Function name.</summary>
public required string FunctionName { get; init; }
/// <summary>Sink functions.</summary>
public IReadOnlyList<string>? Sinks { get; init; }
/// <summary>Basic block edges.</summary>
public IReadOnlyList<BasicBlockEdgeDto>? Edges { get; init; }
/// <summary>Constants/magic values.</summary>
public IReadOnlyList<string>? Constants { get; init; }
/// <summary>Taint invariant description.</summary>
public string? TaintInvariant { get; init; }
/// <summary>Source file hint.</summary>
public string? SourceFile { get; init; }
/// <summary>Source line hint.</summary>
public int? SourceLine { get; init; }
}
/// <summary>
/// Basic block edge DTO.
/// </summary>
public sealed record BasicBlockEdgeDto
{
/// <summary>Source block.</summary>
public required string From { get; init; }
/// <summary>Target block.</summary>
public required string To { get; init; }
}
/// <summary>
/// Witness input DTO.
/// </summary>
public sealed record WitnessInputDto
{
/// <summary>Command-line arguments.</summary>
public IReadOnlyList<string>? Arguments { get; init; }
/// <summary>Invariant/precondition.</summary>
public string? Invariant { get; init; }
/// <summary>PoC file reference.</summary>
public string? PocFileRef { get; init; }
}
/// <summary>
/// Metadata DTO.
/// </summary>
public sealed record GoldenSetMetadataDto
{
/// <summary>Author ID.</summary>
public required string AuthorId { get; init; }
/// <summary>Source reference URL.</summary>
public required string SourceRef { get; init; }
/// <summary>Classification tags.</summary>
public IReadOnlyList<string>? Tags { get; init; }
}
/// <summary>
/// Response after creating a golden set.
/// </summary>
public sealed record GoldenSetCreateResponse
{
/// <summary>Golden set ID.</summary>
public required string Id { get; init; }
/// <summary>Content digest.</summary>
public required string ContentDigest { get; init; }
/// <summary>Initial status.</summary>
public required GoldenSetStatus Status { get; init; }
}
/// <summary>
/// Request to update golden set status.
/// </summary>
public sealed record GoldenSetStatusRequest
{
/// <summary>New status.</summary>
public required GoldenSetStatus Status { get; init; }
/// <summary>Actor performing the change.</summary>
public required string ActorId { get; init; }
/// <summary>Comment explaining the change.</summary>
public string? Comment { get; init; }
}
/// <summary>
/// Response after status update.
/// </summary>
public sealed record GoldenSetStatusResponse
{
/// <summary>Golden set ID.</summary>
public required string Id { get; init; }
/// <summary>Previous status.</summary>
public required GoldenSetStatus PreviousStatus { get; init; }
/// <summary>New current status.</summary>
public required GoldenSetStatus CurrentStatus { get; init; }
/// <summary>Content digest.</summary>
public required string ContentDigest { get; init; }
}
/// <summary>
/// Response with audit log.
/// </summary>
public sealed record GoldenSetAuditResponse
{
/// <summary>Golden set ID.</summary>
public required string Id { get; init; }
/// <summary>Audit log entries.</summary>
public required ImmutableArray<GoldenSetAuditEntry> Entries { get; init; }
}
#endregion

View File

@@ -21,6 +21,7 @@
<ProjectReference Include="../__Libraries/StellaOps.BinaryIndex.Contracts/StellaOps.BinaryIndex.Contracts.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.BinaryIndex.Persistence/StellaOps.BinaryIndex.Persistence.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.BinaryIndex.VexBridge/StellaOps.BinaryIndex.VexBridge.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.BinaryIndex.GoldenSet/StellaOps.BinaryIndex.GoldenSet.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,218 @@
// Licensed under AGPL-3.0-or-later. Copyright (C) 2026 StellaOps Contributors.
// Sprint: SPRINT_20260110_012_004_BINDEX
// Task: GSD-007 - IDiffResultStore Interface
using System.Collections.Immutable;
namespace StellaOps.BinaryIndex.Diff;
/// <summary>
/// Storage interface for patch diff results.
/// Provides persistence and caching for verification results.
/// </summary>
public interface IDiffResultStore
{
/// <summary>
/// Stores a patch diff result.
/// </summary>
/// <param name="result">The diff result to store.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Unique ID of the stored result.</returns>
Task<Guid> StoreAsync(PatchDiffResult result, CancellationToken ct = default);
/// <summary>
/// Retrieves a diff result by ID.
/// </summary>
/// <param name="id">The result ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The diff result, or null if not found.</returns>
Task<PatchDiffResult?> GetByIdAsync(Guid id, CancellationToken ct = default);
/// <summary>
/// Finds diff results for a specific binary pair.
/// </summary>
/// <param name="preBinaryDigest">Pre-patch binary digest.</param>
/// <param name="postBinaryDigest">Post-patch binary digest.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of matching diff results.</returns>
Task<ImmutableArray<StoredDiffResult>> FindByBinariesAsync(
string preBinaryDigest,
string postBinaryDigest,
CancellationToken ct = default);
/// <summary>
/// Finds diff results for a specific golden set.
/// </summary>
/// <param name="goldenSetId">Golden set ID.</param>
/// <param name="limit">Maximum results to return.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of diff results for the golden set.</returns>
Task<ImmutableArray<StoredDiffResult>> FindByGoldenSetAsync(
string goldenSetId,
int limit = 100,
CancellationToken ct = default);
/// <summary>
/// Gets a cached single binary check result.
/// </summary>
/// <param name="binaryDigest">Binary digest.</param>
/// <param name="vulnerabilityId">Vulnerability ID (golden set ID).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Cached result, or null if not cached.</returns>
Task<SingleBinaryCheckResult?> GetCachedCheckAsync(
string binaryDigest,
string vulnerabilityId,
CancellationToken ct = default);
/// <summary>
/// Caches a single binary check result.
/// </summary>
/// <param name="binaryDigest">Binary digest.</param>
/// <param name="vulnerabilityId">Vulnerability ID (golden set ID).</param>
/// <param name="result">Check result to cache.</param>
/// <param name="ttl">Time-to-live for the cache entry.</param>
/// <param name="ct">Cancellation token.</param>
Task CacheCheckAsync(
string binaryDigest,
string vulnerabilityId,
SingleBinaryCheckResult result,
TimeSpan? ttl = null,
CancellationToken ct = default);
/// <summary>
/// Queries stored diff results.
/// </summary>
/// <param name="query">Query parameters.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Matching diff results.</returns>
Task<DiffResultQueryResponse> QueryAsync(
DiffResultQuery query,
CancellationToken ct = default);
/// <summary>
/// Gets statistics about stored diff results.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Storage statistics.</returns>
Task<DiffResultStoreStats> GetStatsAsync(CancellationToken ct = default);
}
/// <summary>
/// Stored diff result with metadata.
/// </summary>
public sealed record StoredDiffResult
{
/// <summary>Unique ID.</summary>
public required Guid Id { get; init; }
/// <summary>The diff result.</summary>
public required PatchDiffResult Result { get; init; }
/// <summary>When the result was stored.</summary>
public required DateTimeOffset StoredAt { get; init; }
}
/// <summary>
/// Query parameters for searching diff results.
/// </summary>
public sealed record DiffResultQuery
{
/// <summary>Filter by golden set ID.</summary>
public string? GoldenSetId { get; init; }
/// <summary>Filter by verdict.</summary>
public PatchVerdict? Verdict { get; init; }
/// <summary>Filter by minimum confidence.</summary>
public decimal? MinConfidence { get; init; }
/// <summary>Filter by pre-binary digest.</summary>
public string? PreBinaryDigest { get; init; }
/// <summary>Filter by post-binary digest.</summary>
public string? PostBinaryDigest { get; init; }
/// <summary>Filter by comparison date (after).</summary>
public DateTimeOffset? ComparedAfter { get; init; }
/// <summary>Filter by comparison date (before).</summary>
public DateTimeOffset? ComparedBefore { get; init; }
/// <summary>Maximum results to return.</summary>
public int Limit { get; init; } = 100;
/// <summary>Pagination offset.</summary>
public int Offset { get; init; } = 0;
/// <summary>Order by field.</summary>
public DiffResultOrderBy OrderBy { get; init; } = DiffResultOrderBy.ComparedAtDesc;
}
/// <summary>
/// Response from a diff result query.
/// </summary>
public sealed record DiffResultQueryResponse
{
/// <summary>Matching results.</summary>
public required ImmutableArray<StoredDiffResult> Results { get; init; }
/// <summary>Total count of matching results (for pagination).</summary>
public required int TotalCount { get; init; }
/// <summary>Query offset.</summary>
public required int Offset { get; init; }
/// <summary>Query limit.</summary>
public required int Limit { get; init; }
}
/// <summary>
/// Ordering options for diff result queries.
/// </summary>
public enum DiffResultOrderBy
{
/// <summary>Order by comparison date ascending.</summary>
ComparedAtAsc,
/// <summary>Order by comparison date descending.</summary>
ComparedAtDesc,
/// <summary>Order by confidence ascending.</summary>
ConfidenceAsc,
/// <summary>Order by confidence descending.</summary>
ConfidenceDesc,
/// <summary>Order by golden set ID ascending.</summary>
GoldenSetIdAsc,
/// <summary>Order by golden set ID descending.</summary>
GoldenSetIdDesc
}
/// <summary>
/// Statistics about the diff result store.
/// </summary>
public sealed record DiffResultStoreStats
{
/// <summary>Total number of stored results.</summary>
public required long TotalResults { get; init; }
/// <summary>Results by verdict.</summary>
public required ImmutableDictionary<PatchVerdict, long> ResultsByVerdict { get; init; }
/// <summary>Number of unique golden sets.</summary>
public required int UniqueGoldenSets { get; init; }
/// <summary>Number of unique binary pairs.</summary>
public required long UniqueBinaryPairs { get; init; }
/// <summary>Number of cached check results.</summary>
public required long CachedChecks { get; init; }
/// <summary>Oldest result timestamp.</summary>
public DateTimeOffset? OldestResult { get; init; }
/// <summary>Newest result timestamp.</summary>
public DateTimeOffset? NewestResult { get; init; }
}

View File

@@ -0,0 +1,245 @@
// Licensed under AGPL-3.0-or-later. Copyright (C) 2026 StellaOps Contributors.
// Sprint: SPRINT_20260110_012_004_BINDEX
// Task: GSD-007 - IDiffResultStore Interface - InMemory Implementation
using System.Collections.Concurrent;
using System.Collections.Immutable;
namespace StellaOps.BinaryIndex.Diff;
/// <summary>
/// In-memory implementation of <see cref="IDiffResultStore"/> for testing.
/// </summary>
public sealed class InMemoryDiffResultStore : IDiffResultStore
{
private readonly ConcurrentDictionary<Guid, StoredDiffResult> _results = new();
private readonly ConcurrentDictionary<string, SingleBinaryCheckResult> _checkCache = new();
private readonly TimeProvider _timeProvider;
public InMemoryDiffResultStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public Task<Guid> StoreAsync(PatchDiffResult result, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(result);
ct.ThrowIfCancellationRequested();
var id = Guid.NewGuid();
var stored = new StoredDiffResult
{
Id = id,
Result = result,
StoredAt = _timeProvider.GetUtcNow()
};
_results[id] = stored;
return Task.FromResult(id);
}
/// <inheritdoc />
public Task<PatchDiffResult?> GetByIdAsync(Guid id, CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
return Task.FromResult(_results.TryGetValue(id, out var stored) ? stored.Result : null);
}
/// <inheritdoc />
public Task<ImmutableArray<StoredDiffResult>> FindByBinariesAsync(
string preBinaryDigest,
string postBinaryDigest,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(preBinaryDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(postBinaryDigest);
ct.ThrowIfCancellationRequested();
var matches = _results.Values
.Where(s => string.Equals(s.Result.PreBinaryDigest, preBinaryDigest, StringComparison.OrdinalIgnoreCase)
&& string.Equals(s.Result.PostBinaryDigest, postBinaryDigest, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(s => s.StoredAt)
.ToImmutableArray();
return Task.FromResult(matches);
}
/// <inheritdoc />
public Task<ImmutableArray<StoredDiffResult>> FindByGoldenSetAsync(
string goldenSetId,
int limit = 100,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(goldenSetId);
ct.ThrowIfCancellationRequested();
var matches = _results.Values
.Where(s => string.Equals(s.Result.GoldenSetId, goldenSetId, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(s => s.StoredAt)
.Take(limit)
.ToImmutableArray();
return Task.FromResult(matches);
}
/// <inheritdoc />
public Task<SingleBinaryCheckResult?> GetCachedCheckAsync(
string binaryDigest,
string vulnerabilityId,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(binaryDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ct.ThrowIfCancellationRequested();
var key = GetCacheKey(binaryDigest, vulnerabilityId);
return Task.FromResult(_checkCache.TryGetValue(key, out var result) ? result : null);
}
/// <inheritdoc />
public Task CacheCheckAsync(
string binaryDigest,
string vulnerabilityId,
SingleBinaryCheckResult result,
TimeSpan? ttl = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(binaryDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentNullException.ThrowIfNull(result);
ct.ThrowIfCancellationRequested();
var key = GetCacheKey(binaryDigest, vulnerabilityId);
_checkCache[key] = result;
// Note: TTL not implemented for in-memory store (testing only)
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<DiffResultQueryResponse> QueryAsync(
DiffResultQuery query,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(query);
ct.ThrowIfCancellationRequested();
IEnumerable<StoredDiffResult> results = _results.Values;
// Apply filters
if (!string.IsNullOrWhiteSpace(query.GoldenSetId))
{
results = results.Where(r =>
string.Equals(r.Result.GoldenSetId, query.GoldenSetId, StringComparison.OrdinalIgnoreCase));
}
if (query.Verdict.HasValue)
{
results = results.Where(r => r.Result.Verdict == query.Verdict.Value);
}
if (query.MinConfidence.HasValue)
{
results = results.Where(r => r.Result.Confidence >= query.MinConfidence.Value);
}
if (!string.IsNullOrWhiteSpace(query.PreBinaryDigest))
{
results = results.Where(r =>
string.Equals(r.Result.PreBinaryDigest, query.PreBinaryDigest, StringComparison.OrdinalIgnoreCase));
}
if (!string.IsNullOrWhiteSpace(query.PostBinaryDigest))
{
results = results.Where(r =>
string.Equals(r.Result.PostBinaryDigest, query.PostBinaryDigest, StringComparison.OrdinalIgnoreCase));
}
if (query.ComparedAfter.HasValue)
{
results = results.Where(r => r.Result.Metadata.ComparedAt >= query.ComparedAfter.Value);
}
if (query.ComparedBefore.HasValue)
{
results = results.Where(r => r.Result.Metadata.ComparedAt <= query.ComparedBefore.Value);
}
// Apply ordering
results = query.OrderBy switch
{
DiffResultOrderBy.ComparedAtAsc => results.OrderBy(r => r.Result.Metadata.ComparedAt),
DiffResultOrderBy.ComparedAtDesc => results.OrderByDescending(r => r.Result.Metadata.ComparedAt),
DiffResultOrderBy.ConfidenceAsc => results.OrderBy(r => r.Result.Confidence),
DiffResultOrderBy.ConfidenceDesc => results.OrderByDescending(r => r.Result.Confidence),
DiffResultOrderBy.GoldenSetIdAsc => results.OrderBy(r => r.Result.GoldenSetId, StringComparer.OrdinalIgnoreCase),
DiffResultOrderBy.GoldenSetIdDesc => results.OrderByDescending(r => r.Result.GoldenSetId, StringComparer.OrdinalIgnoreCase),
_ => results.OrderByDescending(r => r.StoredAt)
};
var allResults = results.ToList();
var totalCount = allResults.Count;
var pagedResults = allResults
.Skip(query.Offset)
.Take(query.Limit)
.ToImmutableArray();
return Task.FromResult(new DiffResultQueryResponse
{
Results = pagedResults,
TotalCount = totalCount,
Offset = query.Offset,
Limit = query.Limit
});
}
/// <inheritdoc />
public Task<DiffResultStoreStats> GetStatsAsync(CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
var results = _results.Values.ToList();
var resultsByVerdict = results
.GroupBy(r => r.Result.Verdict)
.ToImmutableDictionary(g => g.Key, g => (long)g.Count());
var uniqueGoldenSets = results
.Select(r => r.Result.GoldenSetId)
.Distinct(StringComparer.OrdinalIgnoreCase)
.Count();
var uniqueBinaryPairs = results
.Select(r => $"{r.Result.PreBinaryDigest}:{r.Result.PostBinaryDigest}")
.Distinct(StringComparer.OrdinalIgnoreCase)
.Count();
var oldestResult = results.MinBy(r => r.StoredAt)?.StoredAt;
var newestResult = results.MaxBy(r => r.StoredAt)?.StoredAt;
return Task.FromResult(new DiffResultStoreStats
{
TotalResults = results.Count,
ResultsByVerdict = resultsByVerdict,
UniqueGoldenSets = uniqueGoldenSets,
UniqueBinaryPairs = uniqueBinaryPairs,
CachedChecks = _checkCache.Count,
OldestResult = oldestResult,
NewestResult = newestResult
});
}
/// <summary>
/// Clears all stored results and cache.
/// </summary>
public void Clear()
{
_results.Clear();
_checkCache.Clear();
}
private static string GetCacheKey(string binaryDigest, string vulnerabilityId)
=> $"{binaryDigest}:{vulnerabilityId}";
}

View File

@@ -0,0 +1,390 @@
// Licensed under AGPL-3.0-or-later. Copyright (C) 2026 StellaOps Contributors.
// Sprint: SPRINT_20260110_012_002_BINDEX
// Task: GSA-009 - Integration Tests for Golden Set Authoring Flow
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using Xunit;
namespace StellaOps.BinaryIndex.GoldenSet.Tests.Integration.Authoring;
/// <summary>
/// Integration tests for the golden set authoring workflow.
/// Tests the end-to-end flow from extraction to review.
/// </summary>
[Trait("Category", "Integration")]
public sealed class GoldenSetAuthoringIntegrationTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly GoldenSetValidator _validator;
private readonly SinkRegistry _sinkRegistry;
private readonly GoldenSetEnrichmentService _enrichmentService;
private readonly GoldenSetReviewService _reviewService;
public GoldenSetAuthoringIntegrationTests()
{
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
_validator = new GoldenSetValidator(new CveValidator());
_sinkRegistry = new SinkRegistry(
Options.Create(new SinkRegistryOptions()),
NullLogger<SinkRegistry>.Instance);
_enrichmentService = new GoldenSetEnrichmentService(
_sinkRegistry,
NullLogger<GoldenSetEnrichmentService>.Instance);
_reviewService = new GoldenSetReviewService(
_validator,
_timeProvider,
NullLogger<GoldenSetReviewService>.Instance);
}
#region Full Authoring Workflow Tests
[Fact]
public async Task FullAuthoringWorkflow_ValidCve_CompletesSuccessfully()
{
// Step 1: Create initial definition
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-001",
Component = "openssl",
Targets =
[
new VulnerableTarget
{
FunctionName = "PKCS7_verify",
Sinks = ["memcpy"],
TaintInvariant = "Attacker-controlled PKCS7 data flows to unbounded memcpy"
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://nvd.nist.gov/vuln/detail/CVE-2024-TEST-001",
Tags = ["memory-corruption"]
}
};
// Step 2: Validate initial definition
var validationResult = await _validator.ValidateAsync(definition);
validationResult.IsValid.Should().BeTrue("initial definition should be valid");
// Step 3: Enrich with sink context
var enriched = await _enrichmentService.EnrichAsync(definition);
enriched.Should().NotBeNull();
// Step 4: Validate enriched definition
validationResult = await _validator.ValidateAsync(enriched);
validationResult.IsValid.Should().BeTrue("enriched definition should be valid");
// Step 5: Submit for review
var reviewSubmission = await _reviewService.SubmitForReviewAsync(
definition,
"author@test.com",
"Initial submission for review");
reviewSubmission.Should().NotBeNull();
reviewSubmission.ReviewId.Should().NotBeEmpty();
// Step 6: Approve review
var approval = await _reviewService.ApproveAsync(
reviewSubmission.ReviewId,
"reviewer@test.com",
"LGTM - verified against patch diff");
approval.Should().NotBeNull();
approval.Approved.Should().BeTrue();
}
[Fact]
public async Task ReviewWorkflow_RejectionAndResubmit_CompletesSuccessfully()
{
// Create initial incomplete definition
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-002",
Component = "glibc",
Targets =
[
new VulnerableTarget
{
FunctionName = "strcpy", // Missing proper context
Sinks = ImmutableArray<string>.Empty // Empty sinks
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://nvd.nist.gov/vuln/detail/CVE-2024-TEST-002"
}
};
// Submit for review
var reviewSubmission = await _reviewService.SubmitForReviewAsync(
definition,
"author@test.com",
"First attempt");
// Reject with feedback
var rejection = await _reviewService.RejectAsync(
reviewSubmission.ReviewId,
"reviewer@test.com",
"Missing sink definitions. Please add vulnerable sinks.");
rejection.Should().NotBeNull();
rejection.Rejected.Should().BeTrue();
// Fix the issues and resubmit
var fixedDefinition = definition with
{
Targets =
[
new VulnerableTarget
{
FunctionName = "strcpy",
Sinks = ["gets", "strcpy"],
TaintInvariant = "User input flows to strcpy without length check",
SourceFile = "glibc/string/strcpy.c"
}
]
};
// Resubmit
var resubmission = await _reviewService.SubmitForReviewAsync(
fixedDefinition,
"author@test.com",
"Fixed: Added sinks and taint invariant");
resubmission.ReviewId.Should().NotBe(reviewSubmission.ReviewId);
// Now approve
var approval = await _reviewService.ApproveAsync(
resubmission.ReviewId,
"reviewer@test.com",
"Approved after fixes");
approval.Approved.Should().BeTrue();
}
#endregion
#region Enrichment Tests
[Fact]
public async Task EnrichAsync_WithKnownSinks_AddsContext()
{
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-003",
Component = "openssl",
Targets =
[
new VulnerableTarget
{
FunctionName = "X509_NAME_oneline",
Sinks = ["memcpy"]
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://example.com"
}
};
var enriched = await _enrichmentService.EnrichAsync(definition);
enriched.Should().NotBeNull();
// Enrichment should preserve original data
enriched.Id.Should().Be(definition.Id);
enriched.Targets.Should().HaveCount(1);
enriched.Targets[0].Sinks.Should().Contain("memcpy");
}
[Fact]
public async Task EnrichAsync_EmptyTargets_ReturnsOriginal()
{
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-004",
Component = "unknown-component",
Targets = [],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://example.com"
}
};
var enriched = await _enrichmentService.EnrichAsync(definition);
enriched.Should().NotBeNull();
enriched.Targets.Should().BeEmpty();
}
#endregion
#region Validation Integration Tests
[Fact]
public async Task ValidateAsync_InvalidCveId_ReturnsErrors()
{
var definition = new GoldenSetDefinition
{
Id = "INVALID-CVE-FORMAT",
Component = "openssl",
Targets = [],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://example.com"
}
};
var result = await _validator.ValidateAsync(definition);
result.IsValid.Should().BeFalse();
result.Errors.Should().NotBeEmpty();
}
[Fact]
public async Task ValidateAsync_ContentDigest_IsDeterministic()
{
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-005",
Component = "openssl",
Targets =
[
new VulnerableTarget
{
FunctionName = "SSL_read",
Sinks = ["recv"]
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = new DateTimeOffset(2024, 1, 15, 10, 30, 0, TimeSpan.Zero),
SourceRef = "https://example.com"
}
};
var result1 = await _validator.ValidateAsync(definition);
var result2 = await _validator.ValidateAsync(definition);
result1.ContentDigest.Should().Be(result2.ContentDigest);
}
#endregion
#region Sink Registry Integration Tests
[Fact]
public void SinkRegistry_LookupKnownSink_ReturnsContext()
{
var context = _sinkRegistry.GetSinkContext("memcpy");
context.Should().NotBeNull();
context!.Category.Should().Be("memory");
context.CweIds.Should().Contain("CWE-120");
}
[Fact]
public void SinkRegistry_LookupUnknownSink_ReturnsNull()
{
var context = _sinkRegistry.GetSinkContext("unknown_function_xyz");
context.Should().BeNull();
}
[Fact]
public void SinkRegistry_GetSinksByCategory_ReturnsMatching()
{
var memorySinks = _sinkRegistry.GetSinksByCategory("memory");
memorySinks.Should().NotBeEmpty();
memorySinks.Should().Contain("memcpy");
memorySinks.Should().Contain("strcpy");
}
#endregion
#region Edge Cases
[Fact]
public async Task FullWorkflow_WithGhsaId_CompletesSuccessfully()
{
var definition = new GoldenSetDefinition
{
Id = "GHSA-abcd-1234-efgh",
Component = "nodejs-package",
Targets =
[
new VulnerableTarget
{
FunctionName = "parseInput",
Sinks = ["eval"]
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "author@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://github.com/advisories/GHSA-abcd-1234-efgh"
}
};
var validationResult = await _validator.ValidateAsync(definition);
validationResult.IsValid.Should().BeTrue();
}
[Fact]
public async Task FullWorkflow_WithMultipleTargets_CompletesSuccessfully()
{
var definition = new GoldenSetDefinition
{
Id = "CVE-2024-TEST-006",
Component = "libxml2",
Targets =
[
new VulnerableTarget
{
FunctionName = "xmlParseEntity",
Sinks = ["memcpy"],
TaintInvariant = "XML entity expansion leads to buffer overflow"
},
new VulnerableTarget
{
FunctionName = "xmlStringGetNodeList",
Sinks = ["realloc"],
TaintInvariant = "Malformed entity reference causes realloc with wrong size"
},
new VulnerableTarget
{
FunctionName = "xmlNodeAddContent",
Sinks = ["strcpy"],
TaintInvariant = "Entity content copied without bounds check"
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "security-team@test.com",
CreatedAt = _timeProvider.GetUtcNow(),
SourceRef = "https://nvd.nist.gov/vuln/detail/CVE-2024-TEST-006",
Tags = ["xxe", "xml-entity-expansion", "memory-corruption"]
}
};
var validationResult = await _validator.ValidateAsync(definition);
validationResult.IsValid.Should().BeTrue();
validationResult.ContentDigest.Should().NotBeNullOrEmpty();
var enriched = await _enrichmentService.EnrichAsync(definition);
enriched.Targets.Should().HaveCount(3);
}
#endregion
}

View File

@@ -0,0 +1,415 @@
// Licensed under AGPL-3.0-or-later. Copyright (C) 2026 StellaOps Contributors.
// Sprint: SPRINT_20260110_012_001_BINDEX
// Task: GSF-010 - PostgreSQL Integration Tests for Golden Set Store
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using Npgsql;
using Testcontainers.PostgreSql;
using Xunit;
namespace StellaOps.BinaryIndex.GoldenSet.Tests.Integration;
/// <summary>
/// Integration tests for <see cref="PostgresGoldenSetStore"/> using Testcontainers.
/// </summary>
[Trait("Category", "Integration")]
public sealed class PostgresGoldenSetStoreTests : IAsyncLifetime
{
private PostgreSqlContainer _postgres = null!;
private NpgsqlDataSource _dataSource = null!;
private PostgresGoldenSetStore _store = null!;
private FakeTimeProvider _timeProvider = null!;
public async Task InitializeAsync()
{
_postgres = new PostgreSqlBuilder()
.WithImage("postgres:16-alpine")
.WithDatabase("goldensets_test")
.WithUsername("test")
.WithPassword("test")
.Build();
await _postgres.StartAsync();
var connectionString = _postgres.GetConnectionString();
_dataSource = NpgsqlDataSource.Create(connectionString);
// Run migration
await RunMigrationAsync();
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
var validator = new GoldenSetValidator(new CveValidator());
var options = Options.Create(new GoldenSetOptions());
var logger = NullLogger<PostgresGoldenSetStore>.Instance;
_store = new PostgresGoldenSetStore(
_dataSource,
validator,
_timeProvider,
options,
logger);
}
public async Task DisposeAsync()
{
await _dataSource.DisposeAsync();
await _postgres.DisposeAsync();
}
private async Task RunMigrationAsync()
{
var migrationSql = await File.ReadAllTextAsync(GetMigrationPath());
await using var conn = await _dataSource.OpenConnectionAsync();
await using var cmd = new NpgsqlCommand(migrationSql, conn);
await cmd.ExecuteNonQueryAsync();
}
private static string GetMigrationPath()
{
// Navigate from bin/Debug/net10.0 to the Migrations folder
var baseDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(baseDir, "..", "..", "..", "..", ".."));
return Path.Combine(projectDir, "__Libraries", "StellaOps.BinaryIndex.GoldenSet", "Migrations", "V1_0_0__initial_schema.sql");
}
#region Store Tests
[Fact]
public async Task StoreAsync_ValidDefinition_ReturnsSuccessWithDigest()
{
// Arrange
var definition = CreateTestDefinition("CVE-2024-0001");
// Act
var result = await _store.StoreAsync(definition);
// Assert
result.Success.Should().BeTrue();
result.ContentDigest.Should().NotBeNullOrEmpty();
result.WasUpdated.Should().BeFalse();
}
[Fact]
public async Task StoreAsync_DuplicateId_UpdatesAndReturnsWasUpdated()
{
// Arrange
var definition1 = CreateTestDefinition("CVE-2024-0002");
await _store.StoreAsync(definition1);
var definition2 = definition1 with
{
Targets =
[
new VulnerableTarget
{
FunctionName = "different_function",
Sinks = ["strcat"]
}
]
};
// Act
var result = await _store.StoreAsync(definition2);
// Assert
result.Success.Should().BeTrue();
result.WasUpdated.Should().BeTrue();
}
#endregion
#region GetById Tests
[Fact]
public async Task GetByIdAsync_ExistingId_ReturnsDefinition()
{
// Arrange
var definition = CreateTestDefinition("CVE-2024-0003");
await _store.StoreAsync(definition);
// Act
var retrieved = await _store.GetByIdAsync("CVE-2024-0003");
// Assert
retrieved.Should().NotBeNull();
retrieved!.Id.Should().Be("CVE-2024-0003");
retrieved.Component.Should().Be(definition.Component);
retrieved.Targets.Should().HaveCount(definition.Targets.Length);
}
[Fact]
public async Task GetByIdAsync_NonExistingId_ReturnsNull()
{
// Act
var retrieved = await _store.GetByIdAsync("CVE-NONEXISTENT");
// Assert
retrieved.Should().BeNull();
}
#endregion
#region GetByDigest Tests
[Fact]
public async Task GetByDigestAsync_ExistingDigest_ReturnsDefinition()
{
// Arrange
var definition = CreateTestDefinition("CVE-2024-0004");
var storeResult = await _store.StoreAsync(definition);
// Act
var retrieved = await _store.GetByDigestAsync(storeResult.ContentDigest);
// Assert
retrieved.Should().NotBeNull();
retrieved!.Id.Should().Be("CVE-2024-0004");
}
#endregion
#region List Tests
[Fact]
public async Task ListAsync_WithComponentFilter_ReturnsMatching()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0010", "openssl"));
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0011", "glibc"));
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0012", "openssl"));
var query = new GoldenSetListQuery { ComponentFilter = "openssl" };
// Act
var results = await _store.ListAsync(query);
// Assert
results.Should().HaveCount(2);
results.Should().AllSatisfy(r => r.Component.Should().Be("openssl"));
}
[Fact]
public async Task ListAsync_WithStatusFilter_ReturnsMatching()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0020"), GoldenSetStatus.Draft);
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0021"), GoldenSetStatus.Approved);
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0022"), GoldenSetStatus.Draft);
var query = new GoldenSetListQuery { StatusFilter = GoldenSetStatus.Draft };
// Act
var results = await _store.ListAsync(query);
// Assert
results.Should().HaveCountGreaterThanOrEqualTo(2);
results.Should().AllSatisfy(r => r.Status.Should().Be(GoldenSetStatus.Draft));
}
[Fact]
public async Task ListAsync_WithPagination_ReturnsCorrectPage()
{
// Arrange
for (var i = 0; i < 5; i++)
{
await _store.StoreAsync(CreateTestDefinition($"CVE-2024-003{i}"));
}
var query = new GoldenSetListQuery
{
Limit = 2,
Offset = 2,
OrderBy = GoldenSetOrderBy.IdAsc
};
// Act
var results = await _store.ListAsync(query);
// Assert
results.Should().HaveCount(2);
}
#endregion
#region UpdateStatus Tests
[Fact]
public async Task UpdateStatusAsync_ValidTransition_UpdatesStatus()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0040"), GoldenSetStatus.Draft);
// Act
var result = await _store.UpdateStatusAsync(
"CVE-2024-0040",
GoldenSetStatus.InReview,
"reviewer@test.com",
"Submitting for review");
// Assert
result.Success.Should().BeTrue();
var stored = await _store.GetAsync("CVE-2024-0040");
stored.Should().NotBeNull();
stored!.Status.Should().Be(GoldenSetStatus.InReview);
}
[Fact]
public async Task UpdateStatusAsync_NonExistingId_ReturnsFailure()
{
// Act
var result = await _store.UpdateStatusAsync(
"CVE-NONEXISTENT",
GoldenSetStatus.Approved,
"reviewer@test.com",
"Approving");
// Assert
result.Success.Should().BeFalse();
result.Error.Should().Contain("not found");
}
#endregion
#region GetByComponent Tests
[Fact]
public async Task GetByComponentAsync_ReturnsMatchingDefinitions()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0050", "libcurl"), GoldenSetStatus.Approved);
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0051", "libcurl"), GoldenSetStatus.Approved);
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0052", "zlib"), GoldenSetStatus.Approved);
// Act
var results = await _store.GetByComponentAsync("libcurl", GoldenSetStatus.Approved);
// Assert
results.Should().HaveCount(2);
results.Should().AllSatisfy(d => d.Component.Should().Be("libcurl"));
}
#endregion
#region Delete Tests
[Fact]
public async Task DeleteAsync_ExistingId_ArchivesAndReturnsTrue()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0060"));
// Act
var deleted = await _store.DeleteAsync("CVE-2024-0060");
// Assert
deleted.Should().BeTrue();
var stored = await _store.GetAsync("CVE-2024-0060");
stored.Should().NotBeNull();
stored!.Status.Should().Be(GoldenSetStatus.Archived);
}
[Fact]
public async Task DeleteAsync_NonExistingId_ReturnsFalse()
{
// Act
var deleted = await _store.DeleteAsync("CVE-NONEXISTENT");
// Assert
deleted.Should().BeFalse();
}
#endregion
#region AuditLog Tests
[Fact]
public async Task GetAuditLogAsync_ReturnsAuditEntries()
{
// Arrange
await _store.StoreAsync(CreateTestDefinition("CVE-2024-0070"));
await _store.UpdateStatusAsync(
"CVE-2024-0070",
GoldenSetStatus.InReview,
"reviewer1@test.com",
"First review");
await _store.UpdateStatusAsync(
"CVE-2024-0070",
GoldenSetStatus.Approved,
"reviewer2@test.com",
"Approved after review");
// Act
var auditLog = await _store.GetAuditLogAsync("CVE-2024-0070");
// Assert
auditLog.Should().HaveCountGreaterThanOrEqualTo(3); // created + 2 status changes
}
#endregion
#region Content Addressability Tests
[Fact]
public async Task ContentDigest_IsDeterministic()
{
// Arrange
var definition = CreateTestDefinition("CVE-2024-0080");
// Act
var result1 = await _store.StoreAsync(definition);
var retrieved = await _store.GetByIdAsync("CVE-2024-0080");
// Delete and re-store the same definition
await _store.DeleteAsync("CVE-2024-0080");
// Need to store again with same content - digest should match
var definition2 = retrieved!;
var result2 = await _store.StoreAsync(definition2);
// Assert
result1.ContentDigest.Should().Be(result2.ContentDigest);
}
#endregion
#region Helpers
private static GoldenSetDefinition CreateTestDefinition(string id, string component = "openssl")
{
return new GoldenSetDefinition
{
Id = id,
Component = component,
Targets =
[
new VulnerableTarget
{
FunctionName = "vulnerable_function",
Sinks = ["memcpy", "strcpy"],
Edges =
[
new BasicBlockEdge { From = "bb0", To = "bb1" },
new BasicBlockEdge { From = "bb1", To = "bb2" }
],
TaintInvariant = "attacker-controlled input reaches sink without bounds check"
}
],
Metadata = new GoldenSetMetadata
{
AuthorId = "test@example.com",
CreatedAt = DateTimeOffset.UtcNow,
SourceRef = $"https://nvd.nist.gov/vuln/detail/{id}",
Tags = ["memory-corruption", "heap-overflow"]
}
};
}
#endregion
}

View File

@@ -16,6 +16,7 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Npgsql" />
<PackageReference Include="NSubstitute" />
<PackageReference Include="Testcontainers.PostgreSql" />
</ItemGroup>

View File

@@ -11,6 +11,7 @@ using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.BackportProof.Models;
using StellaOps.Concelier.BackportProof.Repositories;
using StellaOps.Determinism;
namespace StellaOps.Concelier.BackportProof.Services;
@@ -22,6 +23,8 @@ public sealed class FixIndexService : IFixIndexService
{
private readonly IFixRuleRepository _repository;
private readonly ILogger<FixIndexService> _logger;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
// Active in-memory index
private FixIndexState? _activeIndex;
@@ -32,10 +35,14 @@ public sealed class FixIndexService : IFixIndexService
public FixIndexService(
IFixRuleRepository repository,
ILogger<FixIndexService> logger)
ILogger<FixIndexService> logger,
TimeProvider? timeProvider = null,
IGuidProvider? guidProvider = null)
{
_repository = repository;
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
public ValueTask<string?> GetActiveSnapshotIdAsync(CancellationToken ct = default)
@@ -52,7 +59,7 @@ public sealed class FixIndexService : IFixIndexService
{
_logger.LogInformation("Creating fix index snapshot: {Label}", sourceLabel);
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
// Load all rules from repository
// In a real implementation, this would need pagination for large datasets
@@ -66,7 +73,7 @@ public sealed class FixIndexService : IFixIndexService
var index = BuildIndex(allRules);
// Generate snapshot ID and digest
var snapshotId = $"fix-index-{DateTimeOffset.UtcNow:yyyyMMddHHmmss}-{Guid.NewGuid():N}";
var snapshotId = $"fix-index-{_timeProvider.GetUtcNow():yyyyMMddHHmmss}-{_guidProvider.NewGuid():N}";
var digest = ComputeIndexDigest(allRules);
var snapshot = new FixIndexSnapshot(
@@ -84,7 +91,7 @@ public sealed class FixIndexService : IFixIndexService
// Store snapshot
_snapshots[snapshotId] = indexState;
var elapsed = DateTimeOffset.UtcNow - startTime;
var elapsed = _timeProvider.GetUtcNow() - startTime;
_logger.LogInformation(
"Created snapshot {SnapshotId} with {Count} rules in {Elapsed}ms",
snapshotId, allRules.Count, elapsed.TotalMilliseconds);

View File

@@ -15,6 +15,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Determinism.Abstractions/StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.VersionComparison/StellaOps.VersionComparison.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.DistroIntel/StellaOps.DistroIntel.csproj" />

View File

@@ -20,15 +20,18 @@ public sealed partial class ProvenanceScopeService : IProvenanceScopeService
private readonly IProvenanceScopeStore _store;
private readonly IBackportEvidenceResolver? _evidenceResolver;
private readonly ILogger<ProvenanceScopeService> _logger;
private readonly TimeProvider _timeProvider;
public ProvenanceScopeService(
IProvenanceScopeStore store,
ILogger<ProvenanceScopeService> logger,
IBackportEvidenceResolver? evidenceResolver = null)
IBackportEvidenceResolver? evidenceResolver = null,
TimeProvider? timeProvider = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_evidenceResolver = evidenceResolver; // Optional - if not provided, uses advisory data only
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -89,8 +92,8 @@ public sealed partial class ProvenanceScopeService : IProvenanceScopeService
PatchOrigin = evidence?.PatchOrigin ?? DeterminePatchOrigin(request.Source),
EvidenceRef = null, // Will be linked separately
Confidence = evidence?.Confidence ?? DetermineDefaultConfidence(request.Source),
CreatedAt = existing?.CreatedAt ?? DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
CreatedAt = existing?.CreatedAt ?? _timeProvider.GetUtcNow(),
UpdatedAt = _timeProvider.GetUtcNow()
};
// 5. Upsert scope
@@ -154,8 +157,8 @@ public sealed partial class ProvenanceScopeService : IProvenanceScopeService
PatchOrigin = evidence.PatchOrigin,
EvidenceRef = null,
Confidence = evidence.Confidence,
CreatedAt = existing?.CreatedAt ?? DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
CreatedAt = existing?.CreatedAt ?? _timeProvider.GetUtcNow(),
UpdatedAt = _timeProvider.GetUtcNow()
};
var scopeId = await _store.UpsertAsync(scope, ct).ConfigureAwait(false);

View File

@@ -1,6 +1,7 @@
using System.Text.Json;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Persistence.Postgres.Models;
using StellaOps.Determinism;
namespace StellaOps.Concelier.Persistence.Postgres.Conversion;
@@ -15,6 +16,20 @@ public sealed class AdvisoryConverter
WriteIndented = false
};
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
/// <summary>
/// Initializes a new instance of the <see cref="AdvisoryConverter"/> class.
/// </summary>
/// <param name="timeProvider">Time provider for deterministic timestamps.</param>
/// <param name="guidProvider">GUID provider for deterministic ID generation.</param>
public AdvisoryConverter(TimeProvider? timeProvider = null, IGuidProvider? guidProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <summary>
/// Converts an Advisory domain model to PostgreSQL entities.
/// </summary>
@@ -22,8 +37,8 @@ public sealed class AdvisoryConverter
{
ArgumentNullException.ThrowIfNull(advisory);
var advisoryId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
var advisoryId = _guidProvider.NewGuid();
var now = _timeProvider.GetUtcNow();
var primaryVulnId = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
@@ -62,7 +77,7 @@ public sealed class AdvisoryConverter
aliasEntities.Add(new AdvisoryAliasEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
AliasType = aliasType,
AliasValue = alias,
@@ -78,7 +93,7 @@ public sealed class AdvisoryConverter
{
cvssEntities.Add(new AdvisoryCvssEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
CvssVersion = metric.Version,
VectorString = metric.Vector,
@@ -103,7 +118,7 @@ public sealed class AdvisoryConverter
affectedEntities.Add(new AdvisoryAffectedEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
Ecosystem = ecosystem,
PackageName = pkg.Identifier,
@@ -119,7 +134,7 @@ public sealed class AdvisoryConverter
// References
var referenceEntities = advisory.References.Select(reference => new AdvisoryReferenceEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
RefType = reference.Kind ?? "web",
Url = reference.Url,
@@ -129,7 +144,7 @@ public sealed class AdvisoryConverter
// Credits
var creditEntities = advisory.Credits.Select(credit => new AdvisoryCreditEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
Name = credit.DisplayName,
Contact = credit.Contacts.FirstOrDefault(),
@@ -140,7 +155,7 @@ public sealed class AdvisoryConverter
// Weaknesses
var weaknessEntities = advisory.Cwes.Select(weakness => new AdvisoryWeaknessEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
CweId = weakness.Identifier,
Description = weakness.Name,
@@ -157,7 +172,7 @@ public sealed class AdvisoryConverter
{
kevFlags.Add(new KevFlagEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
AdvisoryId = advisoryId,
CveId = cveId,
VendorProject = null,

View File

@@ -9,6 +9,7 @@ using System.Globalization;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Persistence.Postgres.Models;
using StellaOps.Determinism;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Persistence.Postgres.Repositories;
@@ -19,10 +20,18 @@ namespace StellaOps.Concelier.Persistence.Postgres.Repositories;
public sealed class SyncLedgerRepository : RepositoryBase<ConcelierDataSource>, ISyncLedgerRepository
{
private const string SystemTenantId = "_system";
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public SyncLedgerRepository(ConcelierDataSource dataSource, ILogger<SyncLedgerRepository> logger)
public SyncLedgerRepository(
ConcelierDataSource dataSource,
ILogger<SyncLedgerRepository> logger,
TimeProvider? timeProvider = null,
IGuidProvider? guidProvider = null)
: base(dataSource, logger)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
#region Ledger Operations
@@ -93,7 +102,7 @@ public sealed class SyncLedgerRepository : RepositoryBase<ConcelierDataSource>,
RETURNING id
""";
var id = entry.Id == Guid.Empty ? Guid.NewGuid() : entry.Id;
var id = entry.Id == Guid.Empty ? _guidProvider.NewGuid() : entry.Id;
await ExecuteAsync(
SystemTenantId,
@@ -106,7 +115,7 @@ public sealed class SyncLedgerRepository : RepositoryBase<ConcelierDataSource>,
AddParameter(cmd, "bundle_hash", entry.BundleHash);
AddParameter(cmd, "items_count", entry.ItemsCount);
AddParameter(cmd, "signed_at", entry.SignedAt);
AddParameter(cmd, "imported_at", entry.ImportedAt == default ? DateTimeOffset.UtcNow : entry.ImportedAt);
AddParameter(cmd, "imported_at", entry.ImportedAt == default ? _timeProvider.GetUtcNow() : entry.ImportedAt);
},
ct).ConfigureAwait(false);
@@ -144,13 +153,13 @@ public sealed class SyncLedgerRepository : RepositoryBase<ConcelierDataSource>,
{
var entry = new SyncLedgerEntity
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
SiteId = siteId,
Cursor = newCursor,
BundleHash = bundleHash,
ItemsCount = itemsCount,
SignedAt = signedAt,
ImportedAt = DateTimeOffset.UtcNow
ImportedAt = _timeProvider.GetUtcNow()
};
await InsertAsync(entry, ct).ConfigureAwait(false);

View File

@@ -18,13 +18,16 @@ public sealed class SitePolicyEnforcementService
{
private readonly ISyncLedgerRepository _repository;
private readonly ILogger<SitePolicyEnforcementService> _logger;
private readonly TimeProvider _timeProvider;
public SitePolicyEnforcementService(
ISyncLedgerRepository repository,
ILogger<SitePolicyEnforcementService> logger)
ILogger<SitePolicyEnforcementService> logger,
TimeProvider? timeProvider = null)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
@@ -301,7 +304,7 @@ public sealed class SitePolicyEnforcementService
WindowHours: windowHours);
}
var windowStart = DateTimeOffset.UtcNow.AddHours(-windowHours);
var windowStart = _timeProvider.GetUtcNow().AddHours(-windowHours);
var recentHistory = history.Where(h => h.ImportedAt >= windowStart).ToList();
return new SiteBudgetInfo(

View File

@@ -29,6 +29,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.EfCore\StellaOps.Infrastructure.EfCore.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />

View File

@@ -19,19 +19,22 @@ public sealed class BackportProofService
private readonly ISourceArtifactRepository _sourceRepo;
private readonly IPatchRepository _patchRepo;
private readonly BinaryFingerprintFactory _fingerprintFactory;
private readonly TimeProvider _timeProvider;
public BackportProofService(
ILogger<BackportProofService> logger,
IDistroAdvisoryRepository advisoryRepo,
ISourceArtifactRepository sourceRepo,
IPatchRepository patchRepo,
BinaryFingerprintFactory fingerprintFactory)
BinaryFingerprintFactory fingerprintFactory,
TimeProvider? timeProvider = null)
{
_logger = logger;
_advisoryRepo = advisoryRepo;
_sourceRepo = sourceRepo;
_patchRepo = patchRepo;
_fingerprintFactory = fingerprintFactory;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
@@ -251,7 +254,7 @@ public sealed class BackportProofService
EvidenceId = $"evidence:binary:{matchResult.Method}:{matchResult.MatchedFingerprintId}",
Type = EvidenceType.BinaryFingerprint,
Source = matchResult.Method.ToString(),
Timestamp = DateTimeOffset.UtcNow,
Timestamp = _timeProvider.GetUtcNow(),
Data = fingerprintData,
DataHash = dataHash
});

View File

@@ -21,13 +21,16 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
{
private readonly ICanonicalAdvisoryService _canonicalService;
private readonly ILogger<SbomAdvisoryMatcher> _logger;
private readonly TimeProvider _timeProvider;
public SbomAdvisoryMatcher(
ICanonicalAdvisoryService canonicalService,
ILogger<SbomAdvisoryMatcher> logger)
ILogger<SbomAdvisoryMatcher> logger,
TimeProvider? timeProvider = null)
{
_canonicalService = canonicalService ?? throw new ArgumentNullException(nameof(canonicalService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -142,7 +145,7 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
Method = DetermineMatchMethod(purl),
IsReachable = false,
IsDeployed = false,
MatchedAt = DateTimeOffset.UtcNow
MatchedAt = _timeProvider.GetUtcNow()
};
}
@@ -167,6 +170,7 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
var isReachable = reachabilityMap?.TryGetValue(purl, out var reachable) == true && reachable;
var isDeployed = deploymentMap?.TryGetValue(purl, out var deployed) == true && deployed;
var matchMethod = DetermineMatchMethod(purl);
var matchedAt = _timeProvider.GetUtcNow();
return advisories.Select(advisory => new SbomAdvisoryMatch
{
@@ -178,7 +182,7 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
Method = matchMethod,
IsReachable = isReachable,
IsDeployed = isDeployed,
MatchedAt = DateTimeOffset.UtcNow
MatchedAt = matchedAt
}).ToList();
}
catch (Exception ex)

View File

@@ -21,13 +21,16 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
{
private readonly ICanonicalAdvisoryService _canonicalService;
private readonly ILogger<SbomAdvisoryMatcher> _logger;
private readonly TimeProvider _timeProvider;
public SbomAdvisoryMatcher(
ICanonicalAdvisoryService canonicalService,
ILogger<SbomAdvisoryMatcher> logger)
ILogger<SbomAdvisoryMatcher> logger,
TimeProvider? timeProvider = null)
{
_canonicalService = canonicalService ?? throw new ArgumentNullException(nameof(canonicalService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -142,7 +145,7 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
Method = DetermineMatchMethod(purl),
IsReachable = false,
IsDeployed = false,
MatchedAt = DateTimeOffset.UtcNow
MatchedAt = _timeProvider.GetUtcNow()
};
}
@@ -168,6 +171,8 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
var isDeployed = deploymentMap?.TryGetValue(purl, out var deployed) == true && deployed;
var matchMethod = DetermineMatchMethod(purl);
var matchedAt = _timeProvider.GetUtcNow();
return advisories.Select(advisory => new SbomAdvisoryMatch
{
Id = ComputeDeterministicMatchId(sbomDigest, purl, advisory.Id),
@@ -178,7 +183,7 @@ public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
Method = matchMethod,
IsReachable = isReachable,
IsDeployed = isDeployed,
MatchedAt = DateTimeOffset.UtcNow
MatchedAt = matchedAt
}).ToList();
}
catch (Exception ex)

View File

@@ -26,19 +26,22 @@ public sealed class SbomRegistryService : ISbomRegistryService
private readonly IInterestScoringService _scoringService;
private readonly IEventStream<SbomLearnedEvent>? _eventStream;
private readonly ILogger<SbomRegistryService> _logger;
private readonly TimeProvider _timeProvider;
public SbomRegistryService(
ISbomRegistryRepository repository,
ISbomAdvisoryMatcher matcher,
IInterestScoringService scoringService,
ILogger<SbomRegistryService> logger,
IEventStream<SbomLearnedEvent>? eventStream = null)
IEventStream<SbomLearnedEvent>? eventStream = null,
TimeProvider? timeProvider = null)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_matcher = matcher ?? throw new ArgumentNullException(nameof(matcher));
_scoringService = scoringService ?? throw new ArgumentNullException(nameof(scoringService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_eventStream = eventStream;
_timeProvider = timeProvider ?? TimeProvider.System;
}
#region Registration
@@ -72,7 +75,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
PrimaryVersion = input.PrimaryVersion,
ComponentCount = input.Purls.Count,
Purls = input.Purls,
RegisteredAt = DateTimeOffset.UtcNow,
RegisteredAt = _timeProvider.GetUtcNow(),
Source = input.Source,
TenantId = input.TenantId
};
@@ -161,7 +164,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
// Step 4: Update registration metadata
await _repository.UpdateAffectedCountAsync(registration.Digest, matches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(registration.Digest, DateTimeOffset.UtcNow, cancellationToken)
await _repository.UpdateLastMatchedAsync(registration.Digest, _timeProvider.GetUtcNow(), cancellationToken)
.ConfigureAwait(false);
// Step 5: Update interest scores for affected canonicals
@@ -210,7 +213,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
Registration = registration with
{
AffectedCount = matches.Count,
LastMatchedAt = DateTimeOffset.UtcNow
LastMatchedAt = _timeProvider.GetUtcNow()
},
Matches = matches,
ScoresUpdated = scoresUpdated,
@@ -270,7 +273,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
await _repository.UpdateAffectedCountAsync(digest, matches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(digest, DateTimeOffset.UtcNow, cancellationToken)
await _repository.UpdateLastMatchedAsync(digest, _timeProvider.GetUtcNow(), cancellationToken)
.ConfigureAwait(false);
sw.Stop();
@@ -289,7 +292,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
Registration = registration with
{
AffectedCount = matches.Count,
LastMatchedAt = DateTimeOffset.UtcNow
LastMatchedAt = _timeProvider.GetUtcNow()
},
Matches = matches,
ScoresUpdated = 0, // Rematch doesn't update scores
@@ -374,7 +377,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
await _repository.UpdateAffectedCountAsync(digest, allMatches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(digest, DateTimeOffset.UtcNow, cancellationToken)
await _repository.UpdateLastMatchedAsync(digest, _timeProvider.GetUtcNow(), cancellationToken)
.ConfigureAwait(false);
// Update interest scores only for newly added matches
@@ -424,7 +427,7 @@ public sealed class SbomRegistryService : ISbomRegistryService
{
ComponentCount = newPurls.Count,
AffectedCount = allMatches.Count,
LastMatchedAt = DateTimeOffset.UtcNow,
LastMatchedAt = _timeProvider.GetUtcNow(),
Purls = newPurls
},
Matches = allMatches,

View File

@@ -20,6 +20,7 @@ public sealed class CiscoProviderMetadataLoader
private readonly ILogger<CiscoProviderMetadataLoader> _logger;
private readonly CiscoConnectorOptions _options;
private readonly IFileSystem _fileSystem;
private readonly TimeProvider _timeProvider;
private readonly JsonSerializerOptions _serializerOptions;
private readonly SemaphoreSlim _semaphore = new(1, 1);
@@ -28,7 +29,8 @@ public sealed class CiscoProviderMetadataLoader
IMemoryCache memoryCache,
IOptions<CiscoConnectorOptions> options,
ILogger<CiscoProviderMetadataLoader> logger,
IFileSystem? fileSystem = null)
IFileSystem? fileSystem = null,
TimeProvider? timeProvider = null)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_memoryCache = memoryCache ?? throw new ArgumentNullException(nameof(memoryCache));
@@ -36,6 +38,7 @@ public sealed class CiscoProviderMetadataLoader
ArgumentNullException.ThrowIfNull(options);
_options = options.Value ?? throw new ArgumentNullException(nameof(options));
_fileSystem = fileSystem ?? new FileSystem();
_timeProvider = timeProvider ?? TimeProvider.System;
_serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
@@ -45,7 +48,8 @@ public sealed class CiscoProviderMetadataLoader
public async Task<CiscoProviderMetadataResult> LoadAsync(CancellationToken cancellationToken)
{
if (_memoryCache.TryGetValue<CacheEntry>(CacheKey, out var cached) && cached is not null && !cached.IsExpired())
var now = _timeProvider.GetUtcNow();
if (_memoryCache.TryGetValue<CacheEntry>(CacheKey, out var cached) && cached is not null && !cached.IsExpired(now))
{
_logger.LogDebug("Returning cached Cisco provider metadata (expires {Expires}).", cached.ExpiresAt);
return new CiscoProviderMetadataResult(cached.Provider, cached.FetchedAt, cached.FromOffline, true);
@@ -54,7 +58,8 @@ public sealed class CiscoProviderMetadataLoader
await _semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_memoryCache.TryGetValue<CacheEntry>(CacheKey, out cached) && cached is not null && !cached.IsExpired())
now = _timeProvider.GetUtcNow();
if (_memoryCache.TryGetValue<CacheEntry>(CacheKey, out cached) && cached is not null && !cached.IsExpired(now))
{
return new CiscoProviderMetadataResult(cached.Provider, cached.FetchedAt, cached.FromOffline, true);
}
@@ -76,8 +81,8 @@ public sealed class CiscoProviderMetadataLoader
{
var entry = offline with
{
FetchedAt = DateTimeOffset.UtcNow,
ExpiresAt = DateTimeOffset.UtcNow + _options.MetadataCacheDuration,
FetchedAt = _timeProvider.GetUtcNow(),
ExpiresAt = _timeProvider.GetUtcNow() + _options.MetadataCacheDuration,
FromOffline = true,
};
StoreCache(entry);
@@ -115,8 +120,8 @@ public sealed class CiscoProviderMetadataLoader
_logger.LogDebug("Cisco provider metadata not modified (etag {ETag}).", previous.ETag);
return previous with
{
FetchedAt = DateTimeOffset.UtcNow,
ExpiresAt = DateTimeOffset.UtcNow + _options.MetadataCacheDuration,
FetchedAt = _timeProvider.GetUtcNow(),
ExpiresAt = _timeProvider.GetUtcNow() + _options.MetadataCacheDuration,
};
}
@@ -140,8 +145,8 @@ public sealed class CiscoProviderMetadataLoader
return new CacheEntry(
provider,
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow + _options.MetadataCacheDuration,
_timeProvider.GetUtcNow(),
_timeProvider.GetUtcNow() + _options.MetadataCacheDuration,
etagHeader,
FromOffline: false);
}
@@ -169,7 +174,7 @@ public sealed class CiscoProviderMetadataLoader
{
var payload = _fileSystem.File.ReadAllText(_options.OfflineSnapshotPath);
var provider = ParseProvider(payload);
return new CacheEntry(provider, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow + _options.MetadataCacheDuration, null, true);
return new CacheEntry(provider, _timeProvider.GetUtcNow(), _timeProvider.GetUtcNow() + _options.MetadataCacheDuration, null, true);
}
catch (Exception ex)
{
@@ -242,7 +247,7 @@ public sealed class CiscoProviderMetadataLoader
string? ETag,
bool FromOffline)
{
public bool IsExpired() => DateTimeOffset.UtcNow >= ExpiresAt;
public bool IsExpired(DateTimeOffset now) => now >= ExpiresAt;
}
}

View File

@@ -25,6 +25,7 @@ public sealed class RancherHubMetadataLoader
private readonly RancherHubTokenProvider _tokenProvider;
private readonly IFileSystem _fileSystem;
private readonly ILogger<RancherHubMetadataLoader> _logger;
private readonly TimeProvider _timeProvider;
private readonly SemaphoreSlim _semaphore = new(1, 1);
private readonly JsonDocumentOptions _documentOptions;
@@ -33,13 +34,15 @@ public sealed class RancherHubMetadataLoader
IMemoryCache memoryCache,
RancherHubTokenProvider tokenProvider,
IFileSystem fileSystem,
ILogger<RancherHubMetadataLoader> logger)
ILogger<RancherHubMetadataLoader> logger,
TimeProvider? timeProvider = null)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_memoryCache = memoryCache ?? throw new ArgumentNullException(nameof(memoryCache));
_tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider));
_fileSystem = fileSystem ?? throw new ArgumentNullException(nameof(fileSystem));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_documentOptions = new JsonDocumentOptions
{
CommentHandling = JsonCommentHandling.Skip,
@@ -52,7 +55,7 @@ public sealed class RancherHubMetadataLoader
ArgumentNullException.ThrowIfNull(options);
var cacheKey = CreateCacheKey(options);
if (_memoryCache.TryGetValue<CacheEntry>(cacheKey, out var cached) && cached is not null && !cached.IsExpired())
if (_memoryCache.TryGetValue<CacheEntry>(cacheKey, out var cached) && cached is not null && !cached.IsExpired(_timeProvider.GetUtcNow()))
{
_logger.LogDebug("Returning cached Rancher hub metadata (expires {Expires}).", cached.ExpiresAt);
return new RancherHubMetadataResult(cached.Metadata, cached.FetchedAt, FromCache: true, cached.FromOfflineSnapshot);
@@ -61,7 +64,7 @@ public sealed class RancherHubMetadataLoader
await _semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_memoryCache.TryGetValue<CacheEntry>(cacheKey, out cached) && cached is not null && !cached.IsExpired())
if (_memoryCache.TryGetValue<CacheEntry>(cacheKey, out cached) && cached is not null && !cached.IsExpired(_timeProvider.GetUtcNow()))
{
return new RancherHubMetadataResult(cached.Metadata, cached.FetchedAt, FromCache: true, cached.FromOfflineSnapshot);
}
@@ -131,8 +134,8 @@ public sealed class RancherHubMetadataLoader
_logger.LogDebug("Rancher hub discovery document not modified (etag {ETag}).", previous.ETag);
return previous with
{
FetchedAt = DateTimeOffset.UtcNow,
ExpiresAt = DateTimeOffset.UtcNow + options.MetadataCacheDuration,
FetchedAt = _timeProvider.GetUtcNow(),
ExpiresAt = _timeProvider.GetUtcNow() + options.MetadataCacheDuration,
FromOfflineSnapshot = false,
};
}
@@ -142,8 +145,8 @@ public sealed class RancherHubMetadataLoader
var metadata = ParseMetadata(payload, options);
var entry = new CacheEntry(
metadata,
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow + options.MetadataCacheDuration,
_timeProvider.GetUtcNow(),
_timeProvider.GetUtcNow() + options.MetadataCacheDuration,
response.Headers.ETag?.ToString(),
FromOfflineSnapshot: false,
Payload: payload);
@@ -177,8 +180,8 @@ public sealed class RancherHubMetadataLoader
var metadata = ParseMetadata(payload, options);
return new CacheEntry(
metadata,
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow + options.MetadataCacheDuration,
_timeProvider.GetUtcNow(),
_timeProvider.GetUtcNow() + options.MetadataCacheDuration,
ETag: null,
FromOfflineSnapshot: true,
Payload: payload);
@@ -422,7 +425,7 @@ public sealed class RancherHubMetadataLoader
bool FromOfflineSnapshot,
string? Payload)
{
public bool IsExpired() => DateTimeOffset.UtcNow >= ExpiresAt;
public bool IsExpired(DateTimeOffset now) => now >= ExpiresAt;
}
}

View File

@@ -103,6 +103,7 @@ public sealed class FindingScoringService : IFindingScoringService
private readonly IMemoryCache _cache;
private readonly FindingScoringOptions _options;
private readonly ILogger<FindingScoringService> _logger;
private readonly TimeProvider _timeProvider;
private static readonly TimeSpan DefaultCacheDuration = TimeSpan.FromMinutes(60);
@@ -116,7 +117,8 @@ public sealed class FindingScoringService : IFindingScoringService
IScoreHistoryStore historyStore,
IMemoryCache cache,
IOptions<FindingScoringOptions> options,
ILogger<FindingScoringService> logger)
ILogger<FindingScoringService> logger,
TimeProvider? timeProvider = null)
{
_normalizer = normalizer;
_calculator = calculator;
@@ -126,6 +128,7 @@ public sealed class FindingScoringService : IFindingScoringService
_cache = cache;
_options = options.Value;
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
_environment = Environment.GetEnvironmentVariable("STELLAOPS_ENVIRONMENT") ?? "production";
}
@@ -160,7 +163,7 @@ public sealed class FindingScoringService : IFindingScoringService
var input = _normalizer.Aggregate(evidence);
var result = _calculator.Calculate(input, policy);
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var cacheDuration = TimeSpan.FromMinutes(_options.CacheTtlMinutes);
var response = MapToResponse(result, request.IncludeBreakdown, now, cacheDuration);
@@ -288,7 +291,7 @@ public sealed class FindingScoringService : IFindingScoringService
Summary = summary,
Errors = errors.Count > 0 ? errors : null,
PolicyDigest = policy.ComputeDigest(),
CalculatedAt = DateTimeOffset.UtcNow
CalculatedAt = _timeProvider.GetUtcNow()
};
}

View File

@@ -46,6 +46,12 @@ public sealed class InMemoryScoreHistoryStore : IScoreHistoryStore
private readonly ConcurrentDictionary<string, List<ScoreRecord>> _history = new();
private readonly TimeSpan _retentionPeriod = TimeSpan.FromDays(90);
private readonly int _maxEntriesPerFinding = 1000;
private readonly TimeProvider _timeProvider;
public InMemoryScoreHistoryStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public void RecordScore(ScoreRecord record)
{
@@ -68,7 +74,7 @@ public sealed class InMemoryScoreHistoryStore : IScoreHistoryStore
entries.Add(record);
// Prune old entries
var cutoff = DateTimeOffset.UtcNow - _retentionPeriod;
var cutoff = _timeProvider.GetUtcNow() - _retentionPeriod;
entries.RemoveAll(e => e.CalculatedAt < cutoff);
// Limit total entries

View File

@@ -12,8 +12,14 @@ public sealed class VexConsensusService
private readonly ConcurrentDictionary<string, VexProjectionRecord> _projections = new();
private readonly ConcurrentDictionary<string, VexIssuerRecord> _issuers = new();
private readonly ConcurrentDictionary<string, List<VexStatementRecord>> _statements = new();
private readonly TimeProvider _timeProvider;
private long _projectionCounter = 0;
public VexConsensusService(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Computes consensus for a vulnerability-product pair.
/// </summary>
@@ -84,7 +90,7 @@ public sealed class VexConsensusService
Contributions: [],
Conflicts: null,
ProjectionId: null,
ComputedAt: DateTimeOffset.UtcNow);
ComputedAt: _timeProvider.GetUtcNow());
return Task.FromResult(defaultResponse);
}
@@ -121,7 +127,7 @@ public sealed class VexConsensusService
Contributions: contributions,
Conflicts: null,
ProjectionId: projectionId,
ComputedAt: DateTimeOffset.UtcNow);
ComputedAt: _timeProvider.GetUtcNow());
return Task.FromResult(response);
}
@@ -163,7 +169,7 @@ public sealed class VexConsensusService
TotalCount: request.Targets.Count,
SuccessCount: results.Count,
FailureCount: failures,
CompletedAt: DateTimeOffset.UtcNow);
CompletedAt: _timeProvider.GetUtcNow());
}
/// <summary>
@@ -299,7 +305,7 @@ public sealed class VexConsensusService
: 0;
var withConflicts = projections.Count(p => p.ConflictCount > 0);
var last24h = DateTimeOffset.UtcNow.AddDays(-1);
var last24h = _timeProvider.GetUtcNow().AddDays(-1);
var changesLast24h = projections.Count(p => p.StatusChanged && p.ComputedAt >= last24h);
return Task.FromResult(new VexConsensusStatisticsResponse(
@@ -309,7 +315,7 @@ public sealed class VexConsensusService
AverageConfidence: avgConfidence,
ProjectionsWithConflicts: withConflicts,
StatusChangesLast24h: changesLast24h,
ComputedAt: DateTimeOffset.UtcNow));
ComputedAt: _timeProvider.GetUtcNow()));
}
/// <summary>
@@ -367,6 +373,7 @@ public sealed class VexConsensusService
RegisterVexIssuerRequest request,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var record = new VexIssuerRecord(
IssuerId: request.IssuerId,
Name: request.Name,
@@ -378,14 +385,14 @@ public sealed class VexConsensusService
KeyType: k.KeyType,
Algorithm: k.Algorithm,
Status: "active",
RegisteredAt: DateTimeOffset.UtcNow,
RegisteredAt: now,
ExpiresAt: k.ExpiresAt)).ToList() ?? [],
Metadata: request.Metadata != null ? new VexIssuerMetadata(
Description: request.Metadata.Description,
Uri: request.Metadata.Uri,
Email: request.Metadata.Email,
Tags: request.Metadata.Tags?.ToList()) : null,
RegisteredAt: DateTimeOffset.UtcNow,
RegisteredAt: now,
LastUpdatedAt: null,
RevokedAt: null,
RevocationReason: null);
@@ -425,7 +432,7 @@ public sealed class VexConsensusService
string status, string? justification, double confidence, string outcome, int statementCount)
{
var id = $"proj-{Interlocked.Increment(ref _projectionCounter):D8}";
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var record = new VexProjectionRecord(
ProjectionId: id,

View File

@@ -4,6 +4,7 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Determinism;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Services;
@@ -74,18 +75,26 @@ public interface IWebhookDeliveryService
public sealed class InMemoryWebhookStore : IWebhookStore
{
private readonly ConcurrentDictionary<Guid, WebhookRegistration> _webhooks = new();
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public InMemoryWebhookStore(TimeProvider? timeProvider = null, IGuidProvider? guidProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
public WebhookRegistration Register(RegisterWebhookRequest request)
{
var registration = new WebhookRegistration
{
Id = Guid.NewGuid(),
Id = _guidProvider.NewGuid(),
Url = request.Url,
Secret = request.Secret,
FindingPatterns = request.FindingPatterns,
MinScoreChange = request.MinScoreChange,
TriggerOnBucketChange = request.TriggerOnBucketChange,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = _timeProvider.GetUtcNow(),
IsActive = true
};
@@ -171,6 +180,7 @@ public sealed class WebhookDeliveryService : IWebhookDeliveryService
private readonly IWebhookStore _store;
private readonly IHttpClientFactory _httpClientFactory;
private readonly ILogger<WebhookDeliveryService> _logger;
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
@@ -182,11 +192,13 @@ public sealed class WebhookDeliveryService : IWebhookDeliveryService
public WebhookDeliveryService(
IWebhookStore store,
IHttpClientFactory httpClientFactory,
ILogger<WebhookDeliveryService> logger)
ILogger<WebhookDeliveryService> logger,
TimeProvider? timeProvider = null)
{
_store = store;
_httpClientFactory = httpClientFactory;
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task NotifyScoreChangeAsync(
@@ -219,7 +231,7 @@ public sealed class WebhookDeliveryService : IWebhookDeliveryService
ScoreChange = scoreChange,
BucketChanged = bucketChanged,
PolicyDigest = policyDigest,
Timestamp = DateTimeOffset.UtcNow
Timestamp = _timeProvider.GetUtcNow()
};
var payloadJson = JsonSerializer.Serialize(payload, JsonOptions);
@@ -258,7 +270,7 @@ public sealed class WebhookDeliveryService : IWebhookDeliveryService
}
request.Headers.TryAddWithoutValidation("X-Webhook-Id", webhook.Id.ToString());
request.Headers.TryAddWithoutValidation("X-Webhook-Timestamp", DateTimeOffset.UtcNow.ToUnixTimeSeconds().ToString());
request.Headers.TryAddWithoutValidation("X-Webhook-Timestamp", _timeProvider.GetUtcNow().ToUnixTimeSeconds().ToString());
using var response = await client.SendAsync(request, ct).ConfigureAwait(false);

View File

@@ -16,6 +16,7 @@ public sealed class IntegrationService
private readonly IIntegrationEventPublisher _eventPublisher;
private readonly IIntegrationAuditLogger _auditLogger;
private readonly IAuthRefResolver _authRefResolver;
private readonly TimeProvider _timeProvider;
private readonly ILogger<IntegrationService> _logger;
public IntegrationService(
@@ -24,6 +25,7 @@ public sealed class IntegrationService
IIntegrationEventPublisher eventPublisher,
IIntegrationAuditLogger auditLogger,
IAuthRefResolver authRefResolver,
TimeProvider timeProvider,
ILogger<IntegrationService> logger)
{
_repository = repository;
@@ -31,11 +33,13 @@ public sealed class IntegrationService
_eventPublisher = eventPublisher;
_auditLogger = auditLogger;
_authRefResolver = authRefResolver;
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger;
}
public async Task<IntegrationResponse> CreateAsync(CreateIntegrationRequest request, string? userId, string? tenantId, CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var integration = new Integration
{
Id = Guid.NewGuid(),
@@ -51,7 +55,9 @@ public sealed class IntegrationService
Tags = request.Tags?.ToList() ?? [],
CreatedBy = userId,
UpdatedBy = userId,
TenantId = tenantId
TenantId = tenantId,
CreatedAt = now,
UpdatedAt = now
};
var created = await _repository.CreateAsync(integration, cancellationToken);
@@ -62,7 +68,7 @@ public sealed class IntegrationService
created.Type,
created.Provider,
userId,
DateTimeOffset.UtcNow), cancellationToken);
_timeProvider.GetUtcNow()), cancellationToken);
await _auditLogger.LogAsync("integration.created", created.Id, userId, new { created.Name, created.Type, created.Provider }, cancellationToken);
@@ -119,7 +125,7 @@ public sealed class IntegrationService
if (request.Tags is not null) integration.Tags = request.Tags.ToList();
if (request.Status.HasValue) integration.Status = request.Status.Value;
integration.UpdatedAt = DateTimeOffset.UtcNow;
integration.UpdatedAt = _timeProvider.GetUtcNow();
integration.UpdatedBy = userId;
var updated = await _repository.UpdateAsync(integration, cancellationToken);
@@ -128,7 +134,7 @@ public sealed class IntegrationService
updated.Id,
updated.Name,
userId,
DateTimeOffset.UtcNow), cancellationToken);
_timeProvider.GetUtcNow()), cancellationToken);
if (oldStatus != updated.Status)
{
@@ -136,7 +142,7 @@ public sealed class IntegrationService
updated.Id,
oldStatus,
updated.Status,
DateTimeOffset.UtcNow), cancellationToken);
_timeProvider.GetUtcNow()), cancellationToken);
}
await _auditLogger.LogAsync("integration.updated", updated.Id, userId, new { updated.Name, OldStatus = oldStatus, NewStatus = updated.Status }, cancellationToken);
@@ -156,7 +162,7 @@ public sealed class IntegrationService
await _eventPublisher.PublishAsync(new IntegrationDeletedEvent(
id,
userId,
DateTimeOffset.UtcNow), cancellationToken);
_timeProvider.GetUtcNow()), cancellationToken);
await _auditLogger.LogAsync("integration.deleted", id, userId, new { integration.Name }, cancellationToken);
@@ -180,7 +186,7 @@ public sealed class IntegrationService
$"No connector plugin available for provider {integration.Provider}",
null,
TimeSpan.Zero,
DateTimeOffset.UtcNow);
_timeProvider.GetUtcNow());
}
var resolvedSecret = integration.AuthRefUri is not null
@@ -189,9 +195,9 @@ public sealed class IntegrationService
var config = BuildConfig(integration, resolvedSecret);
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
var result = await plugin.TestConnectionAsync(config, cancellationToken);
var endTime = DateTimeOffset.UtcNow;
var endTime = _timeProvider.GetUtcNow();
// Update integration status based on result
var newStatus = result.Success ? IntegrationStatus.Active : IntegrationStatus.Failed;
@@ -233,7 +239,7 @@ public sealed class IntegrationService
HealthStatus.Unknown,
$"No connector plugin available for provider {integration.Provider}",
null,
DateTimeOffset.UtcNow,
_timeProvider.GetUtcNow(),
TimeSpan.Zero);
}

View File

@@ -66,12 +66,12 @@ public sealed class Integration
/// <summary>
/// UTC timestamp when the integration was created.
/// </summary>
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// UTC timestamp when the integration was last updated.
/// </summary>
public DateTimeOffset UpdatedAt { get; set; } = DateTimeOffset.UtcNow;
public required DateTimeOffset UpdatedAt { get; set; }
/// <summary>
/// User or system that created this integration.

View File

@@ -10,10 +10,12 @@ namespace StellaOps.Integrations.Persistence;
public sealed class PostgresIntegrationRepository : IIntegrationRepository
{
private readonly IntegrationDbContext _context;
private readonly TimeProvider _timeProvider;
public PostgresIntegrationRepository(IntegrationDbContext context)
public PostgresIntegrationRepository(IntegrationDbContext context, TimeProvider? timeProvider = null)
{
_context = context;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<Integration?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default)
@@ -93,7 +95,7 @@ public sealed class PostgresIntegrationRepository : IIntegrationRepository
{
entity.IsDeleted = true;
entity.Status = IntegrationStatus.Archived;
entity.UpdatedAt = DateTimeOffset.UtcNow;
entity.UpdatedAt = _timeProvider.GetUtcNow();
await _context.SaveChangesAsync(cancellationToken);
}
}

View File

@@ -11,6 +11,13 @@ namespace StellaOps.Integrations.Plugin.GitHubApp;
/// </summary>
public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
{
private readonly TimeProvider _timeProvider;
public GitHubAppConnectorPlugin(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public string Name => "github-app";
public IntegrationType Type => IntegrationType.Scm;
@@ -21,7 +28,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
public async Task<TestConnectionResult> TestConnectionAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
using var client = CreateHttpClient(config);
@@ -29,7 +36,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
{
// Call GitHub API to verify authentication
var response = await client.GetAsync("/app", cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
if (response.IsSuccessStatusCode)
{
@@ -63,7 +70,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
}
catch (Exception ex)
{
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new TestConnectionResult(
Success: false,
Message: $"Connection failed: {ex.Message}",
@@ -78,7 +85,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
public async Task<HealthCheckResult> CheckHealthAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
using var client = CreateHttpClient(config);
@@ -86,7 +93,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
{
// Check GitHub API status
var response = await client.GetAsync("/rate_limit", cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
if (response.IsSuccessStatusCode)
{
@@ -113,7 +120,7 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
["limit"] = limit.ToString(),
["percentUsed"] = percentUsed.ToString()
},
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
@@ -121,17 +128,17 @@ public sealed class GitHubAppConnectorPlugin : IIntegrationConnectorPlugin
Status: HealthStatus.Unhealthy,
Message: $"GitHub returned {response.StatusCode}",
Details: new Dictionary<string, string> { ["statusCode"] = ((int)response.StatusCode).ToString() },
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
catch (Exception ex)
{
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new HealthCheckResult(
Status: HealthStatus.Unhealthy,
Message: $"Health check failed: {ex.Message}",
Details: new Dictionary<string, string> { ["error"] = ex.GetType().Name },
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
}

View File

@@ -12,6 +12,13 @@ namespace StellaOps.Integrations.Plugin.Harbor;
/// </summary>
public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
{
private readonly TimeProvider _timeProvider;
public HarborConnectorPlugin(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public string Name => "harbor";
public IntegrationType Type => IntegrationType.Registry;
@@ -22,7 +29,7 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
public async Task<TestConnectionResult> TestConnectionAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
using var client = CreateHttpClient(config);
@@ -30,7 +37,7 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
{
// Call Harbor health endpoint
var response = await client.GetAsync("/api/v2.0/health", cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
if (response.IsSuccessStatusCode)
{
@@ -63,7 +70,7 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
}
catch (Exception ex)
{
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new TestConnectionResult(
Success: false,
Message: $"Connection failed: {ex.Message}",
@@ -78,14 +85,14 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
public async Task<HealthCheckResult> CheckHealthAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
using var client = CreateHttpClient(config);
try
{
var response = await client.GetAsync("/api/v2.0/health", cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
if (response.IsSuccessStatusCode)
{
@@ -103,7 +110,7 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
Status: status,
Message: $"Harbor status: {health?.Status}",
Details: health?.Components?.ToDictionary(c => c.Name, c => c.Status) ?? new Dictionary<string, string>(),
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
@@ -111,17 +118,17 @@ public sealed class HarborConnectorPlugin : IIntegrationConnectorPlugin
Status: HealthStatus.Unhealthy,
Message: $"Harbor returned {response.StatusCode}",
Details: new Dictionary<string, string> { ["statusCode"] = ((int)response.StatusCode).ToString() },
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
catch (Exception ex)
{
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new HealthCheckResult(
Status: HealthStatus.Unhealthy,
Message: $"Health check failed: {ex.Message}",
Details: new Dictionary<string, string> { ["error"] = ex.GetType().Name },
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
}

View File

@@ -9,6 +9,13 @@ namespace StellaOps.Integrations.Plugin.InMemory;
/// </summary>
public sealed class InMemoryConnectorPlugin : IIntegrationConnectorPlugin
{
private readonly TimeProvider _timeProvider;
public InMemoryConnectorPlugin(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public string Name => "inmemory";
public IntegrationType Type => IntegrationType.Registry;
@@ -19,12 +26,12 @@ public sealed class InMemoryConnectorPlugin : IIntegrationConnectorPlugin
public async Task<TestConnectionResult> TestConnectionAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
// Simulate network delay
await Task.Delay(100, cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new TestConnectionResult(
Success: true,
@@ -40,12 +47,12 @@ public sealed class InMemoryConnectorPlugin : IIntegrationConnectorPlugin
public async Task<HealthCheckResult> CheckHealthAsync(IntegrationConfig config, CancellationToken cancellationToken = default)
{
var startTime = DateTimeOffset.UtcNow;
var startTime = _timeProvider.GetUtcNow();
// Simulate health check
await Task.Delay(50, cancellationToken);
var duration = DateTimeOffset.UtcNow - startTime;
var duration = _timeProvider.GetUtcNow() - startTime;
return new HealthCheckResult(
Status: HealthStatus.Healthy,
@@ -55,7 +62,7 @@ public sealed class InMemoryConnectorPlugin : IIntegrationConnectorPlugin
["endpoint"] = config.Endpoint,
["uptime"] = "simulated"
},
CheckedAt: DateTimeOffset.UtcNow,
CheckedAt: _timeProvider.GetUtcNow(),
Duration: duration);
}
}

View File

@@ -32,6 +32,7 @@ public class IntegrationServiceTests
_eventPublisherMock.Object,
_auditLoggerMock.Object,
_authRefResolverMock.Object,
TimeProvider.System,
NullLogger<IntegrationService>.Instance);
}
@@ -327,6 +328,7 @@ public class IntegrationServiceTests
IntegrationType type = IntegrationType.Registry,
IntegrationProvider provider = IntegrationProvider.Harbor)
{
var now = DateTimeOffset.UtcNow;
return new Integration
{
Id = Guid.NewGuid(),
@@ -337,7 +339,9 @@ public class IntegrationServiceTests
Endpoint = "https://example.com",
Description = "Test description",
Tags = ["test"],
CreatedBy = "test-user"
CreatedBy = "test-user",
CreatedAt = now,
UpdatedAt = now
};
}
}

View File

@@ -189,15 +189,18 @@ public sealed class DefaultBackfillSafetyValidator : IBackfillSafetyValidator
{
private readonly ISourceValidator _sourceValidator;
private readonly IOverlapChecker _overlapChecker;
private readonly TimeProvider _timeProvider;
private readonly BackfillManagerOptions _options;
public DefaultBackfillSafetyValidator(
ISourceValidator sourceValidator,
IOverlapChecker overlapChecker,
TimeProvider timeProvider,
BackfillManagerOptions options)
{
_sourceValidator = sourceValidator;
_overlapChecker = overlapChecker;
_timeProvider = timeProvider ?? TimeProvider.System;
_options = options;
}
@@ -236,7 +239,7 @@ public sealed class DefaultBackfillSafetyValidator : IBackfillSafetyValidator
}
// Check retention period
var retentionLimit = DateTimeOffset.UtcNow - _options.RetentionPeriod;
var retentionLimit = _timeProvider.GetUtcNow() - _options.RetentionPeriod;
var withinRetention = request.WindowStart >= retentionLimit;
if (!withinRetention)
{
@@ -325,6 +328,7 @@ public sealed class BackfillManager : IBackfillManager
private readonly IBackfillSafetyValidator _safetyValidator;
private readonly IBackfillEventCounter _eventCounter;
private readonly IDuplicateSuppressor _duplicateSuppressor;
private readonly TimeProvider _timeProvider;
private readonly BackfillManagerOptions _options;
private readonly ILogger<BackfillManager> _logger;
@@ -333,6 +337,7 @@ public sealed class BackfillManager : IBackfillManager
IBackfillSafetyValidator safetyValidator,
IBackfillEventCounter eventCounter,
IDuplicateSuppressor duplicateSuppressor,
TimeProvider timeProvider,
BackfillManagerOptions options,
ILogger<BackfillManager> logger)
{
@@ -340,6 +345,7 @@ public sealed class BackfillManager : IBackfillManager
_safetyValidator = safetyValidator;
_eventCounter = eventCounter;
_duplicateSuppressor = duplicateSuppressor;
_timeProvider = timeProvider ?? TimeProvider.System;
_options = options;
_logger = logger;
}
@@ -367,6 +373,7 @@ public sealed class BackfillManager : IBackfillManager
windowEnd: windowEnd,
reason: reason,
createdBy: createdBy,
timestamp: _timeProvider.GetUtcNow(),
batchSize: batchSize,
dryRun: dryRun,
forceReprocess: forceReprocess,
@@ -446,7 +453,7 @@ public sealed class BackfillManager : IBackfillManager
// Run safety checks
var tempRequest = BackfillRequest.Create(
tenantId, sourceId, jobType, windowStart, windowEnd,
"preview", "system", batchSize);
"preview", "system", _timeProvider.GetUtcNow(), batchSize);
var safetyChecks = await _safetyValidator.ValidateAsync(
tempRequest, estimatedEvents, estimatedDuration, cancellationToken);
@@ -473,7 +480,7 @@ public sealed class BackfillManager : IBackfillManager
var request = await _backfillRepository.GetByIdAsync(tenantId, backfillId, cancellationToken)
?? throw new InvalidOperationException($"Backfill request {backfillId} not found.");
request = request.Start(updatedBy);
request = request.Start(updatedBy, _timeProvider.GetUtcNow());
await _backfillRepository.UpdateAsync(request, cancellationToken);
_logger.LogInformation("Started backfill request {BackfillId}", backfillId);
@@ -524,7 +531,7 @@ public sealed class BackfillManager : IBackfillManager
var request = await _backfillRepository.GetByIdAsync(tenantId, backfillId, cancellationToken)
?? throw new InvalidOperationException($"Backfill request {backfillId} not found.");
request = request.Cancel(updatedBy);
request = request.Cancel(updatedBy, _timeProvider.GetUtcNow());
await _backfillRepository.UpdateAsync(request, cancellationToken);
_logger.LogInformation("Canceled backfill request {BackfillId}", backfillId);

View File

@@ -90,8 +90,18 @@ public sealed record ProcessedEvent(
public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
{
private readonly Dictionary<string, Dictionary<string, ProcessedEventEntry>> _store = new();
private readonly TimeProvider _timeProvider;
private readonly object _lock = new();
/// <summary>
/// Creates a new in-memory duplicate suppressor.
/// </summary>
/// <param name="timeProvider">Time provider for deterministic time.</param>
public InMemoryDuplicateSuppressor(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
private sealed record ProcessedEventEntry(
DateTimeOffset EventTime,
DateTimeOffset ProcessedAt,
@@ -109,7 +119,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
return Task.FromResult(false);
// Check if expired
if (entry.ExpiresAt < DateTimeOffset.UtcNow)
if (entry.ExpiresAt < _timeProvider.GetUtcNow())
{
scopeStore.Remove(eventKey);
return Task.FromResult(false);
@@ -121,7 +131,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
public Task<IReadOnlySet<string>> GetProcessedAsync(string scopeKey, IEnumerable<string> eventKeys, CancellationToken cancellationToken)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var result = new HashSet<string>();
lock (_lock)
@@ -149,7 +159,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
TimeSpan ttl,
CancellationToken cancellationToken)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var entry = new ProcessedEventEntry(eventTime, now, batchId, now + ttl);
lock (_lock)
@@ -173,7 +183,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
TimeSpan ttl,
CancellationToken cancellationToken)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var expiresAt = now + ttl;
lock (_lock)
@@ -195,7 +205,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
public Task<long> CountProcessedAsync(string scopeKey, DateTimeOffset from, DateTimeOffset to, CancellationToken cancellationToken)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
long count = 0;
lock (_lock)
@@ -212,7 +222,7 @@ public sealed class InMemoryDuplicateSuppressor : IDuplicateSuppressor
public Task<int> CleanupExpiredAsync(int batchLimit, CancellationToken cancellationToken)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var removed = 0;
lock (_lock)

View File

@@ -71,19 +71,17 @@ public sealed record EventTimeWindow(
/// <summary>
/// Creates a window covering the last N hours from now.
/// </summary>
public static EventTimeWindow LastHours(int hours, DateTimeOffset? now = null)
public static EventTimeWindow LastHours(int hours, DateTimeOffset now)
{
var endTime = now ?? DateTimeOffset.UtcNow;
return FromDuration(endTime, TimeSpan.FromHours(hours));
return FromDuration(now, TimeSpan.FromHours(hours));
}
/// <summary>
/// Creates a window covering the last N days from now.
/// </summary>
public static EventTimeWindow LastDays(int days, DateTimeOffset? now = null)
public static EventTimeWindow LastDays(int days, DateTimeOffset now)
{
var endTime = now ?? DateTimeOffset.UtcNow;
return FromDuration(endTime, TimeSpan.FromDays(days));
return FromDuration(now, TimeSpan.FromDays(days));
}
}

View File

@@ -44,6 +44,7 @@ public sealed record NotificationRule(
NotificationChannel channel,
string endpoint,
string createdBy,
DateTimeOffset createdAt,
string? jobTypePattern = null,
string? errorCodePattern = null,
ErrorCategory? category = null,
@@ -52,7 +53,6 @@ public sealed record NotificationRule(
int maxPerHour = 10,
bool aggregate = true)
{
var now = DateTimeOffset.UtcNow;
return new NotificationRule(
RuleId: Guid.NewGuid(),
TenantId: tenantId,
@@ -68,8 +68,8 @@ public sealed record NotificationRule(
Aggregate: aggregate,
LastNotifiedAt: null,
NotificationsSent: 0,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}

View File

@@ -80,6 +80,7 @@ public sealed record AuditEntry(
string actorId,
ActorType actorType,
string description,
DateTimeOffset occurredAt,
string? oldState = null,
string? newState = null,
string? actorIp = null,
@@ -94,7 +95,6 @@ public sealed record AuditEntry(
ArgumentNullException.ThrowIfNull(hasher);
var entryId = Guid.NewGuid();
var occurredAt = DateTimeOffset.UtcNow;
// Compute canonical hash from immutable content
// Use the same property names and fields as VerifyIntegrity to keep the hash stable.

View File

@@ -113,6 +113,7 @@ public sealed record BackfillRequest(
DateTimeOffset windowEnd,
string reason,
string createdBy,
DateTimeOffset timestamp,
int batchSize = 100,
bool dryRun = false,
bool forceReprocess = false,
@@ -133,7 +134,6 @@ public sealed record BackfillRequest(
_ => throw new ArgumentException("Either sourceId or jobType must be specified.")
};
var now = DateTimeOffset.UtcNow;
return new BackfillRequest(
BackfillId: Guid.NewGuid(),
TenantId: tenantId,
@@ -156,7 +156,7 @@ public sealed record BackfillRequest(
SafetyChecks: null,
Reason: reason,
Ticket: ticket,
CreatedAt: now,
CreatedAt: timestamp,
StartedAt: null,
CompletedAt: null,
CreatedBy: createdBy,
@@ -196,7 +196,7 @@ public sealed record BackfillRequest(
/// <summary>
/// Transitions to running status.
/// </summary>
public BackfillRequest Start(string updatedBy)
public BackfillRequest Start(string updatedBy, DateTimeOffset timestamp)
{
if (Status != BackfillStatus.Validating)
throw new InvalidOperationException($"Cannot start from status {Status}.");
@@ -207,7 +207,7 @@ public sealed record BackfillRequest(
return this with
{
Status = BackfillStatus.Running,
StartedAt = DateTimeOffset.UtcNow,
StartedAt = timestamp,
CurrentPosition = WindowStart,
UpdatedBy = updatedBy
};
@@ -269,7 +269,7 @@ public sealed record BackfillRequest(
/// <summary>
/// Completes the backfill successfully.
/// </summary>
public BackfillRequest Complete(string updatedBy)
public BackfillRequest Complete(string updatedBy, DateTimeOffset timestamp)
{
if (Status != BackfillStatus.Running)
throw new InvalidOperationException($"Cannot complete from status {Status}.");
@@ -277,7 +277,7 @@ public sealed record BackfillRequest(
return this with
{
Status = BackfillStatus.Completed,
CompletedAt = DateTimeOffset.UtcNow,
CompletedAt = timestamp,
CurrentPosition = WindowEnd,
UpdatedBy = updatedBy
};
@@ -286,12 +286,12 @@ public sealed record BackfillRequest(
/// <summary>
/// Fails the backfill with an error.
/// </summary>
public BackfillRequest Fail(string error, string updatedBy)
public BackfillRequest Fail(string error, string updatedBy, DateTimeOffset timestamp)
{
return this with
{
Status = BackfillStatus.Failed,
CompletedAt = DateTimeOffset.UtcNow,
CompletedAt = timestamp,
ErrorMessage = error,
UpdatedBy = updatedBy
};
@@ -300,7 +300,7 @@ public sealed record BackfillRequest(
/// <summary>
/// Cancels the backfill.
/// </summary>
public BackfillRequest Cancel(string updatedBy)
public BackfillRequest Cancel(string updatedBy, DateTimeOffset timestamp)
{
if (IsTerminal)
throw new InvalidOperationException($"Cannot cancel from terminal status {Status}.");
@@ -308,7 +308,7 @@ public sealed record BackfillRequest(
return this with
{
Status = BackfillStatus.Canceled,
CompletedAt = DateTimeOffset.UtcNow,
CompletedAt = timestamp,
UpdatedBy = updatedBy
};
}

View File

@@ -58,6 +58,7 @@ public sealed record EventEnvelope(
OrchestratorEventType eventType,
string tenantId,
EventActor actor,
DateTimeOffset occurredAt,
string? correlationId = null,
string? projectId = null,
EventJob? job = null,
@@ -65,14 +66,14 @@ public sealed record EventEnvelope(
EventNotifier? notifier = null,
JsonElement? payload = null)
{
var eventId = GenerateEventId();
var eventId = GenerateEventId(occurredAt);
var idempotencyKey = GenerateIdempotencyKey(eventType, job?.Id, job?.Attempt ?? 0);
return new EventEnvelope(
SchemaVersion: CurrentSchemaVersion,
EventId: eventId,
EventType: eventType,
OccurredAt: DateTimeOffset.UtcNow,
OccurredAt: occurredAt,
IdempotencyKey: idempotencyKey,
CorrelationId: correlationId,
TenantId: tenantId,
@@ -90,6 +91,7 @@ public sealed record EventEnvelope(
string tenantId,
EventActor actor,
EventJob job,
DateTimeOffset occurredAt,
string? correlationId = null,
string? projectId = null,
EventMetrics? metrics = null,
@@ -99,6 +101,7 @@ public sealed record EventEnvelope(
eventType: eventType,
tenantId: tenantId,
actor: actor,
occurredAt: occurredAt,
correlationId: correlationId,
projectId: projectId,
job: job,
@@ -112,6 +115,7 @@ public sealed record EventEnvelope(
string tenantId,
EventActor actor,
EventJob exportJob,
DateTimeOffset occurredAt,
string? correlationId = null,
string? projectId = null,
EventMetrics? metrics = null,
@@ -122,6 +126,7 @@ public sealed record EventEnvelope(
tenantId: tenantId,
actor: actor,
job: exportJob,
occurredAt: occurredAt,
correlationId: correlationId,
projectId: projectId,
metrics: metrics,
@@ -133,6 +138,7 @@ public sealed record EventEnvelope(
OrchestratorEventType eventType,
string tenantId,
EventActor actor,
DateTimeOffset occurredAt,
string? correlationId = null,
string? projectId = null,
JsonElement? payload = null)
@@ -141,18 +147,19 @@ public sealed record EventEnvelope(
eventType: eventType,
tenantId: tenantId,
actor: actor,
occurredAt: occurredAt,
correlationId: correlationId,
projectId: projectId,
payload: payload);
}
/// <summary>Generates a UUIDv7-style event ID.</summary>
private static string GenerateEventId()
private static string GenerateEventId(DateTimeOffset timestamp)
{
// UUIDv7: timestamp-based with random suffix
var timestamp = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
var timestampMs = timestamp.ToUnixTimeMilliseconds();
var random = Guid.NewGuid().ToString("N")[..16];
return $"urn:orch:event:{timestamp:x}-{random}";
return $"urn:orch:event:{timestampMs:x}-{random}";
}
/// <summary>Generates an idempotency key for deduplication.</summary>

View File

@@ -188,8 +188,15 @@ public sealed class NullEventPublisher : IEventPublisher
public sealed class InMemoryIdempotencyStore : IIdempotencyStore
{
private readonly Dictionary<string, DateTimeOffset> _keys = new();
private readonly TimeProvider _timeProvider;
private readonly object _lock = new();
/// <summary>Creates a new in-memory idempotency store.</summary>
public InMemoryIdempotencyStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<bool> TryMarkAsync(string key, TimeSpan ttl, CancellationToken cancellationToken = default)
{
lock (_lock)
@@ -198,7 +205,7 @@ public sealed class InMemoryIdempotencyStore : IIdempotencyStore
if (_keys.ContainsKey(key))
return Task.FromResult(false);
_keys[key] = DateTimeOffset.UtcNow.Add(ttl);
_keys[key] = _timeProvider.GetUtcNow().Add(ttl);
return Task.FromResult(true);
}
}
@@ -223,7 +230,7 @@ public sealed class InMemoryIdempotencyStore : IIdempotencyStore
private void CleanupExpired()
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var expired = _keys.Where(kv => kv.Value <= now).Select(kv => kv.Key).ToList();
foreach (var key in expired)
{

View File

@@ -273,10 +273,10 @@ public sealed record ExportDistribution(
}
/// <summary>Creates a download URL with expiration.</summary>
public ExportDistribution WithDownloadUrl(string url, TimeSpan validity) => this with
public ExportDistribution WithDownloadUrl(string url, TimeSpan validity, DateTimeOffset timestamp) => this with
{
DownloadUrl = url,
DownloadUrlExpiresAt = DateTimeOffset.UtcNow.Add(validity)
DownloadUrlExpiresAt = timestamp.Add(validity)
};
/// <summary>Adds a replication target.</summary>
@@ -432,29 +432,29 @@ public sealed record ExportRetention(
ExtensionCount: 0,
Metadata: null);
/// <summary>Whether the export is expired.</summary>
public bool IsExpired => ExpiresAt.HasValue && DateTimeOffset.UtcNow >= ExpiresAt.Value && !LegalHold;
/// <summary>Whether the export is expired at the given timestamp.</summary>
public bool IsExpiredAt(DateTimeOffset timestamp) => ExpiresAt.HasValue && timestamp >= ExpiresAt.Value && !LegalHold;
/// <summary>Whether the export should be archived.</summary>
public bool ShouldArchive => ArchiveAt.HasValue && DateTimeOffset.UtcNow >= ArchiveAt.Value && !ArchivedAt.HasValue;
/// <summary>Whether the export should be archived at the given timestamp.</summary>
public bool ShouldArchiveAt(DateTimeOffset timestamp) => ArchiveAt.HasValue && timestamp >= ArchiveAt.Value && !ArchivedAt.HasValue;
/// <summary>Whether the export can be deleted.</summary>
public bool CanDelete => IsExpired && (!RequiresRelease || ReleasedAt.HasValue) && !LegalHold;
/// <summary>Whether the export can be deleted at the given timestamp.</summary>
public bool CanDeleteAt(DateTimeOffset timestamp) => IsExpiredAt(timestamp) && (!RequiresRelease || ReleasedAt.HasValue) && !LegalHold;
/// <summary>Extends the retention period.</summary>
public ExportRetention ExtendRetention(TimeSpan extension, string? reason = null)
public ExportRetention ExtendRetention(TimeSpan extension, DateTimeOffset timestamp, string? reason = null)
{
var metadata = Metadata is null
? new Dictionary<string, string>()
: new Dictionary<string, string>(Metadata);
metadata[$"extension_{ExtensionCount + 1}_at"] = DateTimeOffset.UtcNow.ToString("o");
metadata[$"extension_{ExtensionCount + 1}_at"] = timestamp.ToString("o");
if (reason is not null)
metadata[$"extension_{ExtensionCount + 1}_reason"] = reason;
return this with
{
ExpiresAt = (ExpiresAt ?? DateTimeOffset.UtcNow).Add(extension),
ExpiresAt = (ExpiresAt ?? timestamp).Add(extension),
ArchiveAt = ArchiveAt?.Add(extension),
ExtensionCount = ExtensionCount + 1,
Metadata = metadata
@@ -476,22 +476,22 @@ public sealed record ExportRetention(
};
/// <summary>Releases the export for deletion.</summary>
public ExportRetention Release(string releasedBy) => this with
public ExportRetention Release(string releasedBy, DateTimeOffset timestamp) => this with
{
ReleasedBy = releasedBy,
ReleasedAt = DateTimeOffset.UtcNow
ReleasedAt = timestamp
};
/// <summary>Marks the export as archived.</summary>
public ExportRetention MarkArchived() => this with
public ExportRetention MarkArchived(DateTimeOffset timestamp) => this with
{
ArchivedAt = DateTimeOffset.UtcNow
ArchivedAt = timestamp
};
/// <summary>Marks the export as deleted.</summary>
public ExportRetention MarkDeleted() => this with
public ExportRetention MarkDeleted(DateTimeOffset timestamp) => this with
{
DeletedAt = DateTimeOffset.UtcNow
DeletedAt = timestamp
};
/// <summary>Serializes retention to JSON.</summary>

View File

@@ -127,6 +127,18 @@ public static class ExportJobPolicy
string tenantId,
string? jobType = null,
string createdBy = "system")
{
throw new NotImplementedException("ExportJobPolicy.CreateDefaultQuota requires a timestamp parameter for deterministic behavior. Use the overload with DateTimeOffset now parameter.");
}
/// <summary>
/// Creates a default quota for export jobs with explicit timestamp.
/// </summary>
public static Quota CreateDefaultQuota(
string tenantId,
DateTimeOffset now,
string? jobType = null,
string createdBy = "system")
{
var rateLimit = jobType is not null && ExportJobTypes.IsExportJob(jobType)
? RateLimits.GetForJobType(jobType)
@@ -135,8 +147,6 @@ public static class ExportJobPolicy
QuotaDefaults.MaxPerHour,
QuotaDefaults.DefaultLeaseSeconds);
var now = DateTimeOffset.UtcNow;
return new Quota(
QuotaId: Guid.NewGuid(),
TenantId: tenantId,

View File

@@ -87,6 +87,7 @@ public sealed record ExportSchedule(
string cronExpression,
ExportJobPayload payloadTemplate,
string createdBy,
DateTimeOffset timestamp,
string? description = null,
string timezone = "UTC",
string retentionPolicy = "default",
@@ -94,8 +95,6 @@ public sealed record ExportSchedule(
int maxConcurrent = 1,
bool skipIfRunning = true)
{
var now = DateTimeOffset.UtcNow;
return new ExportSchedule(
ScheduleId: Guid.NewGuid(),
TenantId: tenantId,
@@ -117,8 +116,8 @@ public sealed record ExportSchedule(
TotalRuns: 0,
SuccessfulRuns: 0,
FailedRuns: 0,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: timestamp,
UpdatedAt: timestamp,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}
@@ -129,63 +128,63 @@ public sealed record ExportSchedule(
: 0;
/// <summary>Enables the schedule.</summary>
public ExportSchedule Enable() => this with
public ExportSchedule Enable(DateTimeOffset timestamp) => this with
{
Enabled = true,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
/// <summary>Disables the schedule.</summary>
public ExportSchedule Disable() => this with
public ExportSchedule Disable(DateTimeOffset timestamp) => this with
{
Enabled = false,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
/// <summary>Records a successful run.</summary>
public ExportSchedule RecordSuccess(Guid jobId, DateTimeOffset? nextRun = null) => this with
public ExportSchedule RecordSuccess(Guid jobId, DateTimeOffset timestamp, DateTimeOffset? nextRun = null) => this with
{
LastRunAt = DateTimeOffset.UtcNow,
LastRunAt = timestamp,
LastJobId = jobId,
LastRunStatus = "completed",
NextRunAt = nextRun,
TotalRuns = TotalRuns + 1,
SuccessfulRuns = SuccessfulRuns + 1,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
/// <summary>Records a failed run.</summary>
public ExportSchedule RecordFailure(Guid jobId, string? reason = null, DateTimeOffset? nextRun = null) => this with
public ExportSchedule RecordFailure(Guid jobId, DateTimeOffset timestamp, string? reason = null, DateTimeOffset? nextRun = null) => this with
{
LastRunAt = DateTimeOffset.UtcNow,
LastRunAt = timestamp,
LastJobId = jobId,
LastRunStatus = $"failed: {reason ?? "unknown"}",
NextRunAt = nextRun,
TotalRuns = TotalRuns + 1,
FailedRuns = FailedRuns + 1,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
/// <summary>Updates the next run time.</summary>
public ExportSchedule WithNextRun(DateTimeOffset nextRun) => this with
public ExportSchedule WithNextRun(DateTimeOffset nextRun, DateTimeOffset timestamp) => this with
{
NextRunAt = nextRun,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
/// <summary>Updates the cron expression.</summary>
public ExportSchedule WithCron(string cronExpression, string updatedBy) => this with
public ExportSchedule WithCron(string cronExpression, string updatedBy, DateTimeOffset timestamp) => this with
{
CronExpression = cronExpression,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = timestamp,
UpdatedBy = updatedBy
};
/// <summary>Updates the payload template.</summary>
public ExportSchedule WithPayload(ExportJobPayload payload, string updatedBy) => this with
public ExportSchedule WithPayload(ExportJobPayload payload, string updatedBy, DateTimeOffset timestamp) => this with
{
PayloadTemplate = payload,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = timestamp,
UpdatedBy = updatedBy
};
}
@@ -247,13 +246,12 @@ public sealed record RetentionPruneConfig(
/// <summary>Creates a default prune configuration.</summary>
public static RetentionPruneConfig Create(
DateTimeOffset timestamp,
string? tenantId = null,
string? exportType = null,
string? cronExpression = null,
int batchSize = DefaultBatchSize)
{
var now = DateTimeOffset.UtcNow;
return new RetentionPruneConfig(
PruneId: Guid.NewGuid(),
TenantId: tenantId,
@@ -268,17 +266,17 @@ public sealed record RetentionPruneConfig(
LastPruneAt: null,
LastPruneCount: 0,
TotalPruned: 0,
CreatedAt: now,
UpdatedAt: now);
CreatedAt: timestamp,
UpdatedAt: timestamp);
}
/// <summary>Records a prune operation.</summary>
public RetentionPruneConfig RecordPrune(int count) => this with
public RetentionPruneConfig RecordPrune(int count, DateTimeOffset timestamp) => this with
{
LastPruneAt = DateTimeOffset.UtcNow,
LastPruneAt = timestamp,
LastPruneCount = count,
TotalPruned = TotalPruned + count,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
}
@@ -335,13 +333,12 @@ public sealed record ExportAlertConfig(
public static ExportAlertConfig Create(
string tenantId,
string name,
DateTimeOffset timestamp,
string? exportType = null,
int consecutiveFailuresThreshold = 3,
double failureRateThreshold = 50.0,
ExportAlertSeverity severity = ExportAlertSeverity.Warning)
{
var now = DateTimeOffset.UtcNow;
return new ExportAlertConfig(
AlertConfigId: Guid.NewGuid(),
TenantId: tenantId,
@@ -356,20 +353,20 @@ public sealed record ExportAlertConfig(
Cooldown: TimeSpan.FromMinutes(15),
LastAlertAt: null,
TotalAlerts: 0,
CreatedAt: now,
UpdatedAt: now);
CreatedAt: timestamp,
UpdatedAt: timestamp);
}
/// <summary>Whether an alert can be triggered (respects cooldown).</summary>
public bool CanAlert => !LastAlertAt.HasValue ||
DateTimeOffset.UtcNow >= LastAlertAt.Value.Add(Cooldown);
public bool CanAlertAt(DateTimeOffset timestamp) => !LastAlertAt.HasValue ||
timestamp >= LastAlertAt.Value.Add(Cooldown);
/// <summary>Records an alert.</summary>
public ExportAlertConfig RecordAlert() => this with
public ExportAlertConfig RecordAlert(DateTimeOffset timestamp) => this with
{
LastAlertAt = DateTimeOffset.UtcNow,
LastAlertAt = timestamp,
TotalAlerts = TotalAlerts + 1,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = timestamp
};
}
@@ -444,7 +441,8 @@ public sealed record ExportAlert(
string exportType,
ExportAlertSeverity severity,
IReadOnlyList<Guid> failedJobIds,
int consecutiveFailures)
int consecutiveFailures,
DateTimeOffset timestamp)
{
return new ExportAlert(
AlertId: Guid.NewGuid(),
@@ -456,7 +454,7 @@ public sealed record ExportAlert(
FailedJobIds: failedJobIds,
ConsecutiveFailures: consecutiveFailures,
FailureRate: 0,
TriggeredAt: DateTimeOffset.UtcNow,
TriggeredAt: timestamp,
AcknowledgedAt: null,
AcknowledgedBy: null,
ResolvedAt: null,
@@ -470,7 +468,8 @@ public sealed record ExportAlert(
string exportType,
ExportAlertSeverity severity,
double failureRate,
IReadOnlyList<Guid> recentFailedJobIds)
IReadOnlyList<Guid> recentFailedJobIds,
DateTimeOffset timestamp)
{
return new ExportAlert(
AlertId: Guid.NewGuid(),
@@ -482,7 +481,7 @@ public sealed record ExportAlert(
FailedJobIds: recentFailedJobIds,
ConsecutiveFailures: 0,
FailureRate: failureRate,
TriggeredAt: DateTimeOffset.UtcNow,
TriggeredAt: timestamp,
AcknowledgedAt: null,
AcknowledgedBy: null,
ResolvedAt: null,
@@ -490,16 +489,16 @@ public sealed record ExportAlert(
}
/// <summary>Acknowledges the alert.</summary>
public ExportAlert Acknowledge(string acknowledgedBy) => this with
public ExportAlert Acknowledge(string acknowledgedBy, DateTimeOffset timestamp) => this with
{
AcknowledgedAt = DateTimeOffset.UtcNow,
AcknowledgedAt = timestamp,
AcknowledgedBy = acknowledgedBy
};
/// <summary>Resolves the alert.</summary>
public ExportAlert Resolve(string? notes = null) => this with
public ExportAlert Resolve(DateTimeOffset timestamp, string? notes = null) => this with
{
ResolvedAt = DateTimeOffset.UtcNow,
ResolvedAt = timestamp,
ResolutionNotes = notes
};

View File

@@ -246,17 +246,20 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
private readonly ITimelineEventEmitter _timelineEmitter;
private readonly IJobCapsuleGenerator _capsuleGenerator;
private readonly IMirrorEvidenceStore _evidenceStore;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MirrorOperationRecorder> _logger;
public MirrorOperationRecorder(
ITimelineEventEmitter timelineEmitter,
IJobCapsuleGenerator capsuleGenerator,
IMirrorEvidenceStore evidenceStore,
TimeProvider timeProvider,
ILogger<MirrorOperationRecorder> logger)
{
_timelineEmitter = timelineEmitter ?? throw new ArgumentNullException(nameof(timelineEmitter));
_capsuleGenerator = capsuleGenerator ?? throw new ArgumentNullException(nameof(capsuleGenerator));
_evidenceStore = evidenceStore ?? throw new ArgumentNullException(nameof(evidenceStore));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -357,6 +360,7 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
try
{
// Create evidence entry
var now = _timeProvider.GetUtcNow();
var evidence = new MirrorOperationEvidence(
OperationId: context.OperationId,
OperationType: MirrorOperationType.BundleExport,
@@ -364,8 +368,8 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
ProjectId: context.ProjectId,
JobId: context.JobId,
Status: MirrorOperationStatus.Completed,
StartedAt: DateTimeOffset.UtcNow.AddSeconds(-result.DurationSeconds),
CompletedAt: DateTimeOffset.UtcNow,
StartedAt: now.AddSeconds(-result.DurationSeconds),
CompletedAt: now,
SourceEnvironment: context.SourceEnvironment,
TargetEnvironment: context.TargetEnvironment,
BundleDigest: result.BundleDigest,
@@ -471,6 +475,7 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
{
try
{
var now = _timeProvider.GetUtcNow();
var evidence = new MirrorOperationEvidence(
OperationId: context.OperationId,
OperationType: MirrorOperationType.BundleExport,
@@ -478,8 +483,8 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
ProjectId: context.ProjectId,
JobId: context.JobId,
Status: MirrorOperationStatus.Failed,
StartedAt: DateTimeOffset.UtcNow,
CompletedAt: DateTimeOffset.UtcNow,
StartedAt: now,
CompletedAt: now,
SourceEnvironment: context.SourceEnvironment,
TargetEnvironment: context.TargetEnvironment,
BundleDigest: null,
@@ -620,6 +625,7 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
{
try
{
var now = _timeProvider.GetUtcNow();
var evidence = new MirrorOperationEvidence(
OperationId: context.OperationId,
OperationType: MirrorOperationType.BundleImport,
@@ -627,8 +633,8 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
ProjectId: context.ProjectId,
JobId: context.JobId,
Status: MirrorOperationStatus.Completed,
StartedAt: DateTimeOffset.UtcNow.AddSeconds(-result.DurationSeconds),
CompletedAt: DateTimeOffset.UtcNow,
StartedAt: now.AddSeconds(-result.DurationSeconds),
CompletedAt: now,
SourceEnvironment: result.Provenance.SourceEnvironment,
TargetEnvironment: context.TargetEnvironment,
BundleDigest: result.Provenance.BundleDigest,
@@ -693,6 +699,7 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
{
try
{
var now = _timeProvider.GetUtcNow();
var evidence = new MirrorOperationEvidence(
OperationId: context.OperationId,
OperationType: MirrorOperationType.BundleImport,
@@ -700,8 +707,8 @@ public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
ProjectId: context.ProjectId,
JobId: context.JobId,
Status: MirrorOperationStatus.Failed,
StartedAt: DateTimeOffset.UtcNow,
CompletedAt: DateTimeOffset.UtcNow,
StartedAt: now,
CompletedAt: now,
SourceEnvironment: context.SourceEnvironment,
TargetEnvironment: context.TargetEnvironment,
BundleDigest: null,

View File

@@ -45,7 +45,10 @@ public sealed record Pack(
ArgumentException.ThrowIfNullOrWhiteSpace(displayName);
ArgumentException.ThrowIfNullOrWhiteSpace(createdBy);
var now = createdAt ?? DateTimeOffset.UtcNow;
if (createdAt is null)
throw new ArgumentNullException(nameof(createdAt), "createdAt must be provided for deterministic behavior.");
var now = createdAt.Value;
return new Pack(
PackId: packId,
@@ -96,15 +99,14 @@ public sealed record Pack(
/// <summary>
/// Creates a copy with updated status.
/// </summary>
public Pack WithStatus(PackStatus newStatus, string updatedBy, DateTimeOffset? updatedAt = null)
public Pack WithStatus(PackStatus newStatus, string updatedBy, DateTimeOffset updatedAt)
{
var now = updatedAt ?? DateTimeOffset.UtcNow;
return this with
{
Status = newStatus,
UpdatedAt = now,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy,
PublishedAt = newStatus == PackStatus.Published ? now : PublishedAt,
PublishedAt = newStatus == PackStatus.Published ? updatedAt : PublishedAt,
PublishedBy = newStatus == PackStatus.Published ? updatedBy : PublishedBy
};
}
@@ -112,14 +114,13 @@ public sealed record Pack(
/// <summary>
/// Creates a copy with incremented version count.
/// </summary>
public Pack WithVersionAdded(string version, string updatedBy, DateTimeOffset? updatedAt = null)
public Pack WithVersionAdded(string version, string updatedBy, DateTimeOffset updatedAt)
{
var now = updatedAt ?? DateTimeOffset.UtcNow;
return this with
{
VersionCount = VersionCount + 1,
LatestVersion = version,
UpdatedAt = now,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy
};
}
@@ -215,7 +216,10 @@ public sealed record PackVersion(
ArgumentException.ThrowIfNullOrWhiteSpace(artifactDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(createdBy);
var now = createdAt ?? DateTimeOffset.UtcNow;
if (createdAt is null)
throw new ArgumentNullException(nameof(createdAt), "createdAt must be provided for deterministic behavior.");
var now = createdAt.Value;
return new PackVersion(
PackVersionId: packVersionId,
@@ -278,15 +282,14 @@ public sealed record PackVersion(
/// <summary>
/// Creates a copy with updated status.
/// </summary>
public PackVersion WithStatus(PackVersionStatus newStatus, string updatedBy, DateTimeOffset? updatedAt = null)
public PackVersion WithStatus(PackVersionStatus newStatus, string updatedBy, DateTimeOffset updatedAt)
{
var now = updatedAt ?? DateTimeOffset.UtcNow;
return this with
{
Status = newStatus,
UpdatedAt = now,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy,
PublishedAt = newStatus == PackVersionStatus.Published ? now : PublishedAt,
PublishedAt = newStatus == PackVersionStatus.Published ? updatedAt : PublishedAt,
PublishedBy = newStatus == PackVersionStatus.Published ? updatedBy : PublishedBy
};
}
@@ -294,15 +297,14 @@ public sealed record PackVersion(
/// <summary>
/// Creates a copy with deprecation info.
/// </summary>
public PackVersion WithDeprecation(string deprecatedBy, string? reason, DateTimeOffset? deprecatedAt = null)
public PackVersion WithDeprecation(string deprecatedBy, string? reason, DateTimeOffset deprecatedAt)
{
var now = deprecatedAt ?? DateTimeOffset.UtcNow;
return this with
{
Status = PackVersionStatus.Deprecated,
UpdatedAt = now,
UpdatedAt = deprecatedAt,
UpdatedBy = deprecatedBy,
DeprecatedAt = now,
DeprecatedAt = deprecatedAt,
DeprecatedBy = deprecatedBy,
DeprecationReason = reason
};
@@ -315,16 +317,15 @@ public sealed record PackVersion(
string signatureUri,
string signatureAlgorithm,
string signedBy,
DateTimeOffset? signedAt = null)
DateTimeOffset signedAt)
{
var now = signedAt ?? DateTimeOffset.UtcNow;
return this with
{
SignatureUri = signatureUri,
SignatureAlgorithm = signatureAlgorithm,
SignedBy = signedBy,
SignedAt = now,
UpdatedAt = now,
SignedAt = signedAt,
UpdatedAt = signedAt,
UpdatedBy = signedBy
};
}

View File

@@ -122,7 +122,7 @@ public sealed record PackRun(
LeaseId: null,
TaskRunnerId: null,
LeaseUntil: null,
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
CreatedAt: createdAt ?? throw new ArgumentNullException(nameof(createdAt), "createdAt must be provided for deterministic behavior."),
ScheduledAt: null,
LeasedAt: null,
StartedAt: null,

View File

@@ -71,7 +71,7 @@ public sealed record PackRunLog(
Message: message,
Digest: digest,
SizeBytes: sizeBytes,
Timestamp: timestamp ?? DateTimeOffset.UtcNow,
Timestamp: timestamp ?? throw new ArgumentNullException(nameof(timestamp), "timestamp must be provided for deterministic behavior."),
Data: data);
}

View File

@@ -29,7 +29,7 @@ public sealed record ReplayInputsLock(
return new ReplayInputsLock(
SchemaVersion: schemaVersion,
ManifestHash: manifest.ComputeHash(hasher),
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
CreatedAt: createdAt ?? throw new ArgumentNullException(nameof(createdAt), "createdAt must be provided for deterministic behavior."),
Inputs: manifest.Inputs,
Notes: string.IsNullOrWhiteSpace(notes) ? null : notes);
}

View File

@@ -34,7 +34,7 @@ public sealed record ReplayManifest(
SchemaVersion: schemaVersion,
JobId: jobId,
ReplayOf: replayOf,
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
CreatedAt: createdAt ?? throw new ArgumentNullException(nameof(createdAt), "createdAt must be provided for deterministic behavior."),
Reason: string.IsNullOrWhiteSpace(reason) ? null : reason,
Inputs: inputs,
Artifacts: artifacts is null ? ImmutableArray<ReplayArtifact>.Empty : ImmutableArray.CreateRange(artifacts));

View File

@@ -83,6 +83,7 @@ public sealed record RunLedgerEntry(
string inputDigest,
long sequenceNumber,
string? previousEntryHash,
DateTimeOffset ledgerCreatedAt,
string? metadata = null)
{
if (run.CompletedAt is null)
@@ -91,7 +92,6 @@ public sealed record RunLedgerEntry(
}
var ledgerId = Guid.NewGuid();
var ledgerCreatedAt = DateTimeOffset.UtcNow;
// Build artifact manifest
var artifactManifest = BuildArtifactManifest(artifacts);
@@ -259,6 +259,7 @@ public sealed record LedgerExport(
string tenantId,
string format,
string requestedBy,
DateTimeOffset requestedAt,
DateTimeOffset? startTime = null,
DateTimeOffset? endTime = null,
string? runTypeFilter = null,
@@ -289,7 +290,7 @@ public sealed record LedgerExport(
OutputDigest: null,
OutputSizeBytes: null,
RequestedBy: requestedBy,
RequestedAt: DateTimeOffset.UtcNow,
RequestedAt: requestedAt,
StartedAt: null,
CompletedAt: null,
ErrorMessage: null);
@@ -298,33 +299,33 @@ public sealed record LedgerExport(
/// <summary>
/// Marks the export as started.
/// </summary>
public LedgerExport Start() => this with
public LedgerExport Start(DateTimeOffset startedAt) => this with
{
Status = LedgerExportStatus.Processing,
StartedAt = DateTimeOffset.UtcNow
StartedAt = startedAt
};
/// <summary>
/// Marks the export as completed.
/// </summary>
public LedgerExport Complete(string outputUri, string outputDigest, long outputSizeBytes, int entryCount) => this with
public LedgerExport Complete(string outputUri, string outputDigest, long outputSizeBytes, int entryCount, DateTimeOffset completedAt) => this with
{
Status = LedgerExportStatus.Completed,
OutputUri = outputUri,
OutputDigest = outputDigest,
OutputSizeBytes = outputSizeBytes,
EntryCount = entryCount,
CompletedAt = DateTimeOffset.UtcNow
CompletedAt = completedAt
};
/// <summary>
/// Marks the export as failed.
/// </summary>
public LedgerExport Fail(string errorMessage) => this with
public LedgerExport Fail(string errorMessage, DateTimeOffset failedAt) => this with
{
Status = LedgerExportStatus.Failed,
ErrorMessage = errorMessage,
CompletedAt = DateTimeOffset.UtcNow
CompletedAt = failedAt
};
}

View File

@@ -66,6 +66,7 @@ public sealed record SignedManifest(
/// </summary>
public static SignedManifest CreateFromLedgerEntry(
RunLedgerEntry ledger,
DateTimeOffset createdAt,
string? buildInfo = null,
string? metadata = null)
{
@@ -95,7 +96,7 @@ public sealed record SignedManifest(
SignatureAlgorithm: "none",
Signature: string.Empty,
KeyId: string.Empty,
CreatedAt: DateTimeOffset.UtcNow,
CreatedAt: createdAt,
ExpiresAt: null,
Metadata: metadata);
}
@@ -106,6 +107,7 @@ public sealed record SignedManifest(
public static SignedManifest CreateFromExport(
LedgerExport export,
IReadOnlyList<RunLedgerEntry> entries,
DateTimeOffset createdAt,
string? buildInfo = null,
string? metadata = null)
{
@@ -114,7 +116,7 @@ public sealed record SignedManifest(
throw new InvalidOperationException("Cannot create manifest from incomplete export.");
}
var statements = CreateStatementsFromExport(export, entries);
var statements = CreateStatementsFromExport(export, entries, createdAt);
var artifacts = CreateExportArtifacts(export);
var materials = CreateExportMaterials(entries);
@@ -140,7 +142,7 @@ public sealed record SignedManifest(
SignatureAlgorithm: "none",
Signature: string.Empty,
KeyId: string.Empty,
CreatedAt: DateTimeOffset.UtcNow,
CreatedAt: createdAt,
ExpiresAt: null,
Metadata: metadata);
}
@@ -180,9 +182,9 @@ public sealed record SignedManifest(
public bool IsSigned => !string.IsNullOrEmpty(Signature) && SignatureAlgorithm != "none";
/// <summary>
/// Checks if the manifest has expired.
/// Checks if the manifest has expired at the given time.
/// </summary>
public bool IsExpired => ExpiresAt.HasValue && ExpiresAt.Value < DateTimeOffset.UtcNow;
public bool IsExpiredAt(DateTimeOffset now) => ExpiresAt.HasValue && ExpiresAt.Value < now;
/// <summary>
/// Verifies the payload digest integrity.
@@ -281,8 +283,9 @@ public sealed record SignedManifest(
return JsonSerializer.Serialize(materials);
}
private static string CreateStatementsFromExport(LedgerExport export, IReadOnlyList<RunLedgerEntry> entries)
private static string CreateStatementsFromExport(LedgerExport export, IReadOnlyList<RunLedgerEntry> entries, DateTimeOffset createdAt)
{
var timestamp = export.CompletedAt ?? createdAt;
var statements = new List<ProvenanceStatement>
{
new(
@@ -290,7 +293,7 @@ public sealed record SignedManifest(
Subject: $"export:{export.ExportId}",
Predicate: "contains",
Object: $"entries:{entries.Count}",
Timestamp: export.CompletedAt ?? DateTimeOffset.UtcNow,
Timestamp: timestamp,
Metadata: JsonSerializer.Serialize(new
{
export.Format,
@@ -314,7 +317,7 @@ public sealed record SignedManifest(
Subject: $"export:{export.ExportId}",
Predicate: "covers",
Object: $"sequence:{first.SequenceNumber}-{last.SequenceNumber}",
Timestamp: export.CompletedAt ?? DateTimeOffset.UtcNow,
Timestamp: timestamp,
Metadata: JsonSerializer.Serialize(new
{
FirstEntryHash = first.ContentHash,

View File

@@ -116,13 +116,13 @@ public sealed record Slo(
double target,
SloWindow window,
string createdBy,
DateTimeOffset createdAt,
string? description = null,
string? jobType = null,
Guid? sourceId = null)
{
ValidateTarget(target);
var now = DateTimeOffset.UtcNow;
return new Slo(
SloId: Guid.NewGuid(),
TenantId: tenantId,
@@ -137,8 +137,8 @@ public sealed record Slo(
LatencyTargetSeconds: null,
ThroughputMinimum: null,
Enabled: true,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}
@@ -152,6 +152,7 @@ public sealed record Slo(
double target,
SloWindow window,
string createdBy,
DateTimeOffset createdAt,
string? description = null,
string? jobType = null,
Guid? sourceId = null)
@@ -162,7 +163,6 @@ public sealed record Slo(
if (targetSeconds <= 0)
throw new ArgumentOutOfRangeException(nameof(targetSeconds), "Target latency must be positive");
var now = DateTimeOffset.UtcNow;
return new Slo(
SloId: Guid.NewGuid(),
TenantId: tenantId,
@@ -177,8 +177,8 @@ public sealed record Slo(
LatencyTargetSeconds: targetSeconds,
ThroughputMinimum: null,
Enabled: true,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}
@@ -191,6 +191,7 @@ public sealed record Slo(
double target,
SloWindow window,
string createdBy,
DateTimeOffset createdAt,
string? description = null,
string? jobType = null,
Guid? sourceId = null)
@@ -199,7 +200,6 @@ public sealed record Slo(
if (minimum <= 0)
throw new ArgumentOutOfRangeException(nameof(minimum), "Throughput minimum must be positive");
var now = DateTimeOffset.UtcNow;
return new Slo(
SloId: Guid.NewGuid(),
TenantId: tenantId,
@@ -214,14 +214,15 @@ public sealed record Slo(
LatencyTargetSeconds: null,
ThroughputMinimum: minimum,
Enabled: true,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}
/// <summary>Updates the SLO with new values.</summary>
public Slo Update(
DateTimeOffset updatedAt,
string? name = null,
string? description = null,
double? target = null,
@@ -237,26 +238,26 @@ public sealed record Slo(
Description = description ?? Description,
Target = target ?? Target,
Enabled = enabled ?? Enabled,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy ?? UpdatedBy
};
}
/// <summary>Disables the SLO.</summary>
public Slo Disable(string updatedBy) =>
public Slo Disable(string updatedBy, DateTimeOffset updatedAt) =>
this with
{
Enabled = false,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy
};
/// <summary>Enables the SLO.</summary>
public Slo Enable(string updatedBy) =>
public Slo Enable(string updatedBy, DateTimeOffset updatedAt) =>
this with
{
Enabled = true,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy
};
@@ -414,6 +415,7 @@ public sealed record AlertBudgetThreshold(
double budgetConsumedThreshold,
AlertSeverity severity,
string createdBy,
DateTimeOffset createdAt,
double? burnRateThreshold = null,
string? notificationChannel = null,
string? notificationEndpoint = null,
@@ -422,7 +424,6 @@ public sealed record AlertBudgetThreshold(
if (budgetConsumedThreshold < 0 || budgetConsumedThreshold > 1)
throw new ArgumentOutOfRangeException(nameof(budgetConsumedThreshold), "Threshold must be between 0 and 1");
var now = DateTimeOffset.UtcNow;
return new AlertBudgetThreshold(
ThresholdId: Guid.NewGuid(),
SloId: sloId,
@@ -435,8 +436,8 @@ public sealed record AlertBudgetThreshold(
NotificationEndpoint: notificationEndpoint,
Cooldown: cooldown ?? TimeSpan.FromHours(1),
LastTriggeredAt: null,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
CreatedBy: createdBy,
UpdatedBy: createdBy);
}

View File

@@ -70,7 +70,8 @@ public sealed record Watermark(
Guid? sourceId,
string? jobType,
DateTimeOffset highWatermark,
string createdBy)
string createdBy,
DateTimeOffset createdAt)
{
var scopeKey = (sourceId, jobType) switch
{
@@ -80,7 +81,6 @@ public sealed record Watermark(
_ => throw new ArgumentException("Either sourceId or jobType must be specified.")
};
var now = DateTimeOffset.UtcNow;
return new Watermark(
WatermarkId: Guid.NewGuid(),
TenantId: tenantId,
@@ -92,8 +92,8 @@ public sealed record Watermark(
SequenceNumber: 0,
ProcessedCount: 0,
LastBatchHash: null,
CreatedAt: now,
UpdatedAt: now,
CreatedAt: createdAt,
UpdatedAt: createdAt,
UpdatedBy: createdBy);
}
@@ -104,7 +104,8 @@ public sealed record Watermark(
DateTimeOffset newHighWatermark,
long eventsProcessed,
string? batchHash,
string updatedBy)
string updatedBy,
DateTimeOffset updatedAt)
{
if (newHighWatermark < HighWatermark)
throw new ArgumentException("New high watermark cannot be before current high watermark.", nameof(newHighWatermark));
@@ -115,7 +116,7 @@ public sealed record Watermark(
SequenceNumber = SequenceNumber + 1,
ProcessedCount = ProcessedCount + eventsProcessed,
LastBatchHash = batchHash,
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = updatedAt,
UpdatedBy = updatedBy
};
}
@@ -123,7 +124,7 @@ public sealed record Watermark(
/// <summary>
/// Sets the event-time window bounds.
/// </summary>
public Watermark WithWindow(DateTimeOffset lowWatermark, DateTimeOffset highWatermark)
public Watermark WithWindow(DateTimeOffset lowWatermark, DateTimeOffset highWatermark, DateTimeOffset updatedAt)
{
if (highWatermark < lowWatermark)
throw new ArgumentException("High watermark cannot be before low watermark.");
@@ -132,7 +133,7 @@ public sealed record Watermark(
{
LowWatermark = lowWatermark,
HighWatermark = highWatermark,
UpdatedAt = DateTimeOffset.UtcNow
UpdatedAt = updatedAt
};
}
}

View File

@@ -38,7 +38,7 @@ public sealed record EventEnvelope(
ArgumentNullException.ThrowIfNull(job);
ArgumentNullException.ThrowIfNull(actor);
var occurred = occurredAt ?? DateTimeOffset.UtcNow;
var occurred = occurredAt ?? throw new ArgumentNullException(nameof(occurredAt), "occurredAt must be provided for deterministic behavior.");
var evtId = string.IsNullOrWhiteSpace(eventId) ? Guid.NewGuid().ToString() : eventId!;
var key = string.IsNullOrWhiteSpace(idempotencyKey)
? ComputeIdempotencyKey(eventType, job.Id, job.Attempt)

View File

@@ -195,17 +195,20 @@ public sealed class JobAttestationService : IJobAttestationService
private readonly IJobAttestationSigner _signer;
private readonly IJobAttestationStore _store;
private readonly ITimelineEventEmitter _timelineEmitter;
private readonly TimeProvider _timeProvider;
private readonly ILogger<JobAttestationService> _logger;
public JobAttestationService(
IJobAttestationSigner signer,
IJobAttestationStore store,
ITimelineEventEmitter timelineEmitter,
TimeProvider timeProvider,
ILogger<JobAttestationService> logger)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_store = store ?? throw new ArgumentNullException(nameof(store));
_timelineEmitter = timelineEmitter ?? throw new ArgumentNullException(nameof(timelineEmitter));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -229,7 +232,7 @@ public sealed class JobAttestationService : IJobAttestationService
Status: request.Status,
ExitCode: request.ExitCode,
StartedAt: request.StartedAt,
CompletedAt: request.CompletedAt ?? DateTimeOffset.UtcNow,
CompletedAt: request.CompletedAt ?? _timeProvider.GetUtcNow(),
DurationSeconds: request.DurationSeconds,
InputHash: ComputePayloadHash(request.InputPayloadJson),
OutputHash: ComputePayloadHash(request.OutputPayloadJson),
@@ -318,7 +321,7 @@ public sealed class JobAttestationService : IJobAttestationService
jobType = request.JobType,
tenantId = request.TenantId,
projectId = request.ProjectId,
scheduledAt = DateTimeOffset.UtcNow,
scheduledAt = _timeProvider.GetUtcNow(),
inputHash = ComputePayloadHash(request.InputPayloadJson)
};
@@ -379,7 +382,7 @@ public sealed class JobAttestationService : IJobAttestationService
runId,
tenantId,
projectId,
completedAt = DateTimeOffset.UtcNow,
completedAt = _timeProvider.GetUtcNow(),
jobCount = jobAttestations.Count,
jobs = jobAttestations.Select(a => new
{
@@ -486,7 +489,7 @@ public sealed class JobAttestationService : IJobAttestationService
var keyId = primarySignature?.KeyId;
// Check age
var age = DateTimeOffset.UtcNow - attestation.CreatedAt;
var age = _timeProvider.GetUtcNow() - attestation.CreatedAt;
if (age > TimeSpan.FromDays(365))
{
warnings.Add($"Attestation is older than 1 year ({age.Days} days)");
@@ -557,7 +560,7 @@ public sealed class JobAttestationService : IJobAttestationService
PredicateType: predicateType,
Subjects: subjects.Select(s => new AttestationSubject(s.Name, s.Digest)).ToList(),
Envelope: envelope,
CreatedAt: DateTimeOffset.UtcNow,
CreatedAt: _timeProvider.GetUtcNow(),
PayloadDigest: payloadDigest,
EvidencePointer: null);
}

View File

@@ -76,6 +76,7 @@ public sealed record JobCapsule(
string jobType,
JobCapsuleKind kind,
JobCapsuleInputs inputs,
DateTimeOffset createdAt,
JobCapsuleOutputs? outputs = null,
IReadOnlyList<JobCapsuleArtifact>? artifacts = null,
IReadOnlyList<JobCapsuleTimelineEntry>? timelineEntries = null,
@@ -85,7 +86,6 @@ public sealed record JobCapsule(
IReadOnlyDictionary<string, string>? metadata = null)
{
var capsuleId = Guid.NewGuid();
var createdAt = DateTimeOffset.UtcNow;
// Compute root hash from all materials
var rootHash = ComputeRootHash(

View File

@@ -113,12 +113,14 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
private readonly IJobCapsuleStore _store;
private readonly ITimelineEventEmitter? _timelineEmitter;
private readonly ISnapshotHookInvoker? _snapshotHooks;
private readonly TimeProvider _timeProvider;
private readonly ILogger<JobCapsuleGenerator> _logger;
private readonly JobCapsuleGeneratorOptions _options;
public JobCapsuleGenerator(
IJobRedactionGuard redactionGuard,
IJobCapsuleStore store,
TimeProvider timeProvider,
ILogger<JobCapsuleGenerator> logger,
ITimelineEventEmitter? timelineEmitter = null,
ISnapshotHookInvoker? snapshotHooks = null,
@@ -126,6 +128,7 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
{
_redactionGuard = redactionGuard ?? throw new ArgumentNullException(nameof(redactionGuard));
_store = store ?? throw new ArgumentNullException(nameof(store));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timelineEmitter = timelineEmitter;
_snapshotHooks = snapshotHooks;
@@ -164,6 +167,7 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
jobType: request.JobType,
kind: JobCapsuleKind.JobScheduling,
inputs: inputs,
createdAt: _timeProvider.GetUtcNow(),
timelineEntries: timelineEntries,
policyResults: request.PolicyResults,
projectId: request.ProjectId,
@@ -239,6 +243,7 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
jobType: request.JobType,
kind: JobCapsuleKind.JobCompletion,
inputs: inputs,
createdAt: _timeProvider.GetUtcNow(),
outputs: outputs,
artifacts: artifacts,
timelineEntries: timelineEntries,
@@ -323,6 +328,7 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
jobType: request.JobType,
kind: JobCapsuleKind.JobFailure,
inputs: inputs,
createdAt: _timeProvider.GetUtcNow(),
outputs: outputs,
timelineEntries: timelineEntries,
policyResults: request.PolicyResults,
@@ -409,6 +415,7 @@ public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
jobType: "run.completion",
kind: JobCapsuleKind.RunCompletion,
inputs: inputs,
createdAt: _timeProvider.GetUtcNow(),
artifacts: jobRefs,
projectId: projectId,
runId: runId,

View File

@@ -212,6 +212,7 @@ public sealed record IncidentModeHooksOptions
public sealed class IncidentModeHooks : IIncidentModeHooks
{
private readonly ITimelineEventEmitter _eventEmitter;
private readonly TimeProvider _timeProvider;
private readonly ILogger<IncidentModeHooks> _logger;
private readonly IncidentModeHooksOptions _options;
private readonly Dictionary<string, IncidentModeState> _tenantStates = new();
@@ -220,10 +221,12 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
public IncidentModeHooks(
ITimelineEventEmitter eventEmitter,
TimeProvider timeProvider,
ILogger<IncidentModeHooks> logger,
IncidentModeHooksOptions? options = null)
{
_eventEmitter = eventEmitter ?? throw new ArgumentNullException(nameof(eventEmitter));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new IncidentModeHooksOptions();
}
@@ -250,7 +253,7 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
{
if (_lastActivations.TryGetValue(tenantId, out var lastActivation))
{
var timeSinceLastActivation = DateTimeOffset.UtcNow - lastActivation;
var timeSinceLastActivation = _timeProvider.GetUtcNow() - lastActivation;
if (timeSinceLastActivation < _options.ReactivationCooldown)
{
_logger.LogDebug(
@@ -298,7 +301,7 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
IncidentModeSource source,
TimeSpan ttl)
{
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var expiresAt = now + ttl;
var newState = new IncidentModeState(
@@ -372,7 +375,7 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
{
// Check if expired
if (state.IsActive && state.ExpiresAt.HasValue &&
DateTimeOffset.UtcNow >= state.ExpiresAt.Value)
_timeProvider.GetUtcNow() >= state.ExpiresAt.Value)
{
_tenantStates[tenantId] = IncidentModeState.Inactive;
return IncidentModeState.Inactive;
@@ -422,7 +425,7 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
TenantId: tenantId,
EventType: eventType,
Source: "orchestrator",
OccurredAt: DateTimeOffset.UtcNow,
OccurredAt: _timeProvider.GetUtcNow(),
ReceivedAt: null,
CorrelationId: Guid.NewGuid().ToString(),
TraceId: null,
@@ -462,8 +465,9 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
string actor,
string reason)
{
var now = _timeProvider.GetUtcNow();
var duration = previousState.ActivatedAt.HasValue
? DateTimeOffset.UtcNow - previousState.ActivatedAt.Value
? now - previousState.ActivatedAt.Value
: TimeSpan.Zero;
var @event = new TimelineEvent(
@@ -472,7 +476,7 @@ public sealed class IncidentModeHooks : IIncidentModeHooks
TenantId: tenantId,
EventType: "orchestrator.incident_mode.deactivated",
Source: "orchestrator",
OccurredAt: DateTimeOffset.UtcNow,
OccurredAt: now,
ReceivedAt: null,
CorrelationId: Guid.NewGuid().ToString(),
TraceId: null,

View File

@@ -361,7 +361,11 @@ public sealed class HourlyCounter
MaxPerHour = maxPerHour;
_currentCount = currentCount;
_hourStart = hourStart ?? TruncateToHour(DateTimeOffset.UtcNow);
if (hourStart is null)
throw new ArgumentNullException(nameof(hourStart), "hourStart must be provided for deterministic behavior.");
_hourStart = hourStart.Value;
}
/// <summary>

View File

@@ -69,7 +69,11 @@ public sealed class TokenBucket
BurstCapacity = burstCapacity;
RefillRate = refillRate;
_currentTokens = Math.Min(initialTokens ?? burstCapacity, burstCapacity);
_lastRefillAt = lastRefillAt ?? DateTimeOffset.UtcNow;
if (lastRefillAt is null)
throw new ArgumentNullException(nameof(lastRefillAt), "lastRefillAt must be provided for deterministic behavior.");
_lastRefillAt = lastRefillAt.Value;
}
/// <summary>

View File

@@ -6,15 +6,18 @@ namespace StellaOps.Orchestrator.Core.Scale;
public sealed class LoadShedder
{
private readonly ScaleMetrics _scaleMetrics;
private readonly TimeProvider _timeProvider;
private readonly LoadShedderOptions _options;
private volatile LoadShedState _currentState = LoadShedState.Normal;
private DateTimeOffset _lastStateChange = DateTimeOffset.UtcNow;
private DateTimeOffset _lastStateChange;
private readonly object _lock = new();
public LoadShedder(ScaleMetrics scaleMetrics, LoadShedderOptions? options = null)
public LoadShedder(ScaleMetrics scaleMetrics, TimeProvider? timeProvider = null, LoadShedderOptions? options = null)
{
_scaleMetrics = scaleMetrics;
_timeProvider = timeProvider ?? TimeProvider.System;
_options = options ?? LoadShedderOptions.Default;
_lastStateChange = _timeProvider.GetUtcNow();
}
/// <summary>
@@ -108,7 +111,7 @@ public sealed class LoadShedder
lock (_lock)
{
// Hysteresis: require sustained condition for state changes
var timeSinceLastChange = DateTimeOffset.UtcNow - _lastStateChange;
var timeSinceLastChange = _timeProvider.GetUtcNow() - _lastStateChange;
// Going up (worse) is immediate; going down (better) requires cooldown
var isImproving = newState < _currentState;
@@ -119,7 +122,7 @@ public sealed class LoadShedder
}
_currentState = newState;
_lastStateChange = DateTimeOffset.UtcNow;
_lastStateChange = _timeProvider.GetUtcNow();
}
}
@@ -131,7 +134,7 @@ public sealed class LoadShedder
lock (_lock)
{
_currentState = state;
_lastStateChange = DateTimeOffset.UtcNow;
_lastStateChange = _timeProvider.GetUtcNow();
}
}

View File

@@ -11,12 +11,22 @@ public sealed class ScaleMetrics
private readonly ConcurrentQueue<LatencySample> _dispatchLatencies = new();
private readonly ConcurrentDictionary<string, long> _queueDepths = new();
private readonly ConcurrentDictionary<string, long> _activeJobs = new();
private readonly TimeProvider _timeProvider;
private readonly object _lock = new();
// Keep samples for the last 5 minutes
private static readonly TimeSpan SampleWindow = TimeSpan.FromMinutes(5);
private const int MaxSamples = 10000;
/// <summary>
/// Creates a new ScaleMetrics instance.
/// </summary>
/// <param name="timeProvider">Time provider for deterministic time.</param>
public ScaleMetrics(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Records a dispatch latency sample.
/// </summary>
@@ -26,7 +36,7 @@ public sealed class ScaleMetrics
public void RecordDispatchLatency(TimeSpan latency, string tenantId, string? jobType = null)
{
var sample = new LatencySample(
Timestamp: DateTimeOffset.UtcNow,
Timestamp: _timeProvider.GetUtcNow(),
LatencyMs: latency.TotalMilliseconds,
TenantId: tenantId,
JobType: jobType);
@@ -88,7 +98,7 @@ public sealed class ScaleMetrics
/// <param name="window">Time window for samples (default: 1 minute).</param>
public LatencyPercentiles GetDispatchLatencyPercentiles(string? tenantId = null, TimeSpan? window = null)
{
var cutoff = DateTimeOffset.UtcNow - (window ?? TimeSpan.FromMinutes(1));
var cutoff = _timeProvider.GetUtcNow() - (window ?? TimeSpan.FromMinutes(1));
var samples = _dispatchLatencies
.Where(s => s.Timestamp >= cutoff)
@@ -122,7 +132,7 @@ public sealed class ScaleMetrics
var totalActiveJobs = _activeJobs.Values.Sum();
return new ScaleSnapshot(
Timestamp: DateTimeOffset.UtcNow,
Timestamp: _timeProvider.GetUtcNow(),
TotalQueueDepth: totalQueueDepth,
TotalActiveJobs: totalActiveJobs,
DispatchLatency: percentiles,
@@ -189,7 +199,7 @@ public sealed class ScaleMetrics
// Double-check after acquiring lock
if (_dispatchLatencies.Count <= MaxSamples) return;
var cutoff = DateTimeOffset.UtcNow - SampleWindow;
var cutoff = _timeProvider.GetUtcNow() - SampleWindow;
var toRemove = _dispatchLatencies.Count - MaxSamples / 2;
for (var i = 0; i < toRemove; i++)

View File

@@ -98,13 +98,16 @@ public sealed class ExportJobService : IExportJobService
{
private readonly IJobRepository _jobRepository;
private readonly IQuotaRepository _quotaRepository;
private readonly TimeProvider _timeProvider;
public ExportJobService(
IJobRepository jobRepository,
IQuotaRepository quotaRepository)
IQuotaRepository quotaRepository,
TimeProvider? timeProvider = null)
{
_jobRepository = jobRepository;
_quotaRepository = quotaRepository;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<Job> CreateExportJobAsync(
@@ -128,7 +131,7 @@ public sealed class ExportJobService : IExportJobService
var payloadJson = payload.ToJson();
var payloadDigest = payload.ComputeDigest();
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var job = new Job(
JobId: Guid.NewGuid(),

View File

@@ -26,6 +26,7 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
{
private readonly NpgsqlDataSource _dataSource;
private readonly ILogger<PostgresFacetSealStore> _logger;
private readonly TimeProvider _timeProvider;
private const string SelectColumns = """
combined_merkle_root, image_digest, schema_version, created_at,
@@ -96,12 +97,15 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
/// </summary>
/// <param name="dataSource">The Npgsql data source.</param>
/// <param name="logger">Logger instance.</param>
/// <param name="timeProvider">Time provider for deterministic time.</param>
public PostgresFacetSealStore(
NpgsqlDataSource dataSource,
ILogger<PostgresFacetSealStore>? logger = null)
ILogger<PostgresFacetSealStore>? logger = null,
TimeProvider? timeProvider = null)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<PostgresFacetSealStore>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
@@ -241,7 +245,7 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ct.ThrowIfCancellationRequested();
ArgumentOutOfRangeException.ThrowIfNegativeOrZero(keepAtLeast);
var cutoff = DateTimeOffset.UtcNow - retentionPeriod;
var cutoff = _timeProvider.GetUtcNow() - retentionPeriod;
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(PurgeSql, conn);

View File

@@ -108,6 +108,9 @@ builder.Services.AddSingleton<IPackRunIncidentModeService, PackRunIncidentModeSe
builder.Services.AddOpenApi();
// Determinism: TimeProvider injection
builder.Services.AddSingleton(TimeProvider.System);
// Stella Router integration
var routerOptions = builder.Configuration.GetSection("TaskRunner:Router").Get<StellaRouterOptionsBase>();
builder.Services.TryAddStellaRouter(
@@ -259,6 +262,7 @@ async Task<IResult> HandleCreateRun(
IPackRunJobScheduler scheduler,
ISealedInstallEnforcer sealedInstallEnforcer,
ISealedInstallAuditLogger auditLogger,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
if (request is null || string.IsNullOrWhiteSpace(request.Manifest))
@@ -315,7 +319,7 @@ async Task<IResult> HandleCreateRun(
}
},
status = "rejected",
rejected_at = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture)
rejected_at = timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
}, statusCode: StatusCodes.Status403Forbidden);
}
@@ -340,7 +344,7 @@ async Task<IResult> HandleCreateRun(
return Results.Conflict(new { error = "Run already exists." });
}
var requestedAt = DateTimeOffset.UtcNow;
var requestedAt = timeProvider.GetUtcNow();
var context = new PackRunExecutionContext(runId, plan, requestedAt, request.TenantId);
var graph = executionGraphBuilder.Build(plan);
@@ -355,7 +359,7 @@ async Task<IResult> HandleCreateRun(
{
await logStore.AppendAsync(
runId,
new PackRunLogEntry(DateTimeOffset.UtcNow, "error", "run.schedule-failed", ex.Message, null, null),
new PackRunLogEntry(timeProvider.GetUtcNow(), "error", "run.schedule-failed", ex.Message, null, null),
cancellationToken).ConfigureAwait(false);
return Results.StatusCode(StatusCodes.Status500InternalServerError);
@@ -373,7 +377,7 @@ async Task<IResult> HandleCreateRun(
await logStore.AppendAsync(
runId,
new PackRunLogEntry(DateTimeOffset.UtcNow, "info", "run.created", "Run created via API.", null, metadata),
new PackRunLogEntry(timeProvider.GetUtcNow(), "info", "run.created", "Run created via API.", null, metadata),
cancellationToken).ConfigureAwait(false);
var response = RunStateMapper.ToResponse(state);
@@ -510,6 +514,7 @@ async Task<IResult> HandleCancelRun(
string runId,
IPackRunStateStore stateStore,
IPackRunLogStore logStore,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(runId))
@@ -523,7 +528,7 @@ async Task<IResult> HandleCancelRun(
return Results.NotFound();
}
var now = DateTimeOffset.UtcNow;
var now = timeProvider.GetUtcNow();
var updatedSteps = state.Steps.Values
.Select(step => step.Status is PackRunStepExecutionStatus.Succeeded or PackRunStepExecutionStatus.Skipped
? step
@@ -550,7 +555,7 @@ async Task<IResult> HandleCancelRun(
};
await logStore.AppendAsync(runId, new PackRunLogEntry(now, "warn", "run.cancel-requested", "Run cancellation requested.", null, metadata), cancellationToken).ConfigureAwait(false);
await logStore.AppendAsync(runId, new PackRunLogEntry(DateTimeOffset.UtcNow, "info", "run.cancelled", "Run cancelled; remaining steps marked as skipped.", null, metadata), cancellationToken).ConfigureAwait(false);
await logStore.AppendAsync(runId, new PackRunLogEntry(timeProvider.GetUtcNow(), "info", "run.cancelled", "Run cancelled; remaining steps marked as skipped.", null, metadata), cancellationToken).ConfigureAwait(false);
return Results.Accepted($"/v1/task-runner/runs/{runId}", new { status = "cancelled" });
}

View File

@@ -28,6 +28,7 @@ public sealed class PackRunWorkerService : BackgroundService
private readonly IPackRunArtifactUploader artifactUploader;
private readonly IPackRunProvenanceWriter provenanceWriter;
private readonly IPackRunLogStore logStore;
private readonly TimeProvider timeProvider;
private readonly ILogger<PackRunWorkerService> logger;
private readonly UpDownCounter<long> runningSteps;
@@ -42,6 +43,7 @@ public sealed class PackRunWorkerService : BackgroundService
IPackRunProvenanceWriter provenanceWriter,
IPackRunLogStore logStore,
IOptions<PackRunWorkerOptions> options,
TimeProvider timeProvider,
ILogger<PackRunWorkerService> logger)
{
this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
@@ -54,6 +56,7 @@ public sealed class PackRunWorkerService : BackgroundService
this.provenanceWriter = provenanceWriter ?? throw new ArgumentNullException(nameof(provenanceWriter));
this.logStore = logStore ?? throw new ArgumentNullException(nameof(logStore));
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
this.timeProvider = timeProvider ?? TimeProvider.System;
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
runningSteps = TaskRunnerTelemetry.RunningSteps;
@@ -141,7 +144,7 @@ public sealed class PackRunWorkerService : BackgroundService
return;
}
var gateUpdate = PackRunGateStateUpdater.Apply(state, graph, processorResult.ApprovalCoordinator, DateTimeOffset.UtcNow);
var gateUpdate = PackRunGateStateUpdater.Apply(state, graph, processorResult.ApprovalCoordinator, timeProvider.GetUtcNow());
state = gateUpdate.State;
if (gateUpdate.HasBlockingFailure)
@@ -189,7 +192,7 @@ public sealed class PackRunWorkerService : BackgroundService
PackRunExecutionGraph graph,
CancellationToken cancellationToken)
{
var timestamp = DateTimeOffset.UtcNow;
var timestamp = timeProvider.GetUtcNow();
var state = PackRunStateFactory.CreateInitialState(context, graph, simulationEngine, timestamp);
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
return state;
@@ -204,7 +207,7 @@ public sealed class PackRunWorkerService : BackgroundService
string? stepId = null,
IReadOnlyDictionary<string, string>? metadata = null)
{
var entry = new PackRunLogEntry(DateTimeOffset.UtcNow, level, eventType, message, stepId, metadata);
var entry = new PackRunLogEntry(timeProvider.GetUtcNow(), level, eventType, message, stepId, metadata);
return logStore.AppendAsync(runId, entry, cancellationToken);
}
@@ -230,7 +233,7 @@ public sealed class PackRunWorkerService : BackgroundService
var updated = new ReadOnlyDictionary<string, PackRunStepStateRecord>(mutable);
return state with
{
UpdatedAt = DateTimeOffset.UtcNow,
UpdatedAt = timeProvider.GetUtcNow(),
Steps = updated
};
}
@@ -256,7 +259,7 @@ public sealed class PackRunWorkerService : BackgroundService
return StepExecutionOutcome.Continue;
}
if (record.NextAttemptAt is { } scheduled && scheduled > DateTimeOffset.UtcNow)
if (record.NextAttemptAt is { } scheduled && scheduled > timeProvider.GetUtcNow())
{
logger.LogInformation(
"Run {RunId} step {StepId} waiting until {NextAttempt} for retry.",
@@ -287,7 +290,7 @@ public sealed class PackRunWorkerService : BackgroundService
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = DateTimeOffset.UtcNow,
LastTransitionAt = timeProvider.GetUtcNow(),
NextAttemptAt = null
};
await AppendLogAsync(
@@ -317,7 +320,7 @@ public sealed class PackRunWorkerService : BackgroundService
{
Status = PackRunStepExecutionStatus.Skipped,
StatusReason = "unsupported-kind",
LastTransitionAt = DateTimeOffset.UtcNow
LastTransitionAt = timeProvider.GetUtcNow()
};
await AppendLogAsync(
executionContext.RunId,
@@ -339,7 +342,7 @@ public sealed class PackRunWorkerService : BackgroundService
ExecutionContext executionContext)
{
var record = executionContext.Steps[step.Id];
var now = DateTimeOffset.UtcNow;
var now = timeProvider.GetUtcNow();
var currentState = new PackRunStepState(record.Status, record.Attempts, record.LastTransitionAt, record.NextAttemptAt);
if (currentState.Status == PackRunStepExecutionStatus.Pending)
@@ -378,7 +381,7 @@ public sealed class PackRunWorkerService : BackgroundService
if (result.Succeeded)
{
currentState = PackRunStepStateMachine.CompleteSuccess(currentState, DateTimeOffset.UtcNow);
currentState = PackRunStepStateMachine.CompleteSuccess(currentState, timeProvider.GetUtcNow());
executionContext.Steps[step.Id] = record with
{
Status = currentState.Status,
@@ -410,7 +413,7 @@ public sealed class PackRunWorkerService : BackgroundService
step.Id,
result.Error ?? "unknown error");
var failure = PackRunStepStateMachine.RegisterFailure(currentState, DateTimeOffset.UtcNow, executionContext.FailurePolicy);
var failure = PackRunStepStateMachine.RegisterFailure(currentState, timeProvider.GetUtcNow(), executionContext.FailurePolicy);
var updatedRecord = record with
{
Status = failure.State.Status,
@@ -603,7 +606,7 @@ public sealed class PackRunWorkerService : BackgroundService
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = DateTimeOffset.UtcNow,
LastTransitionAt = timeProvider.GetUtcNow(),
NextAttemptAt = null
};
}
@@ -619,7 +622,7 @@ public sealed class PackRunWorkerService : BackgroundService
{
Status = PackRunStepExecutionStatus.Failed,
StatusReason = reason,
LastTransitionAt = DateTimeOffset.UtcNow
LastTransitionAt = timeProvider.GetUtcNow()
};
}
@@ -634,7 +637,7 @@ public sealed class PackRunWorkerService : BackgroundService
{
Status = PackRunStepExecutionStatus.Pending,
StatusReason = reason,
LastTransitionAt = DateTimeOffset.UtcNow
LastTransitionAt = timeProvider.GetUtcNow()
};
}