feat(zastava): add evidence locker plan and schema examples

- Introduced README.md for Zastava Evidence Locker Plan detailing artifacts to sign and post-signing steps.
- Added example JSON schemas for observer events and webhook admissions.
- Updated implementor guidelines with checklist for CI linting, determinism, secrets management, and schema control.
- Created alert rules for Vuln Explorer to monitor API latency and projection errors.
- Developed analytics ingestion plan for Vuln Explorer, focusing on telemetry and PII guardrails.
- Implemented Grafana dashboard configuration for Vuln Explorer metrics visualization.
- Added expected projection SHA256 for vulnerability events.
- Created k6 load testing script for Vuln Explorer API.
- Added sample projection and replay event data for testing.
- Implemented ReplayInputsLock for deterministic replay inputs management.
- Developed tests for ReplayInputsLock to ensure stable hash computation.
- Created SurfaceManifestDeterminismVerifier to validate manifest determinism and integrity.
- Added unit tests for SurfaceManifestDeterminismVerifier to ensure correct functionality.
- Implemented Angular tests for VulnerabilityHttpClient and VulnerabilityDetailComponent to verify API interactions and UI rendering.
This commit is contained in:
StellaOps Bot
2025-12-02 09:27:31 +02:00
parent 885ce86af4
commit 2d08f52715
74 changed files with 1690 additions and 131 deletions

View File

@@ -93,20 +93,21 @@ public sealed record AuditEntry(
var occurredAt = DateTimeOffset.UtcNow;
// Compute canonical hash from immutable content
// Use the same property names and fields as VerifyIntegrity to keep the hash stable.
var contentHash = CanonicalJsonHasher.ComputeCanonicalSha256(new
{
entryId,
tenantId,
eventType,
resourceType,
resourceId,
actorId,
actorType,
description,
oldState,
newState,
occurredAt,
sequenceNumber
EntryId = entryId,
TenantId = tenantId,
EventType = eventType,
ResourceType = resourceType,
ResourceId = resourceId,
ActorId = actorId,
ActorType = actorType,
Description = description,
OldState = oldState,
NewState = newState,
OccurredAt = occurredAt,
SequenceNumber = sequenceNumber
});
return new AuditEntry(

View File

@@ -2,6 +2,7 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Core.Domain.Events;
@@ -180,8 +181,8 @@ public sealed record EventEnvelope(
/// <summary>Computes a digest of the envelope for signing.</summary>
public string ComputeDigest()
{
var json = ToJson();
var bytes = Encoding.UTF8.GetBytes(json);
var canonicalJson = CanonicalJsonHasher.ToCanonicalJson(new { envelope = this });
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}

View File

@@ -82,7 +82,10 @@ public sealed record EventPublishOptions(
bool CompressLargePayloads,
/// <summary>Threshold for payload compression (bytes).</summary>
int CompressionThreshold)
int CompressionThreshold,
/// <summary>Maximum number of events to fan out in a single batch to avoid backpressure.</summary>
int MaxBatchSize)
{
/// <summary>Default publishing options.</summary>
public static EventPublishOptions Default => new(
@@ -92,7 +95,8 @@ public sealed record EventPublishOptions(
IdempotencyTtl: TimeSpan.FromHours(24),
IncludeProvenance: true,
CompressLargePayloads: true,
CompressionThreshold: 64 * 1024);
CompressionThreshold: 64 * 1024,
MaxBatchSize: 500);
}
/// <summary>

View File

@@ -26,6 +26,12 @@ public sealed record PackRunLog(
/// <summary>Log message content.</summary>
string Message,
/// <summary>Canonical SHA-256 digest of the log payload (message+data+metadata).</summary>
string Digest,
/// <summary>Size of the log payload in bytes (UTF-8).</summary>
long SizeBytes,
/// <summary>When the log entry was created.</summary>
DateTimeOffset Timestamp,
@@ -45,6 +51,8 @@ public sealed record PackRunLog(
string? data = null,
DateTimeOffset? timestamp = null)
{
var (digest, sizeBytes) = ComputeDigest(message, data, tenantId, packRunId, sequence, level, source);
return new PackRunLog(
LogId: Guid.NewGuid(),
TenantId: tenantId,
@@ -53,6 +61,8 @@ public sealed record PackRunLog(
Level: level,
Source: source,
Message: message,
Digest: digest,
SizeBytes: sizeBytes,
Timestamp: timestamp ?? DateTimeOffset.UtcNow,
Data: data);
}
@@ -188,4 +198,19 @@ public sealed record PackRunLogCursor(
/// Advances the cursor to a new sequence.
/// </summary>
public PackRunLogCursor Advance(long newSequence) => this with { LastSequence = newSequence };
private static (string Digest, long SizeBytes) ComputeDigest(
string message,
string? data,
string tenantId,
Guid packRunId,
long sequence,
LogLevel level,
string source)
{
var payload = $"{tenantId}|{packRunId}|{sequence}|{level}|{source}|{message}|{data}";
var bytes = System.Text.Encoding.UTF8.GetBytes(payload);
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
return (Convert.ToHexString(hash).ToLowerInvariant(), bytes.LongLength);
}
}

View File

@@ -0,0 +1,39 @@
using System.Text.Json.Serialization;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Core.Domain.Replay;
/// <summary>
/// Immutable lock record that captures the exact replay inputs (tooling, policy/graph hashes, seeds, env)
/// and ties them to a specific replay manifest hash. Used to ensure deterministic replays.
/// </summary>
public sealed record ReplayInputsLock(
[property: JsonPropertyName("schemaVersion")] string SchemaVersion,
[property: JsonPropertyName("manifestHash")] string ManifestHash,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("inputs")] ReplayInputs Inputs,
[property: JsonPropertyName("notes")] string? Notes = null)
{
public const string DefaultSchemaVersion = "orch.replay.lock.v1";
public static ReplayInputsLock Create(
ReplayManifest manifest,
string? notes = null,
DateTimeOffset? createdAt = null,
string schemaVersion = DefaultSchemaVersion)
{
ArgumentNullException.ThrowIfNull(manifest);
return new ReplayInputsLock(
SchemaVersion: schemaVersion,
ManifestHash: manifest.ComputeHash(),
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
Inputs: manifest.Inputs,
Notes: string.IsNullOrWhiteSpace(notes) ? null : notes);
}
/// <summary>
/// Canonical hash of the lock content.
/// </summary>
public string ComputeHash() => CanonicalJsonHasher.ComputeCanonicalSha256(this);
}

View File

@@ -27,7 +27,7 @@ public static class CanonicalJsonHasher
{
var node = JsonSerializer.SerializeToNode(value, SerializerOptions) ?? new JsonObject();
// Work on a detached copy to avoid parent conflicts.
var ordered = OrderNode(node.Clone());
var ordered = OrderNode(node.DeepClone());
return ordered.ToJsonString(SerializerOptions);
}
@@ -50,18 +50,18 @@ public static class CanonicalJsonHasher
var orderedObj = new JsonObject();
foreach (var kvp in obj.OrderBy(x => x.Key, StringComparer.Ordinal))
{
orderedObj.Add(kvp.Key, kvp.Value is null ? null : OrderNode(kvp.Value.Clone()));
orderedObj.Add(kvp.Key, kvp.Value is null ? null : OrderNode(kvp.Value.DeepClone()));
}
return orderedObj;
case JsonArray arr:
var orderedArr = new JsonArray();
foreach (var item in arr)
{
orderedArr.Add(item is null ? null : OrderNode(item.Clone()));
orderedArr.Add(item is null ? null : OrderNode(item.DeepClone()));
}
return orderedArr;
default:
return node.Clone(); // primitives stay as-is
return node.DeepClone(); // primitives stay as-is
}
}
}

View File

@@ -79,20 +79,42 @@ public sealed class OrchestratorEventPublisher : IEventPublisher
var failed = 0;
var errors = new List<string>();
foreach (var envelope in envelopes)
// Stable ordering + pre-deduplication to enforce deterministic fan-out and reduce backpressure.
var ordered = envelopes
.OrderBy(e => e.OccurredAt)
.ThenBy(e => e.EventId, StringComparer.Ordinal)
.ToList();
var seenKeys = new HashSet<string>(StringComparer.Ordinal);
var workItems = new List<EventEnvelope>();
foreach (var envelope in ordered)
{
try
if (!seenKeys.Add(envelope.IdempotencyKey))
{
var result = await PublishAsync(envelope, cancellationToken);
if (result)
published++;
else
deduplicated++;
deduplicated++;
continue;
}
catch (Exception ex)
workItems.Add(envelope);
}
foreach (var chunk in workItems.Chunk(_options.MaxBatchSize))
{
foreach (var envelope in chunk)
{
failed++;
errors.Add($"{envelope.EventId}: {ex.Message}");
try
{
var result = await PublishAsync(envelope, cancellationToken);
if (result)
published++;
else
deduplicated++;
}
catch (Exception ex)
{
failed++;
errors.Add($"{envelope.EventId}: {ex.Message}");
}
}
}

View File

@@ -824,6 +824,66 @@ public class EventPublishingTests
Assert.Equal(1, result2.Deduplicated);
}
[Fact]
public async Task OrchestratorEventPublisher_PublishBatch_OrdersAndDeduplicatesBeforeSend()
{
var bus = NullNotifierBus.Instance;
bus.Clear();
var store = new InMemoryIdempotencyStore();
var options = Options.Create(EventPublishOptions.Default with
{
SignWithDsse = false,
MaxBatchSize = 2
});
var publisher = new OrchestratorEventPublisher(
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
var actor = EventActor.Service("test");
var baseEnvelope = EventEnvelope.Create(
eventType: OrchestratorEventType.JobCreated,
tenantId: "tenant-1",
actor: actor);
var earliest = baseEnvelope with
{
EventId = "urn:orch:event:earliest",
OccurredAt = new DateTimeOffset(2025, 1, 1, 0, 0, 5, TimeSpan.Zero),
IdempotencyKey = "dup-key"
};
var laterDuplicate = baseEnvelope with
{
EventId = "urn:orch:event:later-duplicate",
OccurredAt = new DateTimeOffset(2025, 1, 1, 0, 0, 10, TimeSpan.Zero),
IdempotencyKey = "dup-key"
};
var latest = baseEnvelope with
{
EventId = "urn:orch:event:latest",
OccurredAt = new DateTimeOffset(2025, 1, 1, 0, 0, 20, TimeSpan.Zero),
IdempotencyKey = "unique-key"
};
var result = await publisher.PublishBatchAsync(
new[] { laterDuplicate, latest, earliest },
CT);
Assert.Equal(2, result.Published);
Assert.Equal(1, result.Deduplicated);
var messages = bus.GetMessages("orch.jobs");
Assert.Equal(2, messages.Count);
var deserialized = messages
.Select(EventEnvelope.FromJson)
.Where(e => e is not null)
.ToList();
Assert.Equal("urn:orch:event:earliest", deserialized[0]!.EventId);
Assert.Equal("urn:orch:event:latest", deserialized[1]!.EventId);
}
#endregion
#region BatchPublishResult Tests
@@ -905,6 +965,7 @@ public class EventPublishingTests
Assert.True(options.IncludeProvenance);
Assert.True(options.CompressLargePayloads);
Assert.Equal(64 * 1024, options.CompressionThreshold);
Assert.Equal(500, options.MaxBatchSize);
}
#endregion

View File

@@ -29,6 +29,8 @@ public sealed class PackRunLogTests
Assert.Equal(LogLevel.Info, log.Level);
Assert.Equal("stdout", log.Source);
Assert.Equal("Test message", log.Message);
Assert.False(string.IsNullOrWhiteSpace(log.Digest));
Assert.True(log.SizeBytes > 0);
Assert.Equal(now, log.Timestamp);
Assert.Equal("{\"key\":\"value\"}", log.Data);
}

View File

@@ -0,0 +1,49 @@
using StellaOps.Orchestrator.Core.Domain.Replay;
namespace StellaOps.Orchestrator.Tests;
public class ReplayInputsLockTests
{
[Fact]
public void ReplayInputsLock_ComputesStableHash()
{
var manifest = ReplayManifest.Create(
jobId: "job-1",
replayOf: "orig-1",
inputs: new ReplayInputs(
PolicyHash: "sha256:policy",
GraphRevisionId: "graph-1",
LatticeHash: "sha256:lattice",
ToolImages: new[] { "img:v1", "img:v2" }.ToImmutableArray(),
Seeds: new ReplaySeeds(Rng: 42, Sampling: 5),
TimeSource: ReplayTimeSource.monotonic,
Env: new Dictionary<string, string> { { "TZ", "UTC" } }.ToImmutableDictionary()),
artifacts: null,
createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 0, TimeSpan.Zero));
var lock1 = ReplayInputsLock.Create(manifest, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
var lock2 = ReplayInputsLock.Create(manifest, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
Assert.Equal(lock1.ComputeHash(), lock2.ComputeHash());
}
[Fact]
public void ReplayInputsLock_TracksManifestHash()
{
var manifest = ReplayManifest.Create(
jobId: "job-1",
replayOf: "orig-1",
inputs: new ReplayInputs(
PolicyHash: "sha256:policy",
GraphRevisionId: "graph-1",
LatticeHash: null,
ToolImages: new[] { "img:v1" }.ToImmutableArray(),
Seeds: new ReplaySeeds(Rng: null, Sampling: null),
TimeSource: ReplayTimeSource.wall,
Env: ImmutableDictionary<string, string>.Empty));
var inputsLock = ReplayInputsLock.Create(manifest);
Assert.Equal(manifest.ComputeHash(), inputsLock.ManifestHash);
}
}