Merge branch 'main' of https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
@@ -28,16 +29,17 @@ using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
using StellaOps.Plugin.Hosting;
|
||||
using StellaOps.Authority.OpenIddict.Handlers;
|
||||
using System.Linq;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Authority.Permalinks;
|
||||
using StellaOps.Authority.Revocation;
|
||||
using StellaOps.Authority.Signing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
using StellaOps.Plugin.Hosting;
|
||||
using StellaOps.Authority.OpenIddict.Handlers;
|
||||
using System.Linq;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Authority.Permalinks;
|
||||
using StellaOps.Authority.Revocation;
|
||||
using StellaOps.Authority.Signing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||
using StellaOps.Authority.Security;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
@@ -162,15 +164,36 @@ else
|
||||
builder.Services.AddScoped<ValidateDpopProofHandler>();
|
||||
#endif
|
||||
|
||||
builder.Services.AddRateLimiter(rateLimiterOptions =>
|
||||
{
|
||||
AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions);
|
||||
});
|
||||
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>());
|
||||
builder.Services.AddSingleton<AuthoritySigningKeyManager>();
|
||||
builder.Services.AddSingleton<VulnPermalinkService>();
|
||||
builder.Services.AddRateLimiter(rateLimiterOptions =>
|
||||
{
|
||||
AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions);
|
||||
});
|
||||
|
||||
var requiresKms = string.Equals(authorityOptions.Signing.KeySource, "kms", StringComparison.OrdinalIgnoreCase)
|
||||
|| authorityOptions.Signing.AdditionalKeys.Any(k => string.Equals(k.Source, "kms", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (requiresKms)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(authorityOptions.Signing.KeyPassphrase))
|
||||
{
|
||||
throw new InvalidOperationException("Authority signing with source 'kms' requires signing.keyPassphrase to be configured.");
|
||||
}
|
||||
|
||||
var kmsRoot = Path.Combine(builder.Environment.ContentRootPath, "kms");
|
||||
builder.Services.AddFileKms(options =>
|
||||
{
|
||||
options.RootPath = kmsRoot;
|
||||
options.Password = authorityOptions.Signing.KeyPassphrase!;
|
||||
options.Algorithm = authorityOptions.Signing.Algorithm;
|
||||
});
|
||||
|
||||
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, KmsAuthoritySigningKeySource>());
|
||||
}
|
||||
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>());
|
||||
builder.Services.AddSingleton<AuthoritySigningKeyManager>();
|
||||
builder.Services.AddSingleton<VulnPermalinkService>();
|
||||
|
||||
AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader
|
||||
.Load(authorityOptions, builder.Environment.ContentRootPath)
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
|
||||
namespace StellaOps.Authority.Signing;
|
||||
|
||||
internal sealed class KmsAuthoritySigningKeySource : IAuthoritySigningKeySource
|
||||
{
|
||||
private readonly IKmsClient _kmsClient;
|
||||
|
||||
public KmsAuthoritySigningKeySource(IKmsClient kmsClient)
|
||||
=> _kmsClient = kmsClient ?? throw new ArgumentNullException(nameof(kmsClient));
|
||||
|
||||
public bool CanLoad(string source)
|
||||
=> string.Equals(source, "kms", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
public CryptoSigningKey Load(AuthoritySigningKeyRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
if (!CanLoad(request.Source))
|
||||
{
|
||||
throw new InvalidOperationException($"KMS signing key source cannot load '{request.Source}'.");
|
||||
}
|
||||
|
||||
var keyId = (request.Location ?? string.Empty).Trim();
|
||||
if (string.IsNullOrWhiteSpace(keyId))
|
||||
{
|
||||
throw new InvalidOperationException("KMS signing keys require signing.keyPath/location to specify the key identifier.");
|
||||
}
|
||||
|
||||
request.AdditionalMetadata?.TryGetValue(KmsMetadataKeys.Version, out var versionId);
|
||||
var material = _kmsClient.ExportAsync(keyId, versionId).GetAwaiter().GetResult();
|
||||
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = material.D.ToArray(),
|
||||
Q = new ECPoint
|
||||
{
|
||||
X = material.Qx.ToArray(),
|
||||
Y = material.Qy.ToArray(),
|
||||
},
|
||||
};
|
||||
|
||||
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
[KmsMetadataKeys.Version] = material.VersionId
|
||||
};
|
||||
|
||||
var reference = new CryptoKeyReference(request.KeyId, request.Provider);
|
||||
return new CryptoSigningKey(reference, material.Algorithm, in parameters, material.CreatedAt, request.ExpiresAt, metadata: metadata);
|
||||
}
|
||||
|
||||
internal static class KmsMetadataKeys
|
||||
{
|
||||
public const string Version = "kms.version";
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@
|
||||
<ProjectReference Include="..\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography.Kms/StellaOps.Cryptography.Kms.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
|
||||
</ItemGroup>
|
||||
@@ -36,4 +37,4 @@
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -9,4 +9,4 @@
|
||||
|FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**DONE (2025-10-15)** – Added `CccsDiagnostics` meter (fetch/parse/map counters), enriched connector logs with document counts, and published `docs/modules/concelier/operations/connectors/cccs.md` covering config, telemetry, and sanitiser guidance.|
|
||||
|FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**DONE (2025-10-15)** – Measured `/api/cccs/threats/v1/get` inventory (~5.1k rows/lang; earliest 2018-06-08), documented backfill workflow + language split strategy, and linked the runbook for Offline Kit execution.|
|
||||
|FEEDCONN-CCCS-02-008 Raw DOM parsing refinement|BE-Conn-CCCS|Source.Common|**DONE (2025-10-15)** – Parser now walks unsanitised DOM (heading + nested list coverage), sanitizer keeps `<h#>`/`section` nodes, and regression fixtures/tests assert EN/FR list handling + preserved HTML structure.|
|
||||
|FEEDCONN-CCCS-02-009 Normalized versions rollout (Oct 2025)|BE-Conn-CCCS|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** – Implement trailing-version split helper per Merge guidance (see `../Merge/RANGE_PRIMITIVES_COORDINATION.md` “Helper snippets”) to emit `NormalizedVersions` via `SemVerRangeRuleBuilder`; refresh mapper tests/fixtures to assert provenance notes (`cccs:{serial}:{index}`) and confirm merge counters drop.|
|
||||
|FEEDCONN-CCCS-02-009 Normalized versions rollout (Oct 2025)|BE-Conn-CCCS|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** – Implement trailing-version split helper per Merge guidance (see `../Merge/RANGE_PRIMITIVES_COORDINATION.md` “Helper snippets”) to emit `NormalizedVersions` via `SemVerRangeRuleBuilder`; refresh mapper tests/fixtures to assert provenance notes (`cccs:{serial}:{index}`) and confirm merge counters drop.<br>2025-10-29: See `docs/dev/normalized-rule-recipes.md` for ready-made helper + regex snippet; wire into `BuildPackages` and update fixtures with `UPDATE_CCCS_FIXTURES=1`.|
|
||||
|
||||
@@ -13,4 +13,4 @@
|
||||
|Express unaffected/investigation statuses without overloading range fields|BE-Conn-RH|Models|**DONE** – Introduced AffectedPackageStatus collection and updated mapper/tests.|
|
||||
|Reference dedupe & ordering in mapper|BE-Conn-RH|Models|DONE – mapper consolidates by URL, merges metadata, deterministic ordering validated in tests.|
|
||||
|Hydra summary fetch through SourceFetchService|BE-Conn-RH|Source.Common|DONE – summary pages now fetched via SourceFetchService with cache + conditional headers.|
|
||||
|Fixture validation sweep|QA|None|**DOING (2025-10-19)** – Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.|
|
||||
|Fixture validation sweep|QA|None|**DOING (2025-10-19)** – Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.<br>2025-10-29: Added `scripts/update-redhat-fixtures.sh` to regenerate golden snapshots with `UPDATE_GOLDENS=1`; run it before reviews to capture CSAF contract deltas.|
|
||||
|
||||
@@ -12,4 +12,4 @@
|
||||
|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** – GovDelivery onboarding runbook captured in `docs/modules/concelier/operations/connectors/ics-cisa.md`; secret vault path and Offline Kit handling documented.|
|
||||
|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** – Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).|
|
||||
|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** – Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.|
|
||||
|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** – Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.|
|
||||
|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** – Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.<br>2025-10-29: Follow `docs/dev/normalized-rule-recipes.md` §2 to call `ToNormalizedVersionRule` and ensure mixed firmware strings log a Models ticket when regex extraction fails.|
|
||||
|
||||
@@ -336,46 +336,67 @@ internal sealed class AdvisoryRawService : IAdvisoryRawService
|
||||
string.IsNullOrWhiteSpace(content.Encoding) ? null : content.Encoding.Trim());
|
||||
}
|
||||
|
||||
private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers)
|
||||
{
|
||||
var normalizedAliases = identifiers.Aliases
|
||||
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
|
||||
.Select(static alias => alias.Trim())
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new RawIdentifiers(
|
||||
normalizedAliases,
|
||||
identifiers.PrimaryId?.Trim() ?? string.Empty);
|
||||
}
|
||||
|
||||
private static RawLinkset NormalizeLinkset(RawLinkset linkset)
|
||||
{
|
||||
return new RawLinkset
|
||||
{
|
||||
Aliases = NormalizeStringArray(linkset.Aliases, StringComparer.OrdinalIgnoreCase),
|
||||
PackageUrls = NormalizeStringArray(linkset.PackageUrls, StringComparer.Ordinal),
|
||||
Cpes = NormalizeStringArray(linkset.Cpes, StringComparer.Ordinal),
|
||||
References = NormalizeReferences(linkset.References),
|
||||
ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom, StringComparer.Ordinal),
|
||||
Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values, StringComparer comparer)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return EmptyArray;
|
||||
}
|
||||
|
||||
return values
|
||||
.Where(static value => !string.IsNullOrWhiteSpace(value))
|
||||
.Select(static value => value.Trim())
|
||||
.Distinct(comparer)
|
||||
.OrderBy(static value => value, comparer)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers)
|
||||
{
|
||||
var aliases = identifiers.Aliases;
|
||||
if (!aliases.IsDefaultOrEmpty)
|
||||
{
|
||||
var builder = ImmutableArray.CreateBuilder<string>(aliases.Length);
|
||||
foreach (var alias in aliases)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(alias))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(alias.Trim());
|
||||
}
|
||||
|
||||
aliases = builder.ToImmutable();
|
||||
}
|
||||
else
|
||||
{
|
||||
aliases = ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
return new RawIdentifiers(
|
||||
aliases,
|
||||
identifiers.PrimaryId?.Trim() ?? string.Empty);
|
||||
}
|
||||
|
||||
private static RawLinkset NormalizeLinkset(RawLinkset linkset)
|
||||
{
|
||||
return new RawLinkset
|
||||
{
|
||||
Aliases = NormalizeStringArray(linkset.Aliases),
|
||||
PackageUrls = NormalizeStringArray(linkset.PackageUrls),
|
||||
Cpes = NormalizeStringArray(linkset.Cpes),
|
||||
References = NormalizeReferences(linkset.References),
|
||||
ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom),
|
||||
Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty,
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<string>(values.Length);
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(value.Trim());
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<RawReference> NormalizeReferences(ImmutableArray<RawReference> references)
|
||||
{
|
||||
|
||||
@@ -12,8 +12,9 @@
|
||||
| CONCELIER-CORE-AOC-19-003 `Idempotent append-only upsert` | DONE (2025-10-28) | Concelier Core Guild | CONCELIER-STORE-AOC-19-002 | Implement idempotent upsert path using `(vendor, upstreamId, contentHash, tenant)` key, emitting supersedes pointers for new revisions and preventing duplicate inserts. |
|
||||
> 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics.
|
||||
> Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure.
|
||||
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. |
|
||||
> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout.
|
||||
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. |
|
||||
> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout.
|
||||
> 2025-10-29: `AdvisoryRawService` now preserves upstream alias/linkset ordering (trim-only) and updated AOC documentation reflects the behaviour; follow-up to ensure policy consumers handle duplicates remains open.
|
||||
| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. |
|
||||
|
||||
## Policy Engine v2
|
||||
@@ -27,10 +28,12 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Notes |
|
||||
|----|--------|----------|------------|-------|
|
||||
| CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. |
|
||||
> 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically.
|
||||
| CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. |
|
||||
> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending.
|
||||
| CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. |
|
||||
> 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically.
|
||||
> 2025-10-29: Cross-guild handshake captured in `docs/dev/cartographer-graph-handshake.md`; begin drafting enrichment plan once Cartographer ships the inspector schema/query patterns.
|
||||
| CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. |
|
||||
> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending.
|
||||
> 2025-10-29: Action item from handshake doc — prepare sample `sbom.relationship.changed` payload + replay notes once schema lands; coordinate with Scheduler for queue semantics.
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
|
||||
@@ -92,6 +92,7 @@ Until these blocks land, connectors should stage changes behind a feature flag o
|
||||
## Tracking & follow-up
|
||||
- Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900.
|
||||
- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`).
|
||||
- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge.
|
||||
- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge.
|
||||
- Precedence merge emits `Normalized version rules missing` warnings (source + package type) whenever we encounter ranges without normalized output—watch CI/staging logs for those signals to prioritise backlog fixes.
|
||||
- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots.
|
||||
- If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing.
|
||||
|
||||
@@ -162,7 +162,7 @@ public sealed class AdvisoryPrecedenceMerger
|
||||
.ToArray();
|
||||
|
||||
var packageResult = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages));
|
||||
RecordNormalizedRuleMetrics(packageResult.Packages);
|
||||
RecordNormalizedRuleMetrics(advisoryKey, packageResult.Packages);
|
||||
var affectedPackages = packageResult.Packages;
|
||||
var cvssMetrics = ordered
|
||||
.SelectMany(entry => entry.Advisory.CvssMetrics)
|
||||
@@ -217,13 +217,16 @@ public sealed class AdvisoryPrecedenceMerger
|
||||
return new PrecedenceMergeResult(merged, conflicts);
|
||||
}
|
||||
|
||||
private static void RecordNormalizedRuleMetrics(IReadOnlyList<AffectedPackage> packages)
|
||||
private void RecordNormalizedRuleMetrics(string advisoryKey, IReadOnlyList<AffectedPackage> packages)
|
||||
{
|
||||
if (packages.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var missingSources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var missingPackageTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var package in packages)
|
||||
{
|
||||
var packageType = package.Type ?? string.Empty;
|
||||
@@ -249,8 +252,41 @@ public sealed class AdvisoryPrecedenceMerger
|
||||
};
|
||||
|
||||
MissingNormalizedRuleCounter.Add(1, tags);
|
||||
|
||||
if (package.Provenance.Length > 0)
|
||||
{
|
||||
foreach (var provenance in package.Provenance)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(provenance.Source))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!string.Equals(provenance.Source, "merge", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
missingSources.Add(provenance.Source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(packageType))
|
||||
{
|
||||
missingPackageTypes.Add(packageType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (missingSources.Count > 0)
|
||||
{
|
||||
var sources = string.Join(",", missingSources.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase));
|
||||
var packageTypes = string.Join(",", missingPackageTypes.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase));
|
||||
|
||||
_logger.LogWarning(
|
||||
"Normalized version rules missing for {AdvisoryKey}; sources={Sources}; packageTypes={PackageTypes}",
|
||||
advisoryKey,
|
||||
sources,
|
||||
packageTypes);
|
||||
}
|
||||
}
|
||||
|
||||
private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector)
|
||||
|
||||
@@ -15,14 +15,14 @@
|
||||
|FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE – `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.|
|
||||
|Override audit logging|BE-Merge|Observability|DONE – override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.|
|
||||
|Configurable precedence table|BE-Merge|Architecture|DONE – precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.|
|
||||
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.|
|
||||
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-29: Added merge-time warnings highlighting sources/package types when ranges emit without normalized rules to accelerate backlog triage.|
|
||||
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.|
|
||||
|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) – merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.|
|
||||
|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** – GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.|
|
||||
|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** – Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.|
|
||||
> Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests.
|
||||
|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** – Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.|
|
||||
|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.|
|
||||
|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** – Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.<br>2025-10-29: Merge now emits `Normalized version rules missing...` warnings (see `docs/dev/normalized-rule-recipes.md` §4); include zero-warning excerpt plus Grafana counter snapshot when closing this task.|
|
||||
|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.<br>2025-10-29: Recipes doc (§2–§3) outlines SemVer promotion + fallback logging—attach decision summary + log sample when handing off to Models.|
|
||||
|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.|
|
||||
|
||||
## Link-Not-Merge v1 Transition
|
||||
|
||||
@@ -0,0 +1,212 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using Mongo2Go;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.State;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Tests;
|
||||
|
||||
public sealed class SourceStateSeedProcessorTests : IAsyncLifetime
|
||||
{
|
||||
private readonly MongoDbRunner _runner;
|
||||
private readonly MongoClient _client;
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly DocumentStore _documentStore;
|
||||
private readonly RawDocumentStorage _rawStorage;
|
||||
private readonly MongoSourceStateRepository _stateRepository;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public SourceStateSeedProcessorTests()
|
||||
{
|
||||
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
|
||||
_client = new MongoClient(_runner.ConnectionString);
|
||||
_database = _client.GetDatabase($"source-state-seed-{Guid.NewGuid():N}");
|
||||
_documentStore = new DocumentStore(_database, NullLogger<DocumentStore>.Instance);
|
||||
_rawStorage = new RawDocumentStorage(_database);
|
||||
_stateRepository = new MongoSourceStateRepository(_database, NullLogger<MongoSourceStateRepository>.Instance);
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 28, 12, 0, 0, TimeSpan.Zero));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessAsync_PersistsDocumentsAndUpdatesCursor()
|
||||
{
|
||||
var processor = CreateProcessor();
|
||||
var documentId = Guid.NewGuid();
|
||||
|
||||
var specification = new SourceStateSeedSpecification
|
||||
{
|
||||
Source = "vndr.test",
|
||||
Documents = new[]
|
||||
{
|
||||
new SourceStateSeedDocument
|
||||
{
|
||||
DocumentId = documentId,
|
||||
Uri = "https://example.test/advisories/ADV-1",
|
||||
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-1\"}"),
|
||||
ContentType = "application/json",
|
||||
Headers = new Dictionary<string, string> { ["X-Test"] = "true" },
|
||||
Metadata = new Dictionary<string, string> { ["test.meta"] = "value" },
|
||||
FetchedAt = _timeProvider.GetUtcNow().AddMinutes(-5),
|
||||
AddToPendingDocuments = true,
|
||||
AddToPendingMappings = true,
|
||||
KnownIdentifiers = new[] { "ADV-1" },
|
||||
}
|
||||
},
|
||||
Cursor = new SourceStateSeedCursor
|
||||
{
|
||||
LastModifiedCursor = _timeProvider.GetUtcNow().AddDays(-1),
|
||||
LastFetchAt = _timeProvider.GetUtcNow().AddMinutes(-10),
|
||||
Additional = new Dictionary<string, string> { ["custom"] = "value" },
|
||||
},
|
||||
KnownAdvisories = new[] { "ADV-0" },
|
||||
};
|
||||
|
||||
var result = await processor.ProcessAsync(specification, CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, result.DocumentsProcessed);
|
||||
Assert.Single(result.PendingDocumentIds);
|
||||
Assert.Contains(documentId, result.PendingDocumentIds);
|
||||
Assert.Single(result.PendingMappingIds);
|
||||
Assert.Contains(documentId, result.PendingMappingIds);
|
||||
Assert.Equal(2, result.KnownAdvisoriesAdded.Count);
|
||||
Assert.Contains("ADV-0", result.KnownAdvisoriesAdded);
|
||||
Assert.Contains("ADV-1", result.KnownAdvisoriesAdded);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), result.CompletedAt);
|
||||
|
||||
var storedDocument = await _documentStore.FindBySourceAndUriAsync(
|
||||
"vndr.test",
|
||||
"https://example.test/advisories/ADV-1",
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(storedDocument);
|
||||
Assert.Equal(documentId, storedDocument!.Id);
|
||||
Assert.Equal("application/json", storedDocument.ContentType);
|
||||
Assert.Equal(DocumentStatuses.PendingParse, storedDocument.Status);
|
||||
Assert.NotNull(storedDocument.GridFsId);
|
||||
Assert.NotNull(storedDocument.Headers);
|
||||
Assert.Equal("true", storedDocument.Headers!["X-Test"]);
|
||||
Assert.NotNull(storedDocument.Metadata);
|
||||
Assert.Equal("value", storedDocument.Metadata!["test.meta"]);
|
||||
|
||||
var filesCollection = _database.GetCollection<BsonDocument>("documents.files");
|
||||
var fileCount = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
Assert.Equal(1, fileCount);
|
||||
|
||||
var state = await _stateRepository.TryGetAsync("vndr.test", CancellationToken.None);
|
||||
Assert.NotNull(state);
|
||||
Assert.Equal(_timeProvider.GetUtcNow().UtcDateTime, state!.LastSuccess);
|
||||
|
||||
var cursor = state.Cursor;
|
||||
var pendingDocs = cursor["pendingDocuments"].AsBsonArray.Select(v => Guid.Parse(v.AsString)).ToList();
|
||||
Assert.Contains(documentId, pendingDocs);
|
||||
|
||||
var pendingMappings = cursor["pendingMappings"].AsBsonArray.Select(v => Guid.Parse(v.AsString)).ToList();
|
||||
Assert.Contains(documentId, pendingMappings);
|
||||
|
||||
var knownAdvisories = cursor["knownAdvisories"].AsBsonArray.Select(v => v.AsString).ToList();
|
||||
Assert.Contains("ADV-0", knownAdvisories);
|
||||
Assert.Contains("ADV-1", knownAdvisories);
|
||||
|
||||
Assert.Equal(_timeProvider.GetUtcNow().UtcDateTime, cursor["lastSeededAt"].ToUniversalTime());
|
||||
Assert.Equal("value", cursor["custom"].AsString);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessAsync_ReplacesExistingDocumentAndCleansPreviousRawPayload()
|
||||
{
|
||||
var processor = CreateProcessor();
|
||||
var documentId = Guid.NewGuid();
|
||||
|
||||
var initialSpecification = new SourceStateSeedSpecification
|
||||
{
|
||||
Source = "vndr.test",
|
||||
Documents = new[]
|
||||
{
|
||||
new SourceStateSeedDocument
|
||||
{
|
||||
DocumentId = documentId,
|
||||
Uri = "https://example.test/advisories/ADV-2",
|
||||
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-2\",\"rev\":1}"),
|
||||
ContentType = "application/json",
|
||||
AddToPendingDocuments = true,
|
||||
}
|
||||
},
|
||||
KnownAdvisories = new[] { "ADV-2" },
|
||||
};
|
||||
|
||||
await processor.ProcessAsync(initialSpecification, CancellationToken.None);
|
||||
|
||||
var existingRecord = await _documentStore.FindBySourceAndUriAsync(
|
||||
"vndr.test",
|
||||
"https://example.test/advisories/ADV-2",
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(existingRecord);
|
||||
var previousGridId = existingRecord!.GridFsId;
|
||||
Assert.NotNull(previousGridId);
|
||||
|
||||
var filesCollection = _database.GetCollection<BsonDocument>("documents.files");
|
||||
var initialFiles = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
Assert.Single(initialFiles);
|
||||
|
||||
var updatedSpecification = new SourceStateSeedSpecification
|
||||
{
|
||||
Source = "vndr.test",
|
||||
Documents = new[]
|
||||
{
|
||||
new SourceStateSeedDocument
|
||||
{
|
||||
DocumentId = documentId,
|
||||
Uri = "https://example.test/advisories/ADV-2",
|
||||
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-2\",\"rev\":2}"),
|
||||
ContentType = "application/json",
|
||||
AddToPendingDocuments = true,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var secondResult = await processor.ProcessAsync(updatedSpecification, CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, secondResult.DocumentsProcessed);
|
||||
Assert.Empty(secondResult.PendingDocumentIds); // already present in cursor
|
||||
Assert.Empty(secondResult.PendingMappingIds);
|
||||
|
||||
var refreshedRecord = await _documentStore.FindBySourceAndUriAsync(
|
||||
"vndr.test",
|
||||
"https://example.test/advisories/ADV-2",
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(refreshedRecord);
|
||||
Assert.Equal(documentId, refreshedRecord!.Id);
|
||||
Assert.NotNull(refreshedRecord.GridFsId);
|
||||
Assert.NotEqual(previousGridId, refreshedRecord.GridFsId);
|
||||
|
||||
var files = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
Assert.Single(files);
|
||||
Assert.NotEqual(previousGridId, files[0]["_id"].AsObjectId);
|
||||
}
|
||||
|
||||
private SourceStateSeedProcessor CreateProcessor()
|
||||
=> new(
|
||||
_documentStore,
|
||||
_rawStorage,
|
||||
_stateRepository,
|
||||
_timeProvider,
|
||||
NullLogger<SourceStateSeedProcessor>.Instance);
|
||||
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _client.DropDatabaseAsync(_database.DatabaseNamespace.DatabaseName);
|
||||
_runner.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,21 @@
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="Mongo2Go" Version="4.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -34,21 +34,41 @@ public sealed class AdvisoryRawServiceTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_PropagatesRepositoryDuplicateResult()
|
||||
{
|
||||
var repository = new RecordingRepository();
|
||||
var service = CreateService(repository);
|
||||
|
||||
var existingDocument = CreateDocument();
|
||||
var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument));
|
||||
repository.NextResult = expectedResult;
|
||||
|
||||
var result = await service.IngestAsync(CreateDocument(), CancellationToken.None);
|
||||
|
||||
Assert.False(result.Inserted);
|
||||
Assert.Same(expectedResult.Record, result.Record);
|
||||
}
|
||||
|
||||
public async Task IngestAsync_PropagatesRepositoryDuplicateResult()
|
||||
{
|
||||
var repository = new RecordingRepository();
|
||||
var service = CreateService(repository);
|
||||
|
||||
var existingDocument = CreateDocument();
|
||||
var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument));
|
||||
repository.NextResult = expectedResult;
|
||||
|
||||
var result = await service.IngestAsync(CreateDocument(), CancellationToken.None);
|
||||
|
||||
Assert.False(result.Inserted);
|
||||
Assert.Same(expectedResult.Record, result.Record);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IngestAsync_PreservesAliasOrderAndDuplicates()
|
||||
{
|
||||
var repository = new RecordingRepository();
|
||||
var service = CreateService(repository);
|
||||
|
||||
var aliasSeries = ImmutableArray.Create("CVE-2025-0001", "CVE-2025-0001", "GHSA-xxxx", "cve-2025-0001");
|
||||
var document = CreateDocument() with
|
||||
{
|
||||
Identifiers = new RawIdentifiers(aliasSeries, "GHSA-xxxx"),
|
||||
};
|
||||
|
||||
repository.NextResult = new AdvisoryRawUpsertResult(true, CreateRecord(document));
|
||||
|
||||
await service.IngestAsync(document, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(repository.CapturedDocument);
|
||||
Assert.Equal(aliasSeries, repository.CapturedDocument!.Identifiers.Aliases);
|
||||
}
|
||||
|
||||
private static AdvisoryRawService CreateService(RecordingRepository repository)
|
||||
{
|
||||
var writeGuard = new AdvisoryRawWriteGuard(new AocWriteGuard());
|
||||
|
||||
@@ -20,10 +20,12 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Notes |
|
||||
|----|--------|----------|------------|-------|
|
||||
| EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. |
|
||||
> 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target.
|
||||
| EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. |
|
||||
> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented.
|
||||
| EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. |
|
||||
> 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target.
|
||||
> 2025-10-29: Handshake actions in `docs/dev/cartographer-graph-handshake.md` — draft batch linkout API skeleton + fixture plan once Cartographer delivers query patterns.
|
||||
| EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. |
|
||||
> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented.
|
||||
> 2025-10-29: Align overlay schema work with the handshake doc once Policy Guild publishes the overlay additions; collect sample payloads for review.
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
|
||||
@@ -17,8 +17,9 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Notes |
|
||||
|----|--------|----------|------------|-------|
|
||||
| EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. |
|
||||
> 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined.
|
||||
| EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. |
|
||||
> 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined.
|
||||
> 2025-10-29: Per `docs/dev/cartographer-graph-handshake.md`, prepare index sizing doc once Cartographer shares query shapes; include perf targets + migration plan before unblocking.
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
| SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003, SCANNER-ANALYZERS-JAVA-21-004, SCANNER-ANALYZERS-JAVA-21-005 | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Observation JSON for fixtures deterministic; includes entrypoints, edges, warnings; passes AOC compliance lint. |
|
||||
| SCANNER-ANALYZERS-JAVA-21-009 | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-008 | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Fixture suite committed under `fixtures/lang/java/ep`; determinism + benchmark gates (<300ms fat jar) configured in CI. |
|
||||
| SCANNER-ANALYZERS-JAVA-21-010 | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-008 | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Runtime harness produces scrubbed events for sample app; edges merge with static output; docs describe sandbox & privacy. |
|
||||
| SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. |
|
||||
|
||||
> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003–005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria.
|
||||
| SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. |
|
||||
|
||||
> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003–005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria.
|
||||
> 2025-10-29 — See `docs/dev/java-analyzer-observation-plan.md` for prerequisite checklist and target dates; unblock once reflection/config/JNI tasks land and observation schema is frozen.
|
||||
|
||||
593
src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs
Normal file
593
src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs
Normal file
@@ -0,0 +1,593 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// File-backed KMS implementation that stores encrypted key material on disk.
|
||||
/// </summary>
|
||||
public sealed class FileKmsClient : IKmsClient, IDisposable
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
Converters =
|
||||
{
|
||||
new JsonStringEnumConverter(),
|
||||
},
|
||||
};
|
||||
|
||||
private readonly FileKmsOptions _options;
|
||||
private readonly SemaphoreSlim _mutex = new(1, 1);
|
||||
|
||||
public FileKmsClient(FileKmsOptions options)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
if (string.IsNullOrWhiteSpace(options.RootPath))
|
||||
{
|
||||
throw new ArgumentException("Root path must be provided.", nameof(options));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(options.Password))
|
||||
{
|
||||
throw new ArgumentException("Password must be provided.", nameof(options));
|
||||
}
|
||||
|
||||
_options = options;
|
||||
Directory.CreateDirectory(_options.RootPath);
|
||||
}
|
||||
|
||||
public async Task<KmsSignResult> SignAsync(
|
||||
string keyId,
|
||||
string? keyVersion,
|
||||
ReadOnlyMemory<byte> data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
if (data.IsEmpty)
|
||||
{
|
||||
throw new ArgumentException("Data cannot be empty.", nameof(data));
|
||||
}
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
|
||||
|
||||
if (record.State == KmsKeyState.Revoked)
|
||||
{
|
||||
throw new InvalidOperationException($"Key '{keyId}' is revoked and cannot be used for signing.");
|
||||
}
|
||||
|
||||
var version = ResolveVersion(record, keyVersion);
|
||||
if (version.State != KmsKeyState.Active)
|
||||
{
|
||||
throw new InvalidOperationException($"Key version '{version.VersionId}' is not active. Current state: {version.State}");
|
||||
}
|
||||
|
||||
var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false);
|
||||
var signature = SignData(privateKey, data.Span);
|
||||
return new KmsSignResult(record.KeyId, version.VersionId, record.Algorithm, signature);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<bool> VerifyAsync(
|
||||
string keyId,
|
||||
string? keyVersion,
|
||||
ReadOnlyMemory<byte> data,
|
||||
ReadOnlyMemory<byte> signature,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
if (data.IsEmpty || signature.IsEmpty)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false);
|
||||
if (record is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var version = ResolveVersion(record, keyVersion);
|
||||
if (string.IsNullOrWhiteSpace(version.PublicKey))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return VerifyData(version.CurveName, version.PublicKey, data.Span, signature.Span);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<KmsKeyMetadata> GetMetadataAsync(string keyId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
|
||||
return ToMetadata(record);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<KmsKeyMaterial> ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
|
||||
|
||||
var version = ResolveVersion(record, keyVersion);
|
||||
if (string.IsNullOrWhiteSpace(version.PublicKey))
|
||||
{
|
||||
throw new InvalidOperationException($"Key '{keyId}' version '{version.VersionId}' does not have public key material.");
|
||||
}
|
||||
|
||||
var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false);
|
||||
return new KmsKeyMaterial(
|
||||
record.KeyId,
|
||||
version.VersionId,
|
||||
record.Algorithm,
|
||||
version.CurveName,
|
||||
Convert.FromBase64String(privateKey.D),
|
||||
Convert.FromBase64String(privateKey.Qx),
|
||||
Convert.FromBase64String(privateKey.Qy),
|
||||
version.CreatedAt);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<KmsKeyMetadata> RotateAsync(string keyId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("Failed to create or load key metadata.");
|
||||
|
||||
if (record.State == KmsKeyState.Revoked)
|
||||
{
|
||||
throw new InvalidOperationException($"Key '{keyId}' has been revoked and cannot be rotated.");
|
||||
}
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
var versionId = $"{timestamp:yyyyMMddTHHmmssfffZ}";
|
||||
var keyData = CreateKeyMaterial(record.Algorithm);
|
||||
|
||||
try
|
||||
{
|
||||
var envelope = EncryptPrivateKey(keyData.PrivateBlob);
|
||||
var fileName = $"{versionId}.key.json";
|
||||
var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName);
|
||||
await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active))
|
||||
{
|
||||
existing.State = KmsKeyState.PendingRotation;
|
||||
}
|
||||
|
||||
record.Versions.Add(new KeyVersionRecord
|
||||
{
|
||||
VersionId = versionId,
|
||||
State = KmsKeyState.Active,
|
||||
CreatedAt = timestamp,
|
||||
PublicKey = keyData.PublicKey,
|
||||
CurveName = keyData.Curve,
|
||||
FileName = fileName,
|
||||
});
|
||||
|
||||
record.CreatedAt ??= timestamp;
|
||||
record.State = KmsKeyState.Active;
|
||||
record.ActiveVersion = versionId;
|
||||
|
||||
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
return ToMetadata(record);
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(keyData.PrivateBlob);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RevokeAsync(string keyId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
record.State = KmsKeyState.Revoked;
|
||||
foreach (var version in record.Versions)
|
||||
{
|
||||
if (version.State != KmsKeyState.Revoked)
|
||||
{
|
||||
version.State = KmsKeyState.Revoked;
|
||||
version.DeactivatedAt = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetMetadataPath(string root, string keyId)
|
||||
=> Path.Combine(root, keyId, "metadata.json");
|
||||
|
||||
private string GetKeyDirectory(string keyId)
|
||||
{
|
||||
var path = Path.Combine(_options.RootPath, keyId);
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private async Task<KeyMetadataRecord?> LoadOrCreateMetadataAsync(
|
||||
string keyId,
|
||||
CancellationToken cancellationToken,
|
||||
bool createIfMissing)
|
||||
{
|
||||
var metadataPath = GetMetadataPath(_options.RootPath, keyId);
|
||||
if (!File.Exists(metadataPath))
|
||||
{
|
||||
if (!createIfMissing)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var record = new KeyMetadataRecord
|
||||
{
|
||||
KeyId = keyId,
|
||||
Algorithm = _options.Algorithm,
|
||||
State = KmsKeyState.Active,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
return record;
|
||||
}
|
||||
|
||||
await using var stream = File.Open(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var loadedRecord = await JsonSerializer.DeserializeAsync<KeyMetadataRecord>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (loadedRecord is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(loadedRecord.Algorithm))
|
||||
{
|
||||
loadedRecord.Algorithm = KmsAlgorithms.Es256;
|
||||
}
|
||||
|
||||
foreach (var version in loadedRecord.Versions)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(version.CurveName))
|
||||
{
|
||||
version.CurveName = "nistP256";
|
||||
}
|
||||
}
|
||||
|
||||
return loadedRecord;
|
||||
}
|
||||
|
||||
private async Task SaveMetadataAsync(KeyMetadataRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
var metadataPath = GetMetadataPath(_options.RootPath, record.KeyId);
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(metadataPath)!);
|
||||
await using var stream = File.Open(metadataPath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, record, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<EcdsaPrivateKeyRecord> LoadPrivateKeyAsync(KeyMetadataRecord record, KeyVersionRecord version, CancellationToken cancellationToken)
|
||||
{
|
||||
var keyPath = Path.Combine(GetKeyDirectory(record.KeyId), version.FileName);
|
||||
if (!File.Exists(keyPath))
|
||||
{
|
||||
throw new InvalidOperationException($"Key material for version '{version.VersionId}' was not found.");
|
||||
}
|
||||
|
||||
await using var stream = File.Open(keyPath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var envelope = await JsonSerializer.DeserializeAsync<KeyEnvelope>(stream, JsonOptions, cancellationToken).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("Key envelope could not be deserialized.");
|
||||
|
||||
var payload = DecryptPrivateKey(envelope);
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<EcdsaPrivateKeyRecord>(payload, JsonOptions)
|
||||
?? throw new InvalidOperationException("Key payload could not be deserialized.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(payload);
|
||||
}
|
||||
}
|
||||
|
||||
private static KeyVersionRecord ResolveVersion(KeyMetadataRecord record, string? keyVersion)
|
||||
{
|
||||
KeyVersionRecord? version = null;
|
||||
if (!string.IsNullOrWhiteSpace(keyVersion))
|
||||
{
|
||||
version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, keyVersion, StringComparison.Ordinal));
|
||||
if (version is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Key version '{keyVersion}' does not exist for key '{record.KeyId}'.");
|
||||
}
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(record.ActiveVersion))
|
||||
{
|
||||
version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, record.ActiveVersion, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
version ??= record.Versions
|
||||
.Where(v => v.State == KmsKeyState.Active)
|
||||
.OrderByDescending(v => v.CreatedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (version is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Key '{record.KeyId}' does not have an active version.");
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
private EcdsaKeyData CreateKeyMaterial(string algorithm)
|
||||
{
|
||||
if (!string.Equals(algorithm, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new NotSupportedException($"Algorithm '{algorithm}' is not supported by the file KMS driver.");
|
||||
}
|
||||
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var parameters = ecdsa.ExportParameters(true);
|
||||
|
||||
var keyRecord = new EcdsaPrivateKeyRecord
|
||||
{
|
||||
Curve = "nistP256",
|
||||
D = Convert.ToBase64String(parameters.D ?? Array.Empty<byte>()),
|
||||
Qx = Convert.ToBase64String(parameters.Q.X ?? Array.Empty<byte>()),
|
||||
Qy = Convert.ToBase64String(parameters.Q.Y ?? Array.Empty<byte>()),
|
||||
};
|
||||
|
||||
var privateBlob = JsonSerializer.SerializeToUtf8Bytes(keyRecord, JsonOptions);
|
||||
|
||||
var qx = parameters.Q.X ?? Array.Empty<byte>();
|
||||
var qy = parameters.Q.Y ?? Array.Empty<byte>();
|
||||
var publicKey = new byte[qx.Length + qy.Length];
|
||||
Buffer.BlockCopy(qx, 0, publicKey, 0, qx.Length);
|
||||
Buffer.BlockCopy(qy, 0, publicKey, qx.Length, qy.Length);
|
||||
|
||||
return new EcdsaKeyData(privateBlob, Convert.ToBase64String(publicKey), keyRecord.Curve);
|
||||
}
|
||||
|
||||
private byte[] SignData(EcdsaPrivateKeyRecord privateKey, ReadOnlySpan<byte> data)
|
||||
{
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ResolveCurve(privateKey.Curve),
|
||||
D = Convert.FromBase64String(privateKey.D),
|
||||
Q = new ECPoint
|
||||
{
|
||||
X = Convert.FromBase64String(privateKey.Qx),
|
||||
Y = Convert.FromBase64String(privateKey.Qy),
|
||||
},
|
||||
};
|
||||
|
||||
using var ecdsa = ECDsa.Create();
|
||||
ecdsa.ImportParameters(parameters);
|
||||
return ecdsa.SignData(data.ToArray(), HashAlgorithmName.SHA256);
|
||||
}
|
||||
|
||||
private bool VerifyData(string curveName, string publicKeyBase64, ReadOnlySpan<byte> data, ReadOnlySpan<byte> signature)
|
||||
{
|
||||
var publicKey = Convert.FromBase64String(publicKeyBase64);
|
||||
if (publicKey.Length % 2 != 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var half = publicKey.Length / 2;
|
||||
var qx = publicKey[..half];
|
||||
var qy = publicKey[half..];
|
||||
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ResolveCurve(curveName),
|
||||
Q = new ECPoint
|
||||
{
|
||||
X = qx,
|
||||
Y = qy,
|
||||
},
|
||||
};
|
||||
|
||||
using var ecdsa = ECDsa.Create();
|
||||
ecdsa.ImportParameters(parameters);
|
||||
return ecdsa.VerifyData(data.ToArray(), signature.ToArray(), HashAlgorithmName.SHA256);
|
||||
}
|
||||
|
||||
private KeyEnvelope EncryptPrivateKey(ReadOnlySpan<byte> privateKey)
|
||||
{
|
||||
var salt = RandomNumberGenerator.GetBytes(16);
|
||||
var nonce = RandomNumberGenerator.GetBytes(12);
|
||||
var key = DeriveKey(salt);
|
||||
|
||||
try
|
||||
{
|
||||
var ciphertext = new byte[privateKey.Length];
|
||||
var tag = new byte[16];
|
||||
var plaintextCopy = privateKey.ToArray();
|
||||
|
||||
try
|
||||
{
|
||||
AesGcm.Encrypt(key, nonce, plaintextCopy, ciphertext, tag);
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(plaintextCopy);
|
||||
}
|
||||
|
||||
return new KeyEnvelope(
|
||||
Ciphertext: Convert.ToBase64String(ciphertext),
|
||||
Nonce: Convert.ToBase64String(nonce),
|
||||
Tag: Convert.ToBase64String(tag),
|
||||
Salt: Convert.ToBase64String(salt));
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(key);
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] DecryptPrivateKey(KeyEnvelope envelope)
|
||||
{
|
||||
var salt = Convert.FromBase64String(envelope.Salt);
|
||||
var nonce = Convert.FromBase64String(envelope.Nonce);
|
||||
var tag = Convert.FromBase64String(envelope.Tag);
|
||||
var ciphertext = Convert.FromBase64String(envelope.Ciphertext);
|
||||
|
||||
var key = DeriveKey(salt);
|
||||
try
|
||||
{
|
||||
var plaintext = new byte[ciphertext.Length];
|
||||
AesGcm.Decrypt(key, nonce, ciphertext, tag, plaintext);
|
||||
|
||||
return plaintext;
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(key);
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] DeriveKey(byte[] salt)
|
||||
{
|
||||
var key = new byte[32];
|
||||
try
|
||||
{
|
||||
var passwordBytes = Encoding.UTF8.GetBytes(_options.Password);
|
||||
try
|
||||
{
|
||||
var derived = Rfc2898DeriveBytes.Pbkdf2(passwordBytes, salt, _options.KeyDerivationIterations, HashAlgorithmName.SHA256, key.Length);
|
||||
derived.CopyTo(key.AsSpan());
|
||||
CryptographicOperations.ZeroMemory(derived);
|
||||
return key;
|
||||
}
|
||||
finally
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(passwordBytes);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
CryptographicOperations.ZeroMemory(key);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task WriteJsonAsync<T>(string path, T value, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, value, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static KmsKeyMetadata ToMetadata(KeyMetadataRecord record)
|
||||
{
|
||||
var versions = record.Versions
|
||||
.Select(v => new KmsKeyVersionMetadata(
|
||||
v.VersionId,
|
||||
v.State,
|
||||
v.CreatedAt,
|
||||
v.DeactivatedAt,
|
||||
v.PublicKey,
|
||||
v.CurveName))
|
||||
.ToImmutableArray();
|
||||
|
||||
var createdAt = record.CreatedAt ?? (versions.Length > 0 ? versions.Min(v => v.CreatedAt) : DateTimeOffset.UtcNow);
|
||||
return new KmsKeyMetadata(record.KeyId, record.Algorithm, record.State, createdAt, versions);
|
||||
}
|
||||
|
||||
private sealed class KeyMetadataRecord
|
||||
{
|
||||
public string KeyId { get; set; } = string.Empty;
|
||||
public string Algorithm { get; set; } = KmsAlgorithms.Es256;
|
||||
public KmsKeyState State { get; set; } = KmsKeyState.Active;
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public string? ActiveVersion { get; set; }
|
||||
public List<KeyVersionRecord> Versions { get; set; } = new();
|
||||
}
|
||||
|
||||
private sealed class KeyVersionRecord
|
||||
{
|
||||
public string VersionId { get; set; } = string.Empty;
|
||||
public KmsKeyState State { get; set; } = KmsKeyState.Active;
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
public DateTimeOffset? DeactivatedAt { get; set; }
|
||||
public string PublicKey { get; set; } = string.Empty;
|
||||
public string FileName { get; set; } = string.Empty;
|
||||
public string CurveName { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
private sealed record KeyEnvelope(
|
||||
string Ciphertext,
|
||||
string Nonce,
|
||||
string Tag,
|
||||
string Salt);
|
||||
|
||||
private sealed record EcdsaKeyData(byte[] PrivateBlob, string PublicKey, string Curve);
|
||||
|
||||
private sealed class EcdsaPrivateKeyRecord
|
||||
{
|
||||
public string Curve { get; set; } = string.Empty;
|
||||
public string D { get; set; } = string.Empty;
|
||||
public string Qx { get; set; } = string.Empty;
|
||||
public string Qy { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
private static ECCurve ResolveCurve(string curveName) => curveName switch
|
||||
{
|
||||
"nistP256" or "P-256" or "ES256" => ECCurve.NamedCurves.nistP256,
|
||||
_ => throw new NotSupportedException($"Curve '{curveName}' is not supported."),
|
||||
};
|
||||
|
||||
public void Dispose() => _mutex.Dispose();
|
||||
}
|
||||
27
src/__Libraries/StellaOps.Cryptography.Kms/FileKmsOptions.cs
Normal file
27
src/__Libraries/StellaOps.Cryptography.Kms/FileKmsOptions.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Options for the <see cref="FileKmsClient"/>.
|
||||
/// </summary>
|
||||
public sealed class FileKmsOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Root directory for storing key material.
|
||||
/// </summary>
|
||||
public string RootPath { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Password used to encrypt private key material at rest.
|
||||
/// </summary>
|
||||
public required string Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Signing algorithm identifier (default ED25519).
|
||||
/// </summary>
|
||||
public string Algorithm { get; set; } = KmsAlgorithms.Es256;
|
||||
|
||||
/// <summary>
|
||||
/// PBKDF2 iteration count for envelope encryption.
|
||||
/// </summary>
|
||||
public int KeyDerivationIterations { get; set; } = 100_000;
|
||||
}
|
||||
51
src/__Libraries/StellaOps.Cryptography.Kms/IKmsClient.cs
Normal file
51
src/__Libraries/StellaOps.Cryptography.Kms/IKmsClient.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
using System.Threading;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Provides signing key operations backed by a key management system (KMS).
|
||||
/// </summary>
|
||||
public interface IKmsClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs the supplied digest with the specified key version.
|
||||
/// </summary>
|
||||
Task<KmsSignResult> SignAsync(
|
||||
string keyId,
|
||||
string? keyVersion,
|
||||
ReadOnlyMemory<byte> data,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a signature produced by <see cref="SignAsync"/>.
|
||||
/// </summary>
|
||||
Task<bool> VerifyAsync(
|
||||
string keyId,
|
||||
string? keyVersion,
|
||||
ReadOnlyMemory<byte> data,
|
||||
ReadOnlyMemory<byte> signature,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves metadata for the current key and versions.
|
||||
/// </summary>
|
||||
Task<KmsKeyMetadata> GetMetadataAsync(string keyId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports the key material required for local verification.
|
||||
/// </summary>
|
||||
Task<KmsKeyMaterial> ExportAsync(
|
||||
string keyId,
|
||||
string? keyVersion,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a new active key version for the specified key.
|
||||
/// </summary>
|
||||
Task<KmsKeyMetadata> RotateAsync(string keyId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Revokes a key, preventing future signing operations.
|
||||
/// </summary>
|
||||
Task RevokeAsync(string keyId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Supported algorithm identifiers for the KMS abstraction.
|
||||
/// </summary>
|
||||
public static class KmsAlgorithms
|
||||
{
|
||||
public const string Es256 = "ES256";
|
||||
}
|
||||
120
src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs
Normal file
120
src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs
Normal file
@@ -0,0 +1,120 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Crypto provider that delegates signing operations to a KMS backend.
|
||||
/// </summary>
|
||||
public sealed class KmsCryptoProvider : ICryptoProvider
|
||||
{
|
||||
private readonly IKmsClient _kmsClient;
|
||||
private readonly ConcurrentDictionary<string, KmsSigningRegistration> _registrations = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public KmsCryptoProvider(IKmsClient kmsClient)
|
||||
=> _kmsClient = kmsClient ?? throw new ArgumentNullException(nameof(kmsClient));
|
||||
|
||||
public string Name => "kms";
|
||||
|
||||
public bool Supports(CryptoCapability capability, string algorithmId)
|
||||
{
|
||||
if (!string.Equals(algorithmId, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return capability is CryptoCapability.Signing or CryptoCapability.Verification;
|
||||
}
|
||||
|
||||
public IPasswordHasher GetPasswordHasher(string algorithmId)
|
||||
=> throw new InvalidOperationException($"Provider '{Name}' does not support password hashing.");
|
||||
|
||||
public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(keyReference);
|
||||
|
||||
if (!Supports(CryptoCapability.Signing, algorithmId))
|
||||
{
|
||||
throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider '{Name}'.");
|
||||
}
|
||||
|
||||
if (!_registrations.TryGetValue(keyReference.KeyId, out var registration))
|
||||
{
|
||||
throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'.");
|
||||
}
|
||||
|
||||
return new KmsSigner(_kmsClient, registration);
|
||||
}
|
||||
|
||||
public void UpsertSigningKey(CryptoSigningKey signingKey)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(signingKey);
|
||||
|
||||
if (!string.Equals(signingKey.AlgorithmId, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException($"Provider '{Name}' only supports {KmsAlgorithms.Es256} signing keys.");
|
||||
}
|
||||
|
||||
if (signingKey.Metadata is null ||
|
||||
!signingKey.Metadata.TryGetValue(KmsMetadataKeys.Version, out var versionId) ||
|
||||
string.IsNullOrWhiteSpace(versionId))
|
||||
{
|
||||
throw new InvalidOperationException("KMS signing keys must include metadata entry 'kms.version'.");
|
||||
}
|
||||
|
||||
var registration = new KmsSigningRegistration(signingKey.Reference.KeyId, versionId!, signingKey.AlgorithmId);
|
||||
_registrations.AddOrUpdate(signingKey.Reference.KeyId, registration, (_, _) => registration);
|
||||
}
|
||||
|
||||
public bool RemoveSigningKey(string keyId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(keyId))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return _registrations.TryRemove(keyId, out _);
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<CryptoSigningKey> GetSigningKeys()
|
||||
{
|
||||
var list = new List<CryptoSigningKey>();
|
||||
foreach (var registration in _registrations.Values)
|
||||
{
|
||||
var material = _kmsClient.ExportAsync(registration.KeyId, registration.VersionId).GetAwaiter().GetResult();
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = material.D,
|
||||
Q = new ECPoint
|
||||
{
|
||||
X = material.Qx,
|
||||
Y = material.Qy,
|
||||
},
|
||||
};
|
||||
|
||||
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
[KmsMetadataKeys.Version] = material.VersionId
|
||||
};
|
||||
|
||||
list.Add(new CryptoSigningKey(
|
||||
new CryptoKeyReference(material.KeyId, Name),
|
||||
material.Algorithm,
|
||||
in parameters,
|
||||
material.CreatedAt,
|
||||
metadata: metadata));
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
internal static class KmsMetadataKeys
|
||||
{
|
||||
public const string Version = "kms.version";
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record KmsSigningRegistration(string KeyId, string VersionId, string Algorithm);
|
||||
14
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyMaterial.cs
Normal file
14
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyMaterial.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Represents exported key material for verification and registration.
|
||||
/// </summary>
|
||||
public sealed record KmsKeyMaterial(
|
||||
string KeyId,
|
||||
string VersionId,
|
||||
string Algorithm,
|
||||
string Curve,
|
||||
byte[] D,
|
||||
byte[] Qx,
|
||||
byte[] Qy,
|
||||
DateTimeOffset CreatedAt);
|
||||
24
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyMetadata.cs
Normal file
24
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyMetadata.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Describes a logical KMS key and its versions.
|
||||
/// </summary>
|
||||
public sealed record KmsKeyMetadata(
|
||||
string KeyId,
|
||||
string Algorithm,
|
||||
KmsKeyState State,
|
||||
DateTimeOffset CreatedAt,
|
||||
ImmutableArray<KmsKeyVersionMetadata> Versions);
|
||||
|
||||
/// <summary>
|
||||
/// Describes a specific key version.
|
||||
/// </summary>
|
||||
public sealed record KmsKeyVersionMetadata(
|
||||
string VersionId,
|
||||
KmsKeyState State,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? DeactivatedAt,
|
||||
string PublicKey,
|
||||
string Curve);
|
||||
11
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyState.cs
Normal file
11
src/__Libraries/StellaOps.Cryptography.Kms/KmsKeyState.cs
Normal file
@@ -0,0 +1,11 @@
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the lifecycle state of a KMS key or key version.
|
||||
/// </summary>
|
||||
public enum KmsKeyState
|
||||
{
|
||||
Active = 0,
|
||||
PendingRotation = 1,
|
||||
Revoked = 2,
|
||||
}
|
||||
10
src/__Libraries/StellaOps.Cryptography.Kms/KmsSignResult.cs
Normal file
10
src/__Libraries/StellaOps.Cryptography.Kms/KmsSignResult.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the output of a signing operation.
|
||||
/// </summary>
|
||||
public sealed record KmsSignResult(
|
||||
string KeyId,
|
||||
string VersionId,
|
||||
string Algorithm,
|
||||
byte[] Signature);
|
||||
55
src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs
Normal file
55
src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
internal sealed class KmsSigner : ICryptoSigner
|
||||
{
|
||||
private readonly IKmsClient _client;
|
||||
private readonly string _keyId;
|
||||
private readonly string _versionId;
|
||||
private readonly string _algorithm;
|
||||
|
||||
public KmsSigner(IKmsClient client, KmsSigningRegistration registration)
|
||||
{
|
||||
_client = client;
|
||||
_keyId = registration.KeyId;
|
||||
_versionId = registration.VersionId;
|
||||
_algorithm = registration.Algorithm;
|
||||
}
|
||||
|
||||
public string KeyId => _keyId;
|
||||
|
||||
public string AlgorithmId => _algorithm;
|
||||
|
||||
public async ValueTask<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var result = await _client.SignAsync(_keyId, _versionId, data, cancellationToken).ConfigureAwait(false);
|
||||
return result.Signature;
|
||||
}
|
||||
|
||||
public ValueTask<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default)
|
||||
=> new(_client.VerifyAsync(_keyId, _versionId, data, signature, cancellationToken));
|
||||
|
||||
public JsonWebKey ExportPublicJsonWebKey()
|
||||
{
|
||||
var material = _client.ExportAsync(_keyId, _versionId).GetAwaiter().GetResult();
|
||||
var jwk = new JsonWebKey
|
||||
{
|
||||
Kid = material.KeyId,
|
||||
Alg = material.Algorithm,
|
||||
Kty = JsonWebAlgorithmsKeyTypes.EllipticCurve,
|
||||
Use = JsonWebKeyUseNames.Sig,
|
||||
Crv = JsonWebKeyECTypes.P256,
|
||||
};
|
||||
|
||||
jwk.KeyOps.Add("sign");
|
||||
jwk.KeyOps.Add("verify");
|
||||
jwk.X = Base64UrlEncoder.Encode(material.Qx);
|
||||
jwk.Y = Base64UrlEncoder.Encode(material.Qy);
|
||||
return jwk;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
/// <summary>
|
||||
/// Dependency injection helpers for the KMS client and crypto provider.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddFileKms(
|
||||
this IServiceCollection services,
|
||||
Action<FileKmsOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
services.Configure(configure);
|
||||
|
||||
services.TryAddSingleton<IKmsClient>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<FileKmsOptions>>().Value;
|
||||
return new FileKmsClient(options);
|
||||
});
|
||||
|
||||
services.TryAddEnumerable(ServiceDescriptor.Singleton<ICryptoProvider, KmsCryptoProvider>());
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="9.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -3,7 +3,7 @@
|
||||
## Sprint 72 – Abstractions & File Driver
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| KMS-72-001 | TODO | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes. |
|
||||
| KMS-72-001 | DOING (2025-10-29) | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes.<br>2025-10-29: `FileKmsClient` (ES256) file driver scaffolding committed under `StellaOps.Cryptography.Kms`; includes disk encryption + unit tests. Follow-up: address PBKDF2/AesGcm warnings and wire into Authority services. |
|
||||
| KMS-72-002 | TODO | KMS Guild | KMS-72-001 | Add CLI support for importing/exporting file-based keys with password protection. | CLI commands functional; docs updated; integration tests pass. |
|
||||
|
||||
## Sprint 73 – Cloud & HSM Integration
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
using System.Security.Cryptography;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms.Tests;
|
||||
|
||||
public sealed class FileKmsClientTests : IDisposable
|
||||
{
|
||||
private readonly string _rootPath;
|
||||
|
||||
public FileKmsClientTests()
|
||||
{
|
||||
_rootPath = Path.Combine(Path.GetTempPath(), $"kms-tests-{Guid.NewGuid():N}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RotateSignVerifyLifecycle_Works()
|
||||
{
|
||||
using var client = CreateClient();
|
||||
var keyId = "kms-test-key";
|
||||
|
||||
// Initial rotate creates the key.
|
||||
var metadata = await client.RotateAsync(keyId);
|
||||
Assert.Equal(keyId, metadata.KeyId);
|
||||
Assert.Single(metadata.Versions);
|
||||
Assert.Equal(KmsKeyState.Active, metadata.State);
|
||||
var version = metadata.Versions[0];
|
||||
Assert.Equal(KmsKeyState.Active, version.State);
|
||||
|
||||
var firstData = RandomNumberGenerator.GetBytes(256);
|
||||
var firstSignature = await client.SignAsync(keyId, null, firstData);
|
||||
Assert.Equal(keyId, firstSignature.KeyId);
|
||||
Assert.Equal(KmsAlgorithms.Es256, firstSignature.Algorithm);
|
||||
Assert.True(await client.VerifyAsync(keyId, firstSignature.VersionId, firstData, firstSignature.Signature));
|
||||
|
||||
// Rotate again and ensure metadata reflects both versions.
|
||||
var rotated = await client.RotateAsync(keyId);
|
||||
Assert.Equal(2, rotated.Versions.Length);
|
||||
var activeVersion = rotated.Versions.Single(v => v.State == KmsKeyState.Active);
|
||||
Assert.Equal(rotated.Versions.Max(v => v.VersionId), activeVersion.VersionId);
|
||||
var previousVersion = rotated.Versions.Single(v => v.State != KmsKeyState.Active);
|
||||
Assert.Equal(KmsKeyState.PendingRotation, previousVersion.State);
|
||||
|
||||
var newData = RandomNumberGenerator.GetBytes(128);
|
||||
var activeSignature = await client.SignAsync(keyId, null, newData);
|
||||
Assert.Equal(activeVersion.VersionId, activeSignature.VersionId);
|
||||
Assert.True(await client.VerifyAsync(keyId, null, newData, activeSignature.Signature));
|
||||
|
||||
// Explicit version verify should still pass for previous version using the old signature.
|
||||
Assert.True(await client.VerifyAsync(keyId, previousVersion.VersionId, firstData, firstSignature.Signature));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RevokePreventsSigning()
|
||||
{
|
||||
using var client = CreateClient();
|
||||
var keyId = "kms-revoke";
|
||||
|
||||
await client.RotateAsync(keyId);
|
||||
await client.RevokeAsync(keyId);
|
||||
|
||||
var metadata = await client.GetMetadataAsync(keyId);
|
||||
Assert.Equal(KmsKeyState.Revoked, metadata.State);
|
||||
Assert.All(metadata.Versions, v => Assert.Equal(KmsKeyState.Revoked, v.State));
|
||||
|
||||
var data = RandomNumberGenerator.GetBytes(32);
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() => client.SignAsync(keyId, null, data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_ReturnsKeyMaterial()
|
||||
{
|
||||
using var client = CreateClient();
|
||||
var keyId = "kms-export";
|
||||
|
||||
await client.RotateAsync(keyId);
|
||||
var material = await client.ExportAsync(keyId, null);
|
||||
|
||||
Assert.Equal(keyId, material.KeyId);
|
||||
Assert.Equal(KmsAlgorithms.Es256, material.Algorithm);
|
||||
Assert.Equal("nistP256", material.Curve);
|
||||
Assert.NotEmpty(material.D);
|
||||
Assert.NotEmpty(material.Qx);
|
||||
Assert.NotEmpty(material.Qy);
|
||||
}
|
||||
|
||||
private FileKmsClient CreateClient()
|
||||
{
|
||||
var options = new FileKmsOptions
|
||||
{
|
||||
RootPath = _rootPath,
|
||||
Password = "P@ssw0rd!",
|
||||
Algorithm = KmsAlgorithms.Es256,
|
||||
};
|
||||
|
||||
return new FileKmsClient(options);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_rootPath))
|
||||
{
|
||||
Directory.Delete(_rootPath, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Cryptography.Kms/StellaOps.Cryptography.Kms.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user