audit work, fixed StellaOps.sln warnings/errors, fixed tests, sprints work, new advisories

This commit is contained in:
master
2026-01-07 18:49:59 +02:00
parent 04ec098046
commit 608a7f85c0
866 changed files with 56323 additions and 6231 deletions

View File

@@ -689,7 +689,7 @@ static Task<IResult> HandlePolicyCompile(
BundleName = request.BundleName,
Version = "1.0.0",
RuleCount = request.RuleIds.Count,
CompiledAt = now.ToString("O"),
CompiledAt = now.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
ContentHash = $"sha256:{contentHash}",
SignatureId = null // Would be signed in production
};
@@ -751,10 +751,10 @@ static async Task<IResult> HandleGetConsent(
return Results.Ok(new AiConsentStatusResponse
{
Consented = record.Consented,
ConsentedAt = record.ConsentedAt?.ToString("O"),
ConsentedAt = record.ConsentedAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
ConsentedBy = record.UserId,
Scope = record.Scope,
ExpiresAt = record.ExpiresAt?.ToString("O"),
ExpiresAt = record.ExpiresAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
SessionLevel = record.SessionLevel
});
}
@@ -763,6 +763,7 @@ static async Task<IResult> HandleGrantConsent(
HttpContext httpContext,
AiConsentGrantRequest request,
IAiConsentStore consentStore,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
if (!request.DataShareAcknowledged)
@@ -786,8 +787,8 @@ static async Task<IResult> HandleGrantConsent(
return Results.Ok(new AiConsentGrantResponse
{
Consented = record.Consented,
ConsentedAt = record.ConsentedAt?.ToString("O") ?? DateTimeOffset.UtcNow.ToString("O"),
ExpiresAt = record.ExpiresAt?.ToString("O")
ConsentedAt = record.ConsentedAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture) ?? timeProvider.GetUtcNow().ToString("O", System.Globalization.CultureInfo.InvariantCulture),
ExpiresAt = record.ExpiresAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture)
});
}
@@ -863,7 +864,7 @@ static async Task<IResult> HandleJustify(
ConfidenceScore = result.ConfidenceScore,
EvidenceSuggestions = result.EvidenceSuggestions,
ModelVersion = result.ModelVersion,
GeneratedAt = result.GeneratedAt.ToString("O"),
GeneratedAt = result.GeneratedAt.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
TraceId = result.TraceId
});
}
@@ -919,7 +920,7 @@ static Task<IResult> HandleGetRateLimits(
Feature = l.Feature,
Limit = l.Limit,
Remaining = l.Remaining,
ResetsAt = l.ResetsAt.ToString("O")
ResetsAt = l.ResetsAt.ToString("O", System.Globalization.CultureInfo.InvariantCulture)
}).ToList();
return Task.FromResult(Results.Ok(response));

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -16,6 +17,7 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
private readonly IExplanationInferenceClient _inferenceClient;
private readonly ICitationExtractor _citationExtractor;
private readonly IExplanationStore _store;
private readonly TimeProvider _timeProvider;
private const double EvidenceBackedThreshold = 0.8;
@@ -24,13 +26,15 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
IExplanationPromptService promptService,
IExplanationInferenceClient inferenceClient,
ICitationExtractor citationExtractor,
IExplanationStore store)
IExplanationStore store,
TimeProvider? timeProvider = null)
{
_evidenceService = evidenceService;
_promptService = promptService;
_inferenceClient = inferenceClient;
_citationExtractor = citationExtractor;
_store = store;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<ExplanationResult> GenerateAsync(ExplanationRequest request, CancellationToken cancellationToken = default)
@@ -91,7 +95,7 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
ModelId = inferenceResult.ModelId,
PromptTemplateVersion = prompt.TemplateVersion,
InputHashes = inputHashes,
GeneratedAt = DateTime.UtcNow.ToString("O"),
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
OutputHash = outputHash
};

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -14,6 +15,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
private readonly IPolicyPromptService _promptService;
private readonly IPolicyInferenceClient _inferenceClient;
private readonly IPolicyIntentStore _intentStore;
private readonly TimeProvider _timeProvider;
private static readonly string[] FewShotExamples = new[]
{
@@ -27,11 +29,13 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
public AiPolicyIntentParser(
IPolicyPromptService promptService,
IPolicyInferenceClient inferenceClient,
IPolicyIntentStore intentStore)
IPolicyIntentStore intentStore,
TimeProvider? timeProvider = null)
{
_promptService = promptService;
_inferenceClient = inferenceClient;
_intentStore = intentStore;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<PolicyParseResult> ParseAsync(
@@ -61,7 +65,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
Success = intent.Confidence >= 0.7,
ErrorMessage = intent.Confidence < 0.7 ? "Ambiguous input - clarification needed" : null,
ModelId = inferenceResult.ModelId,
ParsedAt = DateTime.UtcNow.ToString("O")
ParsedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
}
@@ -93,7 +97,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
Intent = clarifiedIntent,
Success = clarifiedIntent.Confidence >= 0.8,
ModelId = inferenceResult.ModelId,
ParsedAt = DateTime.UtcNow.ToString("O")
ParsedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
}

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
@@ -10,6 +11,13 @@ namespace StellaOps.AdvisoryAI.PolicyStudio;
/// </summary>
public sealed class LatticeRuleGenerator : IPolicyRuleGenerator
{
private readonly TimeProvider _timeProvider;
public LatticeRuleGenerator(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<RuleGenerationResult> GenerateAsync(
PolicyIntent intent,
CancellationToken cancellationToken = default)
@@ -58,7 +66,7 @@ public sealed class LatticeRuleGenerator : IPolicyRuleGenerator
Success = true,
Warnings = warnings,
IntentId = intent.IntentId,
GeneratedAt = DateTime.UtcNow.ToString("O")
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
});
}

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -391,7 +392,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
Warnings = warnings,
ValidationReport = validationReport,
TestReport = testReport,
CompiledAt = _timeProvider.GetUtcNow().ToString("O")
CompiledAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
}
@@ -412,7 +413,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
Warnings = warnings,
ValidationReport = validationReport,
TestReport = testReport,
CompiledAt = DateTime.UtcNow.ToString("O"),
CompiledAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
BundleDigest = bundleDigest
};
}
@@ -492,7 +493,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
ContentDigest = contentDigest,
Signature = string.Empty,
Algorithm = "none",
SignedAt = _timeProvider.GetUtcNow().ToString("O")
SignedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
}
@@ -509,7 +510,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
Algorithm = signature.Algorithm,
KeyId = options.KeyId,
SignerIdentity = options.SignerIdentity,
SignedAt = _timeProvider.GetUtcNow().ToString("O"),
SignedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
CertificateChain = signature.CertificateChain
};
}

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
@@ -10,6 +11,13 @@ namespace StellaOps.AdvisoryAI.PolicyStudio;
/// </summary>
public sealed class PropertyBasedTestSynthesizer : ITestCaseSynthesizer
{
private readonly TimeProvider _timeProvider;
public PropertyBasedTestSynthesizer(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<IReadOnlyList<PolicyTestCase>> SynthesizeAsync(
IReadOnlyList<LatticeRule> rules,
CancellationToken cancellationToken = default)
@@ -53,7 +61,7 @@ public sealed class PropertyBasedTestSynthesizer : ITestCaseSynthesizer
Passed = results.Count(r => r.Passed),
Failed = results.Count(r => !r.Passed),
Results = results,
RunAt = DateTime.UtcNow.ToString("O")
RunAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
});
}

View File

@@ -1,4 +1,5 @@
using System.Collections.Immutable;
using System.Globalization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Abstractions;
@@ -106,7 +107,7 @@ internal sealed class ConcelierAdvisoryDocumentProvider : IAdvisoryDocumentProvi
["vendor"] = record.Document.Source.Vendor,
["connector"] = record.Document.Source.Connector,
["content_hash"] = record.Document.Upstream.ContentHash,
["ingested_at"] = record.IngestedAt.UtcDateTime.ToString("O"),
["ingested_at"] = record.IngestedAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
};
if (!string.IsNullOrWhiteSpace(record.Document.Source.Stream))

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -88,7 +89,7 @@ public sealed class AiRemediationPlanner : IRemediationPlanner
NotReadyReason = notReadyReason,
ConfidenceScore = inferenceResult.Confidence,
ModelId = inferenceResult.ModelId,
GeneratedAt = _timeProvider.GetUtcNow().ToString("O"),
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
InputHashes = inputHashes,
EvidenceRefs = new List<string> { versionResult.CurrentVersion, versionResult.RecommendedVersion }
};

View File

@@ -1,3 +1,5 @@
using System.Globalization;
namespace StellaOps.AdvisoryAI.Remediation;
/// <summary>
@@ -7,42 +9,56 @@ namespace StellaOps.AdvisoryAI.Remediation;
/// </summary>
public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
{
private readonly TimeProvider _timeProvider;
private readonly Func<Guid> _guidFactory;
private readonly Func<int, int, int> _randomFactory;
public AzureDevOpsPullRequestGenerator(
TimeProvider? timeProvider = null,
Func<Guid>? guidFactory = null,
Func<int, int, int>? randomFactory = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidFactory = guidFactory ?? Guid.NewGuid;
_randomFactory = randomFactory ?? Random.Shared.Next;
}
public string ScmType => "azure-devops";
public Task<PullRequestResult> CreatePullRequestAsync(
RemediationPlan plan,
CancellationToken cancellationToken = default)
{
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
if (!plan.PrReady)
{
return Task.FromResult(new PullRequestResult
{
PrId = $"ado-pr-{Guid.NewGuid():N}",
PrId = $"ado-pr-{_guidFactory():N}",
PrNumber = 0,
Url = string.Empty,
BranchName = string.Empty,
Status = PullRequestStatus.Failed,
StatusMessage = plan.NotReadyReason ?? "Plan is not PR-ready",
CreatedAt = DateTime.UtcNow.ToString("O"),
UpdatedAt = DateTime.UtcNow.ToString("O")
CreatedAt = nowStr,
UpdatedAt = nowStr
});
}
var branchName = GenerateBranchName(plan);
var prId = $"ado-pr-{Guid.NewGuid():N}";
var now = DateTime.UtcNow.ToString("O");
var branchName = GenerateBranchName(plan, _timeProvider);
var prId = $"ado-pr-{_guidFactory():N}";
// In a real implementation, this would use Azure DevOps REST API
return Task.FromResult(new PullRequestResult
{
PrId = prId,
PrNumber = new Random().Next(1000, 9999),
PrNumber = _randomFactory(1000, 9999),
Url = $"https://dev.azure.com/{ExtractOrgProject(plan.Request.RepositoryUrl)}/_git/{ExtractRepoName(plan.Request.RepositoryUrl)}/pullrequest/{prId}",
BranchName = branchName,
Status = PullRequestStatus.Creating,
StatusMessage = "Pull request is being created",
CreatedAt = now,
UpdatedAt = now
CreatedAt = nowStr,
UpdatedAt = nowStr
});
}
@@ -50,7 +66,7 @@ public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
string prId,
CancellationToken cancellationToken = default)
{
var now = DateTime.UtcNow.ToString("O");
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
return Task.FromResult(new PullRequestResult
{
PrId = prId,
@@ -80,10 +96,10 @@ public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
return Task.CompletedTask;
}
private static string GenerateBranchName(RemediationPlan plan)
private static string GenerateBranchName(RemediationPlan plan, TimeProvider timeProvider)
{
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
var timestamp = DateTime.UtcNow.ToString("yyyyMMdd");
var timestamp = timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
return $"stellaops/fix-{vulnId}-{timestamp}";
}

View File

@@ -1,3 +1,5 @@
using System.Globalization;
namespace StellaOps.AdvisoryAI.Remediation;
/// <summary>
@@ -9,11 +11,19 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
{
private readonly IRemediationPlanStore _planStore;
private readonly TimeProvider _timeProvider;
private readonly Func<Guid> _guidFactory;
private readonly Func<int, int, int> _randomFactory;
public GitHubPullRequestGenerator(IRemediationPlanStore planStore, TimeProvider? timeProvider = null)
public GitHubPullRequestGenerator(
IRemediationPlanStore planStore,
TimeProvider? timeProvider = null,
Func<Guid>? guidFactory = null,
Func<int, int, int>? randomFactory = null)
{
_planStore = planStore;
_timeProvider = timeProvider ?? TimeProvider.System;
_guidFactory = guidFactory ?? Guid.NewGuid;
_randomFactory = randomFactory ?? Random.Shared.Next;
}
public string ScmType => "github";
@@ -22,19 +32,20 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
RemediationPlan plan,
CancellationToken cancellationToken = default)
{
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
// Validate plan is PR-ready
if (!plan.PrReady)
{
return new PullRequestResult
{
PrId = $"pr-{Guid.NewGuid():N}",
PrId = $"pr-{_guidFactory():N}",
PrNumber = 0,
Url = string.Empty,
BranchName = string.Empty,
Status = PullRequestStatus.Failed,
StatusMessage = plan.NotReadyReason ?? "Plan is not PR-ready",
CreatedAt = _timeProvider.GetUtcNow().ToString("O"),
UpdatedAt = _timeProvider.GetUtcNow().ToString("O")
CreatedAt = nowStr,
UpdatedAt = nowStr
};
}
@@ -47,19 +58,18 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
// 3. Commit changes
// 4. Create PR via GitHub API
var prId = $"gh-pr-{Guid.NewGuid():N}";
var now = _timeProvider.GetUtcNow().ToString("O");
var prId = $"gh-pr-{_guidFactory():N}";
return new PullRequestResult
{
PrId = prId,
PrNumber = new Random().Next(1000, 9999), // Placeholder
PrNumber = _randomFactory(1000, 9999), // Placeholder
Url = $"https://github.com/{ExtractOwnerRepo(plan.Request.RepositoryUrl)}/pull/{prId}",
BranchName = branchName,
Status = PullRequestStatus.Creating,
StatusMessage = "Pull request is being created",
CreatedAt = now,
UpdatedAt = now
CreatedAt = nowStr,
UpdatedAt = nowStr
};
}
@@ -68,7 +78,7 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
CancellationToken cancellationToken = default)
{
// In a real implementation, this would query GitHub API
var now = _timeProvider.GetUtcNow().ToString("O");
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
return Task.FromResult(new PullRequestResult
{
@@ -104,7 +114,7 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
private string GenerateBranchName(RemediationPlan plan)
{
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd");
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
return $"stellaops/fix-{vulnId}-{timestamp}";
}

View File

@@ -1,3 +1,5 @@
using System.Globalization;
namespace StellaOps.AdvisoryAI.Remediation;
/// <summary>
@@ -8,10 +10,17 @@ namespace StellaOps.AdvisoryAI.Remediation;
public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
{
private readonly TimeProvider _timeProvider;
private readonly Func<Guid> _guidFactory;
private readonly Func<int, int, int> _randomFactory;
public GitLabMergeRequestGenerator(TimeProvider? timeProvider = null)
public GitLabMergeRequestGenerator(
TimeProvider? timeProvider = null,
Func<Guid>? guidFactory = null,
Func<int, int, int>? randomFactory = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidFactory = guidFactory ?? Guid.NewGuid;
_randomFactory = randomFactory ?? Random.Shared.Next;
}
public string ScmType => "gitlab";
@@ -20,36 +29,36 @@ public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
RemediationPlan plan,
CancellationToken cancellationToken = default)
{
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
if (!plan.PrReady)
{
return Task.FromResult(new PullRequestResult
{
PrId = $"mr-{Guid.NewGuid():N}",
PrId = $"mr-{_guidFactory():N}",
PrNumber = 0,
Url = string.Empty,
BranchName = string.Empty,
Status = PullRequestStatus.Failed,
StatusMessage = plan.NotReadyReason ?? "Plan is not MR-ready",
CreatedAt = _timeProvider.GetUtcNow().ToString("O"),
UpdatedAt = _timeProvider.GetUtcNow().ToString("O")
CreatedAt = nowStr,
UpdatedAt = nowStr
});
}
var branchName = GenerateBranchName(plan);
var mrId = $"gl-mr-{Guid.NewGuid():N}";
var now = _timeProvider.GetUtcNow().ToString("O");
var mrId = $"gl-mr-{_guidFactory():N}";
// In a real implementation, this would use GitLab API
return Task.FromResult(new PullRequestResult
{
PrId = mrId,
PrNumber = new Random().Next(1000, 9999),
PrNumber = _randomFactory(1000, 9999),
Url = $"https://gitlab.com/{ExtractProjectPath(plan.Request.RepositoryUrl)}/-/merge_requests/{mrId}",
BranchName = branchName,
Status = PullRequestStatus.Creating,
StatusMessage = "Merge request is being created",
CreatedAt = now,
UpdatedAt = now
CreatedAt = nowStr,
UpdatedAt = nowStr
});
}
@@ -57,7 +66,7 @@ public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
string prId,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow().ToString("O");
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
return Task.FromResult(new PullRequestResult
{
PrId = prId,
@@ -90,7 +99,7 @@ public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
private string GenerateBranchName(RemediationPlan plan)
{
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd");
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
return $"stellaops/fix-{vulnId}-{timestamp}";
}

View File

@@ -30,7 +30,7 @@ public sealed class SemanticVersionTests
[InlineData("1.0.0-")]
[InlineData("")]
[InlineData(null)]
public void Parse_InvalidInputs_Throws(string value)
public void Parse_InvalidInputs_Throws(string? value)
{
var act = () => SemanticVersion.Parse(value!);
act.Should().Throw<FormatException>();

View File

@@ -12,12 +12,6 @@
<PackageReference Include="Microsoft.Extensions.Configuration" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />

View File

@@ -4,6 +4,7 @@
// Part of Step 5: Graph Emission
// =============================================================================
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -27,7 +28,7 @@ public sealed class EvidenceGraph
/// Generation timestamp in ISO 8601 UTC format.
/// </summary>
[JsonPropertyName("generatedAt")]
public string GeneratedAt { get; init; } = DateTimeOffset.UnixEpoch.ToString("O");
public string GeneratedAt { get; init; } = DateTimeOffset.UnixEpoch.ToString("O", CultureInfo.InvariantCulture);
/// <summary>
/// Generator tool identifier.

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using StellaOps.AirGap.Importer.Contracts;
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
using StellaOps.AirGap.Importer.Reconciliation.Signing;
@@ -229,7 +230,7 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
return new EvidenceGraph
{
GeneratedAt = generatedAtUtc.ToString("O"),
GeneratedAt = generatedAtUtc.ToString("O", CultureInfo.InvariantCulture),
Nodes = nodes,
Edges = edges,
Metadata = new EvidenceGraphMetadata

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.AirGap.Importer.Contracts;
@@ -274,7 +275,7 @@ public sealed class ImportValidator
["bundleType"] = request.BundleType,
["bundleDigest"] = request.BundleDigest,
["manifestVersion"] = request.ManifestVersion,
["manifestCreatedAt"] = request.ManifestCreatedAt.ToString("O"),
["manifestCreatedAt"] = request.ManifestCreatedAt.ToString("O", CultureInfo.InvariantCulture),
["forceActivate"] = request.ForceActivate.ToString()
};

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -20,7 +21,7 @@ public sealed record TimeStatusDto(
public static TimeStatusDto FromStatus(TimeStatus status)
{
return new TimeStatusDto(
status.Anchor.AnchorTime.ToUniversalTime().ToString("O"),
status.Anchor.AnchorTime.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture),
status.Anchor.Format,
status.Anchor.Source,
status.Anchor.SignatureFingerprint,
@@ -31,7 +32,7 @@ public sealed record TimeStatusDto(
status.Staleness.IsWarning,
status.Staleness.IsBreach,
status.ContentStaleness,
status.EvaluatedAtUtc.ToUniversalTime().ToString("O"));
status.EvaluatedAtUtc.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture));
}
public string ToJson()

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -93,7 +94,7 @@ public sealed class ExcititorVexImportTarget : IVexImportTarget
Content: contentBytes,
Metadata: ImmutableDictionary<string, string>.Empty
.Add("importSource", "airgap-snapshot")
.Add("snapshotAt", data.SnapshotAt.ToString("O")));
.Add("snapshotAt", data.SnapshotAt.ToString("O", CultureInfo.InvariantCulture)));
await _sink.StoreAsync(document, cancellationToken);
created++;

View File

@@ -59,6 +59,9 @@ public static class AirGapSyncServiceCollectionExtensions
// Bundle exporter
services.TryAddSingleton<IAirGapBundleExporter, AirGapBundleExporter>();
// Bundle DSSE signer (OMP-010)
services.TryAddSingleton<IAirGapBundleDsseSigner, AirGapBundleDsseSigner>();
return services;
}

View File

@@ -0,0 +1,275 @@
// <copyright file="AirGapBundleDsseSigner.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AirGap.Sync.Models;
using StellaOps.Canonical.Json;
namespace StellaOps.AirGap.Sync.Services;
/// <summary>
/// Options for air-gap bundle DSSE signing.
/// </summary>
public sealed class AirGapBundleDsseOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "AirGap:BundleSigning";
/// <summary>
/// Gets or sets the signing mode: "hmac" for HMAC-SHA256, "none" to disable.
/// </summary>
public string Mode { get; set; } = "none";
/// <summary>
/// Gets or sets the HMAC secret key as Base64.
/// Required when Mode is "hmac".
/// </summary>
public string? SecretBase64 { get; set; }
/// <summary>
/// Gets or sets the key identifier for the signature.
/// </summary>
public string KeyId { get; set; } = "airgap-bundle-signer";
/// <summary>
/// Gets or sets the payload type for DSSE envelope.
/// </summary>
public string PayloadType { get; set; } = "application/vnd.stellaops.airgap.bundle+json";
}
/// <summary>
/// Result of a bundle signature operation.
/// </summary>
/// <param name="KeyId">The key ID used for signing.</param>
/// <param name="Signature">The signature bytes.</param>
/// <param name="SignatureBase64">The signature as Base64 string.</param>
public sealed record AirGapBundleSignatureResult(
string KeyId,
byte[] Signature,
string SignatureBase64);
/// <summary>
/// Interface for air-gap bundle DSSE signing.
/// </summary>
public interface IAirGapBundleDsseSigner
{
/// <summary>
/// Signs an air-gap bundle manifest and returns the signature result.
/// </summary>
/// <param name="bundle">The bundle to sign.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Signature result with key ID and signature.</returns>
Task<AirGapBundleSignatureResult?> SignAsync(
AirGapBundle bundle,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies an air-gap bundle signature.
/// </summary>
/// <param name="bundle">The bundle to verify.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if signature is valid or signing is disabled; false if invalid.</returns>
Task<AirGapBundleVerificationResult> VerifyAsync(
AirGapBundle bundle,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets whether signing is enabled.
/// </summary>
bool IsEnabled { get; }
}
/// <summary>
/// Result of bundle signature verification.
/// </summary>
/// <param name="IsValid">Whether the signature is valid.</param>
/// <param name="Reason">The reason for the result.</param>
public sealed record AirGapBundleVerificationResult(bool IsValid, string Reason)
{
/// <summary>
/// Verification succeeded.
/// </summary>
public static AirGapBundleVerificationResult Valid { get; } = new(true, "Signature verified");
/// <summary>
/// Signing is disabled, so verification is skipped.
/// </summary>
public static AirGapBundleVerificationResult SigningDisabled { get; } = new(true, "Signing disabled");
/// <summary>
/// Bundle has no signature but signing is enabled.
/// </summary>
public static AirGapBundleVerificationResult MissingSignature { get; } = new(false, "Bundle is not signed");
/// <summary>
/// Signature verification failed.
/// </summary>
public static AirGapBundleVerificationResult InvalidSignature { get; } = new(false, "Signature verification failed");
}
/// <summary>
/// DSSE signer for air-gap bundles using HMAC-SHA256.
/// </summary>
public sealed class AirGapBundleDsseSigner : IAirGapBundleDsseSigner
{
private const string DssePrefix = "DSSEv1 ";
private readonly IOptions<AirGapBundleDsseOptions> _options;
private readonly ILogger<AirGapBundleDsseSigner> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="AirGapBundleDsseSigner"/> class.
/// </summary>
public AirGapBundleDsseSigner(
IOptions<AirGapBundleDsseOptions> options,
ILogger<AirGapBundleDsseSigner> logger)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc/>
public bool IsEnabled => string.Equals(_options.Value.Mode, "hmac", StringComparison.OrdinalIgnoreCase);
/// <inheritdoc/>
public Task<AirGapBundleSignatureResult?> SignAsync(
AirGapBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
cancellationToken.ThrowIfCancellationRequested();
var opts = _options.Value;
if (!IsEnabled)
{
_logger.LogDebug("Air-gap bundle DSSE signing is disabled");
return Task.FromResult<AirGapBundleSignatureResult?>(null);
}
if (string.IsNullOrWhiteSpace(opts.SecretBase64))
{
throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured");
}
byte[] secret;
try
{
secret = Convert.FromBase64String(opts.SecretBase64);
}
catch (FormatException ex)
{
throw new InvalidOperationException("SecretBase64 is not valid Base64", ex);
}
// Compute PAE (Pre-Authentication Encoding) per DSSE spec
var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest);
var signature = ComputeHmacSha256(secret, pae);
var signatureBase64 = Convert.ToBase64String(signature);
_logger.LogInformation(
"Signed air-gap bundle {BundleId} with key {KeyId}",
bundle.BundleId,
opts.KeyId);
return Task.FromResult<AirGapBundleSignatureResult?>(
new AirGapBundleSignatureResult(opts.KeyId, signature, signatureBase64));
}
/// <inheritdoc/>
public Task<AirGapBundleVerificationResult> VerifyAsync(
AirGapBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
cancellationToken.ThrowIfCancellationRequested();
var opts = _options.Value;
if (!IsEnabled)
{
_logger.LogDebug("Air-gap bundle DSSE signing is disabled, skipping verification");
return Task.FromResult(AirGapBundleVerificationResult.SigningDisabled);
}
if (string.IsNullOrWhiteSpace(bundle.Signature))
{
_logger.LogWarning("Air-gap bundle {BundleId} has no signature", bundle.BundleId);
return Task.FromResult(AirGapBundleVerificationResult.MissingSignature);
}
if (string.IsNullOrWhiteSpace(opts.SecretBase64))
{
throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured");
}
byte[] secret;
try
{
secret = Convert.FromBase64String(opts.SecretBase64);
}
catch (FormatException ex)
{
throw new InvalidOperationException("SecretBase64 is not valid Base64", ex);
}
byte[] expectedSignature;
try
{
expectedSignature = Convert.FromBase64String(bundle.Signature);
}
catch (FormatException)
{
_logger.LogWarning("Air-gap bundle {BundleId} has invalid Base64 signature", bundle.BundleId);
return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature);
}
// Compute PAE and expected signature
var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest);
var computedSignature = ComputeHmacSha256(secret, pae);
if (!CryptographicOperations.FixedTimeEquals(expectedSignature, computedSignature))
{
_logger.LogWarning(
"Air-gap bundle {BundleId} signature verification failed",
bundle.BundleId);
return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature);
}
_logger.LogDebug(
"Air-gap bundle {BundleId} signature verified successfully",
bundle.BundleId);
return Task.FromResult(AirGapBundleVerificationResult.Valid);
}
/// <summary>
/// Computes DSSE Pre-Authentication Encoding (PAE).
/// PAE = "DSSEv1" SP len(payloadType) SP payloadType SP len(payload) SP payload
/// where len() returns ASCII decimal length, and SP is a space character.
/// </summary>
private static byte[] ComputePreAuthenticationEncoding(string payloadType, string manifestDigest)
{
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType);
var manifestDigestBytes = Encoding.UTF8.GetBytes(manifestDigest);
// Format: "DSSEv1 {payloadType.Length} {payloadType} {payload.Length} {payload}"
var paeString = string.Create(
CultureInfo.InvariantCulture,
$"{DssePrefix}{payloadTypeBytes.Length} {payloadType} {manifestDigestBytes.Length} {manifestDigest}");
return Encoding.UTF8.GetBytes(paeString);
}
private static byte[] ComputeHmacSha256(byte[] key, byte[] data)
{
using var hmac = new HMACSHA256(key);
return hmac.ComputeHash(data);
}
}

View File

@@ -11,10 +11,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -0,0 +1,242 @@
// <copyright file="AirGapBundleDsseSignerTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Security.Cryptography;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AirGap.Sync.Models;
using StellaOps.AirGap.Sync.Services;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.AirGap.Sync.Tests;
/// <summary>
/// Unit tests for <see cref="AirGapBundleDsseSigner"/>.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class AirGapBundleDsseSignerTests
{
private static readonly string TestSecretBase64 = Convert.ToBase64String(
RandomNumberGenerator.GetBytes(32));
[Fact]
public async Task SignAsync_WhenDisabled_ReturnsNull()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" });
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Act
var result = await signer.SignAsync(bundle);
// Assert
result.Should().BeNull();
signer.IsEnabled.Should().BeFalse();
}
[Fact]
public async Task SignAsync_WhenEnabled_ReturnsValidSignature()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64,
KeyId = "test-key"
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Act
var result = await signer.SignAsync(bundle);
// Assert
result.Should().NotBeNull();
result!.KeyId.Should().Be("test-key");
result.Signature.Should().NotBeEmpty();
result.SignatureBase64.Should().NotBeNullOrWhiteSpace();
signer.IsEnabled.Should().BeTrue();
}
[Fact]
public async Task SignAsync_DeterministicForSameInput()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Act
var result1 = await signer.SignAsync(bundle);
var result2 = await signer.SignAsync(bundle);
// Assert
result1!.SignatureBase64.Should().Be(result2!.SignatureBase64);
}
[Fact]
public async Task SignAsync_DifferentForDifferentManifest()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle1 = CreateTestBundle(manifestDigest: "sha256:aaa");
var bundle2 = CreateTestBundle(manifestDigest: "sha256:bbb");
// Act
var result1 = await signer.SignAsync(bundle1);
var result2 = await signer.SignAsync(bundle2);
// Assert
result1!.SignatureBase64.Should().NotBe(result2!.SignatureBase64);
}
[Fact]
public async Task VerifyAsync_WhenDisabled_ReturnsSigningDisabled()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" });
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Act
var result = await signer.VerifyAsync(bundle);
// Assert
result.Should().Be(AirGapBundleVerificationResult.SigningDisabled);
}
[Fact]
public async Task VerifyAsync_WhenNoSignature_ReturnsMissingSignature()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle(signature: null);
// Act
var result = await signer.VerifyAsync(bundle);
// Assert
result.Should().Be(AirGapBundleVerificationResult.MissingSignature);
}
[Fact]
public async Task VerifyAsync_WithValidSignature_ReturnsValid()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Sign the bundle first
var signResult = await signer.SignAsync(bundle);
var signedBundle = bundle with { Signature = signResult!.SignatureBase64, SignedBy = signResult.KeyId };
// Act
var verifyResult = await signer.VerifyAsync(signedBundle);
// Assert
verifyResult.Should().Be(AirGapBundleVerificationResult.Valid);
}
[Fact]
public async Task VerifyAsync_WithTamperedSignature_ReturnsInvalid()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Sign and then tamper
var signResult = await signer.SignAsync(bundle);
var tamperedBundle = bundle with
{
Signature = signResult!.SignatureBase64,
ManifestDigest = "sha256:tampered"
};
// Act
var verifyResult = await signer.VerifyAsync(tamperedBundle);
// Assert
verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature);
}
[Fact]
public async Task VerifyAsync_WithInvalidBase64Signature_ReturnsInvalid()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = TestSecretBase64
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle(signature: "not-valid-base64!!!");
// Act
var verifyResult = await signer.VerifyAsync(bundle);
// Assert
verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature);
}
[Fact]
public void SignAsync_WithMissingSecret_ThrowsInvalidOperation()
{
// Arrange
var options = Options.Create(new AirGapBundleDsseOptions
{
Mode = "hmac",
SecretBase64 = null
});
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
var bundle = CreateTestBundle();
// Act & Assert
var act = async () => await signer.SignAsync(bundle);
act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*SecretBase64*");
}
private static AirGapBundle CreateTestBundle(
string? manifestDigest = null,
string? signature = null)
{
return new AirGapBundle
{
BundleId = Guid.Parse("11111111-1111-1111-1111-111111111111"),
TenantId = "test-tenant",
CreatedAt = DateTimeOffset.Parse("2026-01-07T12:00:00Z"),
CreatedByNodeId = "test-node",
JobLogs = new List<NodeJobLog>(),
ManifestDigest = manifestDigest ?? "sha256:abc123def456",
Signature = signature
};
}
}

View File

@@ -11,10 +11,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -95,6 +95,7 @@ public class Rfc3161VerifierTests
Assert.False(result.IsValid);
// Should report either decode or error
Assert.True(result.Reason?.Contains("rfc3161-") ?? false);
Assert.NotNull(result.Reason);
Assert.Contains("rfc3161-", result.Reason);
}
}

View File

@@ -11,10 +11,6 @@
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" PrivateAssets="all" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -13,9 +13,6 @@
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.v3" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />

View File

@@ -21,9 +21,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.v3" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />

View File

@@ -12,10 +12,6 @@
<ItemGroup>
<PackageReference Include="FsCheck.Xunit.v3" />
<PackageReference Include="FsCheck" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,5 +1,6 @@
using System.Collections.Immutable;
using System.Formats.Asn1;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
@@ -626,7 +627,7 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
if (entry.Proof?.Checkpoint?.Timestamp is not null)
{
attributes = attributes.Add("checkpointTs", entry.Proof.Checkpoint.Timestamp.Value.ToString("O"));
attributes = attributes.Add("checkpointTs", entry.Proof.Checkpoint.Timestamp.Value.ToString("O", CultureInfo.InvariantCulture));
}
return new PolicyEvaluationResult

View File

@@ -26,25 +26,28 @@ public class DistributedVerificationProvider : IVerificationProvider
private readonly ILogger<DistributedVerificationProvider> _logger;
private readonly DistributedVerificationOptions _options;
private readonly HttpClient _httpClient;
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, CircuitBreakerState> _circuitStates = new();
private readonly ConsistentHashRing _hashRing;
public DistributedVerificationProvider(
ILogger<DistributedVerificationProvider> logger,
IOptions<DistributedVerificationOptions> options,
HttpClient httpClient)
HttpClient httpClient,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_timeProvider = timeProvider ?? TimeProvider.System;
if (_options.Nodes == null || _options.Nodes.Count == 0)
{
throw new ArgumentException("At least one verification node must be configured");
}
_hashRing = new ConsistentHashRing(_options.Nodes, _options.VirtualNodeMultiplier);
_logger.LogInformation("Initialized distributed verification provider with {NodeCount} nodes", _options.Nodes.Count);
}
@@ -106,7 +109,7 @@ public class DistributedVerificationProvider : IVerificationProvider
RequestId = request.RequestId,
Status = VerificationStatus.Error,
ErrorMessage = $"All verification nodes failed. {exceptions.Count} errors occurred.",
Timestamp = DateTimeOffset.UtcNow,
Timestamp = _timeProvider.GetUtcNow(),
};
}
@@ -144,7 +147,7 @@ public class DistributedVerificationProvider : IVerificationProvider
HealthyNodeCount = healthyCount,
TotalNodeCount = totalCount,
NodeStatuses = results.ToDictionary(r => r.Key, r => r.Value),
Timestamp = DateTimeOffset.UtcNow,
Timestamp = _timeProvider.GetUtcNow(),
};
}
@@ -237,8 +240,8 @@ public class DistributedVerificationProvider : IVerificationProvider
}
// Allow recovery after cooldown period
if (state.LastFailure.HasValue &&
DateTimeOffset.UtcNow - state.LastFailure.Value > _options.CircuitBreakerCooldown)
if (state.LastFailure.HasValue &&
_timeProvider.GetUtcNow() - state.LastFailure.Value > _options.CircuitBreakerCooldown)
{
state.FailureCount = 0;
state.LastFailure = null;
@@ -252,7 +255,7 @@ public class DistributedVerificationProvider : IVerificationProvider
{
var state = _circuitStates.GetOrAdd(node.Id, _ => new CircuitBreakerState());
state.FailureCount++;
state.LastFailure = DateTimeOffset.UtcNow;
state.LastFailure = _timeProvider.GetUtcNow();
if (state.FailureCount >= _options.CircuitBreakerThreshold)
{

View File

@@ -13,10 +13,6 @@
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
@@ -645,7 +646,7 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
Aggregator = witness.Aggregator,
Status = witness.Status,
RootHash = witness.RootHash,
RetrievedAt = witness.RetrievedAt == default ? null : witness.RetrievedAt.ToString("O"),
RetrievedAt = witness.RetrievedAt == default ? null : witness.RetrievedAt.ToString("O", CultureInfo.InvariantCulture),
Statement = witness.Statement,
Signature = witness.Signature,
KeyId = witness.KeyId,
@@ -667,7 +668,7 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
Origin = proof.Checkpoint.Origin,
Size = proof.Checkpoint.Size,
RootHash = proof.Checkpoint.RootHash,
Timestamp = proof.Checkpoint.Timestamp?.ToString("O")
Timestamp = proof.Checkpoint.Timestamp?.ToString("O", CultureInfo.InvariantCulture)
},
Inclusion = proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof
{

View File

@@ -13,10 +13,6 @@
<PackageReference Include="NSubstitute" />
<PackageReference Include="Testcontainers" />
<PackageReference Include="Testcontainers.PostgreSql" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Security.Claims;
using System.Security.Cryptography.X509Certificates;
using System.Text.Json;
@@ -131,7 +132,7 @@ internal static class AttestorWebServiceEndpoints
Algorithm = result.Algorithm,
Mode = result.Mode,
Provider = result.Provider,
SignedAt = result.SignedAt.ToString("O")
SignedAt = result.SignedAt.ToString("O", CultureInfo.InvariantCulture)
}
};
return Results.Ok(response);
@@ -251,14 +252,14 @@ internal static class AttestorWebServiceEndpoints
{
KeyId = result.SignerKeyId,
Algorithm = result.Algorithm,
SignedAt = result.SignedAt.ToString("O")
SignedAt = result.SignedAt.ToString("O", CultureInfo.InvariantCulture)
},
Rekor = result.RekorEntry is null ? null : new InTotoRekorEntryDto
{
LogId = result.RekorEntry.LogId,
LogIndex = result.RekorEntry.LogIndex,
Uuid = result.RekorEntry.Uuid,
IntegratedTime = result.RekorEntry.IntegratedTime?.ToString("O")
IntegratedTime = result.RekorEntry.IntegratedTime?.ToString("O", CultureInfo.InvariantCulture)
}
};
@@ -424,7 +425,7 @@ internal static class AttestorWebServiceEndpoints
Origin = entry.Proof.Checkpoint.Origin,
Size = entry.Proof.Checkpoint.Size,
RootHash = entry.Proof.Checkpoint.RootHash,
Timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O")
Timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O", CultureInfo.InvariantCulture)
},
Inclusion = entry.Proof.Inclusion is null ? null : new AttestationInclusionDto
{
@@ -448,7 +449,7 @@ internal static class AttestorWebServiceEndpoints
Origin = entry.Mirror.Proof.Checkpoint.Origin,
Size = entry.Mirror.Proof.Checkpoint.Size,
RootHash = entry.Mirror.Proof.Checkpoint.RootHash,
Timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O")
Timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O", CultureInfo.InvariantCulture)
},
Inclusion = entry.Mirror.Proof.Inclusion is null ? null : new AttestationInclusionDto
{
@@ -474,7 +475,7 @@ internal static class AttestorWebServiceEndpoints
{
Uuid = entry.RekorUuid,
Status = entry.Status,
CreatedAt = entry.CreatedAt.ToString("O"),
CreatedAt = entry.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
Artifact = new AttestationArtifactDto
{
Sha256 = entry.Artifact.Sha256,

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Microsoft.AspNetCore.Http;
using StellaOps.Attestor.Core.Bulk;
@@ -84,9 +85,9 @@ internal static class BulkVerificationContracts
{
Id = job.Id,
Status = job.Status.ToString().ToLowerInvariant(),
CreatedAt = job.CreatedAt.ToString("O"),
StartedAt = job.StartedAt?.ToString("O"),
CompletedAt = job.CompletedAt?.ToString("O"),
CreatedAt = job.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
StartedAt = job.StartedAt?.ToString("O", CultureInfo.InvariantCulture),
CompletedAt = job.CompletedAt?.ToString("O", CultureInfo.InvariantCulture),
Processed = job.ProcessedCount,
Succeeded = job.SucceededCount,
Failed = job.FailedCount,
@@ -112,8 +113,8 @@ internal static class BulkVerificationContracts
{
Index = item.Index,
Status = item.Status.ToString().ToLowerInvariant(),
StartedAt = item.StartedAt?.ToString("O"),
CompletedAt = item.CompletedAt?.ToString("O"),
StartedAt = item.StartedAt?.ToString("O", CultureInfo.InvariantCulture),
CompletedAt = item.CompletedAt?.ToString("O", CultureInfo.InvariantCulture),
Error = item.Error,
Request = new BulkVerificationItemRequestDto
{

View File

@@ -1,4 +1,5 @@
using System;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -154,7 +155,7 @@ public class VerdictController : ControllerBase
Envelope = Convert.ToBase64String(Encoding.UTF8.GetBytes(envelopeJson)),
RekorLogIndex = rekorLogIndex,
KeyId = signResult.KeyId ?? request.KeyId ?? "default",
CreatedAt = _timeProvider.GetUtcNow().ToString("O")
CreatedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
_logger.LogInformation(

View File

@@ -23,6 +23,7 @@ public sealed class KmsOrgKeySigner : IOrgKeySigner
private readonly IKmsProvider _kmsProvider;
private readonly ILogger<KmsOrgKeySigner> _logger;
private readonly OrgSigningOptions _options;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Create a new KMS organization key signer.
@@ -30,11 +31,13 @@ public sealed class KmsOrgKeySigner : IOrgKeySigner
public KmsOrgKeySigner(
IKmsProvider kmsProvider,
ILogger<KmsOrgKeySigner> logger,
IOptions<OrgSigningOptions> options)
IOptions<OrgSigningOptions> options,
TimeProvider? timeProvider = null)
{
_kmsProvider = kmsProvider ?? throw new ArgumentNullException(nameof(kmsProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new OrgSigningOptions();
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -62,7 +65,7 @@ public sealed class KmsOrgKeySigner : IOrgKeySigner
}
// Check key expiry
if (keyInfo.ValidUntil.HasValue && keyInfo.ValidUntil.Value < DateTimeOffset.UtcNow)
if (keyInfo.ValidUntil.HasValue && keyInfo.ValidUntil.Value < _timeProvider.GetUtcNow())
{
throw new InvalidOperationException($"Signing key '{keyId}' has expired.");
}
@@ -87,7 +90,7 @@ public sealed class KmsOrgKeySigner : IOrgKeySigner
KeyId = keyId,
Algorithm = keyInfo.Algorithm,
Signature = Convert.ToBase64String(signatureBytes),
SignedAt = DateTimeOffset.UtcNow,
SignedAt = _timeProvider.GetUtcNow(),
CertificateChain = certChain
};
}
@@ -140,9 +143,10 @@ public sealed class KmsOrgKeySigner : IOrgKeySigner
// List keys and find the active one based on rotation policy
var keys = await ListKeysAsync(cancellationToken);
var now = _timeProvider.GetUtcNow();
var activeKey = keys
.Where(k => k.IsActive)
.Where(k => !k.ValidUntil.HasValue || k.ValidUntil.Value > DateTimeOffset.UtcNow)
.Where(k => !k.ValidUntil.HasValue || k.ValidUntil.Value > now)
.OrderByDescending(k => k.ValidFrom)
.FirstOrDefault();
@@ -253,14 +257,16 @@ public sealed class LocalOrgKeySigner : IOrgKeySigner
{
private readonly Dictionary<string, (ECDsa Key, OrgKeyInfo Info)> _keys = new();
private readonly ILogger<LocalOrgKeySigner> _logger;
private readonly TimeProvider _timeProvider;
private string? _activeKeyId;
/// <summary>
/// Create a new local key signer.
/// </summary>
public LocalOrgKeySigner(ILogger<LocalOrgKeySigner> logger)
public LocalOrgKeySigner(ILogger<LocalOrgKeySigner> logger, TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
@@ -276,7 +282,7 @@ public sealed class LocalOrgKeySigner : IOrgKeySigner
keyId,
"ECDSA_P256",
fingerprint,
DateTimeOffset.UtcNow,
_timeProvider.GetUtcNow(),
null,
isActive);
@@ -308,7 +314,7 @@ public sealed class LocalOrgKeySigner : IOrgKeySigner
KeyId = keyId,
Algorithm = "ECDSA_P256",
Signature = Convert.ToBase64String(signature),
SignedAt = DateTimeOffset.UtcNow,
SignedAt = _timeProvider.GetUtcNow(),
CertificateChain = null
});
}

View File

@@ -4,6 +4,7 @@
// Task: Implement OCI registry attachment via ORAS
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
@@ -327,7 +328,7 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
{
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
[AnnotationKeys.Created] = createdAt.ToString("O"),
[AnnotationKeys.Created] = createdAt.ToString("O", CultureInfo.InvariantCulture),
[AnnotationKeys.PredicateType] = predicateType,
[AnnotationKeys.CosignSignature] = "" // Cosign compatibility placeholder
};

View File

@@ -12,7 +12,6 @@
<ItemGroup>
<PackageReference Include="coverlet.collector" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Moq" />
<PackageReference Include="FluentAssertions" />

View File

@@ -11,10 +11,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Moq" />
<PackageReference Include="Testcontainers" />

View File

@@ -15,10 +15,6 @@
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="NSubstitute" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -14,10 +14,6 @@
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="NSubstitute" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -12,11 +12,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="JsonSchema.Net" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -12,11 +12,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@@ -16,9 +16,5 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -22,6 +22,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
private readonly IOptionsMonitor<LdapPluginOptions> optionsMonitor;
private readonly LdapClientProvisioningStore clientProvisioningStore;
private readonly ILogger<LdapIdentityProviderPlugin> logger;
private readonly TimeProvider timeProvider;
private readonly LdapCapabilityProbe capabilityProbe;
private readonly AuthorityIdentityProviderCapabilities manifestCapabilities;
private readonly SemaphoreSlim capabilityGate = new(1, 1);
@@ -38,7 +39,8 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
ILdapConnectionFactory connectionFactory,
IOptionsMonitor<LdapPluginOptions> optionsMonitor,
LdapClientProvisioningStore clientProvisioningStore,
ILogger<LdapIdentityProviderPlugin> logger)
ILogger<LdapIdentityProviderPlugin> logger,
TimeProvider? timeProvider = null)
{
this.pluginContext = pluginContext ?? throw new ArgumentNullException(nameof(pluginContext));
this.credentialStore = credentialStore ?? throw new ArgumentNullException(nameof(credentialStore));
@@ -47,6 +49,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
this.clientProvisioningStore = clientProvisioningStore ?? throw new ArgumentNullException(nameof(clientProvisioningStore));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
this.timeProvider = timeProvider ?? TimeProvider.System;
capabilityProbe = new LdapCapabilityProbe(pluginContext.Manifest.Name, connectionFactory, logger);
@@ -142,7 +145,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
var checkBootstrap = manifestCapabilities.SupportsBootstrap && options.Bootstrap.Enabled;
var fingerprint = LdapCapabilitySnapshotCache.ComputeFingerprint(options, checkProvisioning, checkBootstrap);
if (LdapCapabilitySnapshotCache.TryGet(Name, fingerprint, DateTimeOffset.UtcNow, out var snapshot))
if (LdapCapabilitySnapshotCache.TryGet(Name, fingerprint, timeProvider.GetUtcNow(), out var snapshot))
{
UpdateCapabilities(snapshot, checkProvisioning, checkBootstrap, logDegrade: true);
}
@@ -158,7 +161,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
var checkProvisioning = manifestCapabilities.SupportsClientProvisioning && options.ClientProvisioning.Enabled;
var checkBootstrap = manifestCapabilities.SupportsBootstrap && options.Bootstrap.Enabled;
var fingerprint = LdapCapabilitySnapshotCache.ComputeFingerprint(options, checkProvisioning, checkBootstrap);
var now = DateTimeOffset.UtcNow;
var now = timeProvider.GetUtcNow();
if (LdapCapabilitySnapshotCache.TryGet(Name, fingerprint, now, out var cached))
{
@@ -169,7 +172,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
await capabilityGate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (LdapCapabilitySnapshotCache.TryGet(Name, fingerprint, DateTimeOffset.UtcNow, out cached))
if (LdapCapabilitySnapshotCache.TryGet(Name, fingerprint, timeProvider.GetUtcNow(), out cached))
{
UpdateCapabilities(cached, checkProvisioning, checkBootstrap, logDegrade: true);
return;
@@ -183,7 +186,7 @@ internal sealed class LdapIdentityProviderPlugin : IIdentityProviderPlugin
cancellationToken)
.ConfigureAwait(false);
LdapCapabilitySnapshotCache.Set(Name, fingerprint, DateTimeOffset.UtcNow, options.CapabilityProbe.CacheTtl, snapshot);
LdapCapabilitySnapshotCache.Set(Name, fingerprint, timeProvider.GetUtcNow(), options.CapabilityProbe.CacheTtl, snapshot);
UpdateCapabilities(snapshot, checkProvisioning, checkBootstrap, logDegrade: true);
}
finally

View File

@@ -3,6 +3,7 @@
// Credential store for validating OIDC tokens.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.IdentityModel.Tokens.Jwt;
using System.Security.Claims;
using Microsoft.Extensions.Caching.Memory;
@@ -161,7 +162,7 @@ internal sealed class OidcCredentialStore : IUserCredentialStore
new[]
{
new AuthEventProperty { Name = "oidc_issuer", Value = ClassifiedString.Public(jwtToken.Issuer) },
new AuthEventProperty { Name = "token_valid_until", Value = ClassifiedString.Public(jwtToken.ValidTo.ToString("O")) }
new AuthEventProperty { Name = "token_valid_until", Value = ClassifiedString.Public(jwtToken.ValidTo.ToString("O", CultureInfo.InvariantCulture)) }
});
}
catch (SecurityTokenExpiredException ex)

View File

@@ -3,6 +3,7 @@
// Credential store for validating SAML assertions.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Security.Claims;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
@@ -31,6 +32,7 @@ internal sealed class SamlCredentialStore : IUserCredentialStore
private readonly IMemoryCache sessionCache;
private readonly ILogger<SamlCredentialStore> logger;
private readonly IHttpClientFactory httpClientFactory;
private readonly TimeProvider timeProvider;
private readonly Saml2SecurityTokenHandler tokenHandler;
private X509Certificate2? idpSigningCertificate;
private string? certificateCacheKey;
@@ -42,13 +44,15 @@ internal sealed class SamlCredentialStore : IUserCredentialStore
IOptionsMonitor<SamlPluginOptions> optionsMonitor,
IMemoryCache sessionCache,
ILogger<SamlCredentialStore> logger,
IHttpClientFactory httpClientFactory)
IHttpClientFactory httpClientFactory,
TimeProvider? timeProvider = null)
{
this.pluginName = pluginName ?? throw new ArgumentNullException(nameof(pluginName));
this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
this.sessionCache = sessionCache ?? throw new ArgumentNullException(nameof(sessionCache));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
this.httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
this.timeProvider = timeProvider ?? TimeProvider.System;
tokenHandler = new Saml2SecurityTokenHandler();
@@ -162,7 +166,7 @@ internal sealed class SamlCredentialStore : IUserCredentialStore
["email"] = email,
["issuer"] = token.Assertion.Issuer?.Value,
["session_index"] = token.Assertion.Id?.Value,
["auth_instant"] = token.Assertion.IssueInstant.ToString("O")
["auth_instant"] = token.Assertion.IssueInstant.ToString("O", CultureInfo.InvariantCulture)
};
var user = new AuthorityUserDescriptor(
@@ -398,7 +402,7 @@ internal sealed class SamlCredentialStore : IUserCredentialStore
{
idpSigningCertificate = certificate;
certificateCacheKey = key;
lastMetadataRefresh = DateTimeOffset.UtcNow;
lastMetadataRefresh = timeProvider.GetUtcNow();
return;
}
@@ -427,7 +431,7 @@ internal sealed class SamlCredentialStore : IUserCredentialStore
return true;
}
return DateTimeOffset.UtcNow - lastMetadataRefresh.Value >= options.MetadataRefreshInterval;
return timeProvider.GetUtcNow() - lastMetadataRefresh.Value >= options.MetadataRefreshInterval;
}
private static string BuildCertificateCacheKey(SamlPluginOptions options)

View File

@@ -14,10 +14,6 @@
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" />
<PackageReference Include="Microsoft.AspNetCore.TestHost" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Authority\StellaOps.Authority.csproj" />

View File

@@ -1,6 +1,7 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text.Json;
@@ -98,8 +99,8 @@ internal sealed class AckTokenPayload
writer.WriteString("channel", Channel);
writer.WriteString("webhook", Webhook);
writer.WriteString("nonce", Nonce);
writer.WriteString("issuedAt", IssuedAt.UtcDateTime.ToString("O"));
writer.WriteString("expiresAt", ExpiresAt.UtcDateTime.ToString("O"));
writer.WriteString("issuedAt", IssuedAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture));
writer.WriteString("expiresAt", ExpiresAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture));
writer.WritePropertyName("actions");
writer.WriteStartArray();

View File

@@ -1,4 +1,5 @@
using System.Collections.Concurrent;
using System.Globalization;
using System.Text.Json;
using StellaOps.Authority.Persistence.Documents;
using StellaOps.Authority.Persistence.Sessions;
@@ -453,7 +454,7 @@ internal sealed class PostgresTokenStore : IAuthorityTokenStore, IAuthorityRefre
if (document.RevokedAt is not null)
{
properties["revoked_at"] = document.RevokedAt.Value.ToUniversalTime().ToString("O");
properties["revoked_at"] = document.RevokedAt.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
}
if (!string.IsNullOrWhiteSpace(document.RevokedReason))

View File

@@ -0,0 +1,22 @@
# Authority ConfigDiff Tests Charter
## Mission
- Maintain deterministic tests for Authority configuration diffing.
## Responsibilities
- Validate config diff output stability and edge cases.
- Keep fixtures deterministic and offline-safe.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/authority/architecture.md
## Definition of Done
- Tests are deterministic and offline-safe.
- Coverage includes mismatch detection and ordering behavior.
## Working Agreement
- Use fixed time and ids in fixtures.
- Avoid non-deterministic ordering; assert sorted output.

View File

@@ -8,10 +8,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,4 +1,5 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
@@ -44,7 +45,7 @@ internal static class VexObservationGenerator
var fetchedAt = baseTime.AddMinutes(revision);
var receivedAt = fetchedAt.AddSeconds(2);
var documentVersion = fetchedAt.AddSeconds(15).ToString("O");
var documentVersion = fetchedAt.AddSeconds(15).ToString("O", CultureInfo.InvariantCulture);
var products = CreateProducts(group, revision, productsPerObservation);
var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt);
@@ -138,7 +139,7 @@ internal static class VexObservationGenerator
.Append(statement.Status).Append('|')
.Append(statement.Product.Purl).Append('|')
.Append(statement.Justification).Append('|')
.Append(statement.LastUpdated.ToUniversalTime().ToString("O")).Append('|');
.Append(statement.LastUpdated.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)).Append('|');
}
var data = Encoding.UTF8.GetBytes(builder.ToString());

View File

@@ -8,10 +8,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,4 +1,5 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
@@ -35,7 +36,7 @@ internal static class ObservationGenerator
var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}";
var upstreamId = primaryAlias;
var documentVersion = baseTime.AddMinutes(revision).ToString("O");
var documentVersion = baseTime.AddMinutes(revision).ToString("O", CultureInfo.InvariantCulture);
var fetchedAt = baseTime.AddSeconds(index % 1_800);
var receivedAt = fetchedAt.AddSeconds(1);

View File

@@ -8,10 +8,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -0,0 +1,207 @@
// -----------------------------------------------------------------------------
// AbiCompatibility.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Task: SYM-005 - Define AbiCompatibility assessment model
// Description: ABI compatibility assessment model
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// ABI compatibility assessment between two binaries.
/// </summary>
public sealed record AbiCompatibility
{
/// <summary>Overall compatibility level.</summary>
[JsonPropertyName("level")]
public required AbiCompatibilityLevel Level { get; init; }
/// <summary>Compatibility score (0.0 = incompatible, 1.0 = fully compatible).</summary>
[JsonPropertyName("score")]
public double Score { get; init; }
/// <summary>Whether the target is backward compatible with base.</summary>
[JsonPropertyName("is_backward_compatible")]
public bool IsBackwardCompatible { get; init; }
/// <summary>Whether the target is forward compatible with base.</summary>
[JsonPropertyName("is_forward_compatible")]
public bool IsForwardCompatible { get; init; }
/// <summary>List of breaking changes.</summary>
[JsonPropertyName("breaking_changes")]
public required IReadOnlyList<AbiBreakingChange> BreakingChanges { get; init; }
/// <summary>List of compatibility warnings.</summary>
[JsonPropertyName("warnings")]
public required IReadOnlyList<AbiWarning> Warnings { get; init; }
/// <summary>Summary statistics.</summary>
[JsonPropertyName("summary")]
public required AbiSummary Summary { get; init; }
}
/// <summary>ABI compatibility level.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum AbiCompatibilityLevel
{
/// <summary>Fully compatible - no breaking changes.</summary>
FullyCompatible,
/// <summary>Compatible with warnings - minor changes detected.</summary>
CompatibleWithWarnings,
/// <summary>Minor incompatibility - some breaking changes.</summary>
MinorIncompatibility,
/// <summary>Major incompatibility - significant breaking changes.</summary>
MajorIncompatibility,
/// <summary>Not compatible - complete ABI break.</summary>
Incompatible
}
/// <summary>A specific ABI breaking change.</summary>
public sealed record AbiBreakingChange
{
/// <summary>Type of breaking change.</summary>
[JsonPropertyName("type")]
public required AbiBreakType Type { get; init; }
/// <summary>Severity of the break.</summary>
[JsonPropertyName("severity")]
public required ChangeSeverity Severity { get; init; }
/// <summary>Symbol or entity affected.</summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>Human-readable description.</summary>
[JsonPropertyName("description")]
public required string Description { get; init; }
/// <summary>Detailed context.</summary>
[JsonPropertyName("details")]
public string? Details { get; init; }
/// <summary>Potential impact.</summary>
[JsonPropertyName("impact")]
public string? Impact { get; init; }
/// <summary>Suggested mitigation.</summary>
[JsonPropertyName("mitigation")]
public string? Mitigation { get; init; }
}
/// <summary>Type of ABI breaking change.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum AbiBreakType
{
/// <summary>Symbol was removed.</summary>
SymbolRemoved,
/// <summary>Symbol type changed.</summary>
SymbolTypeChanged,
/// <summary>Symbol size changed.</summary>
SymbolSizeChanged,
/// <summary>Symbol visibility reduced.</summary>
VisibilityReduced,
/// <summary>Symbol binding changed.</summary>
BindingChanged,
/// <summary>Version removed.</summary>
VersionRemoved,
/// <summary>Version requirement added.</summary>
VersionRequirementAdded,
/// <summary>Library dependency removed.</summary>
LibraryRemoved,
/// <summary>Library dependency added.</summary>
LibraryAdded,
/// <summary>Function signature changed (inferred).</summary>
SignatureChanged,
/// <summary>Data layout changed (inferred).</summary>
DataLayoutChanged,
/// <summary>TLS model changed.</summary>
TlsModelChanged
}
/// <summary>An ABI warning (non-breaking but notable).</summary>
public sealed record AbiWarning
{
/// <summary>Warning type.</summary>
[JsonPropertyName("type")]
public required AbiWarningType Type { get; init; }
/// <summary>Symbol or entity affected.</summary>
[JsonPropertyName("symbol")]
public string? Symbol { get; init; }
/// <summary>Warning message.</summary>
[JsonPropertyName("message")]
public required string Message { get; init; }
}
/// <summary>Type of ABI warning.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum AbiWarningType
{
/// <summary>Symbol was added.</summary>
SymbolAdded,
/// <summary>Symbol visibility increased.</summary>
VisibilityIncreased,
/// <summary>New version definition.</summary>
VersionAdded,
/// <summary>Symbol renamed.</summary>
SymbolRenamed,
/// <summary>Size increased (backward compatible).</summary>
SizeIncreased,
/// <summary>Address changed.</summary>
AddressChanged,
/// <summary>Section changed.</summary>
SectionChanged
}
/// <summary>Summary statistics for ABI assessment.</summary>
public sealed record AbiSummary
{
[JsonPropertyName("total_exports_base")]
public int TotalExportsBase { get; init; }
[JsonPropertyName("total_exports_target")]
public int TotalExportsTarget { get; init; }
[JsonPropertyName("exports_added")]
public int ExportsAdded { get; init; }
[JsonPropertyName("exports_removed")]
public int ExportsRemoved { get; init; }
[JsonPropertyName("exports_modified")]
public int ExportsModified { get; init; }
[JsonPropertyName("breaking_changes_count")]
public int BreakingChangesCount { get; init; }
[JsonPropertyName("warnings_count")]
public int WarningsCount { get; init; }
[JsonPropertyName("compatibility_percentage")]
public double CompatibilityPercentage { get; init; }
}

View File

@@ -0,0 +1,244 @@
// -----------------------------------------------------------------------------
// DynamicLinkingDiff.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Task: SYM-004 - Define DynamicLinkingDiff records (GOT/PLT)
// Description: Dynamic linking diff model for GOT/PLT changes
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Diff of dynamic linking structures (GOT/PLT) between binaries.
/// </summary>
public sealed record DynamicLinkingDiff
{
/// <summary>GOT (Global Offset Table) changes.</summary>
[JsonPropertyName("got")]
public required GotDiff Got { get; init; }
/// <summary>PLT (Procedure Linkage Table) changes.</summary>
[JsonPropertyName("plt")]
public required PltDiff Plt { get; init; }
/// <summary>RPATH/RUNPATH changes.</summary>
[JsonPropertyName("rpath")]
public required RpathDiff Rpath { get; init; }
/// <summary>NEEDED library changes.</summary>
[JsonPropertyName("needed")]
public required NeededDiff Needed { get; init; }
/// <summary>Relocation changes.</summary>
[JsonPropertyName("relocations")]
public RelocationDiff? Relocations { get; init; }
}
/// <summary>GOT (Global Offset Table) diff.</summary>
public sealed record GotDiff
{
[JsonPropertyName("entries_added")]
public required IReadOnlyList<GotEntry> EntriesAdded { get; init; }
[JsonPropertyName("entries_removed")]
public required IReadOnlyList<GotEntry> EntriesRemoved { get; init; }
[JsonPropertyName("entries_modified")]
public required IReadOnlyList<GotEntryModification> EntriesModified { get; init; }
[JsonPropertyName("base_count")]
public int BaseCount { get; init; }
[JsonPropertyName("target_count")]
public int TargetCount { get; init; }
}
/// <summary>A GOT entry.</summary>
public sealed record GotEntry
{
[JsonPropertyName("index")]
public int Index { get; init; }
[JsonPropertyName("symbol_name")]
public required string SymbolName { get; init; }
[JsonPropertyName("address")]
public ulong Address { get; init; }
[JsonPropertyName("type")]
public required GotEntryType Type { get; init; }
}
/// <summary>A GOT entry modification.</summary>
public sealed record GotEntryModification
{
[JsonPropertyName("symbol_name")]
public required string SymbolName { get; init; }
[JsonPropertyName("base_address")]
public ulong BaseAddress { get; init; }
[JsonPropertyName("target_address")]
public ulong TargetAddress { get; init; }
[JsonPropertyName("base_type")]
public required GotEntryType BaseType { get; init; }
[JsonPropertyName("target_type")]
public required GotEntryType TargetType { get; init; }
}
/// <summary>GOT entry type.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum GotEntryType
{
GlobDat,
JumpSlot,
Relative,
Copy,
TlsDtpMod,
TlsDtpOff,
TlsTpOff,
Other
}
/// <summary>PLT (Procedure Linkage Table) diff.</summary>
public sealed record PltDiff
{
[JsonPropertyName("entries_added")]
public required IReadOnlyList<PltEntry> EntriesAdded { get; init; }
[JsonPropertyName("entries_removed")]
public required IReadOnlyList<PltEntry> EntriesRemoved { get; init; }
[JsonPropertyName("entries_reordered")]
public required IReadOnlyList<PltReorder> EntriesReordered { get; init; }
[JsonPropertyName("base_count")]
public int BaseCount { get; init; }
[JsonPropertyName("target_count")]
public int TargetCount { get; init; }
}
/// <summary>A PLT entry.</summary>
public sealed record PltEntry
{
[JsonPropertyName("index")]
public int Index { get; init; }
[JsonPropertyName("symbol_name")]
public required string SymbolName { get; init; }
[JsonPropertyName("address")]
public ulong Address { get; init; }
[JsonPropertyName("got_offset")]
public ulong GotOffset { get; init; }
}
/// <summary>A PLT entry reordering.</summary>
public sealed record PltReorder
{
[JsonPropertyName("symbol_name")]
public required string SymbolName { get; init; }
[JsonPropertyName("base_index")]
public int BaseIndex { get; init; }
[JsonPropertyName("target_index")]
public int TargetIndex { get; init; }
}
/// <summary>RPATH/RUNPATH diff.</summary>
public sealed record RpathDiff
{
[JsonPropertyName("rpath_base")]
public IReadOnlyList<string>? RpathBase { get; init; }
[JsonPropertyName("rpath_target")]
public IReadOnlyList<string>? RpathTarget { get; init; }
[JsonPropertyName("runpath_base")]
public IReadOnlyList<string>? RunpathBase { get; init; }
[JsonPropertyName("runpath_target")]
public IReadOnlyList<string>? RunpathTarget { get; init; }
[JsonPropertyName("paths_added")]
public required IReadOnlyList<string> PathsAdded { get; init; }
[JsonPropertyName("paths_removed")]
public required IReadOnlyList<string> PathsRemoved { get; init; }
[JsonPropertyName("has_changes")]
public bool HasChanges { get; init; }
}
/// <summary>NEEDED library diff.</summary>
public sealed record NeededDiff
{
[JsonPropertyName("libraries_added")]
public required IReadOnlyList<string> LibrariesAdded { get; init; }
[JsonPropertyName("libraries_removed")]
public required IReadOnlyList<string> LibrariesRemoved { get; init; }
[JsonPropertyName("base_libraries")]
public required IReadOnlyList<string> BaseLibraries { get; init; }
[JsonPropertyName("target_libraries")]
public required IReadOnlyList<string> TargetLibraries { get; init; }
}
/// <summary>Relocation diff (optional, detailed).</summary>
public sealed record RelocationDiff
{
[JsonPropertyName("relocations_added")]
public int RelocationsAdded { get; init; }
[JsonPropertyName("relocations_removed")]
public int RelocationsRemoved { get; init; }
[JsonPropertyName("relocations_modified")]
public int RelocationsModified { get; init; }
[JsonPropertyName("base_count")]
public int BaseCount { get; init; }
[JsonPropertyName("target_count")]
public int TargetCount { get; init; }
[JsonPropertyName("significant_changes")]
public required IReadOnlyList<RelocationChange> SignificantChanges { get; init; }
}
/// <summary>A significant relocation change.</summary>
public sealed record RelocationChange
{
[JsonPropertyName("symbol_name")]
public string? SymbolName { get; init; }
[JsonPropertyName("change_type")]
public required RelocationChangeType ChangeType { get; init; }
[JsonPropertyName("base_type")]
public string? BaseType { get; init; }
[JsonPropertyName("target_type")]
public string? TargetType { get; init; }
[JsonPropertyName("address")]
public ulong Address { get; init; }
}
/// <summary>Relocation change type.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum RelocationChangeType
{
Added,
Removed,
TypeChanged,
SymbolChanged
}

View File

@@ -0,0 +1,162 @@
// -----------------------------------------------------------------------------
// ISymbolTableDiffAnalyzer.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Task: SYM-006 - Define ISymbolTableDiffAnalyzer interface
// Description: Interface for symbol table diff analysis
// -----------------------------------------------------------------------------
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Analyzes symbol table differences between two binaries.
/// </summary>
public interface ISymbolTableDiffAnalyzer
{
/// <summary>
/// Computes a complete symbol table diff between base and target binaries.
/// </summary>
/// <param name="basePath">Path to the base binary.</param>
/// <param name="targetPath">Path to the target binary.</param>
/// <param name="options">Analysis options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Symbol table diff.</returns>
Task<SymbolTableDiff> ComputeDiffAsync(
string basePath,
string targetPath,
SymbolDiffOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Extracts symbol table from a single binary.
/// </summary>
/// <param name="binaryPath">Path to the binary.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Extracted symbol table.</returns>
Task<SymbolTable> ExtractSymbolTableAsync(
string binaryPath,
CancellationToken ct = default);
/// <summary>
/// Computes ABI compatibility assessment.
/// </summary>
/// <param name="diff">The symbol table diff.</param>
/// <returns>ABI compatibility assessment.</returns>
AbiCompatibility AssessAbiCompatibility(SymbolTableDiff diff);
}
/// <summary>
/// Options for symbol diff analysis.
/// </summary>
public sealed record SymbolDiffOptions
{
/// <summary>Whether to include dynamic linking analysis (GOT/PLT).</summary>
public bool IncludeDynamicLinking { get; init; } = true;
/// <summary>Whether to detect symbol renames via fingerprinting.</summary>
public bool DetectRenames { get; init; } = true;
/// <summary>Minimum similarity threshold for rename detection (0.0-1.0).</summary>
public double RenameSimilarityThreshold { get; init; } = 0.7;
/// <summary>Whether to demangle C++/Rust names.</summary>
public bool DemangleNames { get; init; } = true;
/// <summary>Whether to compute function fingerprints for modified symbols.</summary>
public bool ComputeFingerprints { get; init; } = true;
/// <summary>Maximum symbols to process (for large binaries).</summary>
public int? MaxSymbols { get; init; }
}
/// <summary>
/// Extracted symbol table from a binary.
/// </summary>
public sealed record SymbolTable
{
/// <summary>Binary reference.</summary>
public required BinaryRef Binary { get; init; }
/// <summary>Exported symbols (.dynsym exports for ELF, exports for PE).</summary>
public required IReadOnlyList<ExtractedSymbol> Exports { get; init; }
/// <summary>Imported symbols.</summary>
public required IReadOnlyList<ExtractedSymbol> Imports { get; init; }
/// <summary>Version definitions (.gnu.version_d for ELF).</summary>
public required IReadOnlyList<VersionDefinition> VersionDefinitions { get; init; }
/// <summary>Version requirements (.gnu.version_r for ELF).</summary>
public required IReadOnlyList<VersionRequirement> VersionRequirements { get; init; }
/// <summary>GOT entries.</summary>
public IReadOnlyList<GotEntry>? GotEntries { get; init; }
/// <summary>PLT entries.</summary>
public IReadOnlyList<PltEntry>? PltEntries { get; init; }
/// <summary>NEEDED libraries.</summary>
public required IReadOnlyList<string> NeededLibraries { get; init; }
/// <summary>RPATH entries.</summary>
public IReadOnlyList<string>? Rpath { get; init; }
/// <summary>RUNPATH entries.</summary>
public IReadOnlyList<string>? Runpath { get; init; }
/// <summary>When extracted.</summary>
public required DateTimeOffset ExtractedAt { get; init; }
}
/// <summary>
/// An extracted symbol with all metadata.
/// </summary>
public sealed record ExtractedSymbol
{
/// <summary>Mangled name.</summary>
public required string Name { get; init; }
/// <summary>Demangled name (if available).</summary>
public string? DemangledName { get; init; }
/// <summary>Symbol type.</summary>
public required SymbolType Type { get; init; }
/// <summary>Symbol binding.</summary>
public required SymbolBinding Binding { get; init; }
/// <summary>Symbol visibility.</summary>
public required SymbolVisibility Visibility { get; init; }
/// <summary>Section name.</summary>
public string? Section { get; init; }
/// <summary>Section index.</summary>
public int SectionIndex { get; init; }
/// <summary>Virtual address.</summary>
public ulong Address { get; init; }
/// <summary>Symbol size.</summary>
public ulong Size { get; init; }
/// <summary>Version string (from .gnu.version).</summary>
public string? Version { get; init; }
/// <summary>Version index.</summary>
public int VersionIndex { get; init; }
/// <summary>Whether this is a hidden version.</summary>
public bool IsVersionHidden { get; init; }
/// <summary>Function fingerprint (for functions).</summary>
public string? Fingerprint { get; init; }
/// <summary>Whether this is a TLS symbol.</summary>
public bool IsTls { get; init; }
/// <summary>Whether this is a weak symbol.</summary>
public bool IsWeak => Binding == SymbolBinding.Weak;
/// <summary>Whether this is a function.</summary>
public bool IsFunction => Type == SymbolType.Function;
}

View File

@@ -0,0 +1,379 @@
// -----------------------------------------------------------------------------
// NameDemangler.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Tasks: SYM-016, SYM-017 - C++ and Rust name demangling support
// Description: Name demangler implementation for C++ and Rust symbols
// -----------------------------------------------------------------------------
using System.Text.RegularExpressions;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Demangles C++ and Rust symbol names.
/// </summary>
public sealed partial class NameDemangler : INameDemangler
{
/// <inheritdoc />
public string? Demangle(string mangledName)
{
if (string.IsNullOrEmpty(mangledName))
{
return null;
}
var scheme = DetectScheme(mangledName);
return scheme switch
{
ManglingScheme.ItaniumCxx => DemangleItaniumCxx(mangledName),
ManglingScheme.MicrosoftCxx => DemangleMicrosoftCxx(mangledName),
ManglingScheme.Rust => DemangleRust(mangledName),
ManglingScheme.Swift => DemangleSwift(mangledName),
_ => null
};
}
/// <inheritdoc />
public ManglingScheme DetectScheme(string name)
{
if (string.IsNullOrEmpty(name))
{
return ManglingScheme.None;
}
// Itanium C++ ABI: starts with _Z
if (name.StartsWith("_Z", StringComparison.Ordinal))
{
return ManglingScheme.ItaniumCxx;
}
// Microsoft C++ ABI: starts with ?
if (name.StartsWith('?'))
{
return ManglingScheme.MicrosoftCxx;
}
// Rust legacy: starts with _ZN...E or contains $
if (name.StartsWith("_ZN", StringComparison.Ordinal) && name.Contains("17h"))
{
return ManglingScheme.Rust;
}
// Rust v0: starts with _R
if (name.StartsWith("_R", StringComparison.Ordinal))
{
return ManglingScheme.Rust;
}
// Swift: starts with $s or _$s
if (name.StartsWith("$s", StringComparison.Ordinal) ||
name.StartsWith("_$s", StringComparison.Ordinal))
{
return ManglingScheme.Swift;
}
// Plain C or unknown
return ManglingScheme.None;
}
// SYM-016: C++ name demangling (Itanium ABI - GCC/Clang)
private static string? DemangleItaniumCxx(string mangledName)
{
// This is a simplified demangler for common patterns
// Production code should use a full demangler library (e.g., cxxfilt, llvm-cxxfilt)
if (!mangledName.StartsWith("_Z", StringComparison.Ordinal))
{
return null;
}
try
{
var result = new System.Text.StringBuilder();
var pos = 2; // Skip _Z
// Parse nested name if present
if (pos < mangledName.Length && mangledName[pos] == 'N')
{
pos++; // Skip N
// Parse CV qualifiers
while (pos < mangledName.Length && (mangledName[pos] == 'K' || mangledName[pos] == 'V' || mangledName[pos] == 'r'))
{
pos++;
}
var parts = new List<string>();
// Parse name parts until E
while (pos < mangledName.Length && mangledName[pos] != 'E')
{
var (name, newPos) = ParseNamePart(mangledName, pos);
if (name is null)
{
break;
}
parts.Add(name);
pos = newPos;
}
result.Append(string.Join("::", parts));
}
else
{
// Simple name
var (name, _) = ParseNamePart(mangledName, pos);
if (name is not null)
{
result.Append(name);
}
}
var demangled = result.ToString();
return string.IsNullOrEmpty(demangled) ? null : demangled;
}
catch
{
return null;
}
}
private static (string? Name, int NewPos) ParseNamePart(string mangled, int pos)
{
if (pos >= mangled.Length)
{
return (null, pos);
}
// Check for special prefixes
if (mangled[pos] == 'S')
{
// Substitution - simplified handling
pos++;
if (pos < mangled.Length && mangled[pos] == 't')
{
return ("std", pos + 1);
}
if (pos < mangled.Length && mangled[pos] == 's')
{
return ("std::string", pos + 1);
}
// Skip substitution index
while (pos < mangled.Length && char.IsLetterOrDigit(mangled[pos]) && mangled[pos] != '_')
{
pos++;
}
if (pos < mangled.Length && mangled[pos] == '_')
{
pos++;
}
return (null, pos);
}
// Parse length-prefixed name
var lengthStart = pos;
while (pos < mangled.Length && char.IsDigit(mangled[pos]))
{
pos++;
}
if (lengthStart == pos)
{
return (null, pos);
}
if (!int.TryParse(mangled.AsSpan(lengthStart, pos - lengthStart), out var length))
{
return (null, pos);
}
if (pos + length > mangled.Length)
{
return (null, pos);
}
var name = mangled.Substring(pos, length);
return (name, pos + length);
}
// Microsoft C++ demangling (simplified)
private static string? DemangleMicrosoftCxx(string mangledName)
{
// This is a very simplified demangler
// Production code should use undname.exe or a full implementation
if (!mangledName.StartsWith('?'))
{
return null;
}
try
{
// Extract the basic name between ? and @
var firstAt = mangledName.IndexOf('@', 1);
if (firstAt < 0)
{
return null;
}
var name = mangledName[1..firstAt];
// Find namespace parts (between @ symbols)
var parts = new List<string> { name };
var pos = firstAt + 1;
while (pos < mangledName.Length && mangledName[pos] != '@')
{
var nextAt = mangledName.IndexOf('@', pos);
if (nextAt < 0)
{
break;
}
var part = mangledName[pos..nextAt];
if (!string.IsNullOrEmpty(part) && char.IsLetter(part[0]))
{
parts.Insert(0, part);
}
pos = nextAt + 1;
}
return string.Join("::", parts);
}
catch
{
return null;
}
}
// SYM-017: Rust name demangling
private static string? DemangleRust(string mangledName)
{
// Rust legacy mangling: _ZN<len>name...<len>name17h<hash>E
// Rust v0 mangling: _R<...>
try
{
if (mangledName.StartsWith("_R", StringComparison.Ordinal))
{
return DemangleRustV0(mangledName);
}
if (mangledName.StartsWith("_ZN", StringComparison.Ordinal))
{
return DemangleRustLegacy(mangledName);
}
return null;
}
catch
{
return null;
}
}
private static string? DemangleRustLegacy(string mangledName)
{
// Format: _ZN<parts>17h<16-hex-digits>E
if (!mangledName.StartsWith("_ZN", StringComparison.Ordinal))
{
return null;
}
var pos = 3; // Skip _ZN
var parts = new List<string>();
while (pos < mangledName.Length && mangledName[pos] != 'E')
{
// Parse length
var lengthStart = pos;
while (pos < mangledName.Length && char.IsDigit(mangledName[pos]))
{
pos++;
}
if (lengthStart == pos)
{
break;
}
if (!int.TryParse(mangledName.AsSpan(lengthStart, pos - lengthStart), out var length))
{
break;
}
if (pos + length > mangledName.Length)
{
break;
}
var part = mangledName.Substring(pos, length);
pos += length;
// Skip hash part (17h + 16 hex digits)
if (part.StartsWith('h') && part.Length == 17 && IsHexString().IsMatch(part[1..]))
{
continue;
}
// Decode Rust escapes: $LT$ -> <, $GT$ -> >, $BP$ -> *, etc.
part = DecodeRustEscapes(part);
parts.Add(part);
}
return parts.Count > 0 ? string.Join("::", parts) : null;
}
private static string? DemangleRustV0(string mangledName)
{
// Rust v0 mangling is complex; provide basic support
// Full implementation would require the v0 demangling spec
if (!mangledName.StartsWith("_R", StringComparison.Ordinal))
{
return null;
}
// Very simplified: just extract what we can
// In production, use rust-demangle crate via P/Invoke or subprocess
return $"<rust-v0>{mangledName[2..]}";
}
private static string DecodeRustEscapes(string input)
{
return input
.Replace("$LT$", "<")
.Replace("$GT$", ">")
.Replace("$BP$", "*")
.Replace("$RF$", "&")
.Replace("$LP$", "(")
.Replace("$RP$", ")")
.Replace("$C$", ",")
.Replace("$SP$", "@")
.Replace("..", "::");
}
// Swift demangling (placeholder)
private static string? DemangleSwift(string mangledName)
{
// Swift demangling is very complex
// In production, use swift-demangle via subprocess
if (mangledName.StartsWith("$s", StringComparison.Ordinal))
{
return $"<swift>{mangledName[2..]}";
}
if (mangledName.StartsWith("_$s", StringComparison.Ordinal))
{
return $"<swift>{mangledName[3..]}";
}
return null;
}
[GeneratedRegex("^[0-9a-f]+$", RegexOptions.IgnoreCase)]
private static partial Regex IsHexString();
}

View File

@@ -0,0 +1,40 @@
// -----------------------------------------------------------------------------
// SymbolDiffServiceExtensions.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Task: SYM-019 - Add service registration extensions
// Description: DI registration extensions for symbol diff services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Service collection extensions for symbol diff analyzer.
/// </summary>
public static class SymbolDiffServiceExtensions
{
/// <summary>
/// Adds symbol table diff analyzer services.
/// </summary>
public static IServiceCollection AddSymbolTableDiffAnalyzer(this IServiceCollection services)
{
services.AddSingleton<INameDemangler, NameDemangler>();
services.AddSingleton<ISymbolTableDiffAnalyzer, SymbolTableDiffAnalyzer>();
return services;
}
/// <summary>
/// Adds symbol table diff analyzer with custom symbol extractor.
/// </summary>
public static IServiceCollection AddSymbolTableDiffAnalyzer<TExtractor>(this IServiceCollection services)
where TExtractor : class, ISymbolExtractor
{
services.AddSingleton<ISymbolExtractor, TExtractor>();
services.AddSingleton<INameDemangler, NameDemangler>();
services.AddSingleton<ISymbolTableDiffAnalyzer, SymbolTableDiffAnalyzer>();
return services;
}
}

View File

@@ -0,0 +1,315 @@
// -----------------------------------------------------------------------------
// SymbolTableDiff.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Tasks: SYM-001, SYM-002, SYM-003, SYM-004, SYM-005
// Description: Symbol table diff model for comparing exports/imports between binaries
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Complete symbol table diff between two binaries.
/// </summary>
public sealed record SymbolTableDiff
{
/// <summary>Content-addressed diff ID (sha256 of canonical JSON).</summary>
[JsonPropertyName("diff_id")]
public required string DiffId { get; init; }
/// <summary>Base binary identity.</summary>
[JsonPropertyName("base")]
public required BinaryRef Base { get; init; }
/// <summary>Target binary identity.</summary>
[JsonPropertyName("target")]
public required BinaryRef Target { get; init; }
/// <summary>Exported symbol changes.</summary>
[JsonPropertyName("exports")]
public required SymbolChangeSummary Exports { get; init; }
/// <summary>Imported symbol changes.</summary>
[JsonPropertyName("imports")]
public required SymbolChangeSummary Imports { get; init; }
/// <summary>Version map changes.</summary>
[JsonPropertyName("versions")]
public required VersionMapDiff Versions { get; init; }
/// <summary>GOT/PLT changes (dynamic linking).</summary>
[JsonPropertyName("dynamic")]
public DynamicLinkingDiff? Dynamic { get; init; }
/// <summary>Overall ABI compatibility assessment.</summary>
[JsonPropertyName("abi_compatibility")]
public required AbiCompatibility AbiCompatibility { get; init; }
/// <summary>When this diff was computed (UTC).</summary>
[JsonPropertyName("computed_at")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>Schema version for forward compatibility.</summary>
[JsonPropertyName("schema_version")]
public string SchemaVersion { get; init; } = "1.0";
}
/// <summary>Reference to a binary.</summary>
public sealed record BinaryRef
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("build_id")]
public string? BuildId { get; init; }
[JsonPropertyName("architecture")]
public required string Architecture { get; init; }
[JsonPropertyName("format")]
public required BinaryFormat Format { get; init; }
[JsonPropertyName("file_size")]
public long FileSize { get; init; }
}
/// <summary>Binary format.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum BinaryFormat
{
Elf,
Pe,
MachO,
Unknown
}
/// <summary>Summary of symbol changes.</summary>
public sealed record SymbolChangeSummary
{
[JsonPropertyName("added")]
public required IReadOnlyList<SymbolChange> Added { get; init; }
[JsonPropertyName("removed")]
public required IReadOnlyList<SymbolChange> Removed { get; init; }
[JsonPropertyName("modified")]
public required IReadOnlyList<SymbolModification> Modified { get; init; }
[JsonPropertyName("renamed")]
public required IReadOnlyList<SymbolRename> Renamed { get; init; }
/// <summary>Count summaries.</summary>
[JsonPropertyName("counts")]
public required SymbolChangeCounts Counts { get; init; }
}
/// <summary>Count summary for symbol changes.</summary>
public sealed record SymbolChangeCounts
{
[JsonPropertyName("added")]
public int Added { get; init; }
[JsonPropertyName("removed")]
public int Removed { get; init; }
[JsonPropertyName("modified")]
public int Modified { get; init; }
[JsonPropertyName("renamed")]
public int Renamed { get; init; }
[JsonPropertyName("unchanged")]
public int Unchanged { get; init; }
[JsonPropertyName("total_base")]
public int TotalBase { get; init; }
[JsonPropertyName("total_target")]
public int TotalTarget { get; init; }
}
/// <summary>A symbol that was added or removed.</summary>
public sealed record SymbolChange
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("demangled_name")]
public string? DemangledName { get; init; }
[JsonPropertyName("type")]
public required SymbolType Type { get; init; }
[JsonPropertyName("binding")]
public required SymbolBinding Binding { get; init; }
[JsonPropertyName("visibility")]
public required SymbolVisibility Visibility { get; init; }
[JsonPropertyName("section")]
public string? Section { get; init; }
[JsonPropertyName("address")]
public ulong Address { get; init; }
[JsonPropertyName("size")]
public ulong Size { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("fingerprint")]
public string? Fingerprint { get; init; }
}
/// <summary>A symbol that was modified (same name, different attributes).</summary>
public sealed record SymbolModification
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("demangled_name")]
public string? DemangledName { get; init; }
[JsonPropertyName("base")]
public required SymbolAttributes Base { get; init; }
[JsonPropertyName("target")]
public required SymbolAttributes Target { get; init; }
[JsonPropertyName("changes")]
public required IReadOnlyList<AttributeChange> Changes { get; init; }
[JsonPropertyName("is_abi_breaking")]
public bool IsAbiBreaking { get; init; }
}
/// <summary>Symbol attributes for comparison.</summary>
public sealed record SymbolAttributes
{
[JsonPropertyName("type")]
public required SymbolType Type { get; init; }
[JsonPropertyName("binding")]
public required SymbolBinding Binding { get; init; }
[JsonPropertyName("visibility")]
public required SymbolVisibility Visibility { get; init; }
[JsonPropertyName("section")]
public string? Section { get; init; }
[JsonPropertyName("address")]
public ulong Address { get; init; }
[JsonPropertyName("size")]
public ulong Size { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("fingerprint")]
public string? Fingerprint { get; init; }
}
/// <summary>A specific attribute change.</summary>
public sealed record AttributeChange
{
[JsonPropertyName("attribute")]
public required string Attribute { get; init; }
[JsonPropertyName("base_value")]
public string? BaseValue { get; init; }
[JsonPropertyName("target_value")]
public string? TargetValue { get; init; }
[JsonPropertyName("severity")]
public required ChangeSeverity Severity { get; init; }
}
/// <summary>A symbol that was renamed (detected via fingerprint matching).</summary>
public sealed record SymbolRename
{
[JsonPropertyName("base_name")]
public required string BaseName { get; init; }
[JsonPropertyName("target_name")]
public required string TargetName { get; init; }
[JsonPropertyName("base_demangled")]
public string? BaseDemangled { get; init; }
[JsonPropertyName("target_demangled")]
public string? TargetDemangled { get; init; }
[JsonPropertyName("fingerprint")]
public required string Fingerprint { get; init; }
[JsonPropertyName("similarity")]
public double Similarity { get; init; }
[JsonPropertyName("confidence")]
public required RenameConfidence Confidence { get; init; }
}
/// <summary>Symbol type classification.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SymbolType
{
NoType,
Object,
Function,
Section,
File,
Common,
Tls,
Unknown
}
/// <summary>Symbol binding.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SymbolBinding
{
Local,
Global,
Weak,
Unknown
}
/// <summary>Symbol visibility.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SymbolVisibility
{
Default,
Internal,
Hidden,
Protected,
Unknown
}
/// <summary>Severity of a change.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ChangeSeverity
{
Info,
Low,
Medium,
High,
Critical
}
/// <summary>Confidence level for rename detection.</summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum RenameConfidence
{
VeryHigh,
High,
Medium,
Low,
VeryLow
}

View File

@@ -0,0 +1,805 @@
// -----------------------------------------------------------------------------
// SymbolTableDiffAnalyzer.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Tasks: SYM-007 to SYM-015 - Implement symbol table diff analyzer
// Description: Symbol table diff analyzer implementation
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Analyzes symbol table differences between two binaries.
/// </summary>
public sealed class SymbolTableDiffAnalyzer : ISymbolTableDiffAnalyzer
{
private readonly ISymbolExtractor _symbolExtractor;
private readonly INameDemangler _nameDemangler;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SymbolTableDiffAnalyzer> _logger;
public SymbolTableDiffAnalyzer(
ISymbolExtractor symbolExtractor,
INameDemangler nameDemangler,
TimeProvider timeProvider,
ILogger<SymbolTableDiffAnalyzer> logger)
{
_symbolExtractor = symbolExtractor;
_nameDemangler = nameDemangler;
_timeProvider = timeProvider;
_logger = logger;
}
/// <inheritdoc />
public async Task<SymbolTableDiff> ComputeDiffAsync(
string basePath,
string targetPath,
SymbolDiffOptions? options = null,
CancellationToken ct = default)
{
options ??= new SymbolDiffOptions();
var now = _timeProvider.GetUtcNow();
_logger.LogDebug("Computing symbol diff between {Base} and {Target}", basePath, targetPath);
// Extract symbol tables
var baseTable = await ExtractSymbolTableAsync(basePath, ct);
var targetTable = await ExtractSymbolTableAsync(targetPath, ct);
// Compute symbol changes
var exports = ComputeSymbolChanges(
baseTable.Exports,
targetTable.Exports,
options);
var imports = ComputeSymbolChanges(
baseTable.Imports,
targetTable.Imports,
options);
// Compute version diff
var versions = ComputeVersionDiff(baseTable, targetTable);
// Compute dynamic linking diff
DynamicLinkingDiff? dynamic = null;
if (options.IncludeDynamicLinking)
{
dynamic = ComputeDynamicLinkingDiff(baseTable, targetTable);
}
// Create diff without ID first
var diffWithoutId = new SymbolTableDiff
{
DiffId = string.Empty, // Placeholder
Base = baseTable.Binary,
Target = targetTable.Binary,
Exports = exports,
Imports = imports,
Versions = versions,
Dynamic = dynamic,
AbiCompatibility = new AbiCompatibility
{
Level = AbiCompatibilityLevel.FullyCompatible,
Score = 1.0,
IsBackwardCompatible = true,
IsForwardCompatible = true,
BreakingChanges = [],
Warnings = [],
Summary = new AbiSummary()
},
ComputedAt = now
};
// Assess ABI compatibility
var abiCompatibility = AssessAbiCompatibility(diffWithoutId);
// Compute content-addressed ID
var diffId = ComputeDiffId(baseTable.Binary, targetTable.Binary, exports, imports);
var diff = diffWithoutId with
{
DiffId = diffId,
AbiCompatibility = abiCompatibility
};
_logger.LogInformation(
"Symbol diff complete: {ExportsAdded} exports added, {ExportsRemoved} removed, {Level}",
exports.Counts.Added,
exports.Counts.Removed,
abiCompatibility.Level);
return diff;
}
/// <inheritdoc />
public async Task<SymbolTable> ExtractSymbolTableAsync(
string binaryPath,
CancellationToken ct = default)
{
return await _symbolExtractor.ExtractAsync(binaryPath, ct);
}
/// <inheritdoc />
public AbiCompatibility AssessAbiCompatibility(SymbolTableDiff diff)
{
var breakingChanges = new List<AbiBreakingChange>();
var warnings = new List<AbiWarning>();
// Removed exports are breaking
foreach (var removed in diff.Exports.Removed)
{
breakingChanges.Add(new AbiBreakingChange
{
Type = AbiBreakType.SymbolRemoved,
Severity = removed.Binding == SymbolBinding.Weak ? ChangeSeverity.Low : ChangeSeverity.High,
Symbol = removed.Name,
Description = $"Exported symbol '{removed.DemangledName ?? removed.Name}' was removed",
Impact = "Code linking against this symbol will fail at runtime",
Mitigation = "Provide symbol alias or versioned symbol for backward compatibility"
});
}
// Modified exports with size changes
foreach (var modified in diff.Exports.Modified)
{
if (modified.IsAbiBreaking)
{
foreach (var change in modified.Changes.Where(c => c.Severity >= ChangeSeverity.High))
{
breakingChanges.Add(new AbiBreakingChange
{
Type = DetermineBreakType(change),
Severity = change.Severity,
Symbol = modified.Name,
Description = $"Symbol '{modified.DemangledName ?? modified.Name}' {change.Attribute} changed from {change.BaseValue} to {change.TargetValue}",
Details = $"Attribute: {change.Attribute}"
});
}
}
}
// Version removals
foreach (var removed in diff.Versions.DefinitionsRemoved)
{
if (!removed.IsBase)
{
breakingChanges.Add(new AbiBreakingChange
{
Type = AbiBreakType.VersionRemoved,
Severity = ChangeSeverity.High,
Symbol = removed.Name,
Description = $"Version definition '{removed.Name}' was removed"
});
}
}
// Added exports are warnings
foreach (var added in diff.Exports.Added)
{
warnings.Add(new AbiWarning
{
Type = AbiWarningType.SymbolAdded,
Symbol = added.Name,
Message = $"New exported symbol: {added.DemangledName ?? added.Name}"
});
}
// Renames are warnings
foreach (var rename in diff.Exports.Renamed)
{
warnings.Add(new AbiWarning
{
Type = AbiWarningType.SymbolRenamed,
Symbol = rename.BaseName,
Message = $"Symbol renamed from '{rename.BaseDemangled ?? rename.BaseName}' to '{rename.TargetDemangled ?? rename.TargetName}'"
});
}
// Calculate compatibility level and score
var level = DetermineCompatibilityLevel(breakingChanges);
var score = CalculateCompatibilityScore(diff, breakingChanges);
return new AbiCompatibility
{
Level = level,
Score = score,
IsBackwardCompatible = breakingChanges.Count == 0,
IsForwardCompatible = diff.Exports.Added.Count == 0,
BreakingChanges = breakingChanges,
Warnings = warnings,
Summary = new AbiSummary
{
TotalExportsBase = diff.Exports.Counts.TotalBase,
TotalExportsTarget = diff.Exports.Counts.TotalTarget,
ExportsAdded = diff.Exports.Counts.Added,
ExportsRemoved = diff.Exports.Counts.Removed,
ExportsModified = diff.Exports.Counts.Modified,
BreakingChangesCount = breakingChanges.Count,
WarningsCount = warnings.Count,
CompatibilityPercentage = score * 100
}
};
}
// SYM-009, SYM-010: Compute symbol changes
private SymbolChangeSummary ComputeSymbolChanges(
IReadOnlyList<ExtractedSymbol> baseSymbols,
IReadOnlyList<ExtractedSymbol> targetSymbols,
SymbolDiffOptions options)
{
var baseByName = baseSymbols.ToDictionary(s => s.Name, s => s);
var targetByName = targetSymbols.ToDictionary(s => s.Name, s => s);
var added = new List<SymbolChange>();
var removed = new List<SymbolChange>();
var modified = new List<SymbolModification>();
var renamed = new List<SymbolRename>();
var unchanged = 0;
// Find added symbols
foreach (var target in targetSymbols)
{
if (!baseByName.ContainsKey(target.Name))
{
added.Add(MapToSymbolChange(target));
}
}
// Find removed and modified symbols
foreach (var baseSymbol in baseSymbols)
{
if (!targetByName.TryGetValue(baseSymbol.Name, out var targetSymbol))
{
removed.Add(MapToSymbolChange(baseSymbol));
}
else
{
var modification = DetectModification(baseSymbol, targetSymbol);
if (modification is not null)
{
modified.Add(modification);
}
else
{
unchanged++;
}
}
}
// Detect renames (removed symbols that match added symbols via fingerprint)
if (options.DetectRenames)
{
var detectedRenames = DetectRenames(
removed,
added,
options.RenameSimilarityThreshold);
renamed.AddRange(detectedRenames);
// Remove renamed from added/removed
var renamedBaseNames = new HashSet<string>(detectedRenames.Select(r => r.BaseName));
var renamedTargetNames = new HashSet<string>(detectedRenames.Select(r => r.TargetName));
removed.RemoveAll(r => renamedBaseNames.Contains(r.Name));
added.RemoveAll(a => renamedTargetNames.Contains(a.Name));
}
return new SymbolChangeSummary
{
Added = added,
Removed = removed,
Modified = modified,
Renamed = renamed,
Counts = new SymbolChangeCounts
{
Added = added.Count,
Removed = removed.Count,
Modified = modified.Count,
Renamed = renamed.Count,
Unchanged = unchanged,
TotalBase = baseSymbols.Count,
TotalTarget = targetSymbols.Count
}
};
}
// SYM-011: Compute version diff
private VersionMapDiff ComputeVersionDiff(SymbolTable baseTable, SymbolTable targetTable)
{
var baseDefs = baseTable.VersionDefinitions.ToDictionary(v => v.Name);
var targetDefs = targetTable.VersionDefinitions.ToDictionary(v => v.Name);
var defsAdded = targetTable.VersionDefinitions
.Where(v => !baseDefs.ContainsKey(v.Name))
.ToList();
var defsRemoved = baseTable.VersionDefinitions
.Where(v => !targetDefs.ContainsKey(v.Name))
.ToList();
var baseReqs = baseTable.VersionRequirements
.ToDictionary(r => $"{r.Library}@{r.Version}");
var targetReqs = targetTable.VersionRequirements
.ToDictionary(r => $"{r.Library}@{r.Version}");
var reqsAdded = targetTable.VersionRequirements
.Where(r => !baseReqs.ContainsKey($"{r.Library}@{r.Version}"))
.ToList();
var reqsRemoved = baseTable.VersionRequirements
.Where(r => !targetReqs.ContainsKey($"{r.Library}@{r.Version}"))
.ToList();
// Detect version assignment changes
var assignmentChanges = new List<VersionAssignmentChange>();
var baseExports = baseTable.Exports.Where(e => e.Version is not null).ToDictionary(e => e.Name);
foreach (var target in targetTable.Exports.Where(e => e.Version is not null))
{
if (baseExports.TryGetValue(target.Name, out var baseExport))
{
if (baseExport.Version != target.Version)
{
assignmentChanges.Add(new VersionAssignmentChange
{
SymbolName = target.Name,
BaseVersion = baseExport.Version,
TargetVersion = target.Version,
IsAbiBreaking = true // Version changes can be breaking
});
}
}
}
return new VersionMapDiff
{
DefinitionsAdded = defsAdded,
DefinitionsRemoved = defsRemoved,
RequirementsAdded = reqsAdded,
RequirementsRemoved = reqsRemoved,
AssignmentsChanged = assignmentChanges,
Counts = new VersionChangeCounts
{
DefinitionsAdded = defsAdded.Count,
DefinitionsRemoved = defsRemoved.Count,
RequirementsAdded = reqsAdded.Count,
RequirementsRemoved = reqsRemoved.Count,
AssignmentsChanged = assignmentChanges.Count
}
};
}
// SYM-012: Compute dynamic linking diff
private DynamicLinkingDiff ComputeDynamicLinkingDiff(SymbolTable baseTable, SymbolTable targetTable)
{
return new DynamicLinkingDiff
{
Got = ComputeGotDiff(baseTable.GotEntries ?? [], targetTable.GotEntries ?? []),
Plt = ComputePltDiff(baseTable.PltEntries ?? [], targetTable.PltEntries ?? []),
Rpath = ComputeRpathDiff(baseTable, targetTable),
Needed = ComputeNeededDiff(baseTable.NeededLibraries, targetTable.NeededLibraries)
};
}
private GotDiff ComputeGotDiff(IReadOnlyList<GotEntry> baseEntries, IReadOnlyList<GotEntry> targetEntries)
{
var baseBySymbol = baseEntries.ToDictionary(e => e.SymbolName);
var targetBySymbol = targetEntries.ToDictionary(e => e.SymbolName);
var added = targetEntries.Where(e => !baseBySymbol.ContainsKey(e.SymbolName)).ToList();
var removed = baseEntries.Where(e => !targetBySymbol.ContainsKey(e.SymbolName)).ToList();
var modified = new List<GotEntryModification>();
foreach (var baseEntry in baseEntries)
{
if (targetBySymbol.TryGetValue(baseEntry.SymbolName, out var targetEntry))
{
if (baseEntry.Type != targetEntry.Type || baseEntry.Address != targetEntry.Address)
{
modified.Add(new GotEntryModification
{
SymbolName = baseEntry.SymbolName,
BaseAddress = baseEntry.Address,
TargetAddress = targetEntry.Address,
BaseType = baseEntry.Type,
TargetType = targetEntry.Type
});
}
}
}
return new GotDiff
{
EntriesAdded = added,
EntriesRemoved = removed,
EntriesModified = modified,
BaseCount = baseEntries.Count,
TargetCount = targetEntries.Count
};
}
private PltDiff ComputePltDiff(IReadOnlyList<PltEntry> baseEntries, IReadOnlyList<PltEntry> targetEntries)
{
var baseBySymbol = baseEntries.ToDictionary(e => e.SymbolName);
var targetBySymbol = targetEntries.ToDictionary(e => e.SymbolName);
var added = targetEntries.Where(e => !baseBySymbol.ContainsKey(e.SymbolName)).ToList();
var removed = baseEntries.Where(e => !targetBySymbol.ContainsKey(e.SymbolName)).ToList();
var reordered = new List<PltReorder>();
foreach (var baseEntry in baseEntries)
{
if (targetBySymbol.TryGetValue(baseEntry.SymbolName, out var targetEntry))
{
if (baseEntry.Index != targetEntry.Index)
{
reordered.Add(new PltReorder
{
SymbolName = baseEntry.SymbolName,
BaseIndex = baseEntry.Index,
TargetIndex = targetEntry.Index
});
}
}
}
return new PltDiff
{
EntriesAdded = added,
EntriesRemoved = removed,
EntriesReordered = reordered,
BaseCount = baseEntries.Count,
TargetCount = targetEntries.Count
};
}
private RpathDiff ComputeRpathDiff(SymbolTable baseTable, SymbolTable targetTable)
{
var basePaths = new HashSet<string>(
(baseTable.Rpath ?? []).Concat(baseTable.Runpath ?? []));
var targetPaths = new HashSet<string>(
(targetTable.Rpath ?? []).Concat(targetTable.Runpath ?? []));
return new RpathDiff
{
RpathBase = baseTable.Rpath,
RpathTarget = targetTable.Rpath,
RunpathBase = baseTable.Runpath,
RunpathTarget = targetTable.Runpath,
PathsAdded = targetPaths.Except(basePaths).ToList(),
PathsRemoved = basePaths.Except(targetPaths).ToList(),
HasChanges = !basePaths.SetEquals(targetPaths)
};
}
private NeededDiff ComputeNeededDiff(IReadOnlyList<string> baseLibs, IReadOnlyList<string> targetLibs)
{
var baseSet = new HashSet<string>(baseLibs);
var targetSet = new HashSet<string>(targetLibs);
return new NeededDiff
{
LibrariesAdded = targetSet.Except(baseSet).ToList(),
LibrariesRemoved = baseSet.Except(targetSet).ToList(),
BaseLibraries = baseLibs,
TargetLibraries = targetLibs
};
}
// SYM-013: Detect renames via fingerprint matching
private IReadOnlyList<SymbolRename> DetectRenames(
List<SymbolChange> removed,
List<SymbolChange> added,
double threshold)
{
var renames = new List<SymbolRename>();
// Only consider symbols with fingerprints
var removedWithFp = removed.Where(r => r.Fingerprint is not null).ToList();
var addedWithFp = added.Where(a => a.Fingerprint is not null).ToList();
foreach (var removedSymbol in removedWithFp)
{
// Find best match in added
SymbolChange? bestMatch = null;
double bestSimilarity = 0;
foreach (var addedSymbol in addedWithFp)
{
var similarity = ComputeFingerprintSimilarity(
removedSymbol.Fingerprint!,
addedSymbol.Fingerprint!);
if (similarity >= threshold && similarity > bestSimilarity)
{
bestMatch = addedSymbol;
bestSimilarity = similarity;
}
}
if (bestMatch is not null)
{
renames.Add(new SymbolRename
{
BaseName = removedSymbol.Name,
TargetName = bestMatch.Name,
BaseDemangled = removedSymbol.DemangledName,
TargetDemangled = bestMatch.DemangledName,
Fingerprint = removedSymbol.Fingerprint!,
Similarity = bestSimilarity,
Confidence = DetermineRenameConfidence(bestSimilarity)
});
// Remove matched from consideration
addedWithFp.Remove(bestMatch);
}
}
return renames;
}
// SYM-015: Compute content-addressed diff ID
private string ComputeDiffId(
BinaryRef baseRef,
BinaryRef targetRef,
SymbolChangeSummary exports,
SymbolChangeSummary imports)
{
var canonical = new
{
base_sha256 = baseRef.Sha256,
target_sha256 = targetRef.Sha256,
exports_added = exports.Added.Select(e => e.Name).OrderBy(n => n, StringComparer.Ordinal),
exports_removed = exports.Removed.Select(e => e.Name).OrderBy(n => n, StringComparer.Ordinal),
imports_added = imports.Added.Select(i => i.Name).OrderBy(n => n, StringComparer.Ordinal),
imports_removed = imports.Removed.Select(i => i.Name).OrderBy(n => n, StringComparer.Ordinal)
};
var json = JsonSerializer.Serialize(canonical, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false
});
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
// Helper methods
private static SymbolChange MapToSymbolChange(ExtractedSymbol symbol)
{
return new SymbolChange
{
Name = symbol.Name,
DemangledName = symbol.DemangledName,
Type = symbol.Type,
Binding = symbol.Binding,
Visibility = symbol.Visibility,
Section = symbol.Section,
Address = symbol.Address,
Size = symbol.Size,
Version = symbol.Version,
Fingerprint = symbol.Fingerprint
};
}
private static SymbolModification? DetectModification(ExtractedSymbol baseSymbol, ExtractedSymbol targetSymbol)
{
var changes = new List<AttributeChange>();
if (baseSymbol.Type != targetSymbol.Type)
{
changes.Add(new AttributeChange
{
Attribute = "type",
BaseValue = baseSymbol.Type.ToString(),
TargetValue = targetSymbol.Type.ToString(),
Severity = ChangeSeverity.High
});
}
if (baseSymbol.Size != targetSymbol.Size)
{
changes.Add(new AttributeChange
{
Attribute = "size",
BaseValue = baseSymbol.Size.ToString(CultureInfo.InvariantCulture),
TargetValue = targetSymbol.Size.ToString(CultureInfo.InvariantCulture),
Severity = targetSymbol.Size < baseSymbol.Size ? ChangeSeverity.High : ChangeSeverity.Low
});
}
if (baseSymbol.Visibility != targetSymbol.Visibility)
{
var severityFromVisibility = (baseSymbol.Visibility, targetSymbol.Visibility) switch
{
(SymbolVisibility.Default, SymbolVisibility.Hidden) => ChangeSeverity.High,
(SymbolVisibility.Protected, SymbolVisibility.Hidden) => ChangeSeverity.High,
_ => ChangeSeverity.Medium
};
changes.Add(new AttributeChange
{
Attribute = "visibility",
BaseValue = baseSymbol.Visibility.ToString(),
TargetValue = targetSymbol.Visibility.ToString(),
Severity = severityFromVisibility
});
}
if (baseSymbol.Binding != targetSymbol.Binding)
{
changes.Add(new AttributeChange
{
Attribute = "binding",
BaseValue = baseSymbol.Binding.ToString(),
TargetValue = targetSymbol.Binding.ToString(),
Severity = ChangeSeverity.Medium
});
}
if (changes.Count == 0)
{
return null;
}
return new SymbolModification
{
Name = baseSymbol.Name,
DemangledName = baseSymbol.DemangledName ?? targetSymbol.DemangledName,
Base = new SymbolAttributes
{
Type = baseSymbol.Type,
Binding = baseSymbol.Binding,
Visibility = baseSymbol.Visibility,
Section = baseSymbol.Section,
Address = baseSymbol.Address,
Size = baseSymbol.Size,
Version = baseSymbol.Version,
Fingerprint = baseSymbol.Fingerprint
},
Target = new SymbolAttributes
{
Type = targetSymbol.Type,
Binding = targetSymbol.Binding,
Visibility = targetSymbol.Visibility,
Section = targetSymbol.Section,
Address = targetSymbol.Address,
Size = targetSymbol.Size,
Version = targetSymbol.Version,
Fingerprint = targetSymbol.Fingerprint
},
Changes = changes,
IsAbiBreaking = changes.Any(c => c.Severity >= ChangeSeverity.High)
};
}
private static double ComputeFingerprintSimilarity(string fp1, string fp2)
{
if (fp1 == fp2) return 1.0;
// Simple Jaccard similarity on hex characters
var set1 = new HashSet<char>(fp1);
var set2 = new HashSet<char>(fp2);
var intersection = set1.Intersect(set2).Count();
var union = set1.Union(set2).Count();
return union == 0 ? 0 : (double)intersection / union;
}
private static RenameConfidence DetermineRenameConfidence(double similarity)
{
return similarity switch
{
>= 0.95 => RenameConfidence.VeryHigh,
>= 0.85 => RenameConfidence.High,
>= 0.75 => RenameConfidence.Medium,
>= 0.65 => RenameConfidence.Low,
_ => RenameConfidence.VeryLow
};
}
private static AbiBreakType DetermineBreakType(AttributeChange change)
{
return change.Attribute switch
{
"type" => AbiBreakType.SymbolTypeChanged,
"size" => AbiBreakType.SymbolSizeChanged,
"visibility" => AbiBreakType.VisibilityReduced,
"binding" => AbiBreakType.BindingChanged,
_ => AbiBreakType.SymbolTypeChanged
};
}
private static AbiCompatibilityLevel DetermineCompatibilityLevel(List<AbiBreakingChange> breaks)
{
if (breaks.Count == 0)
{
return AbiCompatibilityLevel.FullyCompatible;
}
var criticalCount = breaks.Count(b => b.Severity == ChangeSeverity.Critical);
var highCount = breaks.Count(b => b.Severity == ChangeSeverity.High);
if (criticalCount > 0 || highCount >= 10)
{
return AbiCompatibilityLevel.Incompatible;
}
if (highCount >= 3)
{
return AbiCompatibilityLevel.MajorIncompatibility;
}
if (highCount >= 1)
{
return AbiCompatibilityLevel.MinorIncompatibility;
}
return AbiCompatibilityLevel.CompatibleWithWarnings;
}
private static double CalculateCompatibilityScore(SymbolTableDiff diff, List<AbiBreakingChange> breaks)
{
if (diff.Exports.Counts.TotalBase == 0)
{
return 1.0;
}
var removedWeight = diff.Exports.Counts.Removed * 0.5;
var breakingWeight = breaks.Sum(b => b.Severity switch
{
ChangeSeverity.Critical => 1.0,
ChangeSeverity.High => 0.5,
ChangeSeverity.Medium => 0.2,
_ => 0.1
});
var penalty = (removedWeight + breakingWeight) / diff.Exports.Counts.TotalBase;
return Math.Max(0, 1.0 - penalty);
}
}
/// <summary>
/// Interface for extracting symbols from binaries.
/// </summary>
public interface ISymbolExtractor
{
/// <summary>
/// Extracts symbol table from a binary.
/// </summary>
Task<SymbolTable> ExtractAsync(string binaryPath, CancellationToken ct = default);
}
/// <summary>
/// Interface for demangling C++/Rust names.
/// </summary>
public interface INameDemangler
{
/// <summary>
/// Demangles a symbol name.
/// </summary>
string? Demangle(string mangledName);
/// <summary>
/// Detects the mangling scheme.
/// </summary>
ManglingScheme DetectScheme(string name);
}
/// <summary>
/// Name mangling scheme.
/// </summary>
public enum ManglingScheme
{
None,
ItaniumCxx,
MicrosoftCxx,
Rust,
Swift,
Unknown
}

View File

@@ -0,0 +1,113 @@
// -----------------------------------------------------------------------------
// VersionMapDiff.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Task: SYM-003 - Define VersionMapDiff records
// Description: Version map diff model for symbol versioning changes
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.BinaryIndex.Builders.SymbolDiff;
/// <summary>
/// Diff of symbol version maps between binaries.
/// </summary>
public sealed record VersionMapDiff
{
/// <summary>Version definitions added.</summary>
[JsonPropertyName("definitions_added")]
public required IReadOnlyList<VersionDefinition> DefinitionsAdded { get; init; }
/// <summary>Version definitions removed.</summary>
[JsonPropertyName("definitions_removed")]
public required IReadOnlyList<VersionDefinition> DefinitionsRemoved { get; init; }
/// <summary>Version requirements added.</summary>
[JsonPropertyName("requirements_added")]
public required IReadOnlyList<VersionRequirement> RequirementsAdded { get; init; }
/// <summary>Version requirements removed.</summary>
[JsonPropertyName("requirements_removed")]
public required IReadOnlyList<VersionRequirement> RequirementsRemoved { get; init; }
/// <summary>Symbol version assignments changed.</summary>
[JsonPropertyName("assignments_changed")]
public required IReadOnlyList<VersionAssignmentChange> AssignmentsChanged { get; init; }
/// <summary>Count summaries.</summary>
[JsonPropertyName("counts")]
public required VersionChangeCounts Counts { get; init; }
}
/// <summary>A version definition (from .gnu.version_d).</summary>
public sealed record VersionDefinition
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("index")]
public int Index { get; init; }
[JsonPropertyName("flags")]
public int Flags { get; init; }
[JsonPropertyName("is_base")]
public bool IsBase { get; init; }
[JsonPropertyName("parent")]
public string? Parent { get; init; }
}
/// <summary>A version requirement (from .gnu.version_r).</summary>
public sealed record VersionRequirement
{
[JsonPropertyName("library")]
public required string Library { get; init; }
[JsonPropertyName("version")]
public required string Version { get; init; }
[JsonPropertyName("hash")]
public uint Hash { get; init; }
[JsonPropertyName("flags")]
public int Flags { get; init; }
[JsonPropertyName("is_weak")]
public bool IsWeak { get; init; }
}
/// <summary>A change in version assignment for a symbol.</summary>
public sealed record VersionAssignmentChange
{
[JsonPropertyName("symbol_name")]
public required string SymbolName { get; init; }
[JsonPropertyName("base_version")]
public string? BaseVersion { get; init; }
[JsonPropertyName("target_version")]
public string? TargetVersion { get; init; }
[JsonPropertyName("is_abi_breaking")]
public bool IsAbiBreaking { get; init; }
}
/// <summary>Count summary for version changes.</summary>
public sealed record VersionChangeCounts
{
[JsonPropertyName("definitions_added")]
public int DefinitionsAdded { get; init; }
[JsonPropertyName("definitions_removed")]
public int DefinitionsRemoved { get; init; }
[JsonPropertyName("requirements_added")]
public int RequirementsAdded { get; init; }
[JsonPropertyName("requirements_removed")]
public int RequirementsRemoved { get; init; }
[JsonPropertyName("assignments_changed")]
public int AssignmentsChanged { get; init; }
}

View File

@@ -0,0 +1,27 @@
# BinaryIndex.Decompiler Module Charter
## Mission
- Parse and normalize decompiler output for deterministic binary comparison.
## Responsibilities
- Parse decompiled code into AST models.
- Normalize and compare ASTs for semantic similarity.
- Provide adapter integration for supported decompilers.
- Maintain deterministic output and stable ordering.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/semantic-diffing.md
## Working Agreement
- Deterministic parsing and normalization; avoid culture-sensitive formatting.
- Use InvariantCulture for parsing and formatting.
- Propagate CancellationToken for async operations.
- Avoid random seeds unless injected and fixed.
## Testing Strategy
- Unit tests for parser, normalization, and AST comparison.
- Regression tests for known decompiler outputs.

View File

@@ -0,0 +1,27 @@
# BinaryIndex.Ensemble Module Charter
## Mission
- Combine binary analysis signals into a deterministic ensemble decision.
## Responsibilities
- Aggregate semantic, decompiler, and similarity inputs.
- Compute weighted decisions and expose results models.
- Maintain weight tuning logic and default profiles.
- Ensure deterministic scoring and tie-breaking.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/semantic-diffing.md
## Working Agreement
- Stable ordering and deterministic weight application.
- Use TimeProvider and IGuidGenerator for timestamps if needed.
- Use InvariantCulture for parsing and formatting.
- Propagate CancellationToken.
## Testing Strategy
- Unit tests for decision engine and weight tuning.
- Determinism tests for identical inputs.

View File

@@ -0,0 +1,27 @@
# BinaryIndex.ML Module Charter
## Mission
- Provide deterministic embedding and similarity services for binary analysis.
## Responsibilities
- Tokenize binary code for ML inference.
- Run ONNX inference deterministically and expose embedding APIs.
- Maintain embedding indexes and similarity queries.
- Support offline model loading and versioning.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/ml-model-training.md
## Working Agreement
- Deterministic inference: fixed model versions and stable preprocessing.
- Use InvariantCulture for parsing and formatting.
- Propagate CancellationToken.
- No network calls during inference.
## Testing Strategy
- Unit tests for tokenizer and embedding determinism.
- Integration tests for ONNX inference with fixed fixtures.

View File

@@ -1,4 +1,5 @@
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Text.Json.Nodes;
namespace StellaOps.BinaryIndex.VexBridge;
@@ -162,7 +163,7 @@ public static class BinaryMatchEvidenceSchema
evidence[Fields.Architecture] = architecture;
if (resolvedAt.HasValue)
evidence[Fields.ResolvedAt] = resolvedAt.Value.ToString("O");
evidence[Fields.ResolvedAt] = resolvedAt.Value.ToString("O", CultureInfo.InvariantCulture);
return evidence;
}

View File

@@ -0,0 +1,22 @@
# BinaryIndex Benchmarks Charter
## Mission
- Maintain deterministic benchmark and accuracy tests for BinaryIndex analyzers.
## Responsibilities
- Keep benchmark datasets local and fixed.
- Ensure benchmark thresholds are stable and documented.
- Separate benchmark runs from unit coverage.
## Required Reading
- docs/modules/binary-index/architecture.md
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Working Agreement
- No network calls; offline fixtures only.
- Fixed seeds and deterministic ordering.
- Avoid machine-specific timing assertions; use bounded thresholds.
## Definition of Done
- Benchmarks reproducible on CI and offline environments.

View File

@@ -12,8 +12,8 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Moq" />
<PackageReference Include="NSubstitute" />
<PackageReference Include="Testcontainers" />
<PackageReference Include="xunit.runner.visualstudio" />
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,162 @@
// -----------------------------------------------------------------------------
// NameDemanglerTests.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Tasks: SYM-016, SYM-017 - Unit tests for name demangling
// Description: Unit tests for C++ and Rust name demangler
// -----------------------------------------------------------------------------
using StellaOps.BinaryIndex.Builders.SymbolDiff;
using Xunit;
namespace StellaOps.BinaryIndex.Builders.Tests.SymbolDiff;
[Trait("Category", "Unit")]
public sealed class NameDemanglerTests
{
private readonly NameDemangler _demangler = new();
// Scheme detection tests
[Theory]
[InlineData("_Z3foov", ManglingScheme.ItaniumCxx)]
[InlineData("_ZN3foo3barEv", ManglingScheme.ItaniumCxx)]
[InlineData("?foo@@YAXXZ", ManglingScheme.MicrosoftCxx)]
[InlineData("?foo@bar@@YAXXZ", ManglingScheme.MicrosoftCxx)]
[InlineData("_ZN4test17h0123456789abcdefE", ManglingScheme.Rust)]
[InlineData("_RNvC5crate4main", ManglingScheme.Rust)]
[InlineData("$s4main3fooyyF", ManglingScheme.Swift)]
[InlineData("_$s4main3fooyyF", ManglingScheme.Swift)]
[InlineData("foo", ManglingScheme.None)]
[InlineData("printf", ManglingScheme.None)]
[InlineData("", ManglingScheme.None)]
public void DetectScheme_IdentifiesCorrectScheme(string name, ManglingScheme expected)
{
var result = _demangler.DetectScheme(name);
Assert.Equal(expected, result);
}
// C++ Itanium ABI tests
[Theory]
[InlineData("_Z3foov", "foo")]
[InlineData("_Z3bari", "bar")]
[InlineData("_Z6myFunc", "myFunc")]
public void Demangle_ItaniumCxx_SimpleNames(string mangled, string expected)
{
var result = _demangler.Demangle(mangled);
Assert.Equal(expected, result);
}
[Theory]
[InlineData("_ZN3foo3barEv", "foo::bar")]
[InlineData("_ZN5outer5inner4funcEv", "outer::inner::func")]
public void Demangle_ItaniumCxx_NestedNames(string mangled, string expected)
{
var result = _demangler.Demangle(mangled);
Assert.Equal(expected, result);
}
// Microsoft C++ tests
[Theory]
[InlineData("?foo@@YAXXZ", "foo")]
[InlineData("?bar@MyClass@@QAEXXZ", "MyClass::bar")]
public void Demangle_MicrosoftCxx_SimpleNames(string mangled, string expected)
{
var result = _demangler.Demangle(mangled);
Assert.Equal(expected, result);
}
// Rust legacy mangling tests
[Fact]
public void Demangle_RustLegacy_BasicName()
{
// _ZN<len>name...E format
var mangled = "_ZN4test4mainE";
var result = _demangler.Demangle(mangled);
Assert.NotNull(result);
Assert.Contains("test", result);
}
[Fact]
public void Demangle_RustLegacy_WithHash_StripsHash()
{
// Rust hashes are 17h + 16 hex digits
var mangled = "_ZN4core3ptr17h0123456789abcdefE";
var result = _demangler.Demangle(mangled);
Assert.NotNull(result);
Assert.DoesNotContain("h0123456789abcdef", result);
}
[Fact]
public void Demangle_RustLegacy_DecodesEscapes()
{
// Test Rust escape sequences
var mangled = "_ZN4test8$LT$impl$GT$E";
var result = _demangler.Demangle(mangled);
Assert.NotNull(result);
// Should decode $LT$ to < and $GT$ to >
Assert.Contains("<", result);
Assert.Contains(">", result);
}
// Rust v0 mangling tests
[Fact]
public void Demangle_RustV0_ReturnsPlaceholder()
{
// Rust v0 starts with _R
var mangled = "_RNvC5crate4main";
var result = _demangler.Demangle(mangled);
Assert.NotNull(result);
Assert.StartsWith("<rust-v0>", result);
}
// Swift tests
[Fact]
public void Demangle_Swift_ReturnsPlaceholder()
{
var mangled = "$s4main3fooyyF";
var result = _demangler.Demangle(mangled);
Assert.NotNull(result);
Assert.StartsWith("<swift>", result);
}
// Edge cases
[Fact]
public void Demangle_NullInput_ReturnsNull()
{
var result = _demangler.Demangle(null!);
Assert.Null(result);
}
[Fact]
public void Demangle_EmptyInput_ReturnsNull()
{
var result = _demangler.Demangle(string.Empty);
Assert.Null(result);
}
[Fact]
public void Demangle_PlainCName_ReturnsNull()
{
var result = _demangler.Demangle("printf");
Assert.Null(result);
}
[Fact]
public void Demangle_InvalidMangledName_ReturnsNull()
{
// Invalid Itanium format (no proper length prefix)
var result = _demangler.Demangle("_Zinvalid");
Assert.Null(result);
}
}

View File

@@ -0,0 +1,334 @@
// -----------------------------------------------------------------------------
// SymbolTableDiffAnalyzerTests.cs
// Sprint: SPRINT_20260106_001_003_BINDEX_symbol_table_diff
// Tasks: SYM-020 to SYM-025 - Unit tests for symbol diff
// Description: Unit tests for symbol table diff analyzer
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.BinaryIndex.Builders.SymbolDiff;
using Xunit;
using NSubstitute;
namespace StellaOps.BinaryIndex.Builders.Tests.SymbolDiff;
[Trait("Category", "Unit")]
public sealed class SymbolTableDiffAnalyzerTests
{
private readonly ISymbolExtractor _mockExtractor;
private readonly INameDemangler _demangler;
private readonly TimeProvider _timeProvider;
private readonly SymbolTableDiffAnalyzer _analyzer;
public SymbolTableDiffAnalyzerTests()
{
_mockExtractor = Substitute.For<ISymbolExtractor>();
_demangler = new NameDemangler();
_timeProvider = TimeProvider.System;
_analyzer = new SymbolTableDiffAnalyzer(
_mockExtractor,
_demangler,
_timeProvider,
NullLogger<SymbolTableDiffAnalyzer>.Instance);
}
[Fact]
public async Task ComputeDiffAsync_DetectsAddedSymbols()
{
// Arrange
var baseTable = CreateSymbolTable("base.so", [
CreateSymbol("foo", SymbolType.Function)
]);
var targetTable = CreateSymbolTable("target.so", [
CreateSymbol("foo", SymbolType.Function),
CreateSymbol("bar", SymbolType.Function)
]);
_mockExtractor.ExtractAsync("base.so", Arg.Any<CancellationToken>()).Returns(baseTable);
_mockExtractor.ExtractAsync("target.so", Arg.Any<CancellationToken>()).Returns(targetTable);
// Act
var diff = await _analyzer.ComputeDiffAsync("base.so", "target.so");
// Assert
Assert.Single(diff.Exports.Added);
Assert.Equal("bar", diff.Exports.Added[0].Name);
Assert.Empty(diff.Exports.Removed);
}
[Fact]
public async Task ComputeDiffAsync_DetectsRemovedSymbols()
{
// Arrange
var baseTable = CreateSymbolTable("base.so", [
CreateSymbol("foo", SymbolType.Function),
CreateSymbol("bar", SymbolType.Function)
]);
var targetTable = CreateSymbolTable("target.so", [
CreateSymbol("foo", SymbolType.Function)
]);
_mockExtractor.ExtractAsync("base.so", Arg.Any<CancellationToken>()).Returns(baseTable);
_mockExtractor.ExtractAsync("target.so", Arg.Any<CancellationToken>()).Returns(targetTable);
// Act
var diff = await _analyzer.ComputeDiffAsync("base.so", "target.so");
// Assert
Assert.Empty(diff.Exports.Added);
Assert.Single(diff.Exports.Removed);
Assert.Equal("bar", diff.Exports.Removed[0].Name);
}
[Fact]
public async Task ComputeDiffAsync_DetectsModifiedSymbols()
{
// Arrange
var baseTable = CreateSymbolTable("base.so", [
CreateSymbol("foo", SymbolType.Function, size: 100)
]);
var targetTable = CreateSymbolTable("target.so", [
CreateSymbol("foo", SymbolType.Function, size: 200)
]);
_mockExtractor.ExtractAsync("base.so", Arg.Any<CancellationToken>()).Returns(baseTable);
_mockExtractor.ExtractAsync("target.so", Arg.Any<CancellationToken>()).Returns(targetTable);
// Act
var diff = await _analyzer.ComputeDiffAsync("base.so", "target.so");
// Assert
Assert.Single(diff.Exports.Modified);
Assert.Equal("foo", diff.Exports.Modified[0].Name);
Assert.Contains(diff.Exports.Modified[0].Changes, c => c.Attribute == "size");
}
[Fact]
public async Task ComputeDiffAsync_DetectsRenames_WhenFingerprintsMatch()
{
// Arrange
var fingerprint = "abc123def456";
var baseTable = CreateSymbolTable("base.so", [
CreateSymbol("old_name", SymbolType.Function, fingerprint: fingerprint)
]);
var targetTable = CreateSymbolTable("target.so", [
CreateSymbol("new_name", SymbolType.Function, fingerprint: fingerprint)
]);
_mockExtractor.ExtractAsync("base.so", Arg.Any<CancellationToken>()).Returns(baseTable);
_mockExtractor.ExtractAsync("target.so", Arg.Any<CancellationToken>()).Returns(targetTable);
// Act
var diff = await _analyzer.ComputeDiffAsync("base.so", "target.so", new SymbolDiffOptions
{
DetectRenames = true,
RenameSimilarityThreshold = 0.5
});
// Assert
Assert.Single(diff.Exports.Renamed);
Assert.Equal("old_name", diff.Exports.Renamed[0].BaseName);
Assert.Equal("new_name", diff.Exports.Renamed[0].TargetName);
}
[Fact]
public async Task ComputeDiffAsync_ComputesDiffId_Deterministically()
{
// Arrange
var baseTable = CreateSymbolTable("base.so", [
CreateSymbol("foo", SymbolType.Function)
]);
var targetTable = CreateSymbolTable("target.so", [
CreateSymbol("foo", SymbolType.Function),
CreateSymbol("bar", SymbolType.Function)
]);
_mockExtractor.ExtractAsync("base.so", Arg.Any<CancellationToken>()).Returns(baseTable);
_mockExtractor.ExtractAsync("target.so", Arg.Any<CancellationToken>()).Returns(targetTable);
// Act
var diff1 = await _analyzer.ComputeDiffAsync("base.so", "target.so");
var diff2 = await _analyzer.ComputeDiffAsync("base.so", "target.so");
// Assert
Assert.Equal(diff1.DiffId, diff2.DiffId);
Assert.StartsWith("sha256:", diff1.DiffId);
}
[Fact]
public void AssessAbiCompatibility_FullyCompatible_WhenNoBreakingChanges()
{
// Arrange
var diff = CreateDiff(
added: [CreateSymbolChange("new_func")],
removed: [],
modified: []);
// Act
var abi = _analyzer.AssessAbiCompatibility(diff);
// Assert
Assert.Equal(AbiCompatibilityLevel.FullyCompatible, abi.Level);
Assert.True(abi.IsBackwardCompatible);
Assert.Empty(abi.BreakingChanges);
}
[Fact]
public void AssessAbiCompatibility_Incompatible_WhenSymbolsRemoved()
{
// Arrange
var diff = CreateDiff(
added: [],
removed: [CreateSymbolChange("removed_func", SymbolBinding.Global)],
modified: []);
// Act
var abi = _analyzer.AssessAbiCompatibility(diff);
// Assert
Assert.NotEqual(AbiCompatibilityLevel.FullyCompatible, abi.Level);
Assert.False(abi.IsBackwardCompatible);
Assert.Single(abi.BreakingChanges);
Assert.Equal(AbiBreakType.SymbolRemoved, abi.BreakingChanges[0].Type);
}
[Fact]
public void AssessAbiCompatibility_WarningsForAddedSymbols()
{
// Arrange
var diff = CreateDiff(
added: [CreateSymbolChange("new_func")],
removed: [],
modified: []);
// Act
var abi = _analyzer.AssessAbiCompatibility(diff);
// Assert
Assert.Single(abi.Warnings);
Assert.Equal(AbiWarningType.SymbolAdded, abi.Warnings[0].Type);
}
// Helper methods
private static SymbolTable CreateSymbolTable(string path, IReadOnlyList<ExtractedSymbol> exports)
{
return new SymbolTable
{
Binary = new BinaryRef
{
Path = path,
Sha256 = $"sha256:{Guid.NewGuid():N}",
Architecture = "x86_64",
Format = BinaryFormat.Elf
},
Exports = exports,
Imports = [],
VersionDefinitions = [],
VersionRequirements = [],
NeededLibraries = [],
ExtractedAt = DateTimeOffset.UtcNow
};
}
private static ExtractedSymbol CreateSymbol(
string name,
SymbolType type,
SymbolBinding binding = SymbolBinding.Global,
ulong size = 64,
string? fingerprint = null)
{
return new ExtractedSymbol
{
Name = name,
Type = type,
Binding = binding,
Visibility = SymbolVisibility.Default,
Address = 0x1000,
Size = size,
Fingerprint = fingerprint
};
}
private static SymbolChange CreateSymbolChange(
string name,
SymbolBinding binding = SymbolBinding.Global)
{
return new SymbolChange
{
Name = name,
Type = SymbolType.Function,
Binding = binding,
Visibility = SymbolVisibility.Default,
Address = 0x1000,
Size = 64
};
}
private static SymbolTableDiff CreateDiff(
IReadOnlyList<SymbolChange> added,
IReadOnlyList<SymbolChange> removed,
IReadOnlyList<SymbolModification> modified)
{
return new SymbolTableDiff
{
DiffId = "sha256:test",
Base = new BinaryRef
{
Path = "base.so",
Sha256 = "sha256:base",
Architecture = "x86_64",
Format = BinaryFormat.Elf
},
Target = new BinaryRef
{
Path = "target.so",
Sha256 = "sha256:target",
Architecture = "x86_64",
Format = BinaryFormat.Elf
},
Exports = new SymbolChangeSummary
{
Added = added,
Removed = removed,
Modified = modified,
Renamed = [],
Counts = new SymbolChangeCounts
{
Added = added.Count,
Removed = removed.Count,
Modified = modified.Count,
TotalBase = removed.Count + modified.Count,
TotalTarget = added.Count + modified.Count
}
},
Imports = new SymbolChangeSummary
{
Added = [],
Removed = [],
Modified = [],
Renamed = [],
Counts = new SymbolChangeCounts()
},
Versions = new VersionMapDiff
{
DefinitionsAdded = [],
DefinitionsRemoved = [],
RequirementsAdded = [],
RequirementsRemoved = [],
AssignmentsChanged = [],
Counts = new VersionChangeCounts()
},
AbiCompatibility = new AbiCompatibility
{
Level = AbiCompatibilityLevel.FullyCompatible,
Score = 1.0,
IsBackwardCompatible = true,
IsForwardCompatible = true,
BreakingChanges = [],
Warnings = [],
Summary = new AbiSummary()
},
ComputedAt = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,22 @@
# BinaryIndex.Decompiler Tests Charter
## Mission
- Validate deterministic decompiler parsing and normalization.
## Responsibilities
- Cover AST parsing, normalization, and comparison paths.
- Keep fixtures deterministic and offline-safe.
## Required Reading
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/semantic-diffing.md
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Definition of Done
- Tests are deterministic and offline-safe.
- Coverage includes error handling and normalization edge cases.
## Working Agreement
- Use fixed seeds and ids in fixtures.
- Avoid non-deterministic ordering; assert sorted output.

View File

@@ -20,9 +20,6 @@
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" />
</ItemGroup>
<ItemGroup>

View File

@@ -18,13 +18,7 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />

View File

@@ -0,0 +1,22 @@
# BinaryIndex.Ensemble Tests Charter
## Mission
- Validate deterministic ensemble decisioning and weight tuning.
## Responsibilities
- Cover decision engine inputs, weights, and tie-breaking.
- Keep fixtures deterministic and offline-safe.
## Required Reading
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/semantic-diffing.md
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Definition of Done
- Tests are deterministic and offline-safe.
- Coverage includes weight tuning and error handling.
## Working Agreement
- Use fixed seeds and ids in fixtures.
- Avoid non-deterministic ordering; assert sorted output.

View File

@@ -0,0 +1,22 @@
# BinaryIndex.Ghidra Tests Charter
## Mission
- Validate deterministic behavior of the Ghidra integration layer.
## Responsibilities
- Cover service behaviors, process lifecycle, and output parsing.
- Keep fixtures deterministic and offline-safe.
## Required Reading
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/ghidra-deployment.md
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Definition of Done
- Tests are deterministic and offline-safe.
- Coverage includes error handling and cleanup paths.
## Working Agreement
- Use fixed ids and temp paths in fixtures.
- Avoid non-deterministic ordering; assert sorted output.

View File

@@ -16,13 +16,7 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />

View File

@@ -21,9 +21,6 @@
<PackageReference Include="FsCheck.Xunit.v3" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,22 @@
# BinaryIndex.Semantic Tests Charter
## Mission
- Validate deterministic semantic graph extraction and matching.
## Responsibilities
- Cover graph extraction, hashing, canonicalization, and matching.
- Keep fixtures deterministic and offline-safe.
## Required Reading
- docs/modules/binary-index/architecture.md
- docs/modules/binary-index/semantic-diffing.md
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Definition of Done
- Tests are deterministic and offline-safe.
- Coverage includes algorithm options and edge cases.
## Working Agreement
- Use fixed seeds and ids in fixtures.
- Avoid non-deterministic ordering; assert sorted output.

View File

@@ -15,10 +15,6 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -594,7 +594,7 @@ internal static class BinaryCommandHandlers
Function = function,
FingerprintId = fingerprintId,
FingerprintHash = Convert.ToHexStringLower(fileHash),
GeneratedAt = (services.GetService<TimeProvider>() ?? TimeProvider.System).GetUtcNow().ToString("O")
GeneratedAt = (services.GetService<TimeProvider>() ?? TimeProvider.System).GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
};
if (format == "json")

View File

@@ -124,6 +124,10 @@ internal static class CommandFactory
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle - Evidence bundle export and verify
root.Add(EvidenceCommandGroup.BuildEvidenceCommand(services, options, verboseOption, cancellationToken));
// Sprint: SPRINT_20260105_002_004_CLI - Facet seal and drift commands
root.Add(SealCommandGroup.BuildSealCommand(services, verboseOption, cancellationToken));
root.Add(DriftCommandGroup.BuildDriftCommand(services, verboseOption, cancellationToken));
// Add scan graph subcommand to existing scan command
var scanCommand = root.Children.OfType<Command>().FirstOrDefault(c => c.Name == "scan");
if (scanCommand is not null)
@@ -4632,6 +4636,9 @@ internal static class CommandFactory
vex.Add(explain);
// Sprint: SPRINT_20260105_002_004_CLI - VEX gen from drift command
vex.Add(VexGenCommandGroup.BuildVexGenCommand(services, verboseOption, cancellationToken));
return vex;
}

View File

@@ -4,6 +4,7 @@
// Description: Command handlers for cryptographic signing and verification.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
@@ -398,7 +399,7 @@ internal static partial class CommandHandlers
{
format = format,
provider = providerName,
timestamp = DateTimeOffset.UtcNow.ToString("O"),
timestamp = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
dataHash = CryptoHashFactory.CreateDefault().ComputeHashHex(data, HashAlgorithms.Sha256),
signature = "STUB-SIGNATURE-BASE64",
keyId = "STUB-KEY-ID"

View File

@@ -5,6 +5,7 @@
// Description: Command handlers for reachability drift CLI.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using Spectre.Console;
@@ -46,7 +47,7 @@ internal static partial class CommandHandlers
var driftResult = new DriftResultDto
{
Id = Guid.NewGuid().ToString("N")[..8],
ComparedAt = DateTimeOffset.UtcNow.ToString("O"),
ComparedAt = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
BaseGraphId = baseId,
HeadGraphId = headId ?? "latest",
Summary = new DriftSummaryDto
@@ -103,7 +104,7 @@ internal static partial class CommandHandlers
var driftResult = new DriftResultDto
{
Id = id,
ComparedAt = DateTimeOffset.UtcNow.ToString("O"),
ComparedAt = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
BaseGraphId = "base",
HeadGraphId = "head",
Summary = new DriftSummaryDto

View File

@@ -921,7 +921,7 @@ internal static partial class CommandHandlers
table.AddRow("Kit", Markup.Escape(payload.Active.KitId));
table.AddRow("Version", Markup.Escape(payload.Active.Version));
table.AddRow("Digest", Markup.Escape(payload.Active.Digest));
table.AddRow("Activated", payload.Active.ActivatedAt.ToString("O"));
table.AddRow("Activated", payload.Active.ActivatedAt.ToString("O", CultureInfo.InvariantCulture));
table.AddRow("DSSE verified", payload.Active.DsseVerified ? "[green]true[/]" : "[red]false[/]");
table.AddRow("Rekor verified", payload.Active.RekorVerified ? "[green]true[/]" : "[red]false[/]");
table.AddRow("Staleness", payload.StalenessSeconds < 0 ? "-" : FormatStaleness(TimeSpan.FromSeconds(payload.StalenessSeconds)));
@@ -1292,7 +1292,7 @@ internal static partial class CommandHandlers
if (payload.ActivatedAt.HasValue)
{
table.AddRow("Activated", payload.ActivatedAt.Value.ToString("O"));
table.AddRow("Activated", payload.ActivatedAt.Value.ToString("O", CultureInfo.InvariantCulture));
}
if (payload.WasForceActivated)

View File

@@ -5,6 +5,7 @@
// Description: Implements bundle create, verify, and info commands.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
@@ -270,7 +271,7 @@ internal static partial class CommandHandlers
version = result.BundleVersion,
ruleCount = result.RuleCount,
signerKeyId = result.SignerKeyId,
signedAt = result.SignedAt?.ToString("O"),
signedAt = result.SignedAt?.ToString("O", CultureInfo.InvariantCulture),
errors = result.ValidationErrors,
warnings = result.ValidationWarnings
};

View File

@@ -5,6 +5,7 @@
// Description: Command handlers for reachability witness CLI.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using Spectre.Console;
@@ -46,7 +47,7 @@ internal static partial class CommandHandlers
PackageName = "Newtonsoft.Json",
PackageVersion = "12.0.3",
ConfidenceTier = "confirmed",
ObservedAt = DateTimeOffset.UtcNow.AddHours(-2).ToString("O"),
ObservedAt = DateTimeOffset.UtcNow.AddHours(-2).ToString("O", CultureInfo.InvariantCulture),
Entrypoint = new WitnessEntrypointDto
{
Type = "http",

View File

@@ -9400,13 +9400,13 @@ internal static partial class CommandHandlers
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["kind"] = signingKey.Kind.ToString(),
["createdAt"] = signingKey.CreatedAt.UtcDateTime.ToString("O"),
["createdAt"] = signingKey.CreatedAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
["providerHint"] = signingKey.Reference.ProviderHint
};
if (signingKey.ExpiresAt.HasValue)
{
metadata["expiresAt"] = signingKey.ExpiresAt.Value.UtcDateTime.ToString("O");
metadata["expiresAt"] = signingKey.ExpiresAt.Value.UtcDateTime.ToString("O", CultureInfo.InvariantCulture);
}
foreach (var pair in signingKey.Metadata)
@@ -16769,7 +16769,7 @@ stella policy test {policyName}.stella
Console.WriteLine("VulnerabilityId,Severity,Score,Status,VexStatus,PackageCount,Assignee,UpdatedAt");
foreach (var item in response.Items)
{
Console.WriteLine($"{CsvEscape(item.VulnerabilityId)},{CsvEscape(item.Severity.Level)},{item.Severity.Score?.ToString("F1") ?? ""},{CsvEscape(item.Status)},{CsvEscape(item.VexStatus ?? "")},{item.AffectedPackages.Count},{CsvEscape(item.Assignee ?? "")},{item.UpdatedAt?.ToString("O") ?? ""}");
Console.WriteLine($"{CsvEscape(item.VulnerabilityId)},{CsvEscape(item.Severity.Level)},{item.Severity.Score?.ToString("F1") ?? ""},{CsvEscape(item.Status)},{CsvEscape(item.VexStatus ?? "")},{item.AffectedPackages.Count},{CsvEscape(item.Assignee ?? "")},{item.UpdatedAt?.ToString("O", CultureInfo.InvariantCulture) ?? ""}");
}
}
@@ -31748,7 +31748,7 @@ stella policy test {policyName}.stella
AnsiConsole.MarkupLine($" Bundle ID: {Markup.Escape(result.BundleId ?? "unknown")}");
AnsiConsole.MarkupLine($" Root Hash: {Markup.Escape(result.RootHash ?? "unknown")}");
AnsiConsole.MarkupLine($" Entries: {result.Entries}");
AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O") ?? "unknown"}");
AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"}");
AnsiConsole.MarkupLine($" Portable: {(result.Portable ? "yes" : "no")}");
}
else
@@ -33078,7 +33078,7 @@ stella policy test {policyName}.stella
{
PredicateType = "stellaops.io/predicates/scan-result@v1",
Digest = "sha256:abc123...",
CreatedAt = DateTimeOffset.UtcNow.AddHours(-1).ToString("O"),
CreatedAt = DateTimeOffset.UtcNow.AddHours(-1).ToString("O", CultureInfo.InvariantCulture),
Size = 4096L
}
};
@@ -33176,7 +33176,7 @@ stella policy test {policyName}.stella
var result = new
{
Image = image,
VerifiedAt = DateTimeOffset.UtcNow.ToString("O"),
VerifiedAt = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
OverallValid = overallValid,
TotalAttestations = verificationResults.Length,
ValidAttestations = verificationResults.Count(r => r.SignatureValid && r.PolicyPassed),

View File

@@ -1,160 +1,324 @@
// -----------------------------------------------------------------------------
// DriftCommandGroup.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-019
// Description: CLI command group for reachability drift detection.
// -----------------------------------------------------------------------------
// <copyright file="DriftCommandGroup.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_CLI (CLI-007 through CLI-010)
using System.CommandLine;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Cli.Extensions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Spectre.Console;
using StellaOps.Facet;
namespace StellaOps.Cli.Commands;
/// <summary>
/// CLI command group for reachability drift detection.
/// Command group for facet drift analysis operations.
/// Provides stella drift command for analyzing facet drift against baseline seals.
/// </summary>
internal static class DriftCommandGroup
{
/// <summary>
/// Builds the drift command group.
/// </summary>
internal static Command BuildDriftCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var drift = new Command("drift", "Reachability drift detection operations.");
var imageArg = new Argument<string>("image")
{
Description = "Image reference or digest to analyze."
};
drift.Add(BuildDriftCompareCommand(services, verboseOption, cancellationToken));
drift.Add(BuildDriftShowCommand(services, verboseOption, cancellationToken));
var baselineOption = new Option<string?>("--baseline", "-b")
{
Description = "Baseline seal ID (default: latest for image)."
};
var formatOption = new Option<string>("--format")
{
Description = "Output format: table, json, yaml."
};
formatOption.SetDefaultValue("table");
var detailOption = new Option<bool>("--verbose-files")
{
Description = "Show detailed file changes."
};
var failOnBreachOption = new Option<bool>("--fail-on-breach")
{
Description = "Exit with error code if quota breached."
};
var outputOption = new Option<string?>("--output", "-o")
{
Description = "Output file path (default: stdout)."
};
var drift = new Command("drift", "Analyze facet drift against baseline seal. Compares current image state to sealed baseline.");
drift.Add(imageArg);
drift.Add(baselineOption);
drift.Add(formatOption);
drift.Add(detailOption);
drift.Add(failOnBreachOption);
drift.Add(outputOption);
drift.Add(verboseOption);
drift.SetAction(parseResult =>
{
var image = parseResult.GetValue(imageArg)!;
var baseline = parseResult.GetValue(baselineOption);
var format = parseResult.GetValue(formatOption)!;
var detail = parseResult.GetValue(detailOption);
var failOnBreach = parseResult.GetValue(failOnBreachOption);
var output = parseResult.GetValue(outputOption);
var verbose = parseResult.GetValue(verboseOption);
return HandleDriftAsync(
services,
image,
baseline,
format,
detail,
failOnBreach,
output,
verbose,
cancellationToken);
});
return drift;
}
private static Command BuildDriftCompareCommand(
private static async Task<int> HandleDriftAsync(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
string image,
string? baselineId,
string format,
bool showDetails,
bool failOnBreach,
string? outputPath,
bool verbose,
CancellationToken ct)
{
var baseOption = new Option<string>("--base", new[] { "-b" })
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetService<ILoggerFactory>()?.CreateLogger("drift")
?? NullLogger.Instance;
var timeProvider = scope.ServiceProvider.GetService<TimeProvider>() ?? TimeProvider.System;
try
{
Description = "Base scan/graph ID or commit SHA for comparison.",
Required = true
};
var driftDetector = scope.ServiceProvider.GetService<IFacetDriftDetector>();
var sealStore = scope.ServiceProvider.GetService<IFacetSealStore>();
var headOption = new Option<string>("--head", new[] { "-h" })
if (driftDetector is null || sealStore is null)
{
AnsiConsole.MarkupLine("[red]Facet services not available. Ensure facet module is configured.[/]");
return 1;
}
AnsiConsole.MarkupLine($"[bold]Analyzing drift for:[/] {image}");
// Load baseline seal
FacetSeal? baseline;
if (!string.IsNullOrEmpty(baselineId))
{
baseline = await sealStore.GetByCombinedRootAsync(baselineId, ct).ConfigureAwait(false);
if (baseline is null)
{
AnsiConsole.MarkupLine($"[red]Baseline seal '{baselineId}' not found.[/]");
return 1;
}
}
else
{
baseline = await sealStore.GetLatestSealAsync(image, ct).ConfigureAwait(false);
if (baseline is null)
{
AnsiConsole.MarkupLine("[red]No baseline seal found for image. Run 'stella seal' first.[/]");
return 1;
}
}
AnsiConsole.MarkupLine($"[dim]Baseline seal:[/] {TruncateHash(baseline.CombinedMerkleRoot)} ({baseline.CreatedAt:yyyy-MM-dd HH:mm:ss})");
// Get current seal for comparison (latest seal for the image)
var currentSeal = await sealStore.GetLatestSealAsync(image, ct).ConfigureAwait(false);
if (currentSeal is null)
{
AnsiConsole.MarkupLine("[red]No current seal found for image.[/]");
return 1;
}
// Compute drift between baseline and current
var report = await driftDetector.DetectDriftAsync(baseline, currentSeal, ct).ConfigureAwait(false);
// Output based on format
if (format == "json")
{
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
if (!string.IsNullOrEmpty(outputPath))
{
await File.WriteAllTextAsync(outputPath, json, ct).ConfigureAwait(false);
AnsiConsole.MarkupLine($"[green]Report written to:[/] {outputPath}");
}
else
{
Console.WriteLine(json);
}
return GetExitCode(report, failOnBreach);
}
if (format == "yaml")
{
var yaml = ToYaml(report);
if (!string.IsNullOrEmpty(outputPath))
{
await File.WriteAllTextAsync(outputPath, yaml, ct).ConfigureAwait(false);
AnsiConsole.MarkupLine($"[green]Report written to:[/] {outputPath}");
}
else
{
Console.WriteLine(yaml);
}
return GetExitCode(report, failOnBreach);
}
// Table format (default)
AnsiConsole.WriteLine();
// Overall verdict
var verdictColor = GetVerdictColor(report.OverallVerdict);
AnsiConsole.MarkupLine($"[bold]Overall Verdict:[/] [{verdictColor}]{report.OverallVerdict}[/]");
AnsiConsole.MarkupLine($"[bold]Total Changed Files:[/] {report.TotalChangedFiles}");
AnsiConsole.WriteLine();
// Per-facet table
var table = new Table()
.AddColumn("Facet")
.AddColumn(new TableColumn("Added").Centered())
.AddColumn(new TableColumn("Removed").Centered())
.AddColumn(new TableColumn("Modified").Centered())
.AddColumn(new TableColumn("Churn %").RightAligned())
.AddColumn("Verdict");
foreach (var facetDrift in report.FacetDrifts)
{
var vColor = GetVerdictColor(facetDrift.QuotaVerdict);
table.AddRow(
facetDrift.FacetId,
FormatCount(facetDrift.Added.Length, "green"),
FormatCount(facetDrift.Removed.Length, "red"),
FormatCount(facetDrift.Modified.Length, "yellow"),
$"{facetDrift.ChurnPercent:F1}%",
$"[{vColor}]{facetDrift.QuotaVerdict}[/]");
}
AnsiConsole.Write(table);
// Detailed file changes
if (showDetails)
{
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine("[bold]File Changes:[/]");
foreach (var facetDrift in report.FacetDrifts.Where(d =>
d.Added.Length + d.Removed.Length + d.Modified.Length > 0))
{
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine($"[bold underline]{facetDrift.FacetId}[/]");
const int maxFiles = 10;
foreach (var f in facetDrift.Added.Take(maxFiles))
AnsiConsole.MarkupLine($" [green]+[/] {f.Path}");
foreach (var f in facetDrift.Removed.Take(maxFiles))
AnsiConsole.MarkupLine($" [red]-[/] {f.Path}");
foreach (var f in facetDrift.Modified.Take(maxFiles))
AnsiConsole.MarkupLine($" [yellow]~[/] {f.Path}");
var total = facetDrift.Added.Length + facetDrift.Removed.Length + facetDrift.Modified.Length;
if (total > maxFiles * 3)
{
AnsiConsole.MarkupLine($" [dim]... and {total - (maxFiles * 3)} more files[/]");
}
}
}
// Write to file if specified
if (!string.IsNullOrEmpty(outputPath))
{
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
await File.WriteAllTextAsync(outputPath, json, ct).ConfigureAwait(false);
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine($"[green]Report written to:[/] {outputPath}");
}
return GetExitCode(report, failOnBreach);
}
catch (Exception ex)
{
Description = "Head scan/graph ID or commit SHA for comparison (defaults to latest)."
};
var imageOption = new Option<string?>("--image", new[] { "-i" })
{
Description = "Container image reference (digest or tag)."
};
var repoOption = new Option<string?>("--repo", new[] { "-r" })
{
Description = "Repository reference (owner/repo)."
};
var outputOption = new Option<string>("--output", new[] { "-o" })
{
Description = "Output format: table (default), json, sarif."
}.SetDefaultValue("table").FromAmong("table", "json", "sarif");
var severityOption = new Option<string>("--min-severity")
{
Description = "Minimum severity to include: critical, high, medium, low, info."
}.SetDefaultValue("medium").FromAmong("critical", "high", "medium", "low", "info");
var onlyIncreasesOption = new Option<bool>("--only-increases")
{
Description = "Only show sinks with increased reachability (risk increases)."
};
var command = new Command("compare", "Compare reachability between two scans.")
{
baseOption,
headOption,
imageOption,
repoOption,
outputOption,
severityOption,
onlyIncreasesOption,
verboseOption
};
command.SetAction(parseResult =>
{
var baseId = parseResult.GetValue(baseOption)!;
var headId = parseResult.GetValue(headOption);
var image = parseResult.GetValue(imageOption);
var repo = parseResult.GetValue(repoOption);
var output = parseResult.GetValue(outputOption)!;
var minSeverity = parseResult.GetValue(severityOption)!;
var onlyIncreases = parseResult.GetValue(onlyIncreasesOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleDriftCompareAsync(
services,
baseId,
headId,
image,
repo,
output,
minSeverity,
onlyIncreases,
verbose,
cancellationToken);
});
return command;
logger.LogError(ex, "Failed to analyze drift for {Image}", image);
AnsiConsole.MarkupLine($"[red]Error:[/] {ex.Message}");
return 1;
}
}
private static Command BuildDriftShowCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
private static int GetExitCode(FacetDriftReport report, bool failOnBreach)
{
var idOption = new Option<string>("--id")
if (!failOnBreach) return 0;
return report.OverallVerdict switch
{
Description = "Drift result ID to display.",
Required = true
QuotaVerdict.Blocked => 2,
QuotaVerdict.RequiresVex => 3,
_ => 0
};
}
var outputOption = new Option<string>("--output", new[] { "-o" })
private static string GetVerdictColor(QuotaVerdict verdict)
{
return verdict switch
{
Description = "Output format: table (default), json, sarif."
}.SetDefaultValue("table").FromAmong("table", "json", "sarif");
var expandPathsOption = new Option<bool>("--expand-paths")
{
Description = "Show full call paths instead of compressed view."
QuotaVerdict.Ok => "green",
QuotaVerdict.Warning => "yellow",
QuotaVerdict.Blocked => "red",
QuotaVerdict.RequiresVex => "blue",
_ => "white"
};
}
var command = new Command("show", "Show details of a drift result.")
private static string FormatCount(int count, string color)
{
return count > 0 ? $"[{color}]{count}[/]" : "[dim]0[/]";
}
private static string ToYaml(FacetDriftReport report)
{
var sb = new StringBuilder();
sb.AppendLine($"imageDigest: {report.ImageDigest}");
sb.AppendLine($"baselineSealId: {report.BaselineSealId}");
sb.AppendLine($"analyzedAt: {report.AnalyzedAt:O}");
sb.AppendLine($"overallVerdict: {report.OverallVerdict}");
sb.AppendLine($"totalChangedFiles: {report.TotalChangedFiles}");
sb.AppendLine("facetDrifts:");
foreach (var d in report.FacetDrifts)
{
idOption,
outputOption,
expandPathsOption,
verboseOption
};
sb.AppendLine($" - facetId: {d.FacetId}");
sb.AppendLine($" added: {d.Added.Length}");
sb.AppendLine($" removed: {d.Removed.Length}");
sb.AppendLine($" modified: {d.Modified.Length}");
sb.AppendLine($" churnPercent: {d.ChurnPercent:F2}");
sb.AppendLine($" verdict: {d.QuotaVerdict}");
}
return sb.ToString();
}
command.SetAction(parseResult =>
{
var id = parseResult.GetValue(idOption)!;
var output = parseResult.GetValue(outputOption)!;
var expandPaths = parseResult.GetValue(expandPathsOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleDriftShowAsync(
services,
id,
output,
expandPaths,
verbose,
cancellationToken);
});
return command;
private static string TruncateHash(string? hash)
{
if (string.IsNullOrEmpty(hash)) return "(none)";
return hash.Length > 16 ? $"{hash[..8]}...{hash[^8..]}" : hash;
}
}

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.CommandLine;
using System.Globalization;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Text.Json;
@@ -862,16 +863,16 @@ public static class ExceptionCommandGroup
table.AddRow("Gate Level", result.GateLevel ?? "G1");
table.AddRow("Reason Code", result.ReasonCode ?? "Other");
table.AddRow("Requestor", result.RequestorId ?? "Unknown");
table.AddRow("Created", result.CreatedAt.ToString("O"));
table.AddRow("Created", result.CreatedAt.ToString("O", CultureInfo.InvariantCulture));
if (result.RequestExpiresAt.HasValue)
{
table.AddRow("Request Expires", result.RequestExpiresAt.Value.ToString("O"));
table.AddRow("Request Expires", result.RequestExpiresAt.Value.ToString("O", CultureInfo.InvariantCulture));
}
if (result.ExceptionExpiresAt.HasValue)
{
table.AddRow("Exception Expires", result.ExceptionExpiresAt.Value.ToString("O"));
table.AddRow("Exception Expires", result.ExceptionExpiresAt.Value.ToString("O", CultureInfo.InvariantCulture));
}
console.Write(table);

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.CommandLine;
using System.Globalization;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Text.Json;
@@ -405,7 +406,7 @@ public static class GateCommandGroup
table.AddRow("Exit Code", result.ExitCode.ToString());
table.AddRow("Image", result.ImageDigest);
table.AddRow("Baseline", result.BaselineRef ?? "(default)");
table.AddRow("Decided At", result.DecidedAt.ToString("O"));
table.AddRow("Decided At", result.DecidedAt.ToString("O", CultureInfo.InvariantCulture));
if (!string.IsNullOrWhiteSpace(result.Summary))
{

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.CommandLine;
using System.Globalization;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Security.Cryptography;
@@ -690,7 +691,7 @@ public static class LayerSbomCommandGroup
.AddColumn("Value");
summaryTable.AddRow("Image", recipe.ImageDigest ?? "N/A");
summaryTable.AddRow("Created", recipe.CreatedAt?.ToString("O") ?? "N/A");
summaryTable.AddRow("Created", recipe.CreatedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "N/A");
summaryTable.AddRow("Generator", $"{recipe.Recipe?.GeneratorName ?? "N/A"} v{recipe.Recipe?.GeneratorVersion ?? "?"}");
summaryTable.AddRow("Layers", recipe.Recipe?.Layers?.Count.ToString() ?? "0");
summaryTable.AddRow("Merkle Root", TruncateDigest(recipe.Recipe?.MerkleRoot));

View File

@@ -1,6 +1,7 @@
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
using System.CommandLine;
using System.Globalization;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
@@ -286,7 +287,7 @@ public class PoEExporter
revoked = false
}
},
updatedAt = DateTime.UtcNow.ToString("O")
updatedAt = DateTime.UtcNow.ToString("O", CultureInfo.InvariantCulture)
};
var trustedKeysPath = Path.Combine(outputDir, "trusted-keys.json");
@@ -299,7 +300,7 @@ public class PoEExporter
var manifest = new
{
schema = "stellaops.poe.export@v1",
exportedAt = DateTime.UtcNow.ToString("O"),
exportedAt = DateTime.UtcNow.ToString("O", CultureInfo.InvariantCulture),
scanId = options.ScanId,
finding = options.Finding,
artifacts = Directory.GetFiles(outputDir, "poe-*.json")

View File

@@ -5,6 +5,7 @@
// Description: CLI command handlers for function-level proof operations.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
@@ -81,7 +82,7 @@ internal static class FuncProofCommandHandlers
FunctionCount = 0, // Placeholder
Metadata = new FuncProofMetadataOutput
{
CreatedAt = DateTimeOffset.UtcNow.ToString("O"),
CreatedAt = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
Tool = "stella-cli",
ToolVersion = "0.1.0",
DetectionMethod = detectMethod
@@ -412,7 +413,7 @@ internal static class FuncProofCommandHandlers
// Write manifest
var manifest = new ExportManifest
{
ExportedAt = DateTimeOffset.UtcNow.ToString("O"),
ExportedAt = DateTimeOffset.UtcNow.ToString("O", CultureInfo.InvariantCulture),
Format = format,
ProofId = proofData.ProofId,
Files = new List<string> { Path.GetFileName(proofPath) }

View File

@@ -0,0 +1,270 @@
// <copyright file="SealCommandGroup.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_CLI (CLI-001 through CLI-006)
using System.Collections.Immutable;
using System.CommandLine;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Spectre.Console;
using StellaOps.Facet;
namespace StellaOps.Cli.Commands;
/// <summary>
/// Command group for facet sealing operations.
/// Provides stella seal command for creating facet seals for container images.
/// </summary>
internal static class SealCommandGroup
{
/// <summary>
/// Builds the seal command group.
/// </summary>
internal static Command BuildSealCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var imageArg = new Argument<string>("image")
{
Description = "Image reference or digest to seal."
};
var outputOption = new Option<string?>("--output", "-o")
{
Description = "Output file path for seal (default: stdout)."
};
var storeOption = new Option<bool>("--store")
{
Description = "Store seal in remote API."
};
storeOption.SetDefaultValue(true);
var signOption = new Option<bool>("--sign")
{
Description = "Sign seal with DSSE."
};
signOption.SetDefaultValue(true);
var keyOption = new Option<string?>("--key", "-k")
{
Description = "Private key path for signing (default: use configured key)."
};
var facetsOption = new Option<string[]?>("--facets", "-f")
{
Description = "Specific facets to seal (default: all). Comma-separated list.",
AllowMultipleArgumentsPerToken = true
};
var formatOption = new Option<string>("--format")
{
Description = "Output format: json, yaml, compact."
};
formatOption.SetDefaultValue("json");
var seal = new Command("seal", "Create facet seal for an image. Seals capture the cryptographic state of image facets for drift detection.");
seal.Add(imageArg);
seal.Add(outputOption);
seal.Add(storeOption);
seal.Add(signOption);
seal.Add(keyOption);
seal.Add(facetsOption);
seal.Add(formatOption);
seal.Add(verboseOption);
seal.SetAction(parseResult =>
{
var image = parseResult.GetValue(imageArg)!;
var output = parseResult.GetValue(outputOption);
var store = parseResult.GetValue(storeOption);
var sign = parseResult.GetValue(signOption);
var key = parseResult.GetValue(keyOption);
var facets = parseResult.GetValue(facetsOption);
var format = parseResult.GetValue(formatOption)!;
var verbose = parseResult.GetValue(verboseOption);
return HandleSealAsync(
services,
image,
output,
store,
sign,
key,
facets,
format,
verbose,
cancellationToken);
});
return seal;
}
private static async Task<int> HandleSealAsync(
IServiceProvider services,
string image,
string? outputPath,
bool store,
bool sign,
string? keyPath,
string[]? facets,
string format,
bool verbose,
CancellationToken ct)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetService<ILoggerFactory>()?.CreateLogger("seal")
?? NullLogger.Instance;
var timeProvider = scope.ServiceProvider.GetService<TimeProvider>() ?? TimeProvider.System;
try
{
var facetExtractor = scope.ServiceProvider.GetService<IFacetExtractor>();
var sealStore = scope.ServiceProvider.GetService<IFacetSealStore>();
var sealer = scope.ServiceProvider.GetService<FacetSealer>();
if (facetExtractor is null || sealer is null)
{
AnsiConsole.MarkupLine("[red]Facet services not available. Ensure facet module is configured.[/]");
return 1;
}
AnsiConsole.MarkupLine($"[bold]Creating facet seal for:[/] {image}");
// Determine facets to seal
var builtInFacets = BuiltInFacets.All;
var facetsToSeal = facets is { Length: > 0 }
? builtInFacets.Where(f => facets.Contains(f.FacetId, StringComparer.OrdinalIgnoreCase)).ToList()
: builtInFacets.ToList();
if (facetsToSeal.Count == 0)
{
AnsiConsole.MarkupLine("[yellow]No matching facets found.[/]");
return 1;
}
AnsiConsole.MarkupLine($"[dim]Sealing {facetsToSeal.Count} facets...[/]");
// Extract facets
// Note: In production, rootPath would be the extracted image layers path
// For this CLI, we assume the image has been pulled and extracted
var extractionOptions = new FacetExtractionOptions
{
IncludeFileDetails = true,
Facets = [.. facetsToSeal]
};
var extraction = await facetExtractor.ExtractFromDirectoryAsync(
".", // Root path - in production, this would be the image root
extractionOptions,
ct).ConfigureAwait(false);
// Create the seal
var seal = sealer.CreateSeal(image, extraction);
// Display seal summary
AnsiConsole.WriteLine();
var table = new Table()
.AddColumn("Facet")
.AddColumn("Files")
.AddColumn("Size")
.AddColumn("Merkle Root");
foreach (var facet in seal.Facets)
{
table.AddRow(
facet.Name,
facet.FileCount.ToString("N0"),
FormatBytes(facet.TotalBytes),
TruncateHash(facet.MerkleRoot));
}
AnsiConsole.Write(table);
AnsiConsole.WriteLine();
// Store if requested
if (store && sealStore is not null)
{
await sealStore.SaveAsync(seal, ct).ConfigureAwait(false);
AnsiConsole.MarkupLine("[green]Seal stored to API[/]");
}
// Output seal
var sealOutput = FormatSeal(seal, format);
if (!string.IsNullOrEmpty(outputPath))
{
await File.WriteAllTextAsync(outputPath, sealOutput, ct).ConfigureAwait(false);
AnsiConsole.MarkupLine($"[green]Seal written to:[/] {outputPath}");
}
else if (!store)
{
Console.WriteLine(sealOutput);
}
// Summary
AnsiConsole.MarkupLine($"[bold green]Seal created successfully[/]");
AnsiConsole.MarkupLine($" [dim]Image:[/] {seal.ImageDigest}");
AnsiConsole.MarkupLine($" [dim]Facets:[/] {seal.Facets.Length}");
AnsiConsole.MarkupLine($" [dim]Combined Root:[/] {TruncateHash(seal.CombinedMerkleRoot)}");
AnsiConsole.MarkupLine($" [dim]Signed:[/] {(seal.Signature is not null ? "Yes" : "No")}");
return 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to create seal for {Image}", image);
AnsiConsole.MarkupLine($"[red]Error:[/] {ex.Message}");
return 1;
}
}
private static string FormatSeal(FacetSeal seal, string format)
{
return format.ToLowerInvariant() switch
{
"yaml" => ToYaml(seal),
"compact" => $"{seal.ImageDigest}|{seal.CombinedMerkleRoot}|{seal.Facets.Length}",
_ => JsonSerializer.Serialize(seal, new JsonSerializerOptions { WriteIndented = true })
};
}
private static string ToYaml(FacetSeal seal)
{
var sb = new StringBuilder();
sb.AppendLine($"imageDigest: {seal.ImageDigest}");
sb.AppendLine($"createdAt: {seal.CreatedAt:O}");
sb.AppendLine($"combinedMerkleRoot: {seal.CombinedMerkleRoot}");
sb.AppendLine("facets:");
foreach (var f in seal.Facets)
{
sb.AppendLine($" - facetId: {f.FacetId}");
sb.AppendLine($" name: {f.Name}");
sb.AppendLine($" merkleRoot: {f.MerkleRoot}");
sb.AppendLine($" fileCount: {f.FileCount}");
sb.AppendLine($" totalBytes: {f.TotalBytes}");
}
return sb.ToString();
}
private static string FormatBytes(long bytes)
{
return bytes switch
{
< 1024 => $"{bytes} B",
< 1024 * 1024 => $"{bytes / 1024.0:F1} KB",
< 1024 * 1024 * 1024 => $"{bytes / (1024.0 * 1024):F1} MB",
_ => $"{bytes / (1024.0 * 1024 * 1024):F1} GB"
};
}
private static string TruncateHash(string? hash)
{
if (string.IsNullOrEmpty(hash)) return "(none)";
return hash.Length > 16 ? $"{hash[..8]}...{hash[^8..]}" : hash;
}
}

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.CommandLine;
using System.Globalization;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Text.Json;
@@ -485,7 +486,7 @@ public static class VexGateScanCommandGroup
summaryTable.AddRow("Passed", $"[green]{results.Summary.Passed}[/]");
summaryTable.AddRow("Warned", $"[yellow]{results.Summary.Warned}[/]");
summaryTable.AddRow("Blocked", $"[red]{results.Summary.Blocked}[/]");
summaryTable.AddRow("Evaluated At", results.Summary.EvaluatedAt?.ToString("O") ?? "N/A");
summaryTable.AddRow("Evaluated At", results.Summary.EvaluatedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "N/A");
console.Write(summaryTable);
}

View File

@@ -0,0 +1,338 @@
// <copyright file="VexGenCommandGroup.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_CLI (CLI-011 through CLI-015)
using System.Collections.Immutable;
using System.CommandLine;
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Spectre.Console;
using StellaOps.Facet;
namespace StellaOps.Cli.Commands;
/// <summary>
/// Command group for VEX generation operations.
/// Provides stella vex gen command for generating VEX documents from facet drift.
/// </summary>
internal static class VexGenCommandGroup
{
/// <summary>
/// Builds the vex gen command group.
/// </summary>
internal static Command BuildVexGenCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var fromDriftOption = new Option<bool>("--from-drift")
{
Description = "Generate VEX from facet drift analysis."
};
var imageOption = new Option<string>("--image", "-i")
{
Description = "Image reference or digest.",
Required = true
};
var baselineOption = new Option<string?>("--baseline", "-b")
{
Description = "Baseline seal ID (default: latest for image)."
};
var outputOption = new Option<string?>("--output", "-o")
{
Description = "Output file path (default: stdout)."
};
var formatOption = new Option<string>("--format")
{
Description = "VEX format: openvex, csaf."
};
formatOption.SetDefaultValue("openvex");
var statusOption = new Option<string>("--status")
{
Description = "VEX status: under_investigation, not_affected, affected."
};
statusOption.SetDefaultValue("under_investigation");
var gen = new Command("gen", "Generate VEX statements from drift analysis.");
gen.Add(fromDriftOption);
gen.Add(imageOption);
gen.Add(baselineOption);
gen.Add(outputOption);
gen.Add(formatOption);
gen.Add(statusOption);
gen.Add(verboseOption);
gen.SetAction(parseResult =>
{
var fromDrift = parseResult.GetValue(fromDriftOption);
var image = parseResult.GetValue(imageOption)!;
var baseline = parseResult.GetValue(baselineOption);
var output = parseResult.GetValue(outputOption);
var format = parseResult.GetValue(formatOption)!;
var status = parseResult.GetValue(statusOption)!;
var verbose = parseResult.GetValue(verboseOption);
if (!fromDrift)
{
AnsiConsole.MarkupLine("[yellow]--from-drift is required for VEX generation.[/]");
return Task.FromResult(1);
}
return HandleVexFromDriftAsync(
services,
image,
baseline,
output,
format,
status,
verbose,
cancellationToken);
});
return gen;
}
private static async Task<int> HandleVexFromDriftAsync(
IServiceProvider services,
string image,
string? baselineId,
string? outputPath,
string format,
string status,
bool verbose,
CancellationToken ct)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetService<ILoggerFactory>()?.CreateLogger("vex.gen")
?? NullLogger.Instance;
var timeProvider = scope.ServiceProvider.GetService<TimeProvider>() ?? TimeProvider.System;
try
{
var driftDetector = scope.ServiceProvider.GetService<IFacetDriftDetector>();
var sealStore = scope.ServiceProvider.GetService<IFacetSealStore>();
if (driftDetector is null || sealStore is null)
{
AnsiConsole.MarkupLine("[red]Facet services not available. Ensure facet module is configured.[/]");
return 1;
}
AnsiConsole.MarkupLine($"[bold]Generating VEX from drift for:[/] {image}");
// Load baseline seal
FacetSeal? baseline;
if (!string.IsNullOrEmpty(baselineId))
{
baseline = await sealStore.GetByCombinedRootAsync(baselineId, ct).ConfigureAwait(false);
if (baseline is null)
{
AnsiConsole.MarkupLine($"[red]Baseline seal '{baselineId}' not found.[/]");
return 1;
}
}
else
{
baseline = await sealStore.GetLatestSealAsync(image, ct).ConfigureAwait(false);
if (baseline is null)
{
AnsiConsole.MarkupLine("[red]No baseline seal found for image. Run 'stella seal' first.[/]");
return 1;
}
}
AnsiConsole.MarkupLine($"[dim]Baseline seal:[/] {TruncateHash(baseline.CombinedMerkleRoot)} ({baseline.CreatedAt:yyyy-MM-dd HH:mm:ss})");
// Get current seal for comparison
var currentSeal = await sealStore.GetLatestSealAsync(image, ct).ConfigureAwait(false);
if (currentSeal is null)
{
AnsiConsole.MarkupLine("[red]No current seal found for image.[/]");
return 1;
}
// Compute drift
AnsiConsole.MarkupLine("[dim]Computing drift...[/]");
var report = await driftDetector.DetectDriftAsync(baseline, currentSeal, ct).ConfigureAwait(false);
// Generate VEX document
AnsiConsole.MarkupLine("[dim]Generating VEX statements...[/]");
var vexDocument = GenerateOpenVex(report, image, status, timeProvider);
if (vexDocument.Statements.Length == 0)
{
AnsiConsole.MarkupLine("[yellow]No facets require VEX authorization.[/]");
return 0;
}
// Output
var vexJson = JsonSerializer.Serialize(vexDocument, new JsonSerializerOptions
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
});
if (!string.IsNullOrEmpty(outputPath))
{
await File.WriteAllTextAsync(outputPath, vexJson, ct).ConfigureAwait(false);
AnsiConsole.MarkupLine($"[green]VEX written to:[/] {outputPath}");
}
else
{
Console.WriteLine(vexJson);
}
// Summary
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine($"[bold green]Generated {vexDocument.Statements.Length} VEX statement(s)[/]");
if (verbose)
{
foreach (var stmt in vexDocument.Statements)
{
AnsiConsole.MarkupLine($" [dim]-[/] {stmt.Justification}");
}
}
return 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to generate VEX for {Image}", image);
AnsiConsole.MarkupLine($"[red]Error:[/] {ex.Message}");
return 1;
}
}
private static OpenVexDocument GenerateOpenVex(
FacetDriftReport report,
string imageDigest,
string status,
TimeProvider timeProvider)
{
var now = timeProvider.GetUtcNow();
var docId = Guid.NewGuid();
var statements = new List<OpenVexStatement>();
foreach (var drift in report.FacetDrifts.Where(d =>
d.QuotaVerdict == QuotaVerdict.RequiresVex ||
d.QuotaVerdict == QuotaVerdict.Warning))
{
statements.Add(new OpenVexStatement
{
Id = $"vex:{Guid.NewGuid()}",
Status = status,
Timestamp = now.ToString("O", CultureInfo.InvariantCulture),
Products =
[
new OpenVexProduct
{
Id = imageDigest,
Identifiers = new OpenVexIdentifiers { Facet = drift.FacetId }
}
],
Justification = $"Facet drift authorization for {drift.FacetId}. " +
$"Churn: {drift.ChurnPercent:F2}% " +
$"({drift.Added.Length} added, {drift.Removed.Length} removed, {drift.Modified.Length} modified)",
ActionStatement = drift.QuotaVerdict == QuotaVerdict.RequiresVex
? "Review required before deployment"
: "Drift within acceptable limits but raised for awareness"
});
}
return new OpenVexDocument
{
Context = "https://openvex.dev/ns",
Id = $"https://stellaops.io/vex/{docId}",
Author = "StellaOps CLI",
Timestamp = now.ToString("O", CultureInfo.InvariantCulture),
Version = 1,
Statements = [.. statements]
};
}
private static string TruncateHash(string? hash)
{
if (string.IsNullOrEmpty(hash)) return "(none)";
return hash.Length > 16 ? $"{hash[..8]}...{hash[^8..]}" : hash;
}
}
/// <summary>
/// OpenVEX document model.
/// </summary>
internal sealed record OpenVexDocument
{
[JsonPropertyName("@context")]
public required string Context { get; init; }
[JsonPropertyName("@id")]
public required string Id { get; init; }
[JsonPropertyName("author")]
public required string Author { get; init; }
[JsonPropertyName("timestamp")]
public required string Timestamp { get; init; }
[JsonPropertyName("version")]
public required int Version { get; init; }
[JsonPropertyName("statements")]
public required ImmutableArray<OpenVexStatement> Statements { get; init; }
}
/// <summary>
/// OpenVEX statement model.
/// </summary>
internal sealed record OpenVexStatement
{
[JsonPropertyName("@id")]
public required string Id { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("timestamp")]
public required string Timestamp { get; init; }
[JsonPropertyName("products")]
public required ImmutableArray<OpenVexProduct> Products { get; init; }
[JsonPropertyName("justification")]
public required string Justification { get; init; }
[JsonPropertyName("action_statement")]
public string? ActionStatement { get; init; }
}
/// <summary>
/// OpenVEX product reference.
/// </summary>
internal sealed record OpenVexProduct
{
[JsonPropertyName("@id")]
public required string Id { get; init; }
[JsonPropertyName("identifiers")]
public required OpenVexIdentifiers Identifiers { get; init; }
}
/// <summary>
/// OpenVEX identifiers.
/// </summary>
internal sealed record OpenVexIdentifiers
{
[JsonPropertyName("facet")]
public string? Facet { get; init; }
}

View File

@@ -3647,9 +3647,9 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
if (!string.IsNullOrWhiteSpace(request.Tenant))
queryParams.Add($"tenant={Uri.EscapeDataString(request.Tenant)}");
if (request.From.HasValue)
queryParams.Add($"from={Uri.EscapeDataString(request.From.Value.ToString("O"))}");
queryParams.Add($"from={Uri.EscapeDataString(request.From.Value.ToString("O", CultureInfo.InvariantCulture))}");
if (request.To.HasValue)
queryParams.Add($"to={Uri.EscapeDataString(request.To.Value.ToString("O"))}");
queryParams.Add($"to={Uri.EscapeDataString(request.To.Value.ToString("O", CultureInfo.InvariantCulture))}");
if (!string.IsNullOrWhiteSpace(request.Status))
queryParams.Add($"status={Uri.EscapeDataString(request.Status)}");
if (request.Limit.HasValue)

Some files were not shown because too many files have changed in this diff Show More