Merge remote changes (theirs)
This commit is contained in:
@@ -7,7 +7,7 @@
|
||||
|
||||
## Working Directory
|
||||
- Primary: `src/AdvisoryAI/**` (WebService, Worker, Hosting, plugins, tests).
|
||||
- Docs: `docs/advisory-ai/**`, `docs/policy/assistant-parameters.md`, `docs/sbom/*` when explicitly touched by sprint tasks.
|
||||
- Docs: `docs/advisory-ai/**`, `docs/policy/assistant-parameters.md`, `docs/modules/sbom-service/*` when explicitly touched by sprint tasks.
|
||||
- Shared libraries allowed only if referenced by Advisory AI projects; otherwise stay in-module.
|
||||
|
||||
## Required Reading (treat as read before DOING)
|
||||
|
||||
@@ -689,7 +689,7 @@ static Task<IResult> HandlePolicyCompile(
|
||||
BundleName = request.BundleName,
|
||||
Version = "1.0.0",
|
||||
RuleCount = request.RuleIds.Count,
|
||||
CompiledAt = now.ToString("O"),
|
||||
CompiledAt = now.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
|
||||
ContentHash = $"sha256:{contentHash}",
|
||||
SignatureId = null // Would be signed in production
|
||||
};
|
||||
@@ -751,10 +751,10 @@ static async Task<IResult> HandleGetConsent(
|
||||
return Results.Ok(new AiConsentStatusResponse
|
||||
{
|
||||
Consented = record.Consented,
|
||||
ConsentedAt = record.ConsentedAt?.ToString("O"),
|
||||
ConsentedAt = record.ConsentedAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
|
||||
ConsentedBy = record.UserId,
|
||||
Scope = record.Scope,
|
||||
ExpiresAt = record.ExpiresAt?.ToString("O"),
|
||||
ExpiresAt = record.ExpiresAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
|
||||
SessionLevel = record.SessionLevel
|
||||
});
|
||||
}
|
||||
@@ -763,6 +763,7 @@ static async Task<IResult> HandleGrantConsent(
|
||||
HttpContext httpContext,
|
||||
AiConsentGrantRequest request,
|
||||
IAiConsentStore consentStore,
|
||||
TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (!request.DataShareAcknowledged)
|
||||
@@ -786,8 +787,8 @@ static async Task<IResult> HandleGrantConsent(
|
||||
return Results.Ok(new AiConsentGrantResponse
|
||||
{
|
||||
Consented = record.Consented,
|
||||
ConsentedAt = record.ConsentedAt?.ToString("O") ?? DateTimeOffset.UtcNow.ToString("O"),
|
||||
ExpiresAt = record.ExpiresAt?.ToString("O")
|
||||
ConsentedAt = record.ConsentedAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture) ?? timeProvider.GetUtcNow().ToString("O", System.Globalization.CultureInfo.InvariantCulture),
|
||||
ExpiresAt = record.ExpiresAt?.ToString("O", System.Globalization.CultureInfo.InvariantCulture)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -863,7 +864,7 @@ static async Task<IResult> HandleJustify(
|
||||
ConfidenceScore = result.ConfidenceScore,
|
||||
EvidenceSuggestions = result.EvidenceSuggestions,
|
||||
ModelVersion = result.ModelVersion,
|
||||
GeneratedAt = result.GeneratedAt.ToString("O"),
|
||||
GeneratedAt = result.GeneratedAt.ToString("O", System.Globalization.CultureInfo.InvariantCulture),
|
||||
TraceId = result.TraceId
|
||||
});
|
||||
}
|
||||
@@ -919,7 +920,7 @@ static Task<IResult> HandleGetRateLimits(
|
||||
Feature = l.Feature,
|
||||
Limit = l.Limit,
|
||||
Remaining = l.Remaining,
|
||||
ResetsAt = l.ResetsAt.ToString("O")
|
||||
ResetsAt = l.ResetsAt.ToString("O", System.Globalization.CultureInfo.InvariantCulture)
|
||||
}).ToList();
|
||||
|
||||
return Task.FromResult(Results.Ok(response));
|
||||
|
||||
@@ -22,6 +22,7 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
|
||||
private readonly AdvisoryPipelineMetrics _metrics;
|
||||
private readonly IAdvisoryPipelineExecutor _executor;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly Func<double> _jitterSource;
|
||||
private readonly ILogger<AdvisoryTaskWorker> _logger;
|
||||
private int _consecutiveErrors;
|
||||
|
||||
@@ -32,7 +33,8 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
|
||||
AdvisoryPipelineMetrics metrics,
|
||||
IAdvisoryPipelineExecutor executor,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<AdvisoryTaskWorker> logger)
|
||||
ILogger<AdvisoryTaskWorker> logger,
|
||||
Func<double>? jitterSource = null)
|
||||
{
|
||||
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
@@ -40,6 +42,7 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_executor = executor ?? throw new ArgumentNullException(nameof(executor));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_jitterSource = jitterSource ?? Random.Shared.NextDouble;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
@@ -146,8 +149,8 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
|
||||
// Exponential backoff: base * 2^(errorCount-1), capped at max
|
||||
var backoff = Math.Min(BaseRetryDelaySeconds * Math.Pow(2, errorCount - 1), MaxRetryDelaySeconds);
|
||||
|
||||
// Add jitter (+/- JitterFactor percent)
|
||||
var jitter = backoff * JitterFactor * (2 * Random.Shared.NextDouble() - 1);
|
||||
// Add jitter (+/- JitterFactor percent) using injectable source for testability
|
||||
var jitter = backoff * JitterFactor * (2 * _jitterSource() - 1);
|
||||
|
||||
return Math.Max(BaseRetryDelaySeconds, backoff + jitter);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -16,6 +17,7 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
|
||||
private readonly IExplanationInferenceClient _inferenceClient;
|
||||
private readonly ICitationExtractor _citationExtractor;
|
||||
private readonly IExplanationStore _store;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private const double EvidenceBackedThreshold = 0.8;
|
||||
|
||||
@@ -24,13 +26,15 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
|
||||
IExplanationPromptService promptService,
|
||||
IExplanationInferenceClient inferenceClient,
|
||||
ICitationExtractor citationExtractor,
|
||||
IExplanationStore store)
|
||||
IExplanationStore store,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_evidenceService = evidenceService;
|
||||
_promptService = promptService;
|
||||
_inferenceClient = inferenceClient;
|
||||
_citationExtractor = citationExtractor;
|
||||
_store = store;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<ExplanationResult> GenerateAsync(ExplanationRequest request, CancellationToken cancellationToken = default)
|
||||
@@ -91,7 +95,7 @@ public sealed class EvidenceAnchoredExplanationGenerator : IExplanationGenerator
|
||||
ModelId = inferenceResult.ModelId,
|
||||
PromptTemplateVersion = prompt.TemplateVersion,
|
||||
InputHashes = inputHashes,
|
||||
GeneratedAt = DateTime.UtcNow.ToString("O"),
|
||||
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
|
||||
OutputHash = outputHash
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -14,6 +15,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
|
||||
private readonly IPolicyPromptService _promptService;
|
||||
private readonly IPolicyInferenceClient _inferenceClient;
|
||||
private readonly IPolicyIntentStore _intentStore;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly string[] FewShotExamples = new[]
|
||||
{
|
||||
@@ -27,11 +29,13 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
|
||||
public AiPolicyIntentParser(
|
||||
IPolicyPromptService promptService,
|
||||
IPolicyInferenceClient inferenceClient,
|
||||
IPolicyIntentStore intentStore)
|
||||
IPolicyIntentStore intentStore,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_promptService = promptService;
|
||||
_inferenceClient = inferenceClient;
|
||||
_intentStore = intentStore;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<PolicyParseResult> ParseAsync(
|
||||
@@ -61,7 +65,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
|
||||
Success = intent.Confidence >= 0.7,
|
||||
ErrorMessage = intent.Confidence < 0.7 ? "Ambiguous input - clarification needed" : null,
|
||||
ModelId = inferenceResult.ModelId,
|
||||
ParsedAt = DateTime.UtcNow.ToString("O")
|
||||
ParsedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -93,7 +97,7 @@ public sealed class AiPolicyIntentParser : IPolicyIntentParser
|
||||
Intent = clarifiedIntent,
|
||||
Success = clarifiedIntent.Confidence >= 0.8,
|
||||
ModelId = inferenceResult.ModelId,
|
||||
ParsedAt = DateTime.UtcNow.ToString("O")
|
||||
ParsedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
@@ -10,6 +11,13 @@ namespace StellaOps.AdvisoryAI.PolicyStudio;
|
||||
/// </summary>
|
||||
public sealed class LatticeRuleGenerator : IPolicyRuleGenerator
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public LatticeRuleGenerator(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task<RuleGenerationResult> GenerateAsync(
|
||||
PolicyIntent intent,
|
||||
CancellationToken cancellationToken = default)
|
||||
@@ -58,7 +66,7 @@ public sealed class LatticeRuleGenerator : IPolicyRuleGenerator
|
||||
Success = true,
|
||||
Warnings = warnings,
|
||||
IntentId = intent.IntentId,
|
||||
GeneratedAt = DateTime.UtcNow.ToString("O")
|
||||
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -317,15 +318,18 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
private readonly IPolicyRuleGenerator _ruleGenerator;
|
||||
private readonly IPolicyBundleSigner? _signer;
|
||||
private readonly ILogger<PolicyBundleCompiler> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public PolicyBundleCompiler(
|
||||
IPolicyRuleGenerator ruleGenerator,
|
||||
IPolicyBundleSigner? signer,
|
||||
ILogger<PolicyBundleCompiler> logger)
|
||||
ILogger<PolicyBundleCompiler> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_ruleGenerator = ruleGenerator ?? throw new ArgumentNullException(nameof(ruleGenerator));
|
||||
_signer = signer;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<PolicyCompilationResult> CompileAsync(
|
||||
@@ -388,7 +392,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
Warnings = warnings,
|
||||
ValidationReport = validationReport,
|
||||
TestReport = testReport,
|
||||
CompiledAt = DateTime.UtcNow.ToString("O")
|
||||
CompiledAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -409,7 +413,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
Warnings = warnings,
|
||||
ValidationReport = validationReport,
|
||||
TestReport = testReport,
|
||||
CompiledAt = DateTime.UtcNow.ToString("O"),
|
||||
CompiledAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
|
||||
BundleDigest = bundleDigest
|
||||
};
|
||||
}
|
||||
@@ -425,7 +429,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
// Validate trust roots
|
||||
foreach (var root in bundle.TrustRoots)
|
||||
{
|
||||
if (root.ExpiresAt.HasValue && root.ExpiresAt.Value < DateTimeOffset.UtcNow)
|
||||
if (root.ExpiresAt.HasValue && root.ExpiresAt.Value < _timeProvider.GetUtcNow())
|
||||
{
|
||||
semanticWarnings.Add($"Trust root '{root.Principal.Id}' has expired");
|
||||
}
|
||||
@@ -489,7 +493,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
ContentDigest = contentDigest,
|
||||
Signature = string.Empty,
|
||||
Algorithm = "none",
|
||||
SignedAt = DateTime.UtcNow.ToString("O")
|
||||
SignedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -506,7 +510,7 @@ public sealed class PolicyBundleCompiler : IPolicyBundleCompiler
|
||||
Algorithm = signature.Algorithm,
|
||||
KeyId = options.KeyId,
|
||||
SignerIdentity = options.SignerIdentity,
|
||||
SignedAt = DateTime.UtcNow.ToString("O"),
|
||||
SignedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
|
||||
CertificateChain = signature.CertificateChain
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
@@ -10,6 +11,13 @@ namespace StellaOps.AdvisoryAI.PolicyStudio;
|
||||
/// </summary>
|
||||
public sealed class PropertyBasedTestSynthesizer : ITestCaseSynthesizer
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public PropertyBasedTestSynthesizer(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<PolicyTestCase>> SynthesizeAsync(
|
||||
IReadOnlyList<LatticeRule> rules,
|
||||
CancellationToken cancellationToken = default)
|
||||
@@ -53,7 +61,7 @@ public sealed class PropertyBasedTestSynthesizer : ITestCaseSynthesizer
|
||||
Passed = results.Count(r => r.Passed),
|
||||
Failed = results.Count(r => !r.Passed),
|
||||
Results = results,
|
||||
RunAt = DateTime.UtcNow.ToString("O")
|
||||
RunAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
@@ -106,7 +107,7 @@ internal sealed class ConcelierAdvisoryDocumentProvider : IAdvisoryDocumentProvi
|
||||
["vendor"] = record.Document.Source.Vendor,
|
||||
["connector"] = record.Document.Source.Connector,
|
||||
["content_hash"] = record.Document.Upstream.ContentHash,
|
||||
["ingested_at"] = record.IngestedAt.UtcDateTime.ToString("O"),
|
||||
["ingested_at"] = record.IngestedAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(record.Document.Source.Stream))
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -15,17 +16,20 @@ public sealed class AiRemediationPlanner : IRemediationPlanner
|
||||
private readonly IRemediationPromptService _promptService;
|
||||
private readonly IRemediationInferenceClient _inferenceClient;
|
||||
private readonly IRemediationPlanStore _planStore;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public AiRemediationPlanner(
|
||||
IPackageVersionResolver versionResolver,
|
||||
IRemediationPromptService promptService,
|
||||
IRemediationInferenceClient inferenceClient,
|
||||
IRemediationPlanStore planStore)
|
||||
IRemediationPlanStore planStore,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_versionResolver = versionResolver;
|
||||
_promptService = promptService;
|
||||
_inferenceClient = inferenceClient;
|
||||
_planStore = planStore;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<RemediationPlan> GeneratePlanAsync(
|
||||
@@ -85,7 +89,7 @@ public sealed class AiRemediationPlanner : IRemediationPlanner
|
||||
NotReadyReason = notReadyReason,
|
||||
ConfidenceScore = inferenceResult.Confidence,
|
||||
ModelId = inferenceResult.ModelId,
|
||||
GeneratedAt = DateTime.UtcNow.ToString("O"),
|
||||
GeneratedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
|
||||
InputHashes = inputHashes,
|
||||
EvidenceRefs = new List<string> { versionResult.CurrentVersion, versionResult.RecommendedVersion }
|
||||
};
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Remediation;
|
||||
|
||||
/// <summary>
|
||||
@@ -7,42 +9,56 @@ namespace StellaOps.AdvisoryAI.Remediation;
|
||||
/// </summary>
|
||||
public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly Func<Guid> _guidFactory;
|
||||
private readonly Func<int, int, int> _randomFactory;
|
||||
|
||||
public AzureDevOpsPullRequestGenerator(
|
||||
TimeProvider? timeProvider = null,
|
||||
Func<Guid>? guidFactory = null,
|
||||
Func<int, int, int>? randomFactory = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidFactory = guidFactory ?? Guid.NewGuid;
|
||||
_randomFactory = randomFactory ?? Random.Shared.Next;
|
||||
}
|
||||
|
||||
public string ScmType => "azure-devops";
|
||||
|
||||
public Task<PullRequestResult> CreatePullRequestAsync(
|
||||
RemediationPlan plan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
if (!plan.PrReady)
|
||||
{
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = $"ado-pr-{Guid.NewGuid():N}",
|
||||
PrId = $"ado-pr-{_guidFactory():N}",
|
||||
PrNumber = 0,
|
||||
Url = string.Empty,
|
||||
BranchName = string.Empty,
|
||||
Status = PullRequestStatus.Failed,
|
||||
StatusMessage = plan.NotReadyReason ?? "Plan is not PR-ready",
|
||||
CreatedAt = DateTime.UtcNow.ToString("O"),
|
||||
UpdatedAt = DateTime.UtcNow.ToString("O")
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
});
|
||||
}
|
||||
|
||||
var branchName = GenerateBranchName(plan);
|
||||
var prId = $"ado-pr-{Guid.NewGuid():N}";
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var branchName = GenerateBranchName(plan, _timeProvider);
|
||||
var prId = $"ado-pr-{_guidFactory():N}";
|
||||
|
||||
// In a real implementation, this would use Azure DevOps REST API
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = prId,
|
||||
PrNumber = new Random().Next(1000, 9999),
|
||||
PrNumber = _randomFactory(1000, 9999),
|
||||
Url = $"https://dev.azure.com/{ExtractOrgProject(plan.Request.RepositoryUrl)}/_git/{ExtractRepoName(plan.Request.RepositoryUrl)}/pullrequest/{prId}",
|
||||
BranchName = branchName,
|
||||
Status = PullRequestStatus.Creating,
|
||||
StatusMessage = "Pull request is being created",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
});
|
||||
}
|
||||
|
||||
@@ -50,7 +66,7 @@ public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
|
||||
string prId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = prId,
|
||||
@@ -80,10 +96,10 @@ public sealed class AzureDevOpsPullRequestGenerator : IPullRequestGenerator
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static string GenerateBranchName(RemediationPlan plan)
|
||||
private static string GenerateBranchName(RemediationPlan plan, TimeProvider timeProvider)
|
||||
{
|
||||
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
|
||||
var timestamp = DateTime.UtcNow.ToString("yyyyMMdd");
|
||||
var timestamp = timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
|
||||
return $"stellaops/fix-{vulnId}-{timestamp}";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Remediation;
|
||||
|
||||
/// <summary>
|
||||
@@ -8,10 +10,20 @@ namespace StellaOps.AdvisoryAI.Remediation;
|
||||
public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
|
||||
{
|
||||
private readonly IRemediationPlanStore _planStore;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly Func<Guid> _guidFactory;
|
||||
private readonly Func<int, int, int> _randomFactory;
|
||||
|
||||
public GitHubPullRequestGenerator(IRemediationPlanStore planStore)
|
||||
public GitHubPullRequestGenerator(
|
||||
IRemediationPlanStore planStore,
|
||||
TimeProvider? timeProvider = null,
|
||||
Func<Guid>? guidFactory = null,
|
||||
Func<int, int, int>? randomFactory = null)
|
||||
{
|
||||
_planStore = planStore;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidFactory = guidFactory ?? Guid.NewGuid;
|
||||
_randomFactory = randomFactory ?? Random.Shared.Next;
|
||||
}
|
||||
|
||||
public string ScmType => "github";
|
||||
@@ -20,19 +32,20 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
|
||||
RemediationPlan plan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
// Validate plan is PR-ready
|
||||
if (!plan.PrReady)
|
||||
{
|
||||
return new PullRequestResult
|
||||
{
|
||||
PrId = $"pr-{Guid.NewGuid():N}",
|
||||
PrId = $"pr-{_guidFactory():N}",
|
||||
PrNumber = 0,
|
||||
Url = string.Empty,
|
||||
BranchName = string.Empty,
|
||||
Status = PullRequestStatus.Failed,
|
||||
StatusMessage = plan.NotReadyReason ?? "Plan is not PR-ready",
|
||||
CreatedAt = DateTime.UtcNow.ToString("O"),
|
||||
UpdatedAt = DateTime.UtcNow.ToString("O")
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
};
|
||||
}
|
||||
|
||||
@@ -45,19 +58,18 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
|
||||
// 3. Commit changes
|
||||
// 4. Create PR via GitHub API
|
||||
|
||||
var prId = $"gh-pr-{Guid.NewGuid():N}";
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var prId = $"gh-pr-{_guidFactory():N}";
|
||||
|
||||
return new PullRequestResult
|
||||
{
|
||||
PrId = prId,
|
||||
PrNumber = new Random().Next(1000, 9999), // Placeholder
|
||||
PrNumber = _randomFactory(1000, 9999), // Placeholder
|
||||
Url = $"https://github.com/{ExtractOwnerRepo(plan.Request.RepositoryUrl)}/pull/{prId}",
|
||||
BranchName = branchName,
|
||||
Status = PullRequestStatus.Creating,
|
||||
StatusMessage = "Pull request is being created",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
};
|
||||
}
|
||||
|
||||
@@ -66,7 +78,7 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// In a real implementation, this would query GitHub API
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
@@ -99,10 +111,10 @@ public sealed class GitHubPullRequestGenerator : IPullRequestGenerator
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static string GenerateBranchName(RemediationPlan plan)
|
||||
private string GenerateBranchName(RemediationPlan plan)
|
||||
{
|
||||
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
|
||||
var timestamp = DateTime.UtcNow.ToString("yyyyMMdd");
|
||||
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
|
||||
return $"stellaops/fix-{vulnId}-{timestamp}";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Remediation;
|
||||
|
||||
/// <summary>
|
||||
@@ -7,42 +9,56 @@ namespace StellaOps.AdvisoryAI.Remediation;
|
||||
/// </summary>
|
||||
public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly Func<Guid> _guidFactory;
|
||||
private readonly Func<int, int, int> _randomFactory;
|
||||
|
||||
public GitLabMergeRequestGenerator(
|
||||
TimeProvider? timeProvider = null,
|
||||
Func<Guid>? guidFactory = null,
|
||||
Func<int, int, int>? randomFactory = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidFactory = guidFactory ?? Guid.NewGuid;
|
||||
_randomFactory = randomFactory ?? Random.Shared.Next;
|
||||
}
|
||||
|
||||
public string ScmType => "gitlab";
|
||||
|
||||
public Task<PullRequestResult> CreatePullRequestAsync(
|
||||
RemediationPlan plan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var nowStr = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
if (!plan.PrReady)
|
||||
{
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = $"mr-{Guid.NewGuid():N}",
|
||||
PrId = $"mr-{_guidFactory():N}",
|
||||
PrNumber = 0,
|
||||
Url = string.Empty,
|
||||
BranchName = string.Empty,
|
||||
Status = PullRequestStatus.Failed,
|
||||
StatusMessage = plan.NotReadyReason ?? "Plan is not MR-ready",
|
||||
CreatedAt = DateTime.UtcNow.ToString("O"),
|
||||
UpdatedAt = DateTime.UtcNow.ToString("O")
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
});
|
||||
}
|
||||
|
||||
var branchName = GenerateBranchName(plan);
|
||||
var mrId = $"gl-mr-{Guid.NewGuid():N}";
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var mrId = $"gl-mr-{_guidFactory():N}";
|
||||
|
||||
// In a real implementation, this would use GitLab API
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = mrId,
|
||||
PrNumber = new Random().Next(1000, 9999),
|
||||
PrNumber = _randomFactory(1000, 9999),
|
||||
Url = $"https://gitlab.com/{ExtractProjectPath(plan.Request.RepositoryUrl)}/-/merge_requests/{mrId}",
|
||||
BranchName = branchName,
|
||||
Status = PullRequestStatus.Creating,
|
||||
StatusMessage = "Merge request is being created",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
CreatedAt = nowStr,
|
||||
UpdatedAt = nowStr
|
||||
});
|
||||
}
|
||||
|
||||
@@ -50,7 +66,7 @@ public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
|
||||
string prId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = DateTime.UtcNow.ToString("O");
|
||||
var now = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture);
|
||||
return Task.FromResult(new PullRequestResult
|
||||
{
|
||||
PrId = prId,
|
||||
@@ -80,10 +96,10 @@ public sealed class GitLabMergeRequestGenerator : IPullRequestGenerator
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static string GenerateBranchName(RemediationPlan plan)
|
||||
private string GenerateBranchName(RemediationPlan plan)
|
||||
{
|
||||
var vulnId = plan.Request.VulnerabilityId.Replace(":", "-").ToLowerInvariant();
|
||||
var timestamp = DateTime.UtcNow.ToString("yyyyMMdd");
|
||||
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd", CultureInfo.InvariantCulture);
|
||||
return $"stellaops/fix-{vulnId}-{timestamp}";
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,8 @@ namespace StellaOps.AdvisoryAI.Tests;
|
||||
/// Sprint: SPRINT_20251226_015_AI_zastava_companion
|
||||
/// Task: ZASTAVA-19
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("BlastRadius", TestCategories.BlastRadius.Advisories)]
|
||||
public sealed class ExplanationGeneratorIntegrationTests
|
||||
{
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
|
||||
@@ -30,7 +30,7 @@ public sealed class SemanticVersionTests
|
||||
[InlineData("1.0.0-")]
|
||||
[InlineData("")]
|
||||
[InlineData(null)]
|
||||
public void Parse_InvalidInputs_Throws(string value)
|
||||
public void Parse_InvalidInputs_Throws(string? value)
|
||||
{
|
||||
var act = () => SemanticVersion.Parse(value!);
|
||||
act.Should().Throw<FormatException>();
|
||||
|
||||
@@ -36,7 +36,9 @@ public class SignedModelBundleManagerTests
|
||||
|
||||
var envelopePath = Path.Combine(tempRoot, "signature.dsse");
|
||||
var envelopeJson = await File.ReadAllTextAsync(envelopePath, CancellationToken.None);
|
||||
var envelope = JsonSerializer.Deserialize<ModelBundleSignatureEnvelope>(envelopeJson);
|
||||
var envelope = JsonSerializer.Deserialize<ModelBundleSignatureEnvelope>(
|
||||
envelopeJson,
|
||||
new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower });
|
||||
Assert.NotNull(envelope);
|
||||
|
||||
var payloadJson = Encoding.UTF8.GetString(Convert.FromBase64String(envelope!.Payload));
|
||||
|
||||
@@ -12,12 +12,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
|
||||
@@ -72,6 +72,11 @@ public sealed class HeaderScopeAuthenticationHandler : AuthenticationHandler<Aut
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var scope in value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
{
|
||||
scopes.Add(scope);
|
||||
|
||||
@@ -26,6 +26,7 @@ public sealed class AirGapTelemetry
|
||||
private readonly Queue<(string Tenant, long Sequence)> _evictionQueue = new();
|
||||
private readonly object _cacheLock = new();
|
||||
private readonly int _maxTenantEntries;
|
||||
private readonly int _maxEvictionQueueSize;
|
||||
private long _sequence;
|
||||
|
||||
private readonly ObservableGauge<long> _anchorAgeGauge;
|
||||
@@ -36,6 +37,8 @@ public sealed class AirGapTelemetry
|
||||
{
|
||||
var maxEntries = options.Value.MaxTenantEntries;
|
||||
_maxTenantEntries = maxEntries > 0 ? maxEntries : 1000;
|
||||
// Bound eviction queue to 3x tenant entries to prevent unbounded memory growth
|
||||
_maxEvictionQueueSize = _maxTenantEntries * 3;
|
||||
_logger = logger;
|
||||
_anchorAgeGauge = Meter.CreateObservableGauge("airgap_time_anchor_age_seconds", ObserveAges);
|
||||
_budgetGauge = Meter.CreateObservableGauge("airgap_staleness_budget_seconds", ObserveBudgets);
|
||||
@@ -146,6 +149,7 @@ public sealed class AirGapTelemetry
|
||||
|
||||
private void TrimCache()
|
||||
{
|
||||
// Evict stale tenant entries when cache is over limit
|
||||
while (_latestByTenant.Count > _maxTenantEntries && _evictionQueue.Count > 0)
|
||||
{
|
||||
var (tenant, sequence) = _evictionQueue.Dequeue();
|
||||
@@ -154,6 +158,19 @@ public sealed class AirGapTelemetry
|
||||
_latestByTenant.TryRemove(tenant, out _);
|
||||
}
|
||||
}
|
||||
|
||||
// Trim eviction queue to prevent unbounded memory growth
|
||||
// Discard stale entries that no longer match current tenant state
|
||||
while (_evictionQueue.Count > _maxEvictionQueueSize)
|
||||
{
|
||||
var (tenant, sequence) = _evictionQueue.Dequeue();
|
||||
// Only actually evict if this is still the current entry for the tenant
|
||||
if (_latestByTenant.TryGetValue(tenant, out var entry) && entry.Sequence == sequence)
|
||||
{
|
||||
_latestByTenant.TryRemove(tenant, out _);
|
||||
}
|
||||
// Otherwise the queue entry is stale and can be discarded
|
||||
}
|
||||
}
|
||||
|
||||
private readonly record struct TelemetryEntry(long Age, long Budget, long Sequence);
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0024-M | DONE | Maintainability audit for StellaOps.AirGap.Controller. |
|
||||
| AUDIT-0024-T | DONE | Test coverage audit for StellaOps.AirGap.Controller. |
|
||||
| AUDIT-0024-A | DONE | Applied auth/tenant validation, request validation, telemetry cap, and tests. |
|
||||
| AUDIT-0024-M | DONE | Revalidated 2026-01-06 (maintainability audit). |
|
||||
| AUDIT-0024-T | DONE | Revalidated 2026-01-06 (test coverage audit). |
|
||||
| AUDIT-0024-A | TODO | Revalidated 2026-01-06; open findings pending apply. |
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Importer.Telemetry;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Quarantine;
|
||||
|
||||
@@ -17,15 +18,18 @@ public sealed class FileSystemQuarantineService : IQuarantineService
|
||||
private readonly QuarantineOptions _options;
|
||||
private readonly ILogger<FileSystemQuarantineService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public FileSystemQuarantineService(
|
||||
IOptions<QuarantineOptions> options,
|
||||
ILogger<FileSystemQuarantineService> logger,
|
||||
TimeProvider timeProvider)
|
||||
TimeProvider timeProvider,
|
||||
IGuidProvider? guidProvider = null)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
|
||||
}
|
||||
|
||||
public async Task<QuarantineResult> QuarantineAsync(
|
||||
@@ -74,7 +78,7 @@ public sealed class FileSystemQuarantineService : IQuarantineService
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var timestamp = now.ToString("yyyyMMdd-HHmmss", CultureInfo.InvariantCulture);
|
||||
var sanitizedReason = SanitizeForPathSegment(request.ReasonCode);
|
||||
var quarantineId = $"{timestamp}-{sanitizedReason}-{Guid.NewGuid():N}";
|
||||
var quarantineId = $"{timestamp}-{sanitizedReason}-{_guidProvider.NewGuid():N}";
|
||||
|
||||
var quarantinePath = Path.Combine(tenantRoot, quarantineId);
|
||||
|
||||
@@ -250,7 +254,7 @@ public sealed class FileSystemQuarantineService : IQuarantineService
|
||||
var removedPath = Path.Combine(removedRoot, quarantineId);
|
||||
if (Directory.Exists(removedPath))
|
||||
{
|
||||
removedPath = Path.Combine(removedRoot, $"{quarantineId}-{Guid.NewGuid():N}");
|
||||
removedPath = Path.Combine(removedRoot, $"{quarantineId}-{_guidProvider.NewGuid():N}");
|
||||
}
|
||||
|
||||
Directory.Move(entryPath, removedPath);
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
// Part of Step 5: Graph Emission
|
||||
// =============================================================================
|
||||
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -27,7 +28,7 @@ public sealed class EvidenceGraph
|
||||
/// Generation timestamp in ISO 8601 UTC format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public string GeneratedAt { get; init; } = DateTimeOffset.UnixEpoch.ToString("O");
|
||||
public string GeneratedAt { get; init; } = DateTimeOffset.UnixEpoch.ToString("O", CultureInfo.InvariantCulture);
|
||||
|
||||
/// <summary>
|
||||
/// Generator tool identifier.
|
||||
@@ -209,20 +210,19 @@ public sealed record EvidenceGraphMetadata
|
||||
/// </summary>
|
||||
public sealed class EvidenceGraphSerializer
|
||||
{
|
||||
// Use default escaping for deterministic output (no UnsafeRelaxedJsonEscaping)
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private static readonly JsonSerializerOptions PrettySerializerOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
using StellaOps.AirGap.Importer.Reconciliation.Signing;
|
||||
@@ -229,7 +230,7 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
|
||||
|
||||
return new EvidenceGraph
|
||||
{
|
||||
GeneratedAt = generatedAtUtc.ToString("O"),
|
||||
GeneratedAt = generatedAtUtc.ToString("O", CultureInfo.InvariantCulture),
|
||||
Nodes = nodes,
|
||||
Edges = edges,
|
||||
Metadata = new EvidenceGraphMetadata
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
// Part of Step 3: Normalization
|
||||
// =============================================================================
|
||||
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
@@ -225,7 +226,9 @@ public static class JsonNormalizer
|
||||
char.IsDigit(value[3]) &&
|
||||
value[4] == '-')
|
||||
{
|
||||
return DateTimeOffset.TryParse(value, out _);
|
||||
// Use InvariantCulture for deterministic parsing
|
||||
return DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture,
|
||||
DateTimeStyles.RoundtripKind, out _);
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
@@ -16,11 +16,10 @@ namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
/// </summary>
|
||||
public sealed class CycloneDxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
private static readonly JsonDocumentOptions DocumentOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
CommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
@@ -87,7 +86,7 @@ public sealed class CycloneDxParser : ISbomParser
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
using var document = await JsonDocument.ParseAsync(stream, DocumentOptions, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate bomFormat
|
||||
|
||||
@@ -14,11 +14,10 @@ namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
/// </summary>
|
||||
public sealed class DsseAttestationParser : IAttestationParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
private static readonly JsonDocumentOptions DocumentOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
CommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public bool IsAttestation(string filePath)
|
||||
@@ -92,7 +91,7 @@ public sealed class DsseAttestationParser : IAttestationParser
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
using var document = await JsonDocument.ParseAsync(stream, DocumentOptions, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Parse DSSE envelope
|
||||
|
||||
@@ -11,7 +11,7 @@ namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Transforms SBOMs into a canonical form for deterministic hashing and comparison.
|
||||
/// Applies normalization rules per advisory §5 step 3.
|
||||
/// Applies normalization rules per advisory section 5 step 3.
|
||||
/// </summary>
|
||||
public sealed class SbomNormalizer
|
||||
{
|
||||
|
||||
@@ -15,11 +15,10 @@ namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
/// </summary>
|
||||
public sealed class SpdxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
private static readonly JsonDocumentOptions DocumentOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
CommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
@@ -84,7 +83,7 @@ public sealed class SpdxParser : ISbomParser
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
using var document = await JsonDocument.ParseAsync(stream, DocumentOptions, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate spdxVersion
|
||||
|
||||
@@ -18,5 +18,6 @@
|
||||
<ProjectReference Include="..\\..\\Attestor\\StellaOps.Attestor.Envelope\\StellaOps.Attestor.Envelope.csproj" />
|
||||
<ProjectReference Include="..\\..\\__Libraries\\StellaOps.Cryptography\\StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="..\\..\\__Libraries\\StellaOps.Cryptography.Plugin.OfflineVerification\\StellaOps.Cryptography.Plugin.OfflineVerification.csproj" />
|
||||
<ProjectReference Include="..\\..\\__Libraries\\StellaOps.Determinism.Abstractions\\StellaOps.Determinism.Abstractions.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -5,7 +5,7 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0026-M | DONE | Maintainability audit for StellaOps.AirGap.Importer. |
|
||||
| AUDIT-0026-T | DONE | Test coverage audit for StellaOps.AirGap.Importer. |
|
||||
| AUDIT-0026-A | DONE | Applied VEX merge, monotonicity guard, and DSSE PAE alignment. |
|
||||
| AUDIT-0026-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0026-T | DONE | Revalidated 2026-01-06; test gaps recorded in audit report. |
|
||||
| AUDIT-0026-A | TODO | DSSE PAE helper + invariant formatting, EvidenceGraph canonical JSON, RuleBundleValidator path validation, JsonNormalizer culture, parser JsonOptions, SbomNormalizer ASCII. |
|
||||
| VAL-SMOKE-001 | DONE | Resolved DSSE signer ambiguity; smoke build now proceeds. |
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Validation;
|
||||
@@ -14,7 +15,9 @@ internal static class DssePreAuthenticationEncoding
|
||||
}
|
||||
|
||||
var payloadTypeByteCount = Encoding.UTF8.GetByteCount(payloadType);
|
||||
var header = $"{Prefix} {payloadTypeByteCount} {payloadType} {payload.Length} ";
|
||||
// Use InvariantCulture to ensure ASCII decimal digits per DSSE spec
|
||||
var header = string.Create(CultureInfo.InvariantCulture,
|
||||
$"{Prefix} {payloadTypeByteCount} {payloadType} {payload.Length} ");
|
||||
var headerBytes = Encoding.UTF8.GetBytes(header);
|
||||
|
||||
var buffer = new byte[headerBytes.Length + payload.Length];
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
@@ -274,7 +275,7 @@ public sealed class ImportValidator
|
||||
["bundleType"] = request.BundleType,
|
||||
["bundleDigest"] = request.BundleDigest,
|
||||
["manifestVersion"] = request.ManifestVersion,
|
||||
["manifestCreatedAt"] = request.ManifestCreatedAt.ToString("O"),
|
||||
["manifestCreatedAt"] = request.ManifestCreatedAt.ToString("O", CultureInfo.InvariantCulture),
|
||||
["forceActivate"] = request.ForceActivate.ToString()
|
||||
};
|
||||
|
||||
|
||||
@@ -23,15 +23,18 @@ public sealed class RuleBundleValidator
|
||||
private readonly DsseVerifier _dsseVerifier;
|
||||
private readonly IVersionMonotonicityChecker _monotonicityChecker;
|
||||
private readonly ILogger<RuleBundleValidator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RuleBundleValidator(
|
||||
DsseVerifier dsseVerifier,
|
||||
IVersionMonotonicityChecker monotonicityChecker,
|
||||
ILogger<RuleBundleValidator> logger)
|
||||
ILogger<RuleBundleValidator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_dsseVerifier = dsseVerifier ?? throw new ArgumentNullException(nameof(dsseVerifier));
|
||||
_monotonicityChecker = monotonicityChecker ?? throw new ArgumentNullException(nameof(monotonicityChecker));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -125,7 +128,14 @@ public sealed class RuleBundleValidator
|
||||
var digestErrors = new List<string>();
|
||||
foreach (var file in manifest.Files)
|
||||
{
|
||||
var filePath = Path.Combine(request.BundleDirectory, file.Name);
|
||||
// Validate path to prevent traversal attacks
|
||||
if (!PathValidation.IsSafeRelativePath(file.Name))
|
||||
{
|
||||
digestErrors.Add($"unsafe-path:{file.Name}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var filePath = PathValidation.SafeCombine(request.BundleDirectory, file.Name);
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
digestErrors.Add($"file-missing:{file.Name}");
|
||||
@@ -157,7 +167,7 @@ public sealed class RuleBundleValidator
|
||||
BundleVersion incomingVersion;
|
||||
try
|
||||
{
|
||||
incomingVersion = BundleVersion.Parse(request.Version, request.CreatedAt ?? DateTimeOffset.UtcNow);
|
||||
incomingVersion = BundleVersion.Parse(request.Version, request.CreatedAt ?? _timeProvider.GetUtcNow());
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -342,3 +352,81 @@ internal sealed class RuleBundleFileEntry
|
||||
public string Digest { get; set; } = string.Empty;
|
||||
public long SizeBytes { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Utility methods for path validation and security.
|
||||
/// </summary>
|
||||
internal static class PathValidation
|
||||
{
|
||||
/// <summary>
|
||||
/// Validates that a relative path does not escape the bundle root.
|
||||
/// </summary>
|
||||
public static bool IsSafeRelativePath(string? relativePath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(relativePath))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for absolute paths
|
||||
if (Path.IsPathRooted(relativePath))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for path traversal sequences
|
||||
var normalized = relativePath.Replace('\\', '/');
|
||||
var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
var depth = 0;
|
||||
foreach (var segment in segments)
|
||||
{
|
||||
if (segment == "..")
|
||||
{
|
||||
depth--;
|
||||
if (depth < 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (segment != ".")
|
||||
{
|
||||
depth++;
|
||||
}
|
||||
}
|
||||
|
||||
// Also check for null bytes
|
||||
if (relativePath.Contains('\0'))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Combines a root path with a relative path, validating that the result does not escape the root.
|
||||
/// </summary>
|
||||
public static string SafeCombine(string rootPath, string relativePath)
|
||||
{
|
||||
if (!IsSafeRelativePath(relativePath))
|
||||
{
|
||||
throw new ArgumentException(
|
||||
$"Invalid relative path: path traversal or absolute path detected in '{relativePath}'",
|
||||
nameof(relativePath));
|
||||
}
|
||||
|
||||
var combined = Path.GetFullPath(Path.Combine(rootPath, relativePath));
|
||||
var normalizedRoot = Path.GetFullPath(rootPath);
|
||||
|
||||
// Ensure the combined path starts with the root path
|
||||
if (!combined.StartsWith(normalizedRoot, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new ArgumentException(
|
||||
$"Path '{relativePath}' escapes root directory",
|
||||
nameof(relativePath));
|
||||
}
|
||||
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,80 +83,6 @@ public sealed class HttpClientUsageAnalyzerTests
|
||||
Assert.DoesNotContain(diagnostics, d => d.Id == HttpClientUsageAnalyzer.DiagnosticId);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_RewritesToFactoryCall()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = new HttpClient();
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
const string expected = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = global::StellaOps.AirGap.Policy.EgressHttpClientFactory.Create(egressPolicy: default(global::StellaOps.AirGap.Policy.IEgressPolicy) /* TODO: provide IEgressPolicy instance */, request: new global::StellaOps.AirGap.Policy.EgressRequest(component: "REPLACE_COMPONENT", destination: new global::System.Uri("https://replace-with-endpoint"), intent: "REPLACE_INTENT"));
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var updated = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
Assert.Equal(expected.ReplaceLineEndings(), updated.ReplaceLineEndings());
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_PreservesHttpClientArguments()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var handler = new HttpClientHandler();
|
||||
var client = new HttpClient(handler, disposeHandler: false);
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
const string expected = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var handler = new HttpClientHandler();
|
||||
var client = global::StellaOps.AirGap.Policy.EgressHttpClientFactory.Create(egressPolicy: default(global::StellaOps.AirGap.Policy.IEgressPolicy) /* TODO: provide IEgressPolicy instance */, request: new global::StellaOps.AirGap.Policy.EgressRequest(component: "REPLACE_COMPONENT", destination: new global::System.Uri("https://replace-with-endpoint"), intent: "REPLACE_INTENT"), clientFactory: () => new global::System.Net.Http.HttpClient(handler, disposeHandler: false));
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var updated = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
Assert.Equal(expected.ReplaceLineEndings(), updated.ReplaceLineEndings());
|
||||
}
|
||||
|
||||
private static async Task<ImmutableArray<Diagnostic>> AnalyzeAsync(string source, string assemblyName)
|
||||
{
|
||||
var compilation = CSharpCompilation.Create(
|
||||
@@ -174,53 +100,6 @@ public sealed class HttpClientUsageAnalyzerTests
|
||||
return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync();
|
||||
}
|
||||
|
||||
private static async Task<string> ApplyCodeFixAsync(string source, string assemblyName)
|
||||
{
|
||||
using var workspace = new AdhocWorkspace();
|
||||
|
||||
var projectId = ProjectId.CreateNewId();
|
||||
var documentId = DocumentId.CreateNewId(projectId);
|
||||
var stubDocumentId = DocumentId.CreateNewId(projectId);
|
||||
|
||||
var solution = workspace.CurrentSolution
|
||||
.AddProject(projectId, "TestProject", "TestProject", LanguageNames.CSharp)
|
||||
.WithProjectCompilationOptions(projectId, new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary))
|
||||
.WithProjectAssemblyName(projectId, assemblyName)
|
||||
.AddMetadataReferences(projectId, CreateMetadataReferences())
|
||||
.AddDocument(documentId, "Test.cs", SourceText.From(source))
|
||||
.AddDocument(stubDocumentId, "PolicyStubs.cs", SourceText.From(PolicyStubSource));
|
||||
|
||||
var project = solution.GetProject(projectId)!;
|
||||
var document = solution.GetDocument(documentId)!;
|
||||
|
||||
var compilation = await project.GetCompilationAsync();
|
||||
var analyzer = new HttpClientUsageAnalyzer();
|
||||
var diagnostics = await compilation!.WithAnalyzers(ImmutableArray.Create<DiagnosticAnalyzer>(analyzer))
|
||||
.GetAnalyzerDiagnosticsAsync();
|
||||
|
||||
var diagnostic = Assert.Single(diagnostics);
|
||||
|
||||
var codeFixProvider = new HttpClientUsageCodeFixProvider();
|
||||
var actions = new List<CodeAction>();
|
||||
var context = new CodeFixContext(
|
||||
document,
|
||||
diagnostic,
|
||||
(action, _) => actions.Add(action),
|
||||
CancellationToken.None);
|
||||
|
||||
await codeFixProvider.RegisterCodeFixesAsync(context);
|
||||
var action = Assert.Single(actions);
|
||||
var operations = await action.GetOperationsAsync(CancellationToken.None);
|
||||
|
||||
foreach (var operation in operations)
|
||||
{
|
||||
operation.Apply(workspace, CancellationToken.None);
|
||||
}
|
||||
var updatedDocument = workspace.CurrentSolution.GetDocument(documentId)!;
|
||||
var updatedText = await updatedDocument.GetTextAsync();
|
||||
return updatedText.ToString();
|
||||
}
|
||||
|
||||
private static IEnumerable<MetadataReference> CreateMetadataReferences()
|
||||
{
|
||||
yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location);
|
||||
|
||||
@@ -276,165 +276,6 @@ public sealed class PolicyAnalyzerRoslynTests
|
||||
|
||||
#region AIRGAP-5100-006: Golden Generated Code Tests
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_GeneratesExpectedFactoryCall()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = new HttpClient();
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
const string expectedGolden = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = global::StellaOps.AirGap.Policy.EgressHttpClientFactory.Create(egressPolicy: default(global::StellaOps.AirGap.Policy.IEgressPolicy) /* TODO: provide IEgressPolicy instance */, request: new global::StellaOps.AirGap.Policy.EgressRequest(component: "REPLACE_COMPONENT", destination: new global::System.Uri("https://replace-with-endpoint"), intent: "REPLACE_INTENT"));
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var fixedCode = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
fixedCode.ReplaceLineEndings().Should().Be(expectedGolden.ReplaceLineEndings(),
|
||||
"Code fix should match golden output exactly");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_PreservesTrivia()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
// Important: this client handles external requests
|
||||
var client = new HttpClient(); // end of line comment
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var fixedCode = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
|
||||
// The code fix preserves the trivia from the original node
|
||||
fixedCode.Should().Contain("// Important: this client handles external requests",
|
||||
"Leading comment should be preserved");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_DeterministicOutput()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Determinism;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = new HttpClient();
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
// Apply code fix multiple times
|
||||
var result1 = await ApplyCodeFixAsync(source, assemblyName: "Sample.Determinism");
|
||||
var result2 = await ApplyCodeFixAsync(source, assemblyName: "Sample.Determinism");
|
||||
var result3 = await ApplyCodeFixAsync(source, assemblyName: "Sample.Determinism");
|
||||
|
||||
result1.Should().Be(result2, "Code fix should be deterministic");
|
||||
result2.Should().Be(result3, "Code fix should be deterministic");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_ContainsRequiredPlaceholders()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = new HttpClient();
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var fixedCode = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
|
||||
// Verify all required placeholders are present for developer to fill in
|
||||
fixedCode.Should().Contain("EgressHttpClientFactory.Create");
|
||||
fixedCode.Should().Contain("egressPolicy:");
|
||||
fixedCode.Should().Contain("IEgressPolicy");
|
||||
fixedCode.Should().Contain("EgressRequest");
|
||||
fixedCode.Should().Contain("component:");
|
||||
fixedCode.Should().Contain("REPLACE_COMPONENT");
|
||||
fixedCode.Should().Contain("destination:");
|
||||
fixedCode.Should().Contain("intent:");
|
||||
fixedCode.Should().Contain("REPLACE_INTENT");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFix_UsesFullyQualifiedNames()
|
||||
{
|
||||
const string source = """
|
||||
using System.Net.Http;
|
||||
|
||||
namespace Sample.Service;
|
||||
|
||||
public sealed class Demo
|
||||
{
|
||||
public void Run()
|
||||
{
|
||||
var client = new HttpClient();
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var fixedCode = await ApplyCodeFixAsync(source, assemblyName: "Sample.Service");
|
||||
|
||||
// Verify fully qualified names are used to avoid namespace conflicts
|
||||
fixedCode.Should().Contain("global::StellaOps.AirGap.Policy.EgressHttpClientFactory");
|
||||
fixedCode.Should().Contain("global::StellaOps.AirGap.Policy.EgressRequest");
|
||||
fixedCode.Should().Contain("global::System.Uri");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task FixAllProvider_IsWellKnownBatchFixer()
|
||||
{
|
||||
var provider = new HttpClientUsageCodeFixProvider();
|
||||
var fixAllProvider = provider.GetFixAllProvider();
|
||||
|
||||
fixAllProvider.Should().Be(WellKnownFixAllProviders.BatchFixer,
|
||||
"Should use batch fixer for efficient multi-fix application");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task Analyzer_SupportedDiagnostics_ContainsExpectedId()
|
||||
@@ -446,20 +287,6 @@ public sealed class PolicyAnalyzerRoslynTests
|
||||
supportedDiagnostics[0].Id.Should().Be("AIRGAP001");
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CodeFixProvider_FixableDiagnosticIds_MatchesAnalyzer()
|
||||
{
|
||||
var analyzer = new HttpClientUsageAnalyzer();
|
||||
var codeFixProvider = new HttpClientUsageCodeFixProvider();
|
||||
|
||||
var analyzerIds = analyzer.SupportedDiagnostics.Select(d => d.Id).ToHashSet();
|
||||
var fixableIds = codeFixProvider.FixableDiagnosticIds.ToHashSet();
|
||||
|
||||
fixableIds.Should().BeSubsetOf(analyzerIds,
|
||||
"Code fix provider should only fix diagnostics reported by the analyzer");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
@@ -481,53 +308,6 @@ public sealed class PolicyAnalyzerRoslynTests
|
||||
return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync();
|
||||
}
|
||||
|
||||
private static async Task<string> ApplyCodeFixAsync(string source, string assemblyName)
|
||||
{
|
||||
using var workspace = new AdhocWorkspace();
|
||||
|
||||
var projectId = ProjectId.CreateNewId();
|
||||
var documentId = DocumentId.CreateNewId(projectId);
|
||||
var stubDocumentId = DocumentId.CreateNewId(projectId);
|
||||
|
||||
var solution = workspace.CurrentSolution
|
||||
.AddProject(projectId, "TestProject", "TestProject", LanguageNames.CSharp)
|
||||
.WithProjectCompilationOptions(projectId, new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary))
|
||||
.WithProjectAssemblyName(projectId, assemblyName)
|
||||
.AddMetadataReferences(projectId, CreateMetadataReferences())
|
||||
.AddDocument(documentId, "Test.cs", SourceText.From(source))
|
||||
.AddDocument(stubDocumentId, "PolicyStubs.cs", SourceText.From(PolicyStubSource));
|
||||
|
||||
var project = solution.GetProject(projectId)!;
|
||||
var document = solution.GetDocument(documentId)!;
|
||||
|
||||
var compilation = await project.GetCompilationAsync();
|
||||
var analyzer = new HttpClientUsageAnalyzer();
|
||||
var diagnostics = await compilation!.WithAnalyzers(ImmutableArray.Create<DiagnosticAnalyzer>(analyzer))
|
||||
.GetAnalyzerDiagnosticsAsync();
|
||||
|
||||
var diagnostic = diagnostics.Single(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId);
|
||||
|
||||
var codeFixProvider = new HttpClientUsageCodeFixProvider();
|
||||
var actions = new List<CodeAction>();
|
||||
var context = new CodeFixContext(
|
||||
document,
|
||||
diagnostic,
|
||||
(action, _) => actions.Add(action),
|
||||
CancellationToken.None);
|
||||
|
||||
await codeFixProvider.RegisterCodeFixesAsync(context);
|
||||
var action = actions.Single();
|
||||
var operations = await action.GetOperationsAsync(CancellationToken.None);
|
||||
|
||||
foreach (var operation in operations)
|
||||
{
|
||||
operation.Apply(workspace, CancellationToken.None);
|
||||
}
|
||||
var updatedDocument = workspace.CurrentSolution.GetDocument(documentId)!;
|
||||
var updatedText = await updatedDocument.GetTextAsync();
|
||||
return updatedText.ToString();
|
||||
}
|
||||
|
||||
private static IEnumerable<MetadataReference> CreateMetadataReferences()
|
||||
{
|
||||
// Core runtime references
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0032-M | DONE | Maintainability audit for StellaOps.AirGap.Policy.Analyzers.Tests. |
|
||||
| AUDIT-0032-T | DONE | Test coverage audit for StellaOps.AirGap.Policy.Analyzers.Tests. |
|
||||
| AUDIT-0032-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0032-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0032-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0032-A | DONE | Waived (test project; revalidated 2026-01-06). |
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Composition;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.CodeAnalysis;
|
||||
using Microsoft.CodeAnalysis.CodeActions;
|
||||
using Microsoft.CodeAnalysis.CodeFixes;
|
||||
using Microsoft.CodeAnalysis.CSharp;
|
||||
using Microsoft.CodeAnalysis.CSharp.Syntax;
|
||||
|
||||
namespace StellaOps.AirGap.Policy.Analyzers;
|
||||
|
||||
/// <summary>
|
||||
/// Offers a remediation template that routes HttpClient creation through the shared EgressPolicy factory.
|
||||
/// </summary>
|
||||
[ExportCodeFixProvider(LanguageNames.CSharp, Name = nameof(HttpClientUsageCodeFixProvider))]
|
||||
[Shared]
|
||||
public sealed class HttpClientUsageCodeFixProvider : CodeFixProvider
|
||||
{
|
||||
private const string Title = "Use EgressHttpClientFactory.Create(...)";
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override ImmutableArray<string> FixableDiagnosticIds
|
||||
=> ImmutableArray.Create(HttpClientUsageAnalyzer.DiagnosticId);
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override FixAllProvider GetFixAllProvider()
|
||||
=> WellKnownFixAllProviders.BatchFixer;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override async Task RegisterCodeFixesAsync(CodeFixContext context)
|
||||
{
|
||||
if (context.Document is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var root = await context.Document.GetSyntaxRootAsync(context.CancellationToken).ConfigureAwait(false);
|
||||
if (root is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var diagnostic = context.Diagnostics[0];
|
||||
var node = root.FindNode(diagnostic.Location.SourceSpan);
|
||||
if (node is not ObjectCreationExpressionSyntax objectCreation)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
context.RegisterCodeFix(
|
||||
CodeAction.Create(
|
||||
Title,
|
||||
cancellationToken => ReplaceWithFactoryCallAsync(context.Document, objectCreation, cancellationToken),
|
||||
equivalenceKey: Title),
|
||||
diagnostic);
|
||||
}
|
||||
|
||||
private static async Task<Document> ReplaceWithFactoryCallAsync(Document document, ObjectCreationExpressionSyntax creation, CancellationToken cancellationToken)
|
||||
{
|
||||
var replacementExpression = BuildReplacementExpression(creation);
|
||||
|
||||
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (root is null)
|
||||
{
|
||||
return document;
|
||||
}
|
||||
|
||||
var updatedRoot = root.ReplaceNode(creation, replacementExpression.WithTriviaFrom(creation));
|
||||
return document.WithSyntaxRoot(updatedRoot);
|
||||
}
|
||||
|
||||
private static ExpressionSyntax BuildReplacementExpression(ObjectCreationExpressionSyntax creation)
|
||||
{
|
||||
var requestExpression = SyntaxFactory.ParseExpression(
|
||||
"new global::StellaOps.AirGap.Policy.EgressRequest(" +
|
||||
"component: \"REPLACE_COMPONENT\", " +
|
||||
"destination: new global::System.Uri(\"https://replace-with-endpoint\"), " +
|
||||
"intent: \"REPLACE_INTENT\")");
|
||||
|
||||
var egressPolicyExpression = SyntaxFactory.ParseExpression(
|
||||
"default(global::StellaOps.AirGap.Policy.IEgressPolicy)");
|
||||
|
||||
var arguments = new List<ArgumentSyntax>
|
||||
{
|
||||
SyntaxFactory.Argument(egressPolicyExpression)
|
||||
.WithNameColon(SyntaxFactory.NameColon("egressPolicy"))
|
||||
.WithTrailingTrivia(
|
||||
SyntaxFactory.Space,
|
||||
SyntaxFactory.Comment("/* TODO: provide IEgressPolicy instance */")),
|
||||
SyntaxFactory.Argument(requestExpression)
|
||||
.WithNameColon(SyntaxFactory.NameColon("request"))
|
||||
};
|
||||
|
||||
if (ShouldUseClientFactory(creation))
|
||||
{
|
||||
var clientFactoryLambda = SyntaxFactory.ParenthesizedLambdaExpression(
|
||||
SyntaxFactory.ParameterList(),
|
||||
CreateHttpClientExpression(creation));
|
||||
|
||||
arguments.Add(
|
||||
SyntaxFactory.Argument(clientFactoryLambda)
|
||||
.WithNameColon(SyntaxFactory.NameColon("clientFactory")));
|
||||
}
|
||||
|
||||
return SyntaxFactory.InvocationExpression(
|
||||
SyntaxFactory.ParseExpression("global::StellaOps.AirGap.Policy.EgressHttpClientFactory.Create"))
|
||||
.WithArgumentList(SyntaxFactory.ArgumentList(SyntaxFactory.SeparatedList(arguments)));
|
||||
}
|
||||
|
||||
private static bool ShouldUseClientFactory(ObjectCreationExpressionSyntax creation)
|
||||
=> (creation.ArgumentList?.Arguments.Count ?? 0) > 0 || creation.Initializer is not null;
|
||||
|
||||
private static ObjectCreationExpressionSyntax CreateHttpClientExpression(ObjectCreationExpressionSyntax creation)
|
||||
{
|
||||
var httpClientType = SyntaxFactory.ParseTypeName("global::System.Net.Http.HttpClient");
|
||||
var arguments = creation.ArgumentList ?? SyntaxFactory.ArgumentList();
|
||||
|
||||
return SyntaxFactory.ObjectCreationExpression(httpClientType)
|
||||
.WithArgumentList(arguments)
|
||||
.WithInitializer(creation.Initializer);
|
||||
}
|
||||
}
|
||||
@@ -9,11 +9,13 @@
|
||||
<IncludeBuildOutput>false</IncludeBuildOutput>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<!-- RS1038: Workspaces reference needed for code fix support; analyzer still works without it -->
|
||||
<NoWarn>$(NoWarn);RS1038</NoWarn>
|
||||
<WarningsNotAsErrors>$(WarningsNotAsErrors);RS1038</WarningsNotAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0031-M | DONE | Maintainability audit for StellaOps.AirGap.Policy.Analyzers. |
|
||||
| AUDIT-0031-T | DONE | Test coverage audit for StellaOps.AirGap.Policy.Analyzers. |
|
||||
| AUDIT-0031-M | DONE | Revalidated 2026-01-06; no new findings. |
|
||||
| AUDIT-0031-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0032. |
|
||||
| AUDIT-0031-A | DONE | Applied analyzer symbol match, test assembly exemptions, and code-fix preservation. |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0033-M | DONE | Maintainability audit for StellaOps.AirGap.Policy.Tests. |
|
||||
| AUDIT-0033-T | DONE | Test coverage audit for StellaOps.AirGap.Policy.Tests. |
|
||||
| AUDIT-0033-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0033-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0033-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0033-A | DONE | Waived (test project; revalidated 2026-01-06). |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0030-M | DONE | Maintainability audit for StellaOps.AirGap.Policy. |
|
||||
| AUDIT-0030-T | DONE | Test coverage audit for StellaOps.AirGap.Policy. |
|
||||
| AUDIT-0030-A | DONE | Applied reloadable policy, allowlist de-dup, request guards, and client factory overload. |
|
||||
| AUDIT-0030-M | DONE | Revalidated 2026-01-06; new findings recorded in audit report. |
|
||||
| AUDIT-0030-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0033. |
|
||||
| AUDIT-0030-A | TODO | Replace direct new HttpClient usage in EgressHttpClientFactory. |
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
@@ -20,7 +21,7 @@ public sealed record TimeStatusDto(
|
||||
public static TimeStatusDto FromStatus(TimeStatus status)
|
||||
{
|
||||
return new TimeStatusDto(
|
||||
status.Anchor.AnchorTime.ToUniversalTime().ToString("O"),
|
||||
status.Anchor.AnchorTime.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture),
|
||||
status.Anchor.Format,
|
||||
status.Anchor.Source,
|
||||
status.Anchor.SignatureFingerprint,
|
||||
@@ -31,7 +32,7 @@ public sealed record TimeStatusDto(
|
||||
status.Staleness.IsWarning,
|
||||
status.Staleness.IsBreach,
|
||||
status.ContentStaleness,
|
||||
status.EvaluatedAtUtc.ToUniversalTime().ToString("O"));
|
||||
status.EvaluatedAtUtc.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
public string ToJson()
|
||||
|
||||
@@ -8,6 +8,8 @@ public sealed class TimeTelemetry
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.AirGap.Time", "1.0.0");
|
||||
private const int MaxEntries = 1024;
|
||||
// Bound eviction queue to 3x max entries to prevent unbounded memory growth
|
||||
private const int MaxEvictionQueueSize = MaxEntries * 3;
|
||||
|
||||
private readonly ConcurrentDictionary<string, Snapshot> _latest = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly ConcurrentQueue<string> _evictionQueue = new();
|
||||
@@ -71,10 +73,20 @@ public sealed class TimeTelemetry
|
||||
|
||||
private void TrimCache()
|
||||
{
|
||||
// Evict tenant entries when cache is over limit
|
||||
while (_latest.Count > MaxEntries && _evictionQueue.TryDequeue(out var candidate))
|
||||
{
|
||||
_latest.TryRemove(candidate, out _);
|
||||
}
|
||||
|
||||
// Trim eviction queue to prevent unbounded memory growth
|
||||
// Discard stale entries that may no longer be in the cache
|
||||
while (_evictionQueue.Count > MaxEvictionQueueSize && _evictionQueue.TryDequeue(out var stale))
|
||||
{
|
||||
// If the tenant is still in cache, try to remove it
|
||||
// (this helps when we have many updates to the same tenant)
|
||||
_latest.TryRemove(stale, out _);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record Snapshot(long AgeSeconds, bool IsWarning, bool IsBreach);
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0034-M | DONE | Maintainability audit for StellaOps.AirGap.Time. |
|
||||
| AUDIT-0034-T | DONE | Test coverage audit for StellaOps.AirGap.Time. |
|
||||
| AUDIT-0034-A | DONE | Applied time provider, options reload, and trust-root/roughtime hardening. |
|
||||
| AUDIT-0034-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0034-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0035. |
|
||||
| AUDIT-0034-A | TODO | Address TimeTelemetry queue growth, TimeTokenParser endianness, and default store wiring. |
|
||||
|
||||
@@ -12,6 +12,7 @@ using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Concelier.Core.Raw;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
@@ -134,12 +135,22 @@ public sealed class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository
|
||||
{
|
||||
private readonly Dictionary<string, AdvisoryRawRecord> _records = new();
|
||||
private readonly object _lock = new();
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public InMemoryAdvisoryRawRepository(
|
||||
TimeProvider? timeProvider = null,
|
||||
IGuidProvider? guidProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
|
||||
}
|
||||
|
||||
public Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
|
||||
{
|
||||
var contentHash = ComputeHash(document);
|
||||
var key = $"{document.Tenant}:{contentHash}";
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
@@ -149,7 +160,7 @@ public sealed class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository
|
||||
}
|
||||
|
||||
var record = new AdvisoryRawRecord(
|
||||
Id: Guid.NewGuid().ToString(),
|
||||
Id: _guidProvider.NewGuid().ToString(),
|
||||
Document: document,
|
||||
IngestedAt: now,
|
||||
CreatedAt: now);
|
||||
|
||||
@@ -6,10 +6,12 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Core.Storage;
|
||||
|
||||
@@ -92,7 +94,7 @@ public sealed class ExcititorVexImportTarget : IVexImportTarget
|
||||
Content: contentBytes,
|
||||
Metadata: ImmutableDictionary<string, string>.Empty
|
||||
.Add("importSource", "airgap-snapshot")
|
||||
.Add("snapshotAt", data.SnapshotAt.ToString("O")));
|
||||
.Add("snapshotAt", data.SnapshotAt.ToString("O", CultureInfo.InvariantCulture)));
|
||||
|
||||
await _sink.StoreAsync(document, cancellationToken);
|
||||
created++;
|
||||
@@ -161,10 +163,14 @@ public sealed class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawSto
|
||||
private readonly Dictionary<string, VexRawRecord> _records = new();
|
||||
private readonly string _tenant;
|
||||
private readonly object _lock = new();
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public InMemoryVexRawDocumentSink(string tenant = "default")
|
||||
public InMemoryVexRawDocumentSink(
|
||||
string tenant = "default",
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_tenant = tenant;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
|
||||
@@ -183,7 +189,7 @@ public sealed class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawSto
|
||||
Metadata: document.Metadata,
|
||||
Content: document.Content,
|
||||
InlineContent: true,
|
||||
RecordedAt: DateTimeOffset.UtcNow);
|
||||
RecordedAt: _timeProvider.GetUtcNow());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ using System.IO.Compression;
|
||||
using System.Formats.Tar;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
@@ -25,15 +26,21 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
private readonly IAdvisoryImportTarget? _advisoryTarget;
|
||||
private readonly IVexImportTarget? _vexTarget;
|
||||
private readonly IPolicyImportTarget? _policyTarget;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public KnowledgeSnapshotImporter(
|
||||
IAdvisoryImportTarget? advisoryTarget = null,
|
||||
IVexImportTarget? vexTarget = null,
|
||||
IPolicyImportTarget? policyTarget = null)
|
||||
IPolicyImportTarget? policyTarget = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
IGuidProvider? guidProvider = null)
|
||||
{
|
||||
_advisoryTarget = advisoryTarget;
|
||||
_vexTarget = vexTarget;
|
||||
_policyTarget = policyTarget;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -48,10 +55,10 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
|
||||
if (!File.Exists(request.BundlePath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Bundle file not found");
|
||||
return SnapshotImportResult.Failed("Bundle file not found", _timeProvider);
|
||||
}
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"import-{Guid.NewGuid():N}");
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"import-{_guidProvider.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
@@ -63,21 +70,21 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Manifest not found in bundle");
|
||||
return SnapshotImportResult.Failed("Manifest not found in bundle", _timeProvider);
|
||||
}
|
||||
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifest = JsonSerializer.Deserialize<KnowledgeSnapshotManifest>(manifestBytes, JsonOptions);
|
||||
if (manifest is null)
|
||||
{
|
||||
return SnapshotImportResult.Failed("Failed to parse manifest");
|
||||
return SnapshotImportResult.Failed("Failed to parse manifest", _timeProvider);
|
||||
}
|
||||
|
||||
var result = new SnapshotImportResult
|
||||
{
|
||||
Success = true,
|
||||
BundleId = manifest.BundleId,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
StartedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
var errors = new List<string>();
|
||||
@@ -148,7 +155,7 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
|
||||
result = result with
|
||||
{
|
||||
CompletedAt = DateTimeOffset.UtcNow,
|
||||
CompletedAt = _timeProvider.GetUtcNow(),
|
||||
Statistics = stats,
|
||||
Errors = errors.Count > 0 ? [.. errors] : null,
|
||||
Success = errors.Count == 0 || !request.FailOnAnyError
|
||||
@@ -158,7 +165,7 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SnapshotImportResult.Failed($"Import failed: {ex.Message}");
|
||||
return SnapshotImportResult.Failed($"Import failed: {ex.Message}", _timeProvider);
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -188,7 +195,15 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
// Validate path to prevent traversal attacks
|
||||
if (!PathValidation.IsSafeRelativePath(entry.RelativePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Unsafe path detected: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath);
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
@@ -243,7 +258,15 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
// Validate path to prevent traversal attacks
|
||||
if (!PathValidation.IsSafeRelativePath(entry.RelativePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Unsafe path detected: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath);
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
@@ -298,7 +321,15 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
// Validate path to prevent traversal attacks
|
||||
if (!PathValidation.IsSafeRelativePath(entry.RelativePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Unsafe path detected: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath);
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
@@ -342,9 +373,52 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
var normalizedTargetDir = Path.GetFullPath(targetDir);
|
||||
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
await using var tarReader = new TarReader(gzipStream, leaveOpen: false);
|
||||
|
||||
while (await tarReader.GetNextEntryAsync(copyData: true, ct) is { } entry)
|
||||
{
|
||||
if (string.IsNullOrEmpty(entry.Name))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate entry path to prevent traversal attacks
|
||||
if (!PathValidation.IsSafeRelativePath(entry.Name))
|
||||
{
|
||||
throw new InvalidOperationException($"Unsafe tar entry path detected: {entry.Name}");
|
||||
}
|
||||
|
||||
var destinationPath = Path.GetFullPath(Path.Combine(normalizedTargetDir, entry.Name));
|
||||
|
||||
// Verify the path is within the target directory
|
||||
if (!destinationPath.StartsWith(normalizedTargetDir, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException($"Tar entry path escapes target directory: {entry.Name}");
|
||||
}
|
||||
|
||||
// Create directory if needed
|
||||
var entryDir = Path.GetDirectoryName(destinationPath);
|
||||
if (!string.IsNullOrEmpty(entryDir))
|
||||
{
|
||||
Directory.CreateDirectory(entryDir);
|
||||
}
|
||||
|
||||
// Extract based on entry type
|
||||
if (entry.EntryType == TarEntryType.Directory)
|
||||
{
|
||||
Directory.CreateDirectory(destinationPath);
|
||||
}
|
||||
else if (entry.EntryType == TarEntryType.RegularFile ||
|
||||
entry.EntryType == TarEntryType.V7RegularFile)
|
||||
{
|
||||
await entry.ExtractToFileAsync(destinationPath, overwrite: true, ct);
|
||||
}
|
||||
// Skip symbolic links and other special entry types for security
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ModuleImportResult
|
||||
@@ -422,13 +496,17 @@ public sealed record SnapshotImportResult
|
||||
public IReadOnlyList<string>? Errors { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static SnapshotImportResult Failed(string error) => new()
|
||||
public static SnapshotImportResult Failed(string error, TimeProvider? timeProvider = null)
|
||||
{
|
||||
Success = false,
|
||||
Error = error,
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
CompletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
var now = (timeProvider ?? TimeProvider.System).GetUtcNow();
|
||||
return new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error,
|
||||
StartedAt = now,
|
||||
CompletedAt = now
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record ImportStatistics
|
||||
|
||||
@@ -9,6 +9,7 @@ using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
@@ -26,13 +27,16 @@ public sealed class PolicyRegistryImportTarget : IPolicyImportTarget
|
||||
|
||||
private readonly IPolicyPackImportStore _store;
|
||||
private readonly string _tenantId;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public PolicyRegistryImportTarget(
|
||||
IPolicyPackImportStore store,
|
||||
string tenantId = "default")
|
||||
string tenantId = "default",
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_tenantId = tenantId;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@@ -83,7 +87,7 @@ public sealed class PolicyRegistryImportTarget : IPolicyImportTarget
|
||||
Version: data.Version ?? "1.0.0",
|
||||
Content: data.Content,
|
||||
Metadata: bundle.Metadata,
|
||||
ImportedAt: DateTimeOffset.UtcNow);
|
||||
ImportedAt: _timeProvider.GetUtcNow());
|
||||
|
||||
await _store.SaveAsync(pack, cancellationToken);
|
||||
created++;
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
// Description: Signs snapshot manifests using DSSE format for integrity verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -196,8 +197,9 @@ public sealed class SnapshotManifestSigner : ISnapshotManifestSigner
|
||||
{
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var prefixBytes = Encoding.UTF8.GetBytes(PreAuthenticationEncodingPrefix);
|
||||
var typeLenStr = typeBytes.Length.ToString();
|
||||
var payloadLenStr = payload.Length.ToString();
|
||||
// Use InvariantCulture to ensure ASCII decimal digits per DSSE spec
|
||||
var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture);
|
||||
var payloadLenStr = payload.Length.ToString(CultureInfo.InvariantCulture);
|
||||
|
||||
var totalLen = prefixBytes.Length + 1 +
|
||||
typeLenStr.Length + 1 +
|
||||
|
||||
@@ -178,39 +178,15 @@ public sealed class TimeAnchorService : ITimeAnchorService
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Roughtime is a cryptographic time synchronization protocol
|
||||
// This is a placeholder implementation - full implementation would use a Roughtime client
|
||||
// Full implementation requires a Roughtime client library
|
||||
var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003";
|
||||
|
||||
// For now, fallback to local with indication of intended source
|
||||
var anchorTime = _timeProvider.GetUtcNow();
|
||||
var anchorData = new RoughtimeAnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
Server = serverUrl,
|
||||
Midpoint = anchorTime.ToUnixTimeSeconds(),
|
||||
Radius = 1000000, // 1 second radius in microseconds
|
||||
Nonce = _guidProvider.NewGuid().ToString("N"),
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"roughtime:{serverUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "Roughtime client not implemented; using simulated response"
|
||||
};
|
||||
// Per no-silent-stubs rule: unimplemented paths must fail explicitly
|
||||
return TimeAnchorResult.Failed(
|
||||
$"Roughtime time anchor source '{serverUrl}' is not implemented. " +
|
||||
"Use 'local' source or implement Roughtime client integration.");
|
||||
}
|
||||
|
||||
private async Task<TimeAnchorResult> CreateRfc3161AnchorAsync(
|
||||
@@ -218,37 +194,15 @@ public sealed class TimeAnchorService : ITimeAnchorService
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// RFC 3161 is the Internet X.509 PKI Time-Stamp Protocol (TSP)
|
||||
// This is a placeholder implementation - full implementation would use a TSA client
|
||||
// Full implementation requires a TSA client library
|
||||
var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com";
|
||||
|
||||
var anchorTime = _timeProvider.GetUtcNow();
|
||||
var anchorData = new Rfc3161AnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
TsaUrl = tsaUrl,
|
||||
SerialNumber = _guidProvider.NewGuid().ToString("N"),
|
||||
PolicyOid = "2.16.840.1.114412.2.1", // DigiCert timestamp policy
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"rfc3161:{tsaUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "RFC 3161 TSA client not implemented; using simulated response"
|
||||
};
|
||||
// Per no-silent-stubs rule: unimplemented paths must fail explicitly
|
||||
return TimeAnchorResult.Failed(
|
||||
$"RFC 3161 time anchor source '{tsaUrl}' is not implemented. " +
|
||||
"Use 'local' source or implement RFC 3161 TSA client integration.");
|
||||
}
|
||||
|
||||
private sealed record LocalAnchorData
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0028-M | DONE | Maintainability audit for StellaOps.AirGap.Persistence. |
|
||||
| AUDIT-0028-T | DONE | Test coverage audit for StellaOps.AirGap.Persistence. |
|
||||
| AUDIT-0028-M | DONE | Revalidated 2026-01-06; no new maintainability findings. |
|
||||
| AUDIT-0028-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0029. |
|
||||
| AUDIT-0028-A | DONE | Applied schema + determinism fixes and migration host wiring. |
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
// <copyright file="AirGapSyncServiceCollectionExtensions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.AirGap.Sync.Stores;
|
||||
using StellaOps.AirGap.Sync.Transport;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering air-gap sync services.
|
||||
/// </summary>
|
||||
public static class AirGapSyncServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds air-gap sync services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="nodeId">The node identifier for this instance.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddAirGapSyncServices(
|
||||
this IServiceCollection services,
|
||||
string nodeId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
// Core services
|
||||
services.TryAddSingleton<IConflictResolver, ConflictResolver>();
|
||||
services.TryAddSingleton<IHlcMergeService, HlcMergeService>();
|
||||
services.TryAddSingleton<IAirGapBundleImporter, AirGapBundleImporter>();
|
||||
|
||||
// Register in-memory HLC state store for offline operation
|
||||
services.TryAddSingleton<IHlcStateStore, InMemoryHlcStateStore>();
|
||||
|
||||
// Register HLC clock with node ID
|
||||
services.TryAddSingleton<IHybridLogicalClock>(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
var stateStore = sp.GetRequiredService<IHlcStateStore>();
|
||||
var logger = sp.GetRequiredService<ILogger<HybridLogicalClock.HybridLogicalClock>>();
|
||||
return new HybridLogicalClock.HybridLogicalClock(timeProvider, nodeId, stateStore, logger);
|
||||
});
|
||||
|
||||
// Register deterministic GUID provider
|
||||
services.TryAddSingleton<IGuidProvider>(SystemGuidProvider.Instance);
|
||||
|
||||
// File-based store (can be overridden)
|
||||
services.TryAddSingleton<IOfflineJobLogStore, FileBasedOfflineJobLogStore>();
|
||||
|
||||
// Offline HLC manager
|
||||
services.TryAddSingleton<IOfflineHlcManager, OfflineHlcManager>();
|
||||
|
||||
// Bundle exporter
|
||||
services.TryAddSingleton<IAirGapBundleExporter, AirGapBundleExporter>();
|
||||
|
||||
// Bundle DSSE signer (OMP-010)
|
||||
services.TryAddSingleton<IAirGapBundleDsseSigner, AirGapBundleDsseSigner>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds air-gap sync services with custom options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="nodeId">The node identifier for this instance.</param>
|
||||
/// <param name="configureOptions">Action to configure file-based store options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddAirGapSyncServices(
|
||||
this IServiceCollection services,
|
||||
string nodeId,
|
||||
Action<FileBasedOfflineJobLogStoreOptions> configureOptions)
|
||||
{
|
||||
// Configure file-based store options
|
||||
services.Configure(configureOptions);
|
||||
|
||||
return services.AddAirGapSyncServices(nodeId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the air-gap sync service for importing bundles to the central scheduler.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
/// <remarks>
|
||||
/// This requires ISyncSchedulerLogRepository to be registered separately,
|
||||
/// as it depends on the Scheduler.Persistence module.
|
||||
/// </remarks>
|
||||
public static IServiceCollection AddAirGapSyncImportService(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddScoped<IAirGapSyncService, AirGapSyncService>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds file-based transport for job sync bundles.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFileBasedJobSyncTransport(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IJobSyncTransport, FileBasedJobSyncTransport>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds file-based transport for job sync bundles with custom options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configureOptions">Action to configure transport options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFileBasedJobSyncTransport(
|
||||
this IServiceCollection services,
|
||||
Action<FileBasedJobSyncTransportOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
return services.AddFileBasedJobSyncTransport();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Router-based transport for job sync bundles.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
/// <remarks>
|
||||
/// Requires IRouterJobSyncClient to be registered separately.
|
||||
/// </remarks>
|
||||
public static IServiceCollection AddRouterJobSyncTransport(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IJobSyncTransport, RouterJobSyncTransport>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Router-based transport for job sync bundles with custom options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configureOptions">Action to configure transport options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddRouterJobSyncTransport(
|
||||
this IServiceCollection services,
|
||||
Action<RouterJobSyncTransportOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
return services.AddRouterJobSyncTransport();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
// <copyright file="AirGapBundle.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents an air-gap bundle containing job logs from one or more offline nodes.
|
||||
/// </summary>
|
||||
public sealed record AirGapBundle
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the unique bundle identifier.
|
||||
/// </summary>
|
||||
public required Guid BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tenant ID for this bundle.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets when the bundle was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the node ID that created this bundle.
|
||||
/// </summary>
|
||||
public required string CreatedByNodeId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the job logs from each offline node.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<NodeJobLog> JobLogs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the bundle manifest digest for integrity verification.
|
||||
/// </summary>
|
||||
public required string ManifestDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the optional DSSE signature over the manifest.
|
||||
/// </summary>
|
||||
public string? Signature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the key ID used for signing (if signed).
|
||||
/// </summary>
|
||||
public string? SignedBy { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
// <copyright file="ConflictResolution.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Result of conflict resolution for a job ID.
|
||||
/// </summary>
|
||||
public sealed record ConflictResolution
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the type of conflict detected.
|
||||
/// </summary>
|
||||
public required ConflictType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the resolution strategy applied.
|
||||
/// </summary>
|
||||
public required ResolutionStrategy Resolution { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the selected entry (when resolution is not Error).
|
||||
/// </summary>
|
||||
public OfflineJobLogEntry? SelectedEntry { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the entries that were dropped.
|
||||
/// </summary>
|
||||
public IReadOnlyList<OfflineJobLogEntry>? DroppedEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the error message (when resolution is Error).
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of conflicts that can occur during merge.
|
||||
/// </summary>
|
||||
public enum ConflictType
|
||||
{
|
||||
/// <summary>
|
||||
/// Same JobId with different HLC timestamps but identical payload.
|
||||
/// </summary>
|
||||
DuplicateTimestamp,
|
||||
|
||||
/// <summary>
|
||||
/// Same JobId with different payloads - indicates a bug.
|
||||
/// </summary>
|
||||
PayloadMismatch
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Strategies for resolving conflicts.
|
||||
/// </summary>
|
||||
public enum ResolutionStrategy
|
||||
{
|
||||
/// <summary>
|
||||
/// Take the entry with the earliest HLC timestamp.
|
||||
/// </summary>
|
||||
TakeEarliest,
|
||||
|
||||
/// <summary>
|
||||
/// Fail the merge - conflict cannot be resolved.
|
||||
/// </summary>
|
||||
Error
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
// <copyright file="MergeResult.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Result of merging job logs from multiple offline nodes.
|
||||
/// </summary>
|
||||
public sealed record MergeResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the merged entries in HLC total order.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<MergedJobEntry> MergedEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets duplicate entries that were dropped during merge.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<DuplicateEntry> Duplicates { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the merged chain head (final link after merge).
|
||||
/// </summary>
|
||||
public byte[]? MergedChainHead { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the source node IDs that contributed to this merge.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> SourceNodes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A job entry after merge with unified chain link.
|
||||
/// </summary>
|
||||
public sealed class MergedJobEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the source node ID that created this entry.
|
||||
/// </summary>
|
||||
public required string SourceNodeId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the HLC timestamp.
|
||||
/// </summary>
|
||||
public required HlcTimestamp THlc { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the job ID.
|
||||
/// </summary>
|
||||
public required Guid JobId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the partition key.
|
||||
/// </summary>
|
||||
public string? PartitionKey { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the serialized payload.
|
||||
/// </summary>
|
||||
public required string Payload { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the payload hash.
|
||||
/// </summary>
|
||||
public required byte[] PayloadHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the original chain link from the source node.
|
||||
/// </summary>
|
||||
public required byte[] OriginalLink { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the merged chain link (computed during merge).
|
||||
/// </summary>
|
||||
public byte[]? MergedLink { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a duplicate entry dropped during merge.
|
||||
/// </summary>
|
||||
public sealed record DuplicateEntry(
|
||||
Guid JobId,
|
||||
string NodeId,
|
||||
HlcTimestamp THlc);
|
||||
@@ -0,0 +1,33 @@
|
||||
// <copyright file="NodeJobLog.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the job log from a single offline node.
|
||||
/// </summary>
|
||||
public sealed record NodeJobLog
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the node identifier.
|
||||
/// </summary>
|
||||
public required string NodeId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the last HLC timestamp in this log.
|
||||
/// </summary>
|
||||
public required HlcTimestamp LastHlc { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the chain head (last link) in this log.
|
||||
/// </summary>
|
||||
public required byte[] ChainHead { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the job log entries in HLC order.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<OfflineJobLogEntry> Entries { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// <copyright file="OfflineJobLogEntry.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a job log entry created while operating offline.
|
||||
/// </summary>
|
||||
public sealed record OfflineJobLogEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the node ID that created this entry.
|
||||
/// </summary>
|
||||
public required string NodeId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the HLC timestamp when the job was enqueued.
|
||||
/// </summary>
|
||||
public required HlcTimestamp THlc { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the deterministic job ID.
|
||||
/// </summary>
|
||||
public required Guid JobId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the partition key (if any).
|
||||
/// </summary>
|
||||
public string? PartitionKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the serialized job payload.
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the SHA-256 hash of the canonical payload.
|
||||
/// </summary>
|
||||
public required byte[] PayloadHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the previous chain link (null for first entry).
|
||||
/// </summary>
|
||||
public byte[]? PrevLink { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the chain link: Hash(prev_link || job_id || t_hlc || payload_hash).
|
||||
/// </summary>
|
||||
public required byte[] Link { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the wall-clock time when the entry was created (informational only).
|
||||
/// </summary>
|
||||
public DateTimeOffset EnqueuedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
// <copyright file="SyncResult.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Result of syncing an air-gap bundle to the central scheduler.
|
||||
/// </summary>
|
||||
public sealed record SyncResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the bundle ID that was synced.
|
||||
/// </summary>
|
||||
public required Guid BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of entries in the bundle.
|
||||
/// </summary>
|
||||
public required int TotalInBundle { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of entries appended to the scheduler log.
|
||||
/// </summary>
|
||||
public required int Appended { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of duplicate entries skipped.
|
||||
/// </summary>
|
||||
public required int Duplicates { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of entries that already existed (idempotency).
|
||||
/// </summary>
|
||||
public int AlreadyExisted { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the new chain head after sync.
|
||||
/// </summary>
|
||||
public byte[]? NewChainHead { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets any warnings generated during sync.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Warnings { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of an offline enqueue operation.
|
||||
/// </summary>
|
||||
public sealed record OfflineEnqueueResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the HLC timestamp assigned.
|
||||
/// </summary>
|
||||
public required StellaOps.HybridLogicalClock.HlcTimestamp THlc { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the deterministic job ID.
|
||||
/// </summary>
|
||||
public required Guid JobId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the chain link computed.
|
||||
/// </summary>
|
||||
public required byte[] Link { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the node ID that created this entry.
|
||||
/// </summary>
|
||||
public required string NodeId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,275 @@
|
||||
// <copyright file="AirGapBundleDsseSigner.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.Canonical.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Options for air-gap bundle DSSE signing.
|
||||
/// </summary>
|
||||
public sealed class AirGapBundleDsseOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "AirGap:BundleSigning";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the signing mode: "hmac" for HMAC-SHA256, "none" to disable.
|
||||
/// </summary>
|
||||
public string Mode { get; set; } = "none";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the HMAC secret key as Base64.
|
||||
/// Required when Mode is "hmac".
|
||||
/// </summary>
|
||||
public string? SecretBase64 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the key identifier for the signature.
|
||||
/// </summary>
|
||||
public string KeyId { get; set; } = "airgap-bundle-signer";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the payload type for DSSE envelope.
|
||||
/// </summary>
|
||||
public string PayloadType { get; set; } = "application/vnd.stellaops.airgap.bundle+json";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a bundle signature operation.
|
||||
/// </summary>
|
||||
/// <param name="KeyId">The key ID used for signing.</param>
|
||||
/// <param name="Signature">The signature bytes.</param>
|
||||
/// <param name="SignatureBase64">The signature as Base64 string.</param>
|
||||
public sealed record AirGapBundleSignatureResult(
|
||||
string KeyId,
|
||||
byte[] Signature,
|
||||
string SignatureBase64);
|
||||
|
||||
/// <summary>
|
||||
/// Interface for air-gap bundle DSSE signing.
|
||||
/// </summary>
|
||||
public interface IAirGapBundleDsseSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs an air-gap bundle manifest and returns the signature result.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to sign.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Signature result with key ID and signature.</returns>
|
||||
Task<AirGapBundleSignatureResult?> SignAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies an air-gap bundle signature.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to verify.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if signature is valid or signing is disabled; false if invalid.</returns>
|
||||
Task<AirGapBundleVerificationResult> VerifyAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether signing is enabled.
|
||||
/// </summary>
|
||||
bool IsEnabled { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of bundle signature verification.
|
||||
/// </summary>
|
||||
/// <param name="IsValid">Whether the signature is valid.</param>
|
||||
/// <param name="Reason">The reason for the result.</param>
|
||||
public sealed record AirGapBundleVerificationResult(bool IsValid, string Reason)
|
||||
{
|
||||
/// <summary>
|
||||
/// Verification succeeded.
|
||||
/// </summary>
|
||||
public static AirGapBundleVerificationResult Valid { get; } = new(true, "Signature verified");
|
||||
|
||||
/// <summary>
|
||||
/// Signing is disabled, so verification is skipped.
|
||||
/// </summary>
|
||||
public static AirGapBundleVerificationResult SigningDisabled { get; } = new(true, "Signing disabled");
|
||||
|
||||
/// <summary>
|
||||
/// Bundle has no signature but signing is enabled.
|
||||
/// </summary>
|
||||
public static AirGapBundleVerificationResult MissingSignature { get; } = new(false, "Bundle is not signed");
|
||||
|
||||
/// <summary>
|
||||
/// Signature verification failed.
|
||||
/// </summary>
|
||||
public static AirGapBundleVerificationResult InvalidSignature { get; } = new(false, "Signature verification failed");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signer for air-gap bundles using HMAC-SHA256.
|
||||
/// </summary>
|
||||
public sealed class AirGapBundleDsseSigner : IAirGapBundleDsseSigner
|
||||
{
|
||||
private const string DssePrefix = "DSSEv1 ";
|
||||
|
||||
private readonly IOptions<AirGapBundleDsseOptions> _options;
|
||||
private readonly ILogger<AirGapBundleDsseSigner> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="AirGapBundleDsseSigner"/> class.
|
||||
/// </summary>
|
||||
public AirGapBundleDsseSigner(
|
||||
IOptions<AirGapBundleDsseOptions> options,
|
||||
ILogger<AirGapBundleDsseSigner> logger)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool IsEnabled => string.Equals(_options.Value.Mode, "hmac", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<AirGapBundleSignatureResult?> SignAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var opts = _options.Value;
|
||||
|
||||
if (!IsEnabled)
|
||||
{
|
||||
_logger.LogDebug("Air-gap bundle DSSE signing is disabled");
|
||||
return Task.FromResult<AirGapBundleSignatureResult?>(null);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(opts.SecretBase64))
|
||||
{
|
||||
throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured");
|
||||
}
|
||||
|
||||
byte[] secret;
|
||||
try
|
||||
{
|
||||
secret = Convert.FromBase64String(opts.SecretBase64);
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
throw new InvalidOperationException("SecretBase64 is not valid Base64", ex);
|
||||
}
|
||||
|
||||
// Compute PAE (Pre-Authentication Encoding) per DSSE spec
|
||||
var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest);
|
||||
var signature = ComputeHmacSha256(secret, pae);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Signed air-gap bundle {BundleId} with key {KeyId}",
|
||||
bundle.BundleId,
|
||||
opts.KeyId);
|
||||
|
||||
return Task.FromResult<AirGapBundleSignatureResult?>(
|
||||
new AirGapBundleSignatureResult(opts.KeyId, signature, signatureBase64));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<AirGapBundleVerificationResult> VerifyAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var opts = _options.Value;
|
||||
|
||||
if (!IsEnabled)
|
||||
{
|
||||
_logger.LogDebug("Air-gap bundle DSSE signing is disabled, skipping verification");
|
||||
return Task.FromResult(AirGapBundleVerificationResult.SigningDisabled);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(bundle.Signature))
|
||||
{
|
||||
_logger.LogWarning("Air-gap bundle {BundleId} has no signature", bundle.BundleId);
|
||||
return Task.FromResult(AirGapBundleVerificationResult.MissingSignature);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(opts.SecretBase64))
|
||||
{
|
||||
throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured");
|
||||
}
|
||||
|
||||
byte[] secret;
|
||||
try
|
||||
{
|
||||
secret = Convert.FromBase64String(opts.SecretBase64);
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
throw new InvalidOperationException("SecretBase64 is not valid Base64", ex);
|
||||
}
|
||||
|
||||
byte[] expectedSignature;
|
||||
try
|
||||
{
|
||||
expectedSignature = Convert.FromBase64String(bundle.Signature);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
_logger.LogWarning("Air-gap bundle {BundleId} has invalid Base64 signature", bundle.BundleId);
|
||||
return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature);
|
||||
}
|
||||
|
||||
// Compute PAE and expected signature
|
||||
var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest);
|
||||
var computedSignature = ComputeHmacSha256(secret, pae);
|
||||
|
||||
if (!CryptographicOperations.FixedTimeEquals(expectedSignature, computedSignature))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Air-gap bundle {BundleId} signature verification failed",
|
||||
bundle.BundleId);
|
||||
return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Air-gap bundle {BundleId} signature verified successfully",
|
||||
bundle.BundleId);
|
||||
return Task.FromResult(AirGapBundleVerificationResult.Valid);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes DSSE Pre-Authentication Encoding (PAE).
|
||||
/// PAE = "DSSEv1" SP len(payloadType) SP payloadType SP len(payload) SP payload
|
||||
/// where len() returns ASCII decimal length, and SP is a space character.
|
||||
/// </summary>
|
||||
private static byte[] ComputePreAuthenticationEncoding(string payloadType, string manifestDigest)
|
||||
{
|
||||
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var manifestDigestBytes = Encoding.UTF8.GetBytes(manifestDigest);
|
||||
|
||||
// Format: "DSSEv1 {payloadType.Length} {payloadType} {payload.Length} {payload}"
|
||||
var paeString = string.Create(
|
||||
CultureInfo.InvariantCulture,
|
||||
$"{DssePrefix}{payloadTypeBytes.Length} {payloadType} {manifestDigestBytes.Length} {manifestDigest}");
|
||||
|
||||
return Encoding.UTF8.GetBytes(paeString);
|
||||
}
|
||||
|
||||
private static byte[] ComputeHmacSha256(byte[] key, byte[] data)
|
||||
{
|
||||
using var hmac = new HMACSHA256(key);
|
||||
return hmac.ComputeHash(data);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,270 @@
|
||||
// <copyright file="AirGapBundleExporter.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Stores;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for air-gap bundle export operations.
|
||||
/// </summary>
|
||||
public interface IAirGapBundleExporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports an air-gap bundle containing offline job logs.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">The tenant ID.</param>
|
||||
/// <param name="nodeIds">The node IDs to include (null for current node only).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The exported bundle.</returns>
|
||||
Task<AirGapBundle> ExportAsync(
|
||||
string tenantId,
|
||||
IReadOnlyList<string>? nodeIds = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports an air-gap bundle to a file.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to export.</param>
|
||||
/// <param name="outputPath">The output file path.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task ExportToFileAsync(
|
||||
AirGapBundle bundle,
|
||||
string outputPath,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports an air-gap bundle to a JSON string.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to export.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The JSON string representation.</returns>
|
||||
Task<string> ExportToStringAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for exporting air-gap bundles.
|
||||
/// </summary>
|
||||
public sealed class AirGapBundleExporter : IAirGapBundleExporter
|
||||
{
|
||||
private readonly IOfflineJobLogStore _jobLogStore;
|
||||
private readonly IOfflineHlcManager _hlcManager;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<AirGapBundleExporter> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="AirGapBundleExporter"/> class.
|
||||
/// </summary>
|
||||
public AirGapBundleExporter(
|
||||
IOfflineJobLogStore jobLogStore,
|
||||
IOfflineHlcManager hlcManager,
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<AirGapBundleExporter> logger)
|
||||
{
|
||||
_jobLogStore = jobLogStore ?? throw new ArgumentNullException(nameof(jobLogStore));
|
||||
_hlcManager = hlcManager ?? throw new ArgumentNullException(nameof(hlcManager));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<AirGapBundle> ExportAsync(
|
||||
string tenantId,
|
||||
IReadOnlyList<string>? nodeIds = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var effectiveNodeIds = nodeIds ?? new[] { _hlcManager.NodeId };
|
||||
|
||||
_logger.LogInformation(
|
||||
"Exporting air-gap bundle for tenant {TenantId} with {NodeCount} nodes",
|
||||
tenantId, effectiveNodeIds.Count);
|
||||
|
||||
var jobLogs = new List<NodeJobLog>();
|
||||
|
||||
foreach (var nodeId in effectiveNodeIds)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var nodeLog = await _jobLogStore.GetNodeJobLogAsync(nodeId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (nodeLog is not null && nodeLog.Entries.Count > 0)
|
||||
{
|
||||
jobLogs.Add(nodeLog);
|
||||
_logger.LogDebug(
|
||||
"Added node {NodeId} with {EntryCount} entries to bundle",
|
||||
nodeId, nodeLog.Entries.Count);
|
||||
}
|
||||
}
|
||||
|
||||
if (jobLogs.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("No offline job logs found for export");
|
||||
}
|
||||
|
||||
var bundle = new AirGapBundle
|
||||
{
|
||||
BundleId = _guidProvider.NewGuid(),
|
||||
TenantId = tenantId,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
CreatedByNodeId = _hlcManager.NodeId,
|
||||
JobLogs = jobLogs,
|
||||
ManifestDigest = ComputeManifestDigest(jobLogs)
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created bundle {BundleId} with {LogCount} node logs, {TotalEntries} total entries",
|
||||
bundle.BundleId, jobLogs.Count, jobLogs.Sum(l => l.Entries.Count));
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task ExportToFileAsync(
|
||||
AirGapBundle bundle,
|
||||
string outputPath,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
|
||||
|
||||
var dto = ToExportDto(bundle);
|
||||
var json = JsonSerializer.Serialize(dto, JsonOptions);
|
||||
|
||||
var directory = Path.GetDirectoryName(outputPath);
|
||||
if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Exported bundle {BundleId} to {OutputPath}",
|
||||
bundle.BundleId, outputPath);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<string> ExportToStringAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var dto = ToExportDto(bundle);
|
||||
var json = JsonSerializer.Serialize(dto, JsonOptions);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Exported bundle {BundleId} to string ({Length} chars)",
|
||||
bundle.BundleId, json.Length);
|
||||
|
||||
return Task.FromResult(json);
|
||||
}
|
||||
|
||||
private static string ComputeManifestDigest(IReadOnlyList<NodeJobLog> jobLogs)
|
||||
{
|
||||
// Create manifest of all chain heads for integrity
|
||||
var manifest = jobLogs
|
||||
.OrderBy(l => l.NodeId, StringComparer.Ordinal)
|
||||
.Select(l => new
|
||||
{
|
||||
l.NodeId,
|
||||
LastHlc = l.LastHlc.ToSortableString(),
|
||||
ChainHead = Convert.ToHexString(l.ChainHead)
|
||||
})
|
||||
.ToList();
|
||||
|
||||
var json = CanonJson.Serialize(manifest);
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static AirGapBundleExportDto ToExportDto(AirGapBundle bundle) => new()
|
||||
{
|
||||
BundleId = bundle.BundleId,
|
||||
TenantId = bundle.TenantId,
|
||||
CreatedAt = bundle.CreatedAt,
|
||||
CreatedByNodeId = bundle.CreatedByNodeId,
|
||||
ManifestDigest = bundle.ManifestDigest,
|
||||
Signature = bundle.Signature,
|
||||
SignedBy = bundle.SignedBy,
|
||||
JobLogs = bundle.JobLogs.Select(ToNodeJobLogDto).ToList()
|
||||
};
|
||||
|
||||
private static NodeJobLogExportDto ToNodeJobLogDto(NodeJobLog log) => new()
|
||||
{
|
||||
NodeId = log.NodeId,
|
||||
LastHlc = log.LastHlc.ToSortableString(),
|
||||
ChainHead = Convert.ToBase64String(log.ChainHead),
|
||||
Entries = log.Entries.Select(ToEntryDto).ToList()
|
||||
};
|
||||
|
||||
private static OfflineJobLogEntryExportDto ToEntryDto(OfflineJobLogEntry entry) => new()
|
||||
{
|
||||
NodeId = entry.NodeId,
|
||||
THlc = entry.THlc.ToSortableString(),
|
||||
JobId = entry.JobId,
|
||||
PartitionKey = entry.PartitionKey,
|
||||
Payload = entry.Payload,
|
||||
PayloadHash = Convert.ToBase64String(entry.PayloadHash),
|
||||
PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null,
|
||||
Link = Convert.ToBase64String(entry.Link),
|
||||
EnqueuedAt = entry.EnqueuedAt
|
||||
};
|
||||
|
||||
// Export DTOs
|
||||
private sealed record AirGapBundleExportDto
|
||||
{
|
||||
public required Guid BundleId { get; init; }
|
||||
public required string TenantId { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public required string CreatedByNodeId { get; init; }
|
||||
public required string ManifestDigest { get; init; }
|
||||
public string? Signature { get; init; }
|
||||
public string? SignedBy { get; init; }
|
||||
public required IReadOnlyList<NodeJobLogExportDto> JobLogs { get; init; }
|
||||
}
|
||||
|
||||
private sealed record NodeJobLogExportDto
|
||||
{
|
||||
public required string NodeId { get; init; }
|
||||
public required string LastHlc { get; init; }
|
||||
public required string ChainHead { get; init; }
|
||||
public required IReadOnlyList<OfflineJobLogEntryExportDto> Entries { get; init; }
|
||||
}
|
||||
|
||||
private sealed record OfflineJobLogEntryExportDto
|
||||
{
|
||||
public required string NodeId { get; init; }
|
||||
public required string THlc { get; init; }
|
||||
public required Guid JobId { get; init; }
|
||||
public string? PartitionKey { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required string PayloadHash { get; init; }
|
||||
public string? PrevLink { get; init; }
|
||||
public required string Link { get; init; }
|
||||
public DateTimeOffset EnqueuedAt { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,316 @@
|
||||
// <copyright file="AirGapBundleImporter.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for air-gap bundle import operations.
|
||||
/// </summary>
|
||||
public interface IAirGapBundleImporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Imports an air-gap bundle from a file.
|
||||
/// </summary>
|
||||
/// <param name="inputPath">The input file path.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The imported bundle.</returns>
|
||||
Task<AirGapBundle> ImportFromFileAsync(
|
||||
string inputPath,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates a bundle's integrity.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to validate.</param>
|
||||
/// <returns>Validation result with any issues found.</returns>
|
||||
BundleValidationResult Validate(AirGapBundle bundle);
|
||||
|
||||
/// <summary>
|
||||
/// Imports an air-gap bundle from a JSON string.
|
||||
/// </summary>
|
||||
/// <param name="json">The JSON string representation.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The imported bundle.</returns>
|
||||
Task<AirGapBundle> ImportFromStringAsync(
|
||||
string json,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of bundle validation.
|
||||
/// </summary>
|
||||
public sealed record BundleValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets whether the bundle is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets validation issues found.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> Issues { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for importing air-gap bundles.
|
||||
/// </summary>
|
||||
public sealed class AirGapBundleImporter : IAirGapBundleImporter
|
||||
{
|
||||
private readonly ILogger<AirGapBundleImporter> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="AirGapBundleImporter"/> class.
|
||||
/// </summary>
|
||||
public AirGapBundleImporter(ILogger<AirGapBundleImporter> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<AirGapBundle> ImportFromFileAsync(
|
||||
string inputPath,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(inputPath);
|
||||
|
||||
if (!File.Exists(inputPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Bundle file not found: {inputPath}", inputPath);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Importing air-gap bundle from {InputPath}", inputPath);
|
||||
|
||||
var json = await File.ReadAllTextAsync(inputPath, cancellationToken).ConfigureAwait(false);
|
||||
var dto = JsonSerializer.Deserialize<AirGapBundleImportDto>(json, JsonOptions);
|
||||
|
||||
if (dto is null)
|
||||
{
|
||||
throw new InvalidOperationException("Failed to deserialize bundle file");
|
||||
}
|
||||
|
||||
var bundle = FromImportDto(dto);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Imported bundle {BundleId} from {InputPath}: {LogCount} node logs, {TotalEntries} total entries",
|
||||
bundle.BundleId, inputPath, bundle.JobLogs.Count, bundle.JobLogs.Sum(l => l.Entries.Count));
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<AirGapBundle> ImportFromStringAsync(
|
||||
string json,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(json);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
_logger.LogDebug("Importing air-gap bundle from string ({Length} chars)", json.Length);
|
||||
|
||||
var dto = JsonSerializer.Deserialize<AirGapBundleImportDto>(json, JsonOptions);
|
||||
|
||||
if (dto is null)
|
||||
{
|
||||
throw new InvalidOperationException("Failed to deserialize bundle JSON");
|
||||
}
|
||||
|
||||
var bundle = FromImportDto(dto);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Imported bundle {BundleId} from string: {LogCount} node logs, {TotalEntries} total entries",
|
||||
bundle.BundleId, bundle.JobLogs.Count, bundle.JobLogs.Sum(l => l.Entries.Count));
|
||||
|
||||
return Task.FromResult(bundle);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public BundleValidationResult Validate(AirGapBundle bundle)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
|
||||
var issues = new List<string>();
|
||||
|
||||
// 1. Validate manifest digest
|
||||
var computedDigest = ComputeManifestDigest(bundle.JobLogs);
|
||||
if (!string.Equals(computedDigest, bundle.ManifestDigest, StringComparison.Ordinal))
|
||||
{
|
||||
issues.Add($"Manifest digest mismatch: expected {bundle.ManifestDigest}, computed {computedDigest}");
|
||||
}
|
||||
|
||||
// 2. Validate each node log's chain integrity
|
||||
foreach (var nodeLog in bundle.JobLogs)
|
||||
{
|
||||
var nodeIssues = ValidateNodeLog(nodeLog);
|
||||
issues.AddRange(nodeIssues);
|
||||
}
|
||||
|
||||
// 3. Validate chain heads match last entry links
|
||||
foreach (var nodeLog in bundle.JobLogs)
|
||||
{
|
||||
if (nodeLog.Entries.Count > 0)
|
||||
{
|
||||
var lastEntry = nodeLog.Entries[^1];
|
||||
if (!ByteArrayEquals(nodeLog.ChainHead, lastEntry.Link))
|
||||
{
|
||||
issues.Add($"Node {nodeLog.NodeId}: chain head doesn't match last entry link");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isValid = issues.Count == 0;
|
||||
|
||||
if (!isValid)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Bundle {BundleId} validation failed with {IssueCount} issues",
|
||||
bundle.BundleId, issues.Count);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("Bundle {BundleId} validation passed", bundle.BundleId);
|
||||
}
|
||||
|
||||
return new BundleValidationResult
|
||||
{
|
||||
IsValid = isValid,
|
||||
Issues = issues
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ValidateNodeLog(NodeJobLog nodeLog)
|
||||
{
|
||||
byte[]? expectedPrevLink = null;
|
||||
|
||||
for (var i = 0; i < nodeLog.Entries.Count; i++)
|
||||
{
|
||||
var entry = nodeLog.Entries[i];
|
||||
|
||||
// Verify prev_link matches expected
|
||||
if (!ByteArrayEquals(entry.PrevLink, expectedPrevLink))
|
||||
{
|
||||
yield return $"Node {nodeLog.NodeId}, entry {i}: prev_link mismatch";
|
||||
}
|
||||
|
||||
// Recompute and verify link
|
||||
var computedLink = OfflineHlcManager.ComputeLink(
|
||||
entry.PrevLink,
|
||||
entry.JobId,
|
||||
entry.THlc,
|
||||
entry.PayloadHash);
|
||||
|
||||
if (!ByteArrayEquals(entry.Link, computedLink))
|
||||
{
|
||||
yield return $"Node {nodeLog.NodeId}, entry {i} (JobId {entry.JobId}): link mismatch";
|
||||
}
|
||||
|
||||
expectedPrevLink = entry.Link;
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeManifestDigest(IReadOnlyList<NodeJobLog> jobLogs)
|
||||
{
|
||||
var manifest = jobLogs
|
||||
.OrderBy(l => l.NodeId, StringComparer.Ordinal)
|
||||
.Select(l => new
|
||||
{
|
||||
l.NodeId,
|
||||
LastHlc = l.LastHlc.ToSortableString(),
|
||||
ChainHead = Convert.ToHexString(l.ChainHead)
|
||||
})
|
||||
.ToList();
|
||||
|
||||
var json = CanonJson.Serialize(manifest);
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static bool ByteArrayEquals(byte[]? a, byte[]? b)
|
||||
{
|
||||
if (a is null && b is null) return true;
|
||||
if (a is null || b is null) return false;
|
||||
return a.AsSpan().SequenceEqual(b);
|
||||
}
|
||||
|
||||
private static AirGapBundle FromImportDto(AirGapBundleImportDto dto) => new()
|
||||
{
|
||||
BundleId = dto.BundleId,
|
||||
TenantId = dto.TenantId,
|
||||
CreatedAt = dto.CreatedAt,
|
||||
CreatedByNodeId = dto.CreatedByNodeId,
|
||||
ManifestDigest = dto.ManifestDigest,
|
||||
Signature = dto.Signature,
|
||||
SignedBy = dto.SignedBy,
|
||||
JobLogs = dto.JobLogs.Select(FromNodeJobLogDto).ToList()
|
||||
};
|
||||
|
||||
private static NodeJobLog FromNodeJobLogDto(NodeJobLogImportDto dto) => new()
|
||||
{
|
||||
NodeId = dto.NodeId,
|
||||
LastHlc = HlcTimestamp.Parse(dto.LastHlc),
|
||||
ChainHead = Convert.FromBase64String(dto.ChainHead),
|
||||
Entries = dto.Entries.Select(FromEntryDto).ToList()
|
||||
};
|
||||
|
||||
private static OfflineJobLogEntry FromEntryDto(OfflineJobLogEntryImportDto dto) => new()
|
||||
{
|
||||
NodeId = dto.NodeId,
|
||||
THlc = HlcTimestamp.Parse(dto.THlc),
|
||||
JobId = dto.JobId,
|
||||
PartitionKey = dto.PartitionKey,
|
||||
Payload = dto.Payload,
|
||||
PayloadHash = Convert.FromBase64String(dto.PayloadHash),
|
||||
PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null,
|
||||
Link = Convert.FromBase64String(dto.Link),
|
||||
EnqueuedAt = dto.EnqueuedAt
|
||||
};
|
||||
|
||||
// Import DTOs
|
||||
private sealed record AirGapBundleImportDto
|
||||
{
|
||||
public required Guid BundleId { get; init; }
|
||||
public required string TenantId { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public required string CreatedByNodeId { get; init; }
|
||||
public required string ManifestDigest { get; init; }
|
||||
public string? Signature { get; init; }
|
||||
public string? SignedBy { get; init; }
|
||||
public required IReadOnlyList<NodeJobLogImportDto> JobLogs { get; init; }
|
||||
}
|
||||
|
||||
private sealed record NodeJobLogImportDto
|
||||
{
|
||||
public required string NodeId { get; init; }
|
||||
public required string LastHlc { get; init; }
|
||||
public required string ChainHead { get; init; }
|
||||
public required IReadOnlyList<OfflineJobLogEntryImportDto> Entries { get; init; }
|
||||
}
|
||||
|
||||
private sealed record OfflineJobLogEntryImportDto
|
||||
{
|
||||
public required string NodeId { get; init; }
|
||||
public required string THlc { get; init; }
|
||||
public required Guid JobId { get; init; }
|
||||
public string? PartitionKey { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required string PayloadHash { get; init; }
|
||||
public string? PrevLink { get; init; }
|
||||
public required string Link { get; init; }
|
||||
public DateTimeOffset EnqueuedAt { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
// <copyright file="AirGapSyncService.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for the scheduler log repository used by sync.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This is a subset of the full ISchedulerLogRepository to avoid circular dependencies.
|
||||
/// Implementations should delegate to the actual repository.
|
||||
/// </remarks>
|
||||
public interface ISyncSchedulerLogRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the chain head for a tenant/partition.
|
||||
/// </summary>
|
||||
Task<(byte[]? Link, string? THlc)> GetChainHeadAsync(
|
||||
string tenantId,
|
||||
string? partitionKey = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets an entry by job ID.
|
||||
/// </summary>
|
||||
Task<bool> ExistsByJobIdAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Inserts a synced entry.
|
||||
/// </summary>
|
||||
Task InsertSyncedEntryAsync(
|
||||
string tenantId,
|
||||
string tHlc,
|
||||
string? partitionKey,
|
||||
Guid jobId,
|
||||
byte[] payloadHash,
|
||||
byte[]? prevLink,
|
||||
byte[] link,
|
||||
string sourceNodeId,
|
||||
Guid syncedFromBundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for air-gap sync operations.
|
||||
/// </summary>
|
||||
public interface IAirGapSyncService
|
||||
{
|
||||
/// <summary>
|
||||
/// Syncs offline jobs from an air-gap bundle to the central scheduler.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to sync.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The sync result.</returns>
|
||||
Task<SyncResult> SyncFromBundleAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for syncing air-gap bundles to the central scheduler.
|
||||
/// </summary>
|
||||
public sealed class AirGapSyncService : IAirGapSyncService
|
||||
{
|
||||
private readonly IHlcMergeService _mergeService;
|
||||
private readonly ISyncSchedulerLogRepository _schedulerLogRepo;
|
||||
private readonly IHybridLogicalClock _hlc;
|
||||
private readonly ILogger<AirGapSyncService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="AirGapSyncService"/> class.
|
||||
/// </summary>
|
||||
public AirGapSyncService(
|
||||
IHlcMergeService mergeService,
|
||||
ISyncSchedulerLogRepository schedulerLogRepo,
|
||||
IHybridLogicalClock hlc,
|
||||
ILogger<AirGapSyncService> logger)
|
||||
{
|
||||
_mergeService = mergeService ?? throw new ArgumentNullException(nameof(mergeService));
|
||||
_schedulerLogRepo = schedulerLogRepo ?? throw new ArgumentNullException(nameof(schedulerLogRepo));
|
||||
_hlc = hlc ?? throw new ArgumentNullException(nameof(hlc));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<SyncResult> SyncFromBundleAsync(
|
||||
AirGapBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting sync from bundle {BundleId} with {LogCount} node logs for tenant {TenantId}",
|
||||
bundle.BundleId, bundle.JobLogs.Count, bundle.TenantId);
|
||||
|
||||
// 1. Merge all offline logs
|
||||
var merged = await _mergeService.MergeAsync(bundle.JobLogs, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (merged.MergedEntries.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("Bundle {BundleId} has no entries to sync", bundle.BundleId);
|
||||
return new SyncResult
|
||||
{
|
||||
BundleId = bundle.BundleId,
|
||||
TotalInBundle = 0,
|
||||
Appended = 0,
|
||||
Duplicates = 0,
|
||||
AlreadyExisted = 0
|
||||
};
|
||||
}
|
||||
|
||||
// 2. Get current scheduler chain head
|
||||
var (currentLink, _) = await _schedulerLogRepo.GetChainHeadAsync(
|
||||
bundle.TenantId,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// 3. For each merged entry, update HLC clock (receive)
|
||||
// This ensures central clock advances past all offline timestamps
|
||||
foreach (var entry in merged.MergedEntries)
|
||||
{
|
||||
_hlc.Receive(entry.THlc);
|
||||
}
|
||||
|
||||
// 4. Append merged entries to scheduler log
|
||||
// Chain links recomputed to extend from current head
|
||||
byte[]? prevLink = currentLink;
|
||||
var appended = 0;
|
||||
var alreadyExisted = 0;
|
||||
var warnings = new List<string>();
|
||||
|
||||
foreach (var entry in merged.MergedEntries)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
// Check if job already exists (idempotency)
|
||||
var exists = await _schedulerLogRepo.ExistsByJobIdAsync(
|
||||
bundle.TenantId,
|
||||
entry.JobId,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (exists)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Job {JobId} already exists in scheduler log, skipping",
|
||||
entry.JobId);
|
||||
alreadyExisted++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Compute new chain link extending from current chain
|
||||
var newLink = OfflineHlcManager.ComputeLink(
|
||||
prevLink,
|
||||
entry.JobId,
|
||||
entry.THlc,
|
||||
entry.PayloadHash);
|
||||
|
||||
// Insert the entry
|
||||
await _schedulerLogRepo.InsertSyncedEntryAsync(
|
||||
bundle.TenantId,
|
||||
entry.THlc.ToSortableString(),
|
||||
entry.PartitionKey,
|
||||
entry.JobId,
|
||||
entry.PayloadHash,
|
||||
prevLink,
|
||||
newLink,
|
||||
entry.SourceNodeId,
|
||||
bundle.BundleId,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
prevLink = newLink;
|
||||
appended++;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Sync complete for bundle {BundleId}: {Appended} appended, {Duplicates} duplicates, {AlreadyExisted} already existed",
|
||||
bundle.BundleId, appended, merged.Duplicates.Count, alreadyExisted);
|
||||
|
||||
return new SyncResult
|
||||
{
|
||||
BundleId = bundle.BundleId,
|
||||
TotalInBundle = merged.MergedEntries.Count,
|
||||
Appended = appended,
|
||||
Duplicates = merged.Duplicates.Count,
|
||||
AlreadyExisted = alreadyExisted,
|
||||
NewChainHead = prevLink,
|
||||
Warnings = warnings.Count > 0 ? warnings : null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,114 @@
|
||||
// <copyright file="ConflictResolver.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for conflict resolution during merge.
|
||||
/// </summary>
|
||||
public interface IConflictResolver
|
||||
{
|
||||
/// <summary>
|
||||
/// Resolves conflicts when the same JobId appears in multiple entries.
|
||||
/// </summary>
|
||||
/// <param name="jobId">The conflicting job ID.</param>
|
||||
/// <param name="conflicting">The conflicting entries with their source nodes.</param>
|
||||
/// <returns>The resolution result.</returns>
|
||||
ConflictResolution Resolve(
|
||||
Guid jobId,
|
||||
IReadOnlyList<(string NodeId, OfflineJobLogEntry Entry)> conflicting);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resolves conflicts during HLC merge operations.
|
||||
/// </summary>
|
||||
public sealed class ConflictResolver : IConflictResolver
|
||||
{
|
||||
private readonly ILogger<ConflictResolver> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="ConflictResolver"/> class.
|
||||
/// </summary>
|
||||
public ConflictResolver(ILogger<ConflictResolver> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ConflictResolution Resolve(
|
||||
Guid jobId,
|
||||
IReadOnlyList<(string NodeId, OfflineJobLogEntry Entry)> conflicting)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(conflicting);
|
||||
|
||||
if (conflicting.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("Conflicting list cannot be empty", nameof(conflicting));
|
||||
}
|
||||
|
||||
if (conflicting.Count == 1)
|
||||
{
|
||||
// No conflict
|
||||
return new ConflictResolution
|
||||
{
|
||||
Type = ConflictType.DuplicateTimestamp,
|
||||
Resolution = ResolutionStrategy.TakeEarliest,
|
||||
SelectedEntry = conflicting[0].Entry,
|
||||
DroppedEntries = Array.Empty<OfflineJobLogEntry>()
|
||||
};
|
||||
}
|
||||
|
||||
// Verify payloads are actually different
|
||||
var uniquePayloads = conflicting
|
||||
.Select(c => Convert.ToHexString(c.Entry.PayloadHash))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
if (uniquePayloads.Count == 1)
|
||||
{
|
||||
// Same payload, different HLC timestamps - not a real conflict
|
||||
// Take the earliest HLC (preserves causality)
|
||||
var sorted = conflicting
|
||||
.OrderBy(c => c.Entry.THlc.PhysicalTime)
|
||||
.ThenBy(c => c.Entry.THlc.LogicalCounter)
|
||||
.ThenBy(c => c.Entry.THlc.NodeId, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var earliest = sorted[0];
|
||||
var dropped = sorted.Skip(1).Select(s => s.Entry).ToList();
|
||||
|
||||
_logger.LogDebug(
|
||||
"Resolved duplicate timestamp conflict for JobId {JobId}: selected entry from node {NodeId} at {THlc}, dropped {DroppedCount} duplicates",
|
||||
jobId, earliest.NodeId, earliest.Entry.THlc, dropped.Count);
|
||||
|
||||
return new ConflictResolution
|
||||
{
|
||||
Type = ConflictType.DuplicateTimestamp,
|
||||
Resolution = ResolutionStrategy.TakeEarliest,
|
||||
SelectedEntry = earliest.Entry,
|
||||
DroppedEntries = dropped
|
||||
};
|
||||
}
|
||||
|
||||
// Actual conflict: same JobId, different payloads
|
||||
// This indicates a bug in deterministic ID computation
|
||||
var nodeIds = string.Join(", ", conflicting.Select(c => c.NodeId));
|
||||
var payloadHashes = string.Join(", ", conflicting.Select(c => Convert.ToHexString(c.Entry.PayloadHash)[..16] + "..."));
|
||||
|
||||
_logger.LogError(
|
||||
"Payload mismatch conflict for JobId {JobId}: different payloads from nodes [{NodeIds}] with hashes [{PayloadHashes}]",
|
||||
jobId, nodeIds, payloadHashes);
|
||||
|
||||
return new ConflictResolution
|
||||
{
|
||||
Type = ConflictType.PayloadMismatch,
|
||||
Resolution = ResolutionStrategy.Error,
|
||||
Error = $"JobId {jobId} has conflicting payloads from nodes: {nodeIds}. " +
|
||||
"This indicates a bug in deterministic job ID computation or payload tampering."
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
// <copyright file="HlcMergeService.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for HLC-based merge operations.
|
||||
/// </summary>
|
||||
public interface IHlcMergeService
|
||||
{
|
||||
/// <summary>
|
||||
/// Merges job logs from multiple offline nodes into a unified, HLC-ordered stream.
|
||||
/// </summary>
|
||||
/// <param name="nodeLogs">The node logs to merge.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The merge result.</returns>
|
||||
Task<MergeResult> MergeAsync(
|
||||
IReadOnlyList<NodeJobLog> nodeLogs,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for merging job logs from multiple offline nodes using HLC total ordering.
|
||||
/// </summary>
|
||||
public sealed class HlcMergeService : IHlcMergeService
|
||||
{
|
||||
private readonly IConflictResolver _conflictResolver;
|
||||
private readonly ILogger<HlcMergeService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="HlcMergeService"/> class.
|
||||
/// </summary>
|
||||
public HlcMergeService(
|
||||
IConflictResolver conflictResolver,
|
||||
ILogger<HlcMergeService> logger)
|
||||
{
|
||||
_conflictResolver = conflictResolver ?? throw new ArgumentNullException(nameof(conflictResolver));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<MergeResult> MergeAsync(
|
||||
IReadOnlyList<NodeJobLog> nodeLogs,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(nodeLogs);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (nodeLogs.Count == 0)
|
||||
{
|
||||
return Task.FromResult(new MergeResult
|
||||
{
|
||||
MergedEntries = Array.Empty<MergedJobEntry>(),
|
||||
Duplicates = Array.Empty<DuplicateEntry>(),
|
||||
SourceNodes = Array.Empty<string>()
|
||||
});
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting merge of {NodeCount} node logs with {TotalEntries} total entries",
|
||||
nodeLogs.Count,
|
||||
nodeLogs.Sum(l => l.Entries.Count));
|
||||
|
||||
// 1. Collect all entries from all nodes
|
||||
var allEntries = nodeLogs
|
||||
.SelectMany(log => log.Entries.Select(e => (log.NodeId, Entry: e)))
|
||||
.ToList();
|
||||
|
||||
// 2. Sort by HLC total order: (PhysicalTime, LogicalCounter, NodeId, JobId)
|
||||
var sorted = allEntries
|
||||
.OrderBy(x => x.Entry.THlc.PhysicalTime)
|
||||
.ThenBy(x => x.Entry.THlc.LogicalCounter)
|
||||
.ThenBy(x => x.Entry.THlc.NodeId, StringComparer.Ordinal)
|
||||
.ThenBy(x => x.Entry.JobId)
|
||||
.ToList();
|
||||
|
||||
// 3. Group by JobId to detect duplicates
|
||||
var groupedByJobId = sorted.GroupBy(x => x.Entry.JobId).ToList();
|
||||
|
||||
var deduplicated = new List<MergedJobEntry>();
|
||||
var duplicates = new List<DuplicateEntry>();
|
||||
|
||||
foreach (var group in groupedByJobId)
|
||||
{
|
||||
var entries = group.ToList();
|
||||
|
||||
if (entries.Count == 1)
|
||||
{
|
||||
// No conflict - add directly
|
||||
var (nodeId, entry) = entries[0];
|
||||
deduplicated.Add(CreateMergedEntry(nodeId, entry));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Multiple entries with same JobId - resolve conflict
|
||||
var resolution = _conflictResolver.Resolve(group.Key, entries);
|
||||
|
||||
if (resolution.Resolution == ResolutionStrategy.Error)
|
||||
{
|
||||
_logger.LogError(
|
||||
"Conflict resolution failed for JobId {JobId}: {Error}",
|
||||
group.Key, resolution.Error);
|
||||
throw new InvalidOperationException(resolution.Error);
|
||||
}
|
||||
|
||||
// Add the selected entry
|
||||
if (resolution.SelectedEntry is not null)
|
||||
{
|
||||
var sourceEntry = entries.First(e => e.Entry == resolution.SelectedEntry);
|
||||
deduplicated.Add(CreateMergedEntry(sourceEntry.NodeId, resolution.SelectedEntry));
|
||||
}
|
||||
|
||||
// Record duplicates
|
||||
foreach (var dropped in resolution.DroppedEntries ?? Array.Empty<OfflineJobLogEntry>())
|
||||
{
|
||||
var sourceEntry = entries.First(e => e.Entry == dropped);
|
||||
duplicates.Add(new DuplicateEntry(dropped.JobId, sourceEntry.NodeId, dropped.THlc));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Sort deduplicated entries by HLC order
|
||||
deduplicated = deduplicated
|
||||
.OrderBy(x => x.THlc.PhysicalTime)
|
||||
.ThenBy(x => x.THlc.LogicalCounter)
|
||||
.ThenBy(x => x.THlc.NodeId, StringComparer.Ordinal)
|
||||
.ThenBy(x => x.JobId)
|
||||
.ToList();
|
||||
|
||||
// 5. Recompute unified chain
|
||||
byte[]? prevLink = null;
|
||||
foreach (var entry in deduplicated)
|
||||
{
|
||||
entry.MergedLink = OfflineHlcManager.ComputeLink(
|
||||
prevLink,
|
||||
entry.JobId,
|
||||
entry.THlc,
|
||||
entry.PayloadHash);
|
||||
prevLink = entry.MergedLink;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Merge complete: {MergedCount} entries, {DuplicateCount} duplicates dropped",
|
||||
deduplicated.Count, duplicates.Count);
|
||||
|
||||
return Task.FromResult(new MergeResult
|
||||
{
|
||||
MergedEntries = deduplicated,
|
||||
Duplicates = duplicates,
|
||||
MergedChainHead = prevLink,
|
||||
SourceNodes = nodeLogs.Select(l => l.NodeId).ToList()
|
||||
});
|
||||
}
|
||||
|
||||
private static MergedJobEntry CreateMergedEntry(string nodeId, OfflineJobLogEntry entry) => new()
|
||||
{
|
||||
SourceNodeId = nodeId,
|
||||
THlc = entry.THlc,
|
||||
JobId = entry.JobId,
|
||||
PartitionKey = entry.PartitionKey,
|
||||
Payload = entry.Payload,
|
||||
PayloadHash = entry.PayloadHash,
|
||||
OriginalLink = entry.Link
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
// <copyright file="OfflineHlcManager.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Stores;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for offline HLC management.
|
||||
/// </summary>
|
||||
public interface IOfflineHlcManager
|
||||
{
|
||||
/// <summary>
|
||||
/// Enqueues a job locally while offline, maintaining the local chain.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">The payload type.</typeparam>
|
||||
/// <param name="payload">The job payload.</param>
|
||||
/// <param name="idempotencyKey">The idempotency key for deterministic job ID.</param>
|
||||
/// <param name="partitionKey">Optional partition key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The enqueue result.</returns>
|
||||
Task<OfflineEnqueueResult> EnqueueOfflineAsync<T>(
|
||||
T payload,
|
||||
string idempotencyKey,
|
||||
string? partitionKey = null,
|
||||
CancellationToken cancellationToken = default) where T : notnull;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current node's job log for export.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The node job log, or null if empty.</returns>
|
||||
Task<NodeJobLog?> GetNodeJobLogAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the node ID.
|
||||
/// </summary>
|
||||
string NodeId { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manages HLC operations for offline/air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class OfflineHlcManager : IOfflineHlcManager
|
||||
{
|
||||
private readonly IHybridLogicalClock _hlc;
|
||||
private readonly IOfflineJobLogStore _jobLogStore;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ILogger<OfflineHlcManager> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="OfflineHlcManager"/> class.
|
||||
/// </summary>
|
||||
public OfflineHlcManager(
|
||||
IHybridLogicalClock hlc,
|
||||
IOfflineJobLogStore jobLogStore,
|
||||
IGuidProvider guidProvider,
|
||||
ILogger<OfflineHlcManager> logger)
|
||||
{
|
||||
_hlc = hlc ?? throw new ArgumentNullException(nameof(hlc));
|
||||
_jobLogStore = jobLogStore ?? throw new ArgumentNullException(nameof(jobLogStore));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string NodeId => _hlc.NodeId;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<OfflineEnqueueResult> EnqueueOfflineAsync<T>(
|
||||
T payload,
|
||||
string idempotencyKey,
|
||||
string? partitionKey = null,
|
||||
CancellationToken cancellationToken = default) where T : notnull
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(payload);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(idempotencyKey);
|
||||
|
||||
// 1. Generate HLC timestamp
|
||||
var tHlc = _hlc.Tick();
|
||||
|
||||
// 2. Compute deterministic job ID from idempotency key
|
||||
var jobId = ComputeDeterministicJobId(idempotencyKey);
|
||||
|
||||
// 3. Serialize and hash payload
|
||||
var payloadJson = CanonJson.Serialize(payload);
|
||||
var payloadHash = SHA256.HashData(Encoding.UTF8.GetBytes(payloadJson));
|
||||
|
||||
// 4. Get previous chain link
|
||||
var prevLink = await _jobLogStore.GetLastLinkAsync(NodeId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// 5. Compute chain link
|
||||
var link = ComputeLink(prevLink, jobId, tHlc, payloadHash);
|
||||
|
||||
// 6. Create and store entry
|
||||
var entry = new OfflineJobLogEntry
|
||||
{
|
||||
NodeId = NodeId,
|
||||
THlc = tHlc,
|
||||
JobId = jobId,
|
||||
PartitionKey = partitionKey,
|
||||
Payload = payloadJson,
|
||||
PayloadHash = payloadHash,
|
||||
PrevLink = prevLink,
|
||||
Link = link,
|
||||
EnqueuedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
await _jobLogStore.AppendAsync(entry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Enqueued offline job {JobId} with HLC {THlc} on node {NodeId}",
|
||||
jobId, tHlc, NodeId);
|
||||
|
||||
return new OfflineEnqueueResult
|
||||
{
|
||||
THlc = tHlc,
|
||||
JobId = jobId,
|
||||
Link = link,
|
||||
NodeId = NodeId
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<NodeJobLog?> GetNodeJobLogAsync(CancellationToken cancellationToken = default)
|
||||
=> _jobLogStore.GetNodeJobLogAsync(NodeId, cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic job ID from idempotency key.
|
||||
/// </summary>
|
||||
private Guid ComputeDeterministicJobId(string idempotencyKey)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(idempotencyKey));
|
||||
// Use first 16 bytes of SHA-256 as deterministic GUID
|
||||
return new Guid(hash.AsSpan(0, 16));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes chain link: Hash(prev_link || job_id || t_hlc || payload_hash).
|
||||
/// </summary>
|
||||
internal static byte[] ComputeLink(
|
||||
byte[]? prevLink,
|
||||
Guid jobId,
|
||||
HlcTimestamp tHlc,
|
||||
byte[] payloadHash)
|
||||
{
|
||||
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
|
||||
// Previous link (or 32 zero bytes for first entry)
|
||||
hasher.AppendData(prevLink ?? new byte[32]);
|
||||
|
||||
// Job ID as bytes
|
||||
hasher.AppendData(jobId.ToByteArray());
|
||||
|
||||
// HLC timestamp as UTF-8 bytes
|
||||
hasher.AppendData(Encoding.UTF8.GetBytes(tHlc.ToSortableString()));
|
||||
|
||||
// Payload hash
|
||||
hasher.AppendData(payloadHash);
|
||||
|
||||
return hasher.GetHashAndReset();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
<ProjectReference Include="..\..\..\Scheduler\__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,246 @@
|
||||
// <copyright file="FileBasedOfflineJobLogStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Stores;
|
||||
|
||||
/// <summary>
|
||||
/// Options for the file-based offline job log store.
|
||||
/// </summary>
|
||||
public sealed class FileBasedOfflineJobLogStoreOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the directory for storing offline job logs.
|
||||
/// </summary>
|
||||
public string DataDirectory { get; set; } = "./offline-job-logs";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File-based implementation of <see cref="IOfflineJobLogStore"/> for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class FileBasedOfflineJobLogStore : IOfflineJobLogStore
|
||||
{
|
||||
private readonly IOptions<FileBasedOfflineJobLogStoreOptions> _options;
|
||||
private readonly ILogger<FileBasedOfflineJobLogStore> _logger;
|
||||
private readonly SemaphoreSlim _lock = new(1, 1);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FileBasedOfflineJobLogStore"/> class.
|
||||
/// </summary>
|
||||
public FileBasedOfflineJobLogStore(
|
||||
IOptions<FileBasedOfflineJobLogStoreOptions> options,
|
||||
ILogger<FileBasedOfflineJobLogStore> logger)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
EnsureDirectoryExists();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task AppendAsync(OfflineJobLogEntry entry, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
await _lock.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var filePath = GetNodeLogFilePath(entry.NodeId);
|
||||
var dto = ToDto(entry);
|
||||
var line = JsonSerializer.Serialize(dto, JsonOptions);
|
||||
|
||||
await File.AppendAllTextAsync(filePath, line + Environment.NewLine, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Appended offline job entry {JobId} for node {NodeId}",
|
||||
entry.JobId, entry.NodeId);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<OfflineJobLogEntry>> GetEntriesAsync(
|
||||
string nodeId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
var filePath = GetNodeLogFilePath(nodeId);
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return Array.Empty<OfflineJobLogEntry>();
|
||||
}
|
||||
|
||||
await _lock.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var lines = await File.ReadAllLinesAsync(filePath, cancellationToken).ConfigureAwait(false);
|
||||
var entries = new List<OfflineJobLogEntry>(lines.Length);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var dto = JsonSerializer.Deserialize<OfflineJobLogEntryDto>(line, JsonOptions);
|
||||
if (dto is not null)
|
||||
{
|
||||
entries.Add(FromDto(dto));
|
||||
}
|
||||
}
|
||||
|
||||
// Return in HLC order
|
||||
return entries.OrderBy(e => e.THlc).ToList();
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<byte[]?> GetLastLinkAsync(string nodeId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false);
|
||||
return entries.Count > 0 ? entries[^1].Link : null;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<NodeJobLog?> GetNodeJobLogAsync(string nodeId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false);
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var lastEntry = entries[^1];
|
||||
return new NodeJobLog
|
||||
{
|
||||
NodeId = nodeId,
|
||||
LastHlc = lastEntry.THlc,
|
||||
ChainHead = lastEntry.Link,
|
||||
Entries = entries
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<int> ClearEntriesAsync(
|
||||
string nodeId,
|
||||
string upToHlc,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
await _lock.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false);
|
||||
var remaining = entries
|
||||
.Where(e => string.CompareOrdinal(e.THlc.ToSortableString(), upToHlc) > 0)
|
||||
.ToList();
|
||||
|
||||
var cleared = entries.Count - remaining.Count;
|
||||
|
||||
if (remaining.Count == 0)
|
||||
{
|
||||
var filePath = GetNodeLogFilePath(nodeId);
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
File.Delete(filePath);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Rewrite with remaining entries
|
||||
var filePath = GetNodeLogFilePath(nodeId);
|
||||
var lines = remaining.Select(e => JsonSerializer.Serialize(ToDto(e), JsonOptions));
|
||||
await File.WriteAllLinesAsync(filePath, lines, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Cleared {Count} offline job entries for node {NodeId} up to HLC {UpToHlc}",
|
||||
cleared, nodeId, upToHlc);
|
||||
|
||||
return cleared;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private string GetNodeLogFilePath(string nodeId)
|
||||
{
|
||||
var safeNodeId = nodeId.Replace('/', '_').Replace('\\', '_').Replace(':', '_');
|
||||
return Path.Combine(_options.Value.DataDirectory, $"offline-jobs-{safeNodeId}.ndjson");
|
||||
}
|
||||
|
||||
private void EnsureDirectoryExists()
|
||||
{
|
||||
var dir = _options.Value.DataDirectory;
|
||||
if (!Directory.Exists(dir))
|
||||
{
|
||||
Directory.CreateDirectory(dir);
|
||||
_logger.LogInformation("Created offline job log directory: {Directory}", dir);
|
||||
}
|
||||
}
|
||||
|
||||
private static OfflineJobLogEntryDto ToDto(OfflineJobLogEntry entry) => new()
|
||||
{
|
||||
NodeId = entry.NodeId,
|
||||
THlc = entry.THlc.ToSortableString(),
|
||||
JobId = entry.JobId,
|
||||
PartitionKey = entry.PartitionKey,
|
||||
Payload = entry.Payload,
|
||||
PayloadHash = Convert.ToBase64String(entry.PayloadHash),
|
||||
PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null,
|
||||
Link = Convert.ToBase64String(entry.Link),
|
||||
EnqueuedAt = entry.EnqueuedAt
|
||||
};
|
||||
|
||||
private static OfflineJobLogEntry FromDto(OfflineJobLogEntryDto dto) => new()
|
||||
{
|
||||
NodeId = dto.NodeId,
|
||||
THlc = HlcTimestamp.Parse(dto.THlc),
|
||||
JobId = dto.JobId,
|
||||
PartitionKey = dto.PartitionKey,
|
||||
Payload = dto.Payload,
|
||||
PayloadHash = Convert.FromBase64String(dto.PayloadHash),
|
||||
PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null,
|
||||
Link = Convert.FromBase64String(dto.Link),
|
||||
EnqueuedAt = dto.EnqueuedAt
|
||||
};
|
||||
|
||||
private sealed record OfflineJobLogEntryDto
|
||||
{
|
||||
public required string NodeId { get; init; }
|
||||
public required string THlc { get; init; }
|
||||
public required Guid JobId { get; init; }
|
||||
public string? PartitionKey { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required string PayloadHash { get; init; }
|
||||
public string? PrevLink { get; init; }
|
||||
public required string Link { get; init; }
|
||||
public DateTimeOffset EnqueuedAt { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// <copyright file="IOfflineJobLogStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Stores;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for storing offline job log entries.
|
||||
/// </summary>
|
||||
public interface IOfflineJobLogStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Appends an entry to the offline job log.
|
||||
/// </summary>
|
||||
/// <param name="entry">The entry to append.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task AppendAsync(OfflineJobLogEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all entries for a node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>All entries in HLC order.</returns>
|
||||
Task<IReadOnlyList<OfflineJobLogEntry>> GetEntriesAsync(
|
||||
string nodeId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the last chain link for a node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The last link, or null if no entries exist.</returns>
|
||||
Task<byte[]?> GetLastLinkAsync(string nodeId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the node job log for export.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The complete node job log.</returns>
|
||||
Task<NodeJobLog?> GetNodeJobLogAsync(string nodeId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Clears entries for a node after successful sync.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="upToHlc">Clear entries up to and including this HLC timestamp.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries cleared.</returns>
|
||||
Task<int> ClearEntriesAsync(
|
||||
string nodeId,
|
||||
string upToHlc,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,161 @@
|
||||
// <copyright file="AirGapSyncMetrics.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Telemetry;
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for air-gap sync operations.
|
||||
/// </summary>
|
||||
public static class AirGapSyncMetrics
|
||||
{
|
||||
private const string NodeIdTag = "node_id";
|
||||
private const string TenantIdTag = "tenant_id";
|
||||
private const string ConflictTypeTag = "conflict_type";
|
||||
|
||||
private static readonly Meter Meter = new("StellaOps.AirGap.Sync");
|
||||
|
||||
// Counters
|
||||
private static readonly Counter<long> BundlesExportedCounter = Meter.CreateCounter<long>(
|
||||
"airgap_bundles_exported_total",
|
||||
unit: "{bundle}",
|
||||
description: "Total number of air-gap bundles exported");
|
||||
|
||||
private static readonly Counter<long> BundlesImportedCounter = Meter.CreateCounter<long>(
|
||||
"airgap_bundles_imported_total",
|
||||
unit: "{bundle}",
|
||||
description: "Total number of air-gap bundles imported");
|
||||
|
||||
private static readonly Counter<long> JobsSyncedCounter = Meter.CreateCounter<long>(
|
||||
"airgap_jobs_synced_total",
|
||||
unit: "{job}",
|
||||
description: "Total number of jobs synced from air-gap bundles");
|
||||
|
||||
private static readonly Counter<long> DuplicatesDroppedCounter = Meter.CreateCounter<long>(
|
||||
"airgap_duplicates_dropped_total",
|
||||
unit: "{duplicate}",
|
||||
description: "Total number of duplicate entries dropped during merge");
|
||||
|
||||
private static readonly Counter<long> MergeConflictsCounter = Meter.CreateCounter<long>(
|
||||
"airgap_merge_conflicts_total",
|
||||
unit: "{conflict}",
|
||||
description: "Total number of merge conflicts by type");
|
||||
|
||||
private static readonly Counter<long> OfflineEnqueuesCounter = Meter.CreateCounter<long>(
|
||||
"airgap_offline_enqueues_total",
|
||||
unit: "{enqueue}",
|
||||
description: "Total number of offline enqueue operations");
|
||||
|
||||
// Histograms
|
||||
private static readonly Histogram<double> BundleSizeHistogram = Meter.CreateHistogram<double>(
|
||||
"airgap_bundle_size_bytes",
|
||||
unit: "By",
|
||||
description: "Size of air-gap bundles in bytes");
|
||||
|
||||
private static readonly Histogram<double> SyncDurationHistogram = Meter.CreateHistogram<double>(
|
||||
"airgap_sync_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of air-gap sync operations");
|
||||
|
||||
private static readonly Histogram<int> MergeEntriesHistogram = Meter.CreateHistogram<int>(
|
||||
"airgap_merge_entries_count",
|
||||
unit: "{entry}",
|
||||
description: "Number of entries in merge operations");
|
||||
|
||||
/// <summary>
|
||||
/// Records a bundle export.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID that exported.</param>
|
||||
/// <param name="tenantId">The tenant ID.</param>
|
||||
/// <param name="entryCount">Number of entries in the bundle.</param>
|
||||
public static void RecordBundleExported(string nodeId, string tenantId, int entryCount)
|
||||
{
|
||||
BundlesExportedCounter.Add(1,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId),
|
||||
new KeyValuePair<string, object?>(TenantIdTag, tenantId));
|
||||
MergeEntriesHistogram.Record(entryCount,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a bundle import.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID that imported.</param>
|
||||
/// <param name="tenantId">The tenant ID.</param>
|
||||
public static void RecordBundleImported(string nodeId, string tenantId)
|
||||
{
|
||||
BundlesImportedCounter.Add(1,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId),
|
||||
new KeyValuePair<string, object?>(TenantIdTag, tenantId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records jobs synced from a bundle.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="count">Number of jobs synced.</param>
|
||||
public static void RecordJobsSynced(string nodeId, int count)
|
||||
{
|
||||
JobsSyncedCounter.Add(count,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records duplicates dropped during merge.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="count">Number of duplicates dropped.</param>
|
||||
public static void RecordDuplicatesDropped(string nodeId, int count)
|
||||
{
|
||||
if (count > 0)
|
||||
{
|
||||
DuplicatesDroppedCounter.Add(count,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a merge conflict.
|
||||
/// </summary>
|
||||
/// <param name="conflictType">The type of conflict.</param>
|
||||
public static void RecordMergeConflict(ConflictType conflictType)
|
||||
{
|
||||
MergeConflictsCounter.Add(1,
|
||||
new KeyValuePair<string, object?>(ConflictTypeTag, conflictType.ToString()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records an offline enqueue operation.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
public static void RecordOfflineEnqueue(string nodeId)
|
||||
{
|
||||
OfflineEnqueuesCounter.Add(1,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records bundle size.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="sizeBytes">Size in bytes.</param>
|
||||
public static void RecordBundleSize(string nodeId, long sizeBytes)
|
||||
{
|
||||
BundleSizeHistogram.Record(sizeBytes,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records sync duration.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="durationSeconds">Duration in seconds.</param>
|
||||
public static void RecordSyncDuration(string nodeId, double durationSeconds)
|
||||
{
|
||||
SyncDurationHistogram.Record(durationSeconds,
|
||||
new KeyValuePair<string, object?>(NodeIdTag, nodeId));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,221 @@
|
||||
// <copyright file="FileBasedJobSyncTransport.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.AirGap.Sync.Telemetry;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Transport;
|
||||
|
||||
/// <summary>
|
||||
/// File-based transport for job sync bundles in air-gapped scenarios.
|
||||
/// </summary>
|
||||
public sealed class FileBasedJobSyncTransport : IJobSyncTransport
|
||||
{
|
||||
private readonly IAirGapBundleExporter _exporter;
|
||||
private readonly IAirGapBundleImporter _importer;
|
||||
private readonly FileBasedJobSyncTransportOptions _options;
|
||||
private readonly ILogger<FileBasedJobSyncTransport> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FileBasedJobSyncTransport"/> class.
|
||||
/// </summary>
|
||||
public FileBasedJobSyncTransport(
|
||||
IAirGapBundleExporter exporter,
|
||||
IAirGapBundleImporter importer,
|
||||
IOptions<FileBasedJobSyncTransportOptions> options,
|
||||
ILogger<FileBasedJobSyncTransport> logger)
|
||||
{
|
||||
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
|
||||
_importer = importer ?? throw new ArgumentNullException(nameof(importer));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string TransportId => "file";
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<JobSyncSendResult> SendBundleAsync(
|
||||
AirGapBundle bundle,
|
||||
string destination,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var startTime = DateTimeOffset.UtcNow;
|
||||
|
||||
try
|
||||
{
|
||||
// Ensure destination directory exists
|
||||
var destPath = Path.IsPathRooted(destination)
|
||||
? destination
|
||||
: Path.Combine(_options.OutputDirectory, destination);
|
||||
|
||||
Directory.CreateDirectory(destPath);
|
||||
|
||||
// Export to file
|
||||
var filePath = Path.Combine(destPath, $"job-sync-{bundle.BundleId:N}.json");
|
||||
await _exporter.ExportToFileAsync(bundle, filePath, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
var sizeBytes = fileInfo.Exists ? fileInfo.Length : 0;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Exported job sync bundle {BundleId} to {Path} ({Size} bytes)",
|
||||
bundle.BundleId,
|
||||
filePath,
|
||||
sizeBytes);
|
||||
|
||||
AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, sizeBytes);
|
||||
|
||||
return new JobSyncSendResult
|
||||
{
|
||||
Success = true,
|
||||
BundleId = bundle.BundleId,
|
||||
Destination = filePath,
|
||||
TransmittedAt = startTime,
|
||||
SizeBytes = sizeBytes
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to export job sync bundle {BundleId}", bundle.BundleId);
|
||||
|
||||
return new JobSyncSendResult
|
||||
{
|
||||
Success = false,
|
||||
BundleId = bundle.BundleId,
|
||||
Destination = destination,
|
||||
Error = ex.Message,
|
||||
TransmittedAt = startTime
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<AirGapBundle?> ReceiveBundleAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var sourcePath = Path.IsPathRooted(source)
|
||||
? source
|
||||
: Path.Combine(_options.InputDirectory, source);
|
||||
|
||||
if (!File.Exists(sourcePath))
|
||||
{
|
||||
_logger.LogWarning("Job sync bundle file not found: {Path}", sourcePath);
|
||||
return null;
|
||||
}
|
||||
|
||||
var bundle = await _importer.ImportFromFileAsync(sourcePath, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Imported job sync bundle {BundleId} from {Path}",
|
||||
bundle.BundleId,
|
||||
sourcePath);
|
||||
|
||||
return bundle;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to import job sync bundle from {Source}", source);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<IReadOnlyList<BundleInfo>> ListAvailableBundlesAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sourcePath = Path.IsPathRooted(source)
|
||||
? source
|
||||
: Path.Combine(_options.InputDirectory, source);
|
||||
|
||||
var bundles = new List<BundleInfo>();
|
||||
|
||||
if (!Directory.Exists(sourcePath))
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<BundleInfo>>(bundles);
|
||||
}
|
||||
|
||||
var files = Directory.GetFiles(sourcePath, "job-sync-*.json");
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Quick parse to extract bundle metadata
|
||||
var json = File.ReadAllText(file);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
if (root.TryGetProperty("bundleId", out var bundleIdProp) &&
|
||||
root.TryGetProperty("tenantId", out var tenantIdProp) &&
|
||||
root.TryGetProperty("createdByNodeId", out var nodeIdProp) &&
|
||||
root.TryGetProperty("createdAt", out var createdAtProp))
|
||||
{
|
||||
var entryCount = 0;
|
||||
if (root.TryGetProperty("jobLogs", out var jobLogs))
|
||||
{
|
||||
foreach (var log in jobLogs.EnumerateArray())
|
||||
{
|
||||
if (log.TryGetProperty("entries", out var entries))
|
||||
{
|
||||
entryCount += entries.GetArrayLength();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bundles.Add(new BundleInfo
|
||||
{
|
||||
BundleId = Guid.Parse(bundleIdProp.GetString()!),
|
||||
TenantId = tenantIdProp.GetString()!,
|
||||
SourceNodeId = nodeIdProp.GetString()!,
|
||||
CreatedAt = DateTimeOffset.Parse(createdAtProp.GetString()!),
|
||||
EntryCount = entryCount,
|
||||
SizeBytes = new FileInfo(file).Length
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to parse bundle metadata from {File}", file);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<BundleInfo>>(
|
||||
bundles.OrderByDescending(b => b.CreatedAt).ToList());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for file-based job sync transport.
|
||||
/// </summary>
|
||||
public sealed class FileBasedJobSyncTransportOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the output directory for exporting bundles.
|
||||
/// </summary>
|
||||
public string OutputDirectory { get; set; } = Path.Combine(
|
||||
Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
|
||||
"stellaops",
|
||||
"airgap",
|
||||
"outbox");
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the input directory for importing bundles.
|
||||
/// </summary>
|
||||
public string InputDirectory { get; set; } = Path.Combine(
|
||||
Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
|
||||
"stellaops",
|
||||
"airgap",
|
||||
"inbox");
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
// <copyright file="IJobSyncTransport.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Transport;
|
||||
|
||||
/// <summary>
|
||||
/// Transport abstraction for job sync bundles.
|
||||
/// Enables bundle transfer over various transports (file, Router messaging, etc.).
|
||||
/// </summary>
|
||||
public interface IJobSyncTransport
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the transport identifier.
|
||||
/// </summary>
|
||||
string TransportId { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Sends a job sync bundle to a destination.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to send.</param>
|
||||
/// <param name="destination">The destination identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The send result.</returns>
|
||||
Task<JobSyncSendResult> SendBundleAsync(
|
||||
AirGapBundle bundle,
|
||||
string destination,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Receives a job sync bundle from a source.
|
||||
/// </summary>
|
||||
/// <param name="source">The source identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The received bundle, or null if not available.</returns>
|
||||
Task<AirGapBundle?> ReceiveBundleAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Lists available bundles from a source.
|
||||
/// </summary>
|
||||
/// <param name="source">The source identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of available bundle identifiers.</returns>
|
||||
Task<IReadOnlyList<BundleInfo>> ListAvailableBundlesAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of sending a job sync bundle.
|
||||
/// </summary>
|
||||
public sealed record JobSyncSendResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a value indicating whether the send was successful.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the bundle ID.
|
||||
/// </summary>
|
||||
public required Guid BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the destination where the bundle was sent.
|
||||
/// </summary>
|
||||
public required string Destination { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the error message if the send failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the transmission timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset TransmittedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the size of the transmitted data in bytes.
|
||||
/// </summary>
|
||||
public long SizeBytes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about an available bundle.
|
||||
/// </summary>
|
||||
public sealed record BundleInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the bundle ID.
|
||||
/// </summary>
|
||||
public required Guid BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tenant ID.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the source node ID.
|
||||
/// </summary>
|
||||
public required string SourceNodeId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the creation timestamp.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the entry count in the bundle.
|
||||
/// </summary>
|
||||
public int EntryCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the bundle size in bytes.
|
||||
/// </summary>
|
||||
public long SizeBytes { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
// <copyright file="RouterJobSyncTransport.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.AirGap.Sync.Telemetry;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Transport;
|
||||
|
||||
/// <summary>
|
||||
/// Router-based transport for job sync bundles when network is available.
|
||||
/// This transport uses the Router messaging infrastructure for real-time sync.
|
||||
/// </summary>
|
||||
public sealed class RouterJobSyncTransport : IJobSyncTransport
|
||||
{
|
||||
private readonly IAirGapBundleExporter _exporter;
|
||||
private readonly IAirGapBundleImporter _importer;
|
||||
private readonly IRouterJobSyncClient _routerClient;
|
||||
private readonly RouterJobSyncTransportOptions _options;
|
||||
private readonly ILogger<RouterJobSyncTransport> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="RouterJobSyncTransport"/> class.
|
||||
/// </summary>
|
||||
public RouterJobSyncTransport(
|
||||
IAirGapBundleExporter exporter,
|
||||
IAirGapBundleImporter importer,
|
||||
IRouterJobSyncClient routerClient,
|
||||
IOptions<RouterJobSyncTransportOptions> options,
|
||||
ILogger<RouterJobSyncTransport> logger)
|
||||
{
|
||||
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
|
||||
_importer = importer ?? throw new ArgumentNullException(nameof(importer));
|
||||
_routerClient = routerClient ?? throw new ArgumentNullException(nameof(routerClient));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string TransportId => "router";
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<JobSyncSendResult> SendBundleAsync(
|
||||
AirGapBundle bundle,
|
||||
string destination,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var startTime = DateTimeOffset.UtcNow;
|
||||
|
||||
try
|
||||
{
|
||||
// Serialize bundle
|
||||
var json = await _exporter.ExportToStringAsync(bundle, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var payload = Encoding.UTF8.GetBytes(json);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Sending job sync bundle {BundleId} to {Destination} ({Size} bytes)",
|
||||
bundle.BundleId,
|
||||
destination,
|
||||
payload.Length);
|
||||
|
||||
// Send via Router
|
||||
var response = await _routerClient.SendJobSyncBundleAsync(
|
||||
destination,
|
||||
bundle.BundleId,
|
||||
bundle.TenantId,
|
||||
payload,
|
||||
_options.SendTimeout,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.Success)
|
||||
{
|
||||
AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, payload.Length);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Sent job sync bundle {BundleId} to {Destination}",
|
||||
bundle.BundleId,
|
||||
destination);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to send job sync bundle {BundleId} to {Destination}: {Error}",
|
||||
bundle.BundleId,
|
||||
destination,
|
||||
response.Error);
|
||||
}
|
||||
|
||||
return new JobSyncSendResult
|
||||
{
|
||||
Success = response.Success,
|
||||
BundleId = bundle.BundleId,
|
||||
Destination = destination,
|
||||
Error = response.Error,
|
||||
TransmittedAt = startTime,
|
||||
SizeBytes = payload.Length
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Error sending job sync bundle {BundleId} to {Destination}",
|
||||
bundle.BundleId,
|
||||
destination);
|
||||
|
||||
return new JobSyncSendResult
|
||||
{
|
||||
Success = false,
|
||||
BundleId = bundle.BundleId,
|
||||
Destination = destination,
|
||||
Error = ex.Message,
|
||||
TransmittedAt = startTime
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<AirGapBundle?> ReceiveBundleAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var response = await _routerClient.ReceiveJobSyncBundleAsync(
|
||||
source,
|
||||
_options.ReceiveTimeout,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.Payload is null || response.Payload.Length == 0)
|
||||
{
|
||||
_logger.LogDebug("No bundle available from {Source}", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = Encoding.UTF8.GetString(response.Payload);
|
||||
var bundle = await _importer.ImportFromStringAsync(json, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Received job sync bundle {BundleId} from {Source}",
|
||||
bundle.BundleId,
|
||||
source);
|
||||
|
||||
return bundle;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error receiving job sync bundle from {Source}", source);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<BundleInfo>> ListAvailableBundlesAsync(
|
||||
string source,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var response = await _routerClient.ListAvailableBundlesAsync(
|
||||
source,
|
||||
_options.ListTimeout,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return response.Bundles;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error listing available bundles from {Source}", source);
|
||||
return Array.Empty<BundleInfo>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for Router-based job sync transport.
|
||||
/// </summary>
|
||||
public sealed class RouterJobSyncTransportOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the timeout for send operations.
|
||||
/// </summary>
|
||||
public TimeSpan SendTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the timeout for receive operations.
|
||||
/// </summary>
|
||||
public TimeSpan ReceiveTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the timeout for list operations.
|
||||
/// </summary>
|
||||
public TimeSpan ListTimeout { get; set; } = TimeSpan.FromSeconds(10);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the service endpoint for job sync.
|
||||
/// </summary>
|
||||
public string ServiceEndpoint { get; set; } = "scheduler.job-sync";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Client interface for Router job sync operations.
|
||||
/// </summary>
|
||||
public interface IRouterJobSyncClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Sends a job sync bundle via the Router.
|
||||
/// </summary>
|
||||
Task<RouterSendResponse> SendJobSyncBundleAsync(
|
||||
string destination,
|
||||
Guid bundleId,
|
||||
string tenantId,
|
||||
byte[] payload,
|
||||
TimeSpan timeout,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Receives a job sync bundle via the Router.
|
||||
/// </summary>
|
||||
Task<RouterReceiveResponse> ReceiveJobSyncBundleAsync(
|
||||
string source,
|
||||
TimeSpan timeout,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Lists available bundles via the Router.
|
||||
/// </summary>
|
||||
Task<RouterListResponse> ListAvailableBundlesAsync(
|
||||
string source,
|
||||
TimeSpan timeout,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from a Router send operation.
|
||||
/// </summary>
|
||||
public sealed record RouterSendResponse
|
||||
{
|
||||
/// <summary>Gets a value indicating whether the send was successful.</summary>
|
||||
public bool Success { get; init; }
|
||||
|
||||
/// <summary>Gets the error message if failed.</summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from a Router receive operation.
|
||||
/// </summary>
|
||||
public sealed record RouterReceiveResponse
|
||||
{
|
||||
/// <summary>Gets the received payload.</summary>
|
||||
public byte[]? Payload { get; init; }
|
||||
|
||||
/// <summary>Gets the bundle ID.</summary>
|
||||
public Guid? BundleId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from a Router list operation.
|
||||
/// </summary>
|
||||
public sealed record RouterListResponse
|
||||
{
|
||||
/// <summary>Gets the available bundles.</summary>
|
||||
public IReadOnlyList<BundleInfo> Bundles { get; init; } = Array.Empty<BundleInfo>();
|
||||
}
|
||||
@@ -22,6 +22,9 @@ namespace StellaOps.AirGap.Bundle.Tests;
|
||||
/// Task AIRGAP-5100-016: Export bundle (online env) → import bundle (offline env) → verify data integrity
|
||||
/// Task AIRGAP-5100-017: Policy export → policy import → policy evaluation → verify identical verdict
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("BlastRadius", TestCategories.BlastRadius.Integrations)]
|
||||
[Trait("BlastRadius", TestCategories.BlastRadius.Persistence)]
|
||||
public sealed class AirGapIntegrationTests : IDisposable
|
||||
{
|
||||
private readonly string _tempRoot;
|
||||
@@ -72,7 +75,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
null,
|
||||
new[] { new FeedBuildConfig("nvd-feed", "nvd", "2025-06-15", feedPath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundleOutputPath = Path.Combine(_onlineEnvPath, "bundle");
|
||||
|
||||
@@ -120,7 +124,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
DateTimeOffset.UtcNow.AddDays(30),
|
||||
new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedPath, "feeds/all-feeds.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) },
|
||||
new[] { new PolicyBuildConfig("policy-1", "default", "1.0", policyPath, "policies/default.rego", PolicyType.OpaRego) },
|
||||
new[] { new CryptoBuildConfig("crypto-1", "trust-root", certPath, "certs/root.pem", CryptoComponentType.TrustRoot, null) });
|
||||
new[] { new CryptoBuildConfig("crypto-1", "trust-root", certPath, "certs/root.pem", CryptoComponentType.TrustRoot, null) },
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath = Path.Combine(_onlineEnvPath, "multi-bundle");
|
||||
|
||||
@@ -161,7 +166,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
null,
|
||||
new[] { new FeedBuildConfig("feed", "nvd", "v1", feedPath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath = Path.Combine(_onlineEnvPath, "corrupt-source");
|
||||
var manifest = await builder.BuildAsync(request, bundlePath);
|
||||
@@ -219,7 +225,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
null,
|
||||
Array.Empty<FeedBuildConfig>(),
|
||||
new[] { new PolicyBuildConfig("security-policy", "security", "1.0", policyPath, "policies/security.rego", PolicyType.OpaRego) },
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath = Path.Combine(_onlineEnvPath, "policy-bundle");
|
||||
|
||||
@@ -273,7 +280,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
new PolicyBuildConfig("policy-2", "policy2", "1.0", policy2Path, "policies/policy2.rego", PolicyType.OpaRego),
|
||||
new PolicyBuildConfig("policy-3", "policy3", "1.0", policy3Path, "policies/policy3.rego", PolicyType.OpaRego)
|
||||
},
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath = Path.Combine(_onlineEnvPath, "multi-policy");
|
||||
|
||||
@@ -315,7 +323,8 @@ public sealed class AirGapIntegrationTests : IDisposable
|
||||
null,
|
||||
Array.Empty<FeedBuildConfig>(),
|
||||
new[] { new PolicyBuildConfig("signed-policy", "signed", "1.0", policyPath, "policies/signed.rego", PolicyType.OpaRego) },
|
||||
new[] { new CryptoBuildConfig("signing-cert", "signing", certPath, "certs/signing.pem", CryptoComponentType.SigningKey, null) });
|
||||
new[] { new CryptoBuildConfig("signing-cert", "signing", certPath, "certs/signing.pem", CryptoComponentType.SigningKey, null) },
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath = Path.Combine(_onlineEnvPath, "signed-bundle");
|
||||
|
||||
|
||||
@@ -142,7 +142,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act - First export
|
||||
var manifest1 = await builder.BuildAsync(request, outputPath1);
|
||||
@@ -163,7 +164,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var manifest2 = await builder.BuildAsync(request2, outputPath2);
|
||||
|
||||
@@ -278,7 +280,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f3", "osv", "v1", feed3, "feeds/f3.json", DateTimeOffset.UtcNow, FeedFormat.OsvJson)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, Path.Combine(_tempRoot, "multi"));
|
||||
@@ -332,7 +335,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f1", "binary", "v1", source1, "data/binary.bin", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var request2 = new BundleBuildRequest(
|
||||
"binary-test",
|
||||
@@ -343,7 +347,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f1", "binary", "v1", source2, "data/binary.bin", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "bin1"));
|
||||
@@ -407,7 +412,8 @@ public sealed class BundleDeterminismTests : IAsyncLifetime
|
||||
new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
}
|
||||
|
||||
private BundleManifest CreateDeterministicManifest(string name)
|
||||
|
||||
@@ -259,7 +259,8 @@ public sealed class BundleExportImportTests : IDisposable
|
||||
null,
|
||||
new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile1, "feeds/nvd.json", DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var request2 = new BundleBuildRequest(
|
||||
"determinism-test",
|
||||
@@ -267,7 +268,8 @@ public sealed class BundleExportImportTests : IDisposable
|
||||
null,
|
||||
new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile2, "feeds/nvd.json", DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var outputPath1 = Path.Combine(_tempRoot, "determinism-output1");
|
||||
var outputPath2 = Path.Combine(_tempRoot, "determinism-output2");
|
||||
@@ -363,7 +365,8 @@ public sealed class BundleExportImportTests : IDisposable
|
||||
imported.Feeds[0].SnapshotAt,
|
||||
imported.Feeds[0].Format) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var bundlePath2 = Path.Combine(_tempRoot, "roundtrip2");
|
||||
var manifest2 = await builder.BuildAsync(reexportRequest, bundlePath2);
|
||||
@@ -409,7 +412,8 @@ public sealed class BundleExportImportTests : IDisposable
|
||||
null,
|
||||
new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedSourcePath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
}
|
||||
|
||||
private static BundleManifest CreateTestManifest()
|
||||
|
||||
@@ -49,7 +49,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
null,
|
||||
Array.Empty<FeedBuildConfig>(),
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -93,7 +94,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -139,7 +141,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
"policies/default.rego",
|
||||
PolicyType.OpaRego)
|
||||
},
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -182,7 +185,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
"certs/root.pem",
|
||||
CryptoComponentType.TrustRoot,
|
||||
DateTimeOffset.UtcNow.AddYears(10))
|
||||
});
|
||||
},
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -225,7 +229,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
{
|
||||
new PolicyBuildConfig("p1", "default", "1.0", policy, "policies/default.rego", PolicyType.OpaRego)
|
||||
},
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -261,7 +266,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -288,7 +294,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -328,7 +335,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new[]
|
||||
{
|
||||
new CryptoBuildConfig("c1", "root", certFile, "crypto/certs/ca/root.pem", CryptoComponentType.TrustRoot, null)
|
||||
});
|
||||
},
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -369,7 +377,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, format)
|
||||
},
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -404,7 +413,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
{
|
||||
new PolicyBuildConfig("p1", "test", "1.0", policyFile, "policies/test", type)
|
||||
},
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -440,7 +450,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new[]
|
||||
{
|
||||
new CryptoBuildConfig("c1", "test", certFile, "certs/test", type, null)
|
||||
});
|
||||
},
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -468,7 +479,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
expiresAt,
|
||||
Array.Empty<FeedBuildConfig>(),
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
@@ -496,7 +508,8 @@ public sealed class BundleExportTests : IAsyncLifetime
|
||||
new[]
|
||||
{
|
||||
new CryptoBuildConfig("c1", "root", certFile, "certs/root.pem", CryptoComponentType.TrustRoot, componentExpiry)
|
||||
});
|
||||
},
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
// Act
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
|
||||
@@ -49,7 +49,8 @@ public class BundleManifestTests
|
||||
null,
|
||||
new[] { new FeedBuildConfig("feed-1", "nvd", "v1", sourceFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) },
|
||||
Array.Empty<PolicyBuildConfig>(),
|
||||
Array.Empty<CryptoBuildConfig>());
|
||||
Array.Empty<CryptoBuildConfig>(),
|
||||
Array.Empty<RuleBundleBuildConfig>());
|
||||
|
||||
var outputPath = Path.Combine(tempRoot, "bundle");
|
||||
var manifest = await builder.BuildAsync(request, outputPath);
|
||||
|
||||
@@ -11,10 +11,6 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
@@ -5,7 +5,7 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0027-M | DONE | Maintainability audit for StellaOps.AirGap.Importer.Tests. |
|
||||
| AUDIT-0027-T | DONE | Test coverage audit for StellaOps.AirGap.Importer.Tests. |
|
||||
| AUDIT-0027-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0027-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0027-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0027-A | DONE | Waived (test project; revalidated 2026-01-06). |
|
||||
| VAL-SMOKE-001 | DONE | Align DSSE PAE test data and manifest merkle root; unit tests pass. |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0029-M | DONE | Maintainability audit for StellaOps.AirGap.Persistence.Tests. |
|
||||
| AUDIT-0029-T | DONE | Test coverage audit for StellaOps.AirGap.Persistence.Tests. |
|
||||
| AUDIT-0029-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0029-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0029-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. |
|
||||
| AUDIT-0029-A | DONE | Waived (test project; revalidated 2026-01-06). |
|
||||
|
||||
@@ -0,0 +1,242 @@
|
||||
// <copyright file="AirGapBundleDsseSignerTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="AirGapBundleDsseSigner"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
public sealed class AirGapBundleDsseSignerTests
|
||||
{
|
||||
private static readonly string TestSecretBase64 = Convert.ToBase64String(
|
||||
RandomNumberGenerator.GetBytes(32));
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WhenDisabled_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" });
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Act
|
||||
var result = await signer.SignAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
signer.IsEnabled.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WhenEnabled_ReturnsValidSignature()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64,
|
||||
KeyId = "test-key"
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Act
|
||||
var result = await signer.SignAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.KeyId.Should().Be("test-key");
|
||||
result.Signature.Should().NotBeEmpty();
|
||||
result.SignatureBase64.Should().NotBeNullOrWhiteSpace();
|
||||
signer.IsEnabled.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_DeterministicForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Act
|
||||
var result1 = await signer.SignAsync(bundle);
|
||||
var result2 = await signer.SignAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result1!.SignatureBase64.Should().Be(result2!.SignatureBase64);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_DifferentForDifferentManifest()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle1 = CreateTestBundle(manifestDigest: "sha256:aaa");
|
||||
var bundle2 = CreateTestBundle(manifestDigest: "sha256:bbb");
|
||||
|
||||
// Act
|
||||
var result1 = await signer.SignAsync(bundle1);
|
||||
var result2 = await signer.SignAsync(bundle2);
|
||||
|
||||
// Assert
|
||||
result1!.SignatureBase64.Should().NotBe(result2!.SignatureBase64);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WhenDisabled_ReturnsSigningDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" });
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Act
|
||||
var result = await signer.VerifyAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(AirGapBundleVerificationResult.SigningDisabled);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WhenNoSignature_ReturnsMissingSignature()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle(signature: null);
|
||||
|
||||
// Act
|
||||
var result = await signer.VerifyAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(AirGapBundleVerificationResult.MissingSignature);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithValidSignature_ReturnsValid()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Sign the bundle first
|
||||
var signResult = await signer.SignAsync(bundle);
|
||||
var signedBundle = bundle with { Signature = signResult!.SignatureBase64, SignedBy = signResult.KeyId };
|
||||
|
||||
// Act
|
||||
var verifyResult = await signer.VerifyAsync(signedBundle);
|
||||
|
||||
// Assert
|
||||
verifyResult.Should().Be(AirGapBundleVerificationResult.Valid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithTamperedSignature_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Sign and then tamper
|
||||
var signResult = await signer.SignAsync(bundle);
|
||||
var tamperedBundle = bundle with
|
||||
{
|
||||
Signature = signResult!.SignatureBase64,
|
||||
ManifestDigest = "sha256:tampered"
|
||||
};
|
||||
|
||||
// Act
|
||||
var verifyResult = await signer.VerifyAsync(tamperedBundle);
|
||||
|
||||
// Assert
|
||||
verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithInvalidBase64Signature_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = TestSecretBase64
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle(signature: "not-valid-base64!!!");
|
||||
|
||||
// Act
|
||||
var verifyResult = await signer.VerifyAsync(bundle);
|
||||
|
||||
// Assert
|
||||
verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SignAsync_WithMissingSecret_ThrowsInvalidOperation()
|
||||
{
|
||||
// Arrange
|
||||
var options = Options.Create(new AirGapBundleDsseOptions
|
||||
{
|
||||
Mode = "hmac",
|
||||
SecretBase64 = null
|
||||
});
|
||||
var signer = new AirGapBundleDsseSigner(options, NullLogger<AirGapBundleDsseSigner>.Instance);
|
||||
var bundle = CreateTestBundle();
|
||||
|
||||
// Act & Assert
|
||||
var act = async () => await signer.SignAsync(bundle);
|
||||
act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*SecretBase64*");
|
||||
}
|
||||
|
||||
private static AirGapBundle CreateTestBundle(
|
||||
string? manifestDigest = null,
|
||||
string? signature = null)
|
||||
{
|
||||
return new AirGapBundle
|
||||
{
|
||||
BundleId = Guid.Parse("11111111-1111-1111-1111-111111111111"),
|
||||
TenantId = "test-tenant",
|
||||
CreatedAt = DateTimeOffset.Parse("2026-01-07T12:00:00Z"),
|
||||
CreatedByNodeId = "test-node",
|
||||
JobLogs = new List<NodeJobLog>(),
|
||||
ManifestDigest = manifestDigest ?? "sha256:abc123def456",
|
||||
Signature = signature
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,342 @@
|
||||
// <copyright file="ConflictResolverTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="ConflictResolver"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
public sealed class ConflictResolverTests
|
||||
{
|
||||
private readonly ConflictResolver _sut;
|
||||
|
||||
public ConflictResolverTests()
|
||||
{
|
||||
_sut = new ConflictResolver(NullLogger<ConflictResolver>.Instance);
|
||||
}
|
||||
|
||||
#region Single Entry Tests
|
||||
|
||||
[Fact]
|
||||
public void Resolve_SingleEntry_ReturnsDuplicateTimestampWithTakeEarliest()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
var entry = CreateEntry("node-a", 100, 0, jobId);
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entry)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
|
||||
result.SelectedEntry.Should().Be(entry);
|
||||
result.DroppedEntries.Should().BeEmpty();
|
||||
result.Error.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Duplicate Timestamp Tests (Same Payload)
|
||||
|
||||
[Fact]
|
||||
public void Resolve_TwoEntriesSamePayload_TakesEarliest()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("22222222-2222-2222-2222-222222222222");
|
||||
var payloadHash = CreatePayloadHash(0xAA);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash);
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHash);
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
|
||||
result.SelectedEntry.Should().Be(entryA);
|
||||
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_TwoEntriesSamePayload_TakesEarliest_WhenSecondComesFirst()
|
||||
{
|
||||
// Arrange - Earlier entry is second in list
|
||||
var jobId = Guid.Parse("33333333-3333-3333-3333-333333333333");
|
||||
var payloadHash = CreatePayloadHash(0xBB);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash);
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earlier
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert - Should take entryB (earlier)
|
||||
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
|
||||
result.SelectedEntry.Should().Be(entryB);
|
||||
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_ThreeEntriesSamePayload_TakesEarliestDropsTwo()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("44444444-4444-4444-4444-444444444444");
|
||||
var payloadHash = CreatePayloadHash(0xCC);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 150, 0, jobId, payloadHash);
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earliest
|
||||
var entryC = CreateEntryWithPayloadHash("node-c", 200, 0, jobId, payloadHash);
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB),
|
||||
("node-c", entryC)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
|
||||
result.SelectedEntry.Should().Be(entryB);
|
||||
result.DroppedEntries.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_SamePhysicalTime_UsesLogicalCounter()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("55555555-5555-5555-5555-555555555555");
|
||||
var payloadHash = CreatePayloadHash(0xDD);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 100, 2, jobId, payloadHash); // Higher counter
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 100, 1, jobId, payloadHash); // Earlier
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.SelectedEntry.Should().Be(entryB); // Lower logical counter
|
||||
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_SamePhysicalTimeAndCounter_UsesNodeId()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("66666666-6666-6666-6666-666666666666");
|
||||
var payloadHash = CreatePayloadHash(0xEE);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("alpha-node", 100, 0, jobId, payloadHash);
|
||||
var entryB = CreateEntryWithPayloadHash("beta-node", 100, 0, jobId, payloadHash);
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("beta-node", entryB),
|
||||
("alpha-node", entryA)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert - "alpha-node" < "beta-node" alphabetically
|
||||
result.SelectedEntry.Should().Be(entryA);
|
||||
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Payload Mismatch Tests
|
||||
|
||||
[Fact]
|
||||
public void Resolve_DifferentPayloads_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("77777777-7777-7777-7777-777777777777");
|
||||
|
||||
var payloadHashA = CreatePayloadHash(0x01);
|
||||
var payloadHashB = CreatePayloadHash(0x02);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA);
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHashB);
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.Type.Should().Be(ConflictType.PayloadMismatch);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.Error);
|
||||
result.Error.Should().NotBeNullOrEmpty();
|
||||
result.Error.Should().Contain(jobId.ToString());
|
||||
result.Error.Should().Contain("conflicting payloads");
|
||||
result.SelectedEntry.Should().BeNull();
|
||||
result.DroppedEntries.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_ThreeDifferentPayloads_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.Parse("88888888-8888-8888-8888-888888888888");
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, CreatePayloadHash(0x01));
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, CreatePayloadHash(0x02));
|
||||
var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, CreatePayloadHash(0x03));
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB),
|
||||
("node-c", entryC)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert
|
||||
result.Type.Should().Be(ConflictType.PayloadMismatch);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_TwoSameOneUnique_ReturnsError()
|
||||
{
|
||||
// Arrange - 2 entries with same payload, 1 with different
|
||||
var jobId = Guid.Parse("99999999-9999-9999-9999-999999999999");
|
||||
var sharedPayload = CreatePayloadHash(0xAA);
|
||||
var uniquePayload = CreatePayloadHash(0xBB);
|
||||
|
||||
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, sharedPayload);
|
||||
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, sharedPayload);
|
||||
var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, uniquePayload);
|
||||
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
|
||||
{
|
||||
("node-a", entryA),
|
||||
("node-b", entryB),
|
||||
("node-c", entryC)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _sut.Resolve(jobId, conflicting);
|
||||
|
||||
// Assert - Should be error due to different payloads
|
||||
result.Type.Should().Be(ConflictType.PayloadMismatch);
|
||||
result.Resolution.Should().Be(ResolutionStrategy.Error);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void Resolve_NullConflicting_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _sut.Resolve(jobId, null!);
|
||||
act.Should().Throw<ArgumentNullException>()
|
||||
.WithParameterName("conflicting");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_EmptyConflicting_ThrowsArgumentException()
|
||||
{
|
||||
// Arrange
|
||||
var jobId = Guid.NewGuid();
|
||||
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>();
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _sut.Resolve(jobId, conflicting);
|
||||
act.Should().Throw<ArgumentException>()
|
||||
.WithParameterName("conflicting");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static byte[] CreatePayloadHash(byte prefix)
|
||||
{
|
||||
var hash = new byte[32];
|
||||
hash[0] = prefix;
|
||||
return hash;
|
||||
}
|
||||
|
||||
private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId)
|
||||
{
|
||||
var payloadHash = new byte[32];
|
||||
jobId.ToByteArray().CopyTo(payloadHash, 0);
|
||||
|
||||
return CreateEntryWithPayloadHash(nodeId, physicalTime, logicalCounter, jobId, payloadHash);
|
||||
}
|
||||
|
||||
private static OfflineJobLogEntry CreateEntryWithPayloadHash(
|
||||
string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash)
|
||||
{
|
||||
var hlc = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = physicalTime,
|
||||
NodeId = nodeId,
|
||||
LogicalCounter = logicalCounter
|
||||
};
|
||||
|
||||
return new OfflineJobLogEntry
|
||||
{
|
||||
NodeId = nodeId,
|
||||
THlc = hlc,
|
||||
JobId = jobId,
|
||||
Payload = $"{{\"id\":\"{jobId}\"}}",
|
||||
PayloadHash = payloadHash,
|
||||
Link = new byte[32],
|
||||
EnqueuedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,451 @@
|
||||
// <copyright file="HlcMergeServiceTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Sync.Models;
|
||||
using StellaOps.AirGap.Sync.Services;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Sync.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="HlcMergeService"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
public sealed class HlcMergeServiceTests
|
||||
{
|
||||
private readonly HlcMergeService _sut;
|
||||
private readonly ConflictResolver _conflictResolver;
|
||||
|
||||
public HlcMergeServiceTests()
|
||||
{
|
||||
_conflictResolver = new ConflictResolver(NullLogger<ConflictResolver>.Instance);
|
||||
_sut = new HlcMergeService(_conflictResolver, NullLogger<HlcMergeService>.Instance);
|
||||
}
|
||||
|
||||
#region OMP-014: Merge Algorithm Correctness
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_EmptyInput_ReturnsEmptyResult()
|
||||
{
|
||||
// Arrange
|
||||
var nodeLogs = new List<NodeJobLog>();
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(nodeLogs);
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().BeEmpty();
|
||||
result.Duplicates.Should().BeEmpty();
|
||||
result.SourceNodes.Should().BeEmpty();
|
||||
result.MergedChainHead.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_SingleNode_PreservesOrder()
|
||||
{
|
||||
// Arrange
|
||||
var nodeLog = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")),
|
||||
CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")),
|
||||
CreateEntry("node-a", 300, 0, Guid.Parse("33333333-3333-3333-3333-333333333333"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeLog });
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().HaveCount(3);
|
||||
result.MergedEntries[0].JobId.Should().Be(Guid.Parse("11111111-1111-1111-1111-111111111111"));
|
||||
result.MergedEntries[1].JobId.Should().Be(Guid.Parse("22222222-2222-2222-2222-222222222222"));
|
||||
result.MergedEntries[2].JobId.Should().Be(Guid.Parse("33333333-3333-3333-3333-333333333333"));
|
||||
result.Duplicates.Should().BeEmpty();
|
||||
result.SourceNodes.Should().ContainSingle().Which.Should().Be("node-a");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_TwoNodes_MergesByHlcOrder()
|
||||
{
|
||||
// Arrange - Two nodes with interleaved HLC timestamps
|
||||
// Node A: T=100, T=102
|
||||
// Node B: T=101, T=103
|
||||
// Expected order: 100, 101, 102, 103
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
|
||||
CreateEntry("node-a", 102, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntry("node-b", 101, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
|
||||
CreateEntry("node-b", 103, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().HaveCount(4);
|
||||
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
|
||||
result.MergedEntries[1].THlc.PhysicalTime.Should().Be(101);
|
||||
result.MergedEntries[2].THlc.PhysicalTime.Should().Be(102);
|
||||
result.MergedEntries[3].THlc.PhysicalTime.Should().Be(103);
|
||||
result.SourceNodes.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_SamePhysicalTime_OrdersByLogicalCounter()
|
||||
{
|
||||
// Arrange - Same physical time, different logical counters
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")),
|
||||
CreateEntry("node-a", 100, 2, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000003"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntry("node-b", 100, 1, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")),
|
||||
CreateEntry("node-b", 100, 3, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000004"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().HaveCount(4);
|
||||
result.MergedEntries[0].THlc.LogicalCounter.Should().Be(0);
|
||||
result.MergedEntries[1].THlc.LogicalCounter.Should().Be(1);
|
||||
result.MergedEntries[2].THlc.LogicalCounter.Should().Be(2);
|
||||
result.MergedEntries[3].THlc.LogicalCounter.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_SameTimeAndCounter_OrdersByNodeId()
|
||||
{
|
||||
// Arrange - Same physical time and counter, different node IDs
|
||||
var nodeA = CreateNodeLog("alpha-node", new[]
|
||||
{
|
||||
CreateEntry("alpha-node", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("beta-node", new[]
|
||||
{
|
||||
CreateEntry("beta-node", 100, 0, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
|
||||
// Assert - "alpha-node" < "beta-node" alphabetically
|
||||
result.MergedEntries.Should().HaveCount(2);
|
||||
result.MergedEntries[0].SourceNodeId.Should().Be("alpha-node");
|
||||
result.MergedEntries[1].SourceNodeId.Should().Be("beta-node");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_RecomputesUnifiedChain()
|
||||
{
|
||||
// Arrange
|
||||
var nodeLog = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")),
|
||||
CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeLog });
|
||||
|
||||
// Assert - Chain should be recomputed
|
||||
result.MergedEntries.Should().HaveCount(2);
|
||||
result.MergedEntries[0].MergedLink.Should().NotBeNull();
|
||||
result.MergedEntries[1].MergedLink.Should().NotBeNull();
|
||||
result.MergedChainHead.Should().NotBeNull();
|
||||
|
||||
// First entry's link should be computed from null prev_link
|
||||
result.MergedEntries[0].MergedLink.Should().HaveCount(32);
|
||||
|
||||
// Chain head should equal last entry's merged link
|
||||
result.MergedChainHead.Should().BeEquivalentTo(result.MergedEntries[1].MergedLink);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OMP-015: Duplicate Detection
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_DuplicateJobId_SamePayload_TakesEarliest()
|
||||
{
|
||||
// Arrange - Same job ID (same payload hash) from two nodes
|
||||
var jobId = Guid.Parse("dddddddd-dddd-dddd-dddd-dddddddddddd");
|
||||
var payloadHash = new byte[32];
|
||||
payloadHash[0] = 0xAA;
|
||||
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash)
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHash)
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
|
||||
// Assert - Should take earliest (T=100 from node-a)
|
||||
result.MergedEntries.Should().ContainSingle();
|
||||
result.MergedEntries[0].SourceNodeId.Should().Be("node-a");
|
||||
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
|
||||
|
||||
// Should report duplicate
|
||||
result.Duplicates.Should().ContainSingle();
|
||||
result.Duplicates[0].JobId.Should().Be(jobId);
|
||||
result.Duplicates[0].NodeId.Should().Be("node-b");
|
||||
result.Duplicates[0].THlc.PhysicalTime.Should().Be(105);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_TriplicateJobId_SamePayload_TakesEarliest()
|
||||
{
|
||||
// Arrange - Same job ID from three nodes
|
||||
var jobId = Guid.Parse("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee");
|
||||
var payloadHash = new byte[32];
|
||||
payloadHash[0] = 0xBB;
|
||||
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash)
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash) // Earliest
|
||||
});
|
||||
var nodeC = CreateNodeLog("node-c", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-c", 150, 0, jobId, payloadHash)
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC });
|
||||
|
||||
// Assert - Should take earliest (T=100 from node-b)
|
||||
result.MergedEntries.Should().ContainSingle();
|
||||
result.MergedEntries[0].SourceNodeId.Should().Be("node-b");
|
||||
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
|
||||
|
||||
// Should report two duplicates
|
||||
result.Duplicates.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_DuplicateJobId_DifferentPayload_ThrowsError()
|
||||
{
|
||||
// Arrange - Same job ID but different payload hashes (indicates bug)
|
||||
var jobId = Guid.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff");
|
||||
var payloadHashA = new byte[32];
|
||||
payloadHashA[0] = 0x01;
|
||||
var payloadHashB = new byte[32];
|
||||
payloadHashB[0] = 0x02;
|
||||
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA)
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHashB)
|
||||
});
|
||||
|
||||
// Act & Assert - Should throw because payloads differ
|
||||
var act = () => _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*conflicting payloads*");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OMP-018: Multi-Node Merge
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_ThreeNodes_MergesCorrectly()
|
||||
{
|
||||
// Arrange - Three nodes with various timestamps
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
|
||||
CreateEntry("node-a", 400, 0, Guid.Parse("aaaaaaaa-0007-0000-0000-000000000000"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
|
||||
CreateEntry("node-b", 500, 0, Guid.Parse("bbbbbbbb-0008-0000-0000-000000000000"))
|
||||
});
|
||||
var nodeC = CreateNodeLog("node-c", new[]
|
||||
{
|
||||
CreateEntry("node-c", 300, 0, Guid.Parse("cccccccc-0003-0000-0000-000000000000")),
|
||||
CreateEntry("node-c", 600, 0, Guid.Parse("cccccccc-0009-0000-0000-000000000000"))
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC });
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().HaveCount(6);
|
||||
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
|
||||
.BeInAscendingOrder();
|
||||
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
|
||||
.ContainInOrder(100L, 200L, 300L, 400L, 500L, 600L);
|
||||
result.SourceNodes.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_ManyNodes_PreservesTotalOrder()
|
||||
{
|
||||
// Arrange - 5 nodes with 2 entries each
|
||||
var nodes = new List<NodeJobLog>();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var nodeId = $"node-{i:D2}";
|
||||
nodes.Add(CreateNodeLog(nodeId, new[]
|
||||
{
|
||||
CreateEntry(nodeId, 100 + i * 10, 0, Guid.NewGuid()),
|
||||
CreateEntry(nodeId, 150 + i * 10, 0, Guid.NewGuid())
|
||||
}));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _sut.MergeAsync(nodes);
|
||||
|
||||
// Assert
|
||||
result.MergedEntries.Should().HaveCount(10);
|
||||
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
|
||||
.BeInAscendingOrder();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OMP-019: Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_SameInput_ProducesSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
|
||||
CreateEntry("node-a", 300, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
|
||||
CreateEntry("node-b", 400, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000"))
|
||||
});
|
||||
|
||||
// Act - Run merge twice
|
||||
var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
var result2 = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
|
||||
// Assert - Results should be identical
|
||||
result1.MergedEntries.Should().HaveCount(result2.MergedEntries.Count);
|
||||
for (int i = 0; i < result1.MergedEntries.Count; i++)
|
||||
{
|
||||
result1.MergedEntries[i].JobId.Should().Be(result2.MergedEntries[i].JobId);
|
||||
result1.MergedEntries[i].THlc.Should().Be(result2.MergedEntries[i].THlc);
|
||||
result1.MergedEntries[i].MergedLink.Should().BeEquivalentTo(result2.MergedEntries[i].MergedLink);
|
||||
}
|
||||
result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MergeAsync_InputOrderIndependent_ProducesSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var nodeA = CreateNodeLog("node-a", new[]
|
||||
{
|
||||
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000"))
|
||||
});
|
||||
var nodeB = CreateNodeLog("node-b", new[]
|
||||
{
|
||||
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000"))
|
||||
});
|
||||
|
||||
// Act - Merge in different orders
|
||||
var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB });
|
||||
var result2 = await _sut.MergeAsync(new[] { nodeB, nodeA });
|
||||
|
||||
// Assert - Results should be identical regardless of input order
|
||||
result1.MergedEntries.Select(e => e.JobId).Should()
|
||||
.BeEquivalentTo(result2.MergedEntries.Select(e => e.JobId));
|
||||
result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static NodeJobLog CreateNodeLog(string nodeId, IEnumerable<OfflineJobLogEntry> entries)
|
||||
{
|
||||
var entryList = entries.ToList();
|
||||
var lastEntry = entryList.LastOrDefault();
|
||||
|
||||
return new NodeJobLog
|
||||
{
|
||||
NodeId = nodeId,
|
||||
Entries = entryList,
|
||||
LastHlc = lastEntry?.THlc ?? new HlcTimestamp { PhysicalTime = 0, NodeId = nodeId, LogicalCounter = 0 },
|
||||
ChainHead = lastEntry?.Link ?? new byte[32]
|
||||
};
|
||||
}
|
||||
|
||||
private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId)
|
||||
{
|
||||
var payloadHash = new byte[32];
|
||||
jobId.ToByteArray().CopyTo(payloadHash, 0);
|
||||
|
||||
var hlc = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = physicalTime,
|
||||
NodeId = nodeId,
|
||||
LogicalCounter = logicalCounter
|
||||
};
|
||||
|
||||
return new OfflineJobLogEntry
|
||||
{
|
||||
NodeId = nodeId,
|
||||
THlc = hlc,
|
||||
JobId = jobId,
|
||||
Payload = $"{{\"id\":\"{jobId}\"}}",
|
||||
PayloadHash = payloadHash,
|
||||
Link = new byte[32],
|
||||
EnqueuedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static OfflineJobLogEntry CreateEntryWithPayloadHash(
|
||||
string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash)
|
||||
{
|
||||
var hlc = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = physicalTime,
|
||||
NodeId = nodeId,
|
||||
LogicalCounter = logicalCounter
|
||||
};
|
||||
|
||||
return new OfflineJobLogEntry
|
||||
{
|
||||
NodeId = nodeId,
|
||||
THlc = hlc,
|
||||
JobId = jobId,
|
||||
Payload = $"{{\"id\":\"{jobId}\"}}",
|
||||
PayloadHash = payloadHash,
|
||||
Link = new byte[32],
|
||||
EnqueuedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.AirGap.Sync\StellaOps.AirGap.Sync.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -95,6 +95,7 @@ public class Rfc3161VerifierTests
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
// Should report either decode or error
|
||||
Assert.True(result.Reason?.Contains("rfc3161-") ?? false);
|
||||
Assert.NotNull(result.Reason);
|
||||
Assert.Contains("rfc3161-", result.Reason);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0035-M | DONE | Maintainability audit for StellaOps.AirGap.Time.Tests. |
|
||||
| AUDIT-0035-T | DONE | Test coverage audit for StellaOps.AirGap.Time.Tests. |
|
||||
| AUDIT-0035-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0035-M | DONE | Revalidated maintainability for StellaOps.AirGap.Time.Tests (2026-01-06). |
|
||||
| AUDIT-0035-T | DONE | Revalidated test coverage for StellaOps.AirGap.Time.Tests (2026-01-06). |
|
||||
| AUDIT-0035-A | DONE | Waived (test project). |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0037-M | DONE | Maintainability audit for StellaOps.Aoc.Analyzers. |
|
||||
| AUDIT-0037-T | DONE | Test coverage audit for StellaOps.Aoc.Analyzers. |
|
||||
| AUDIT-0037-M | DONE | Revalidated maintainability for StellaOps.Aoc.Analyzers (2026-01-06). |
|
||||
| AUDIT-0037-T | DONE | Revalidated test coverage for StellaOps.Aoc.Analyzers (2026-01-06). |
|
||||
| AUDIT-0037-A | DONE | Applied ingestion markers, tighter DB detection, and guard-scope coverage. |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0039-M | DONE | Maintainability audit for StellaOps.Aoc.AspNetCore. |
|
||||
| AUDIT-0039-T | DONE | Test coverage audit for StellaOps.Aoc.AspNetCore. |
|
||||
| AUDIT-0039-M | DONE | Revalidated maintainability for StellaOps.Aoc.AspNetCore (2026-01-06). |
|
||||
| AUDIT-0039-T | DONE | Revalidated test coverage for StellaOps.Aoc.AspNetCore (2026-01-06). |
|
||||
| AUDIT-0039-A | DONE | Hardened guard filter error handling and added tests. |
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0036-M | DONE | Maintainability audit for StellaOps.Aoc. |
|
||||
| AUDIT-0036-T | DONE | Test coverage audit for StellaOps.Aoc. |
|
||||
| AUDIT-0036-M | DONE | Revalidated maintainability for StellaOps.Aoc (2026-01-06). |
|
||||
| AUDIT-0036-T | DONE | Revalidated test coverage for StellaOps.Aoc (2026-01-06). |
|
||||
| AUDIT-0036-A | DONE | Applied error code fixes, deterministic ordering, and guard validation hardening. |
|
||||
|
||||
@@ -11,10 +11,6 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0038-M | DONE | Maintainability audit for StellaOps.Aoc.Analyzers.Tests. |
|
||||
| AUDIT-0038-T | DONE | Test coverage audit for StellaOps.Aoc.Analyzers.Tests. |
|
||||
| AUDIT-0038-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0038-M | DONE | Revalidated maintainability for StellaOps.Aoc.Analyzers.Tests (2026-01-06). |
|
||||
| AUDIT-0038-T | DONE | Revalidated test coverage for StellaOps.Aoc.Analyzers.Tests (2026-01-06). |
|
||||
| AUDIT-0038-A | DONE | Waived (test project). |
|
||||
|
||||
@@ -13,9 +13,6 @@
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit.v3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0040-M | DONE | Maintainability audit for StellaOps.Aoc.AspNetCore.Tests. |
|
||||
| AUDIT-0040-T | DONE | Test coverage audit for StellaOps.Aoc.AspNetCore.Tests. |
|
||||
| AUDIT-0040-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0040-M | DONE | Revalidated maintainability for StellaOps.Aoc.AspNetCore.Tests (2026-01-06). |
|
||||
| AUDIT-0040-T | DONE | Revalidated test coverage for StellaOps.Aoc.AspNetCore.Tests (2026-01-06). |
|
||||
| AUDIT-0040-A | DONE | Waived (test project). |
|
||||
|
||||
@@ -21,9 +21,6 @@
|
||||
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit.v3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
|
||||
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0041-M | DONE | Maintainability audit for StellaOps.Aoc.Tests. |
|
||||
| AUDIT-0041-T | DONE | Test coverage audit for StellaOps.Aoc.Tests. |
|
||||
| AUDIT-0041-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0041-M | DONE | Revalidated maintainability for StellaOps.Aoc.Tests (2026-01-06). |
|
||||
| AUDIT-0041-T | DONE | Revalidated test coverage for StellaOps.Aoc.Tests (2026-01-06). |
|
||||
| AUDIT-0041-A | DONE | Waived (test project). |
|
||||
|
||||
@@ -726,8 +726,8 @@ Status: VERIFIED
|
||||
- **Sprint:** `docs/implplan/SPRINT_3500_0001_0001_proof_of_exposure_mvp.md`
|
||||
- **Advisory:** `docs/product-advisories/23-Dec-2026 - Binary Mapping as Attestable Proof.md`
|
||||
- **Subgraph Extraction:** `src/Scanner/__Libraries/StellaOps.Scanner.Reachability/SUBGRAPH_EXTRACTION.md`
|
||||
- **Function-Level Evidence:** `docs/reachability/function-level-evidence.md`
|
||||
- **Hybrid Attestation:** `docs/reachability/hybrid-attestation.md`
|
||||
- **Function-Level Evidence:** `docs/modules/reach-graph/guides/function-level-evidence.md`
|
||||
- **Hybrid Attestation:** `docs/modules/reach-graph/guides/hybrid-attestation.md`
|
||||
- **DSSE Spec:** https://github.com/secure-systems-lab/dsse
|
||||
|
||||
---
|
||||
|
||||
295
src/Attestor/StellaOps.Attestation.Tests/DsseVerifierTests.cs
Normal file
295
src/Attestor/StellaOps.Attestation.Tests/DsseVerifierTests.cs
Normal file
@@ -0,0 +1,295 @@
|
||||
// <copyright file="DsseVerifierTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for DsseVerifier.
|
||||
/// Sprint: SPRINT_20260105_002_001_REPLAY, Tasks RPL-006 through RPL-010.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class DsseVerifierTests
|
||||
{
|
||||
private readonly DsseVerifier _verifier;
|
||||
|
||||
public DsseVerifierTests()
|
||||
{
|
||||
_verifier = new DsseVerifier(NullLogger<DsseVerifier>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithValidEcdsaSignature_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, publicKeyPem) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, publicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.ValidSignatureCount.Should().Be(1);
|
||||
result.TotalSignatureCount.Should().Be(1);
|
||||
result.PayloadType.Should().Be("https://in-toto.io/Statement/v1");
|
||||
result.Issues.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithInvalidSignature_ReturnsFail()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, _) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
// Use a different key for verification
|
||||
using var differentKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var differentPublicKeyPem = ExportPublicKeyPem(differentKey);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, differentPublicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.ValidSignatureCount.Should().Be(0);
|
||||
result.Issues.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithMalformedJson_ReturnsParseError()
|
||||
{
|
||||
// Arrange
|
||||
var malformedJson = "{ not valid json }";
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var publicKeyPem = ExportPublicKeyPem(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(malformedJson, publicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Issues.Should().Contain(i => i.Contains("envelope_parse_error"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithMissingPayload_ReturnsFail()
|
||||
{
|
||||
// Arrange
|
||||
var envelope = JsonSerializer.Serialize(new
|
||||
{
|
||||
payloadType = "https://in-toto.io/Statement/v1",
|
||||
signatures = new[] { new { keyId = "key-001", sig = "YWJj" } }
|
||||
});
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var publicKeyPem = ExportPublicKeyPem(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, publicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Issues.Should().Contain(i => i.Contains("envelope_missing_payload"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithMissingSignatures_ReturnsFail()
|
||||
{
|
||||
// Arrange
|
||||
var payload = Convert.ToBase64String(Encoding.UTF8.GetBytes("{}"));
|
||||
var envelope = JsonSerializer.Serialize(new
|
||||
{
|
||||
payloadType = "https://in-toto.io/Statement/v1",
|
||||
payload,
|
||||
signatures = Array.Empty<object>()
|
||||
});
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var publicKeyPem = ExportPublicKeyPem(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, publicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Issues.Should().Contain("envelope_missing_signatures");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithNoTrustedKeys_ReturnsFail()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, _) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, Array.Empty<string>(), TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Issues.Should().Contain("no_trusted_keys_provided");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithMultipleTrustedKeys_SucceedsWithMatchingKey()
|
||||
{
|
||||
// Arrange
|
||||
using var signingKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
using var otherKey1 = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
using var otherKey2 = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
|
||||
var (envelope, signingKeyPem) = CreateSignedEnvelope(signingKey);
|
||||
|
||||
var trustedKeys = new[]
|
||||
{
|
||||
ExportPublicKeyPem(otherKey1),
|
||||
signingKeyPem,
|
||||
ExportPublicKeyPem(otherKey2),
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, trustedKeys, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.ValidSignatureCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithKeyResolver_UsesResolverForVerification()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, publicKeyPem) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
Task<string?> KeyResolver(string? keyId, CancellationToken ct)
|
||||
{
|
||||
return Task.FromResult<string?>(publicKeyPem);
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, KeyResolver, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithKeyResolverReturningNull_ReturnsFail()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, _) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
static Task<string?> KeyResolver(string? keyId, CancellationToken ct)
|
||||
{
|
||||
return Task.FromResult<string?>(null);
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, KeyResolver, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Issues.Should().Contain(i => i.Contains("key_not_found"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsPayloadHash()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, publicKeyPem) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
// Act
|
||||
var result = await _verifier.VerifyAsync(envelope, publicKeyPem, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.PayloadHash.Should().StartWith("sha256:");
|
||||
result.PayloadHash.Should().HaveLength("sha256:".Length + 64);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ThrowsOnNullEnvelope()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var publicKeyPem = ExportPublicKeyPem(ecdsa);
|
||||
|
||||
// Act & Assert - null envelope throws ArgumentNullException
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _verifier.VerifyAsync(null!, publicKeyPem, TestContext.Current.CancellationToken));
|
||||
|
||||
// Empty envelope throws ArgumentException (whitespace check)
|
||||
await Assert.ThrowsAsync<ArgumentException>(
|
||||
() => _verifier.VerifyAsync("", publicKeyPem, TestContext.Current.CancellationToken));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ThrowsOnNullKeys()
|
||||
{
|
||||
// Arrange
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var (envelope, _) = CreateSignedEnvelope(ecdsa);
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _verifier.VerifyAsync(envelope, (IEnumerable<string>)null!, TestContext.Current.CancellationToken));
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _verifier.VerifyAsync(envelope, (Func<string?, CancellationToken, Task<string?>>)null!, TestContext.Current.CancellationToken));
|
||||
}
|
||||
|
||||
private static (string EnvelopeJson, string PublicKeyPem) CreateSignedEnvelope(ECDsa signingKey)
|
||||
{
|
||||
var payloadType = "https://in-toto.io/Statement/v1";
|
||||
var payloadContent = "{\"_type\":\"https://in-toto.io/Statement/v1\",\"subject\":[]}";
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payloadContent);
|
||||
var payloadBase64 = Convert.ToBase64String(payloadBytes);
|
||||
|
||||
// Compute PAE
|
||||
var pae = DsseHelper.PreAuthenticationEncoding(payloadType, payloadBytes);
|
||||
|
||||
// Sign
|
||||
var signatureBytes = signingKey.SignData(pae, HashAlgorithmName.SHA256);
|
||||
var signatureBase64 = Convert.ToBase64String(signatureBytes);
|
||||
|
||||
// Build envelope
|
||||
var envelope = JsonSerializer.Serialize(new
|
||||
{
|
||||
payloadType,
|
||||
payload = payloadBase64,
|
||||
signatures = new[]
|
||||
{
|
||||
new { keyId = "test-key-001", sig = signatureBase64 }
|
||||
}
|
||||
});
|
||||
|
||||
var publicKeyPem = ExportPublicKeyPem(signingKey);
|
||||
|
||||
return (envelope, publicKeyPem);
|
||||
}
|
||||
|
||||
private static string ExportPublicKeyPem(ECDsa key)
|
||||
{
|
||||
var publicKeyBytes = key.ExportSubjectPublicKeyInfo();
|
||||
var base64 = Convert.ToBase64String(publicKeyBytes);
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("-----BEGIN PUBLIC KEY-----");
|
||||
|
||||
for (var i = 0; i < base64.Length; i += 64)
|
||||
{
|
||||
builder.AppendLine(base64.Substring(i, Math.Min(64, base64.Length - i)));
|
||||
}
|
||||
|
||||
builder.AppendLine("-----END PUBLIC KEY-----");
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0044-M | DONE | Maintainability audit for StellaOps.Attestation.Tests. |
|
||||
| AUDIT-0044-T | DONE | Test coverage audit for StellaOps.Attestation.Tests. |
|
||||
| AUDIT-0044-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0044-M | DONE | Revalidated maintainability for StellaOps.Attestation.Tests (2026-01-06). |
|
||||
| AUDIT-0044-T | DONE | Revalidated test coverage for StellaOps.Attestation.Tests (2026-01-06). |
|
||||
| AUDIT-0044-A | DONE | Waived (test project). |
|
||||
|
||||
301
src/Attestor/StellaOps.Attestation/DsseVerifier.cs
Normal file
301
src/Attestor/StellaOps.Attestation/DsseVerifier.cs
Normal file
@@ -0,0 +1,301 @@
|
||||
// <copyright file="DsseVerifier.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of DSSE signature verification.
|
||||
/// Uses the existing DsseHelper for PAE computation.
|
||||
/// </summary>
|
||||
public sealed class DsseVerifier : IDsseVerifier
|
||||
{
|
||||
private readonly ILogger<DsseVerifier> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// JSON serializer options for parsing DSSE envelopes.
|
||||
/// </summary>
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
};
|
||||
|
||||
public DsseVerifier(ILogger<DsseVerifier> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<DsseVerificationResult> VerifyAsync(
|
||||
string envelopeJson,
|
||||
string publicKeyPem,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return VerifyAsync(envelopeJson, new[] { publicKeyPem }, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DsseVerificationResult> VerifyAsync(
|
||||
string envelopeJson,
|
||||
IEnumerable<string> trustedKeysPem,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(envelopeJson);
|
||||
ArgumentNullException.ThrowIfNull(trustedKeysPem);
|
||||
|
||||
var trustedKeys = trustedKeysPem.ToList();
|
||||
if (trustedKeys.Count == 0)
|
||||
{
|
||||
return DsseVerificationResult.Failure(0, ImmutableArray.Create("no_trusted_keys_provided"));
|
||||
}
|
||||
|
||||
return await VerifyWithAllKeysAsync(envelopeJson, trustedKeys, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DsseVerificationResult> VerifyAsync(
|
||||
string envelopeJson,
|
||||
Func<string?, CancellationToken, Task<string?>> keyResolver,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(envelopeJson);
|
||||
ArgumentNullException.ThrowIfNull(keyResolver);
|
||||
|
||||
// Parse the envelope
|
||||
DsseEnvelopeDto? envelope;
|
||||
try
|
||||
{
|
||||
envelope = JsonSerializer.Deserialize<DsseEnvelopeDto>(envelopeJson, JsonOptions);
|
||||
if (envelope is null)
|
||||
{
|
||||
return DsseVerificationResult.ParseError("Failed to deserialize envelope");
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to parse DSSE envelope JSON");
|
||||
return DsseVerificationResult.ParseError(ex.Message);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Payload))
|
||||
{
|
||||
return DsseVerificationResult.Failure(0, ImmutableArray.Create("envelope_missing_payload"));
|
||||
}
|
||||
|
||||
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
|
||||
{
|
||||
return DsseVerificationResult.Failure(0, ImmutableArray.Create("envelope_missing_signatures"));
|
||||
}
|
||||
|
||||
// Decode payload
|
||||
byte[] payloadBytes;
|
||||
try
|
||||
{
|
||||
payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return DsseVerificationResult.Failure(envelope.Signatures.Count, ImmutableArray.Create("payload_invalid_base64"));
|
||||
}
|
||||
|
||||
// Compute PAE for signature verification
|
||||
var payloadType = envelope.PayloadType ?? "https://in-toto.io/Statement/v1";
|
||||
var pae = DsseHelper.PreAuthenticationEncoding(payloadType, payloadBytes);
|
||||
|
||||
// Verify each signature
|
||||
var verifiedKeyIds = new List<string>();
|
||||
var issues = new List<string>();
|
||||
|
||||
foreach (var sig in envelope.Signatures)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sig.Sig))
|
||||
{
|
||||
issues.Add($"signature_{sig.KeyId ?? "unknown"}_empty");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Resolve the public key for this signature
|
||||
var publicKeyPem = await keyResolver(sig.KeyId, cancellationToken).ConfigureAwait(false);
|
||||
if (string.IsNullOrWhiteSpace(publicKeyPem))
|
||||
{
|
||||
issues.Add($"key_not_found_{sig.KeyId ?? "unknown"}");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verify the signature
|
||||
try
|
||||
{
|
||||
var signatureBytes = Convert.FromBase64String(sig.Sig);
|
||||
if (VerifySignature(pae, signatureBytes, publicKeyPem))
|
||||
{
|
||||
verifiedKeyIds.Add(sig.KeyId ?? "unknown");
|
||||
_logger.LogDebug("DSSE signature verified for keyId: {KeyId}", sig.KeyId ?? "unknown");
|
||||
}
|
||||
else
|
||||
{
|
||||
issues.Add($"signature_invalid_{sig.KeyId ?? "unknown"}");
|
||||
}
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
issues.Add($"signature_invalid_base64_{sig.KeyId ?? "unknown"}");
|
||||
}
|
||||
catch (CryptographicException ex)
|
||||
{
|
||||
issues.Add($"signature_crypto_error_{sig.KeyId ?? "unknown"}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
// Compute payload hash for result
|
||||
var payloadHash = $"sha256:{Convert.ToHexString(SHA256.HashData(payloadBytes)).ToLowerInvariant()}";
|
||||
|
||||
if (verifiedKeyIds.Count > 0)
|
||||
{
|
||||
return DsseVerificationResult.Success(
|
||||
verifiedKeyIds.Count,
|
||||
envelope.Signatures.Count,
|
||||
verifiedKeyIds.ToImmutableArray(),
|
||||
payloadType,
|
||||
payloadHash);
|
||||
}
|
||||
|
||||
return new DsseVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
ValidSignatureCount = 0,
|
||||
TotalSignatureCount = envelope.Signatures.Count,
|
||||
VerifiedKeyIds = ImmutableArray<string>.Empty,
|
||||
PayloadType = payloadType,
|
||||
PayloadHash = payloadHash,
|
||||
Issues = issues.ToImmutableArray(),
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies against all trusted keys, returning success if any key validates any signature.
|
||||
/// </summary>
|
||||
private async Task<DsseVerificationResult> VerifyWithAllKeysAsync(
|
||||
string envelopeJson,
|
||||
List<string> trustedKeys,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Parse envelope first to get signature keyIds
|
||||
DsseEnvelopeDto? envelope;
|
||||
try
|
||||
{
|
||||
envelope = JsonSerializer.Deserialize<DsseEnvelopeDto>(envelopeJson, JsonOptions);
|
||||
if (envelope is null)
|
||||
{
|
||||
return DsseVerificationResult.ParseError("Failed to deserialize envelope");
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return DsseVerificationResult.ParseError(ex.Message);
|
||||
}
|
||||
|
||||
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
|
||||
{
|
||||
return DsseVerificationResult.Failure(0, ImmutableArray.Create("envelope_missing_signatures"));
|
||||
}
|
||||
|
||||
// Try each trusted key
|
||||
var allIssues = new List<string>();
|
||||
foreach (var key in trustedKeys)
|
||||
{
|
||||
var keyIndex = trustedKeys.IndexOf(key);
|
||||
|
||||
async Task<string?> SingleKeyResolver(string? keyId, CancellationToken ct)
|
||||
{
|
||||
await Task.CompletedTask.ConfigureAwait(false);
|
||||
return key;
|
||||
}
|
||||
|
||||
var result = await VerifyAsync(envelopeJson, SingleKeyResolver, cancellationToken).ConfigureAwait(false);
|
||||
if (result.IsValid)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
// Collect issues for debugging
|
||||
foreach (var issue in result.Issues)
|
||||
{
|
||||
allIssues.Add($"key{keyIndex}: {issue}");
|
||||
}
|
||||
}
|
||||
|
||||
return DsseVerificationResult.Failure(envelope.Signatures.Count, allIssues.ToImmutableArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a signature against PAE using the provided public key.
|
||||
/// Supports ECDSA P-256 and RSA keys.
|
||||
/// </summary>
|
||||
private bool VerifySignature(byte[] pae, byte[] signature, string publicKeyPem)
|
||||
{
|
||||
// Try ECDSA first (most common for Sigstore/Fulcio)
|
||||
try
|
||||
{
|
||||
using var ecdsa = ECDsa.Create();
|
||||
ecdsa.ImportFromPem(publicKeyPem);
|
||||
return ecdsa.VerifyData(pae, signature, HashAlgorithmName.SHA256);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
// Not an ECDSA key, try RSA
|
||||
}
|
||||
|
||||
// Try RSA
|
||||
try
|
||||
{
|
||||
using var rsa = RSA.Create();
|
||||
rsa.ImportFromPem(publicKeyPem);
|
||||
return rsa.VerifyData(pae, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
// Not an RSA key either
|
||||
}
|
||||
|
||||
// Try Ed25519 if available (.NET 9+)
|
||||
try
|
||||
{
|
||||
// Ed25519 support via System.Security.Cryptography
|
||||
// Note: Ed25519 verification requires different handling
|
||||
// For now, we log and return false - can be extended later
|
||||
_logger.LogDebug("Ed25519 signature verification not yet implemented");
|
||||
return false;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ed25519 not available
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DTO for deserializing DSSE envelope JSON.
|
||||
/// </summary>
|
||||
private sealed class DsseEnvelopeDto
|
||||
{
|
||||
public string? PayloadType { get; set; }
|
||||
public string? Payload { get; set; }
|
||||
public List<DsseSignatureDto>? Signatures { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DTO for DSSE signature.
|
||||
/// </summary>
|
||||
private sealed class DsseSignatureDto
|
||||
{
|
||||
public string? KeyId { get; set; }
|
||||
public string? Sig { get; set; }
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user