feat(audit): Apply TreatWarningsAsErrors=true to 160+ production csproj files

Sprint: SPRINT_20251229_049_BE_csproj_audit_maint_tests
Tasks: AUDIT-0001 through AUDIT-0147 APPLY tasks (approved decisions 1-9)

Changes:
- Set TreatWarningsAsErrors=true for all production .NET projects
- Fixed nullable warnings in Scanner.EntryTrace, Scanner.Evidence,
  Scheduler.Worker, Concelier connectors, and other modules
- Injected TimeProvider/IGuidProvider for deterministic time/ID generation
- Added path traversal validation in AirGap.Bundle
- Fixed NULL handling in various cursor classes
- Third-party GostCryptography retains TreatWarningsAsErrors=false (preserves original)
- Test projects excluded per user decision (rejected decision 10)

Note: All 17 ACSC connector tests pass after snapshot fixture sync
This commit is contained in:
StellaOps Bot
2026-01-04 11:21:16 +02:00
parent bc4dd4f377
commit e411fde1a9
438 changed files with 2648 additions and 668 deletions

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />

View File

@@ -32,6 +32,18 @@ builder.Configuration
builder.Services.AddAdvisoryAiCore(builder.Configuration);
// Authorization service
builder.Services.AddSingleton<StellaOps.AdvisoryAI.WebService.Services.IAuthorizationService, StellaOps.AdvisoryAI.WebService.Services.HeaderBasedAuthorizationService>();
// Rate limits service with configuration
builder.Services.AddOptions<StellaOps.AdvisoryAI.WebService.Services.RateLimitsOptions>()
.Bind(builder.Configuration.GetSection(StellaOps.AdvisoryAI.WebService.Services.RateLimitsOptions.SectionName))
.ValidateOnStart();
builder.Services.AddSingleton<StellaOps.AdvisoryAI.WebService.Services.IRateLimitsService, StellaOps.AdvisoryAI.WebService.Services.ConfigDrivenRateLimitsService>();
// TimeProvider for deterministic timestamps
builder.Services.AddSingleton(TimeProvider.System);
// VEX-AI-016: Consent and justification services
builder.Services.AddSingleton<IAiConsentStore, InMemoryAiConsentStore>();
builder.Services.AddSingleton<IAiJustificationGenerator, DefaultAiJustificationGenerator>();
@@ -645,9 +657,12 @@ static async Task<IResult> HandlePolicyValidate(
}
// POLICY-19: POST /v1/advisory-ai/policy/studio/compile
// NOTE: This is a stub implementation. In production, this would compile rules into a PolicyBundle.
// The stub returns experimental markers to indicate incomplete implementation.
static Task<IResult> HandlePolicyCompile(
HttpContext httpContext,
PolicyCompileApiRequest request,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.policy_compile", ActivityKind.Server);
@@ -659,9 +674,14 @@ static Task<IResult> HandlePolicyCompile(
return Task.FromResult(Results.StatusCode(StatusCodes.Status403Forbidden));
}
// In a real implementation, this would compile rules into a PolicyBundle
var bundleId = $"bundle:{Guid.NewGuid():N}";
var now = DateTime.UtcNow;
// STUB: This endpoint is experimental and not wired to real policy compilation.
// Return a deterministic bundle ID derived from input to avoid nondeterministic output.
var inputHash = ComputeDeterministicBundleId(request.BundleName, request.RuleIds);
var bundleId = $"bundle:stub:{inputHash}";
var now = timeProvider.GetUtcNow();
// Compute content hash deterministically from the rule IDs
var contentHash = ComputeDeterministicContentHash(request.RuleIds);
var response = new PolicyBundleApiResponse
{
@@ -670,13 +690,29 @@ static Task<IResult> HandlePolicyCompile(
Version = "1.0.0",
RuleCount = request.RuleIds.Count,
CompiledAt = now.ToString("O"),
ContentHash = $"sha256:{Guid.NewGuid():N}",
ContentHash = $"sha256:{contentHash}",
SignatureId = null // Would be signed in production
};
return Task.FromResult(Results.Ok(response));
}
// Deterministic hash computation for stub bundle ID
static string ComputeDeterministicBundleId(string bundleName, IReadOnlyList<string> ruleIds)
{
var input = $"{bundleName}:{string.Join(",", ruleIds.OrderBy(x => x, StringComparer.Ordinal))}";
var bytes = System.Security.Cryptography.SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes)[..32].ToLowerInvariant();
}
// Deterministic content hash for stub bundles
static string ComputeDeterministicContentHash(IReadOnlyList<string> ruleIds)
{
var input = string.Join(",", ruleIds.OrderBy(x => x, StringComparer.Ordinal));
var bytes = System.Security.Cryptography.SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
// VEX-AI-016: Consent handler functions
static string GetTenantId(HttpContext context)
{
@@ -869,41 +905,24 @@ static async Task<IResult> HandleRemediate(
}
}
// VEX-AI-016: Rate limits handler
// VEX-AI-016: Rate limits handler using config-driven service
static Task<IResult> HandleGetRateLimits(
HttpContext httpContext,
StellaOps.AdvisoryAI.WebService.Services.IRateLimitsService rateLimitsService,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
// Return current rate limit info for each feature
var now = DateTimeOffset.UtcNow;
var resetTime = now.AddMinutes(1);
var limits = rateLimitsService.GetRateLimits(timeProvider);
var limits = new List<AiRateLimitInfoResponse>
var response = limits.Select(l => new AiRateLimitInfoResponse
{
new AiRateLimitInfoResponse
{
Feature = "explain",
Limit = 10,
Remaining = 10,
ResetsAt = resetTime.ToString("O")
},
new AiRateLimitInfoResponse
{
Feature = "remediate",
Limit = 5,
Remaining = 5,
ResetsAt = resetTime.ToString("O")
},
new AiRateLimitInfoResponse
{
Feature = "justify",
Limit = 3,
Remaining = 3,
ResetsAt = resetTime.ToString("O")
}
};
Feature = l.Feature,
Limit = l.Limit,
Remaining = l.Remaining,
ResetsAt = l.ResetsAt.ToString("O")
}).ToList();
return Task.FromResult(Results.Ok(limits));
return Task.FromResult(Results.Ok(response));
}
internal sealed record PipelinePlanRequest(

View File

@@ -0,0 +1,116 @@
using Microsoft.AspNetCore.Http;
using StellaOps.AdvisoryAI.Orchestration;
namespace StellaOps.AdvisoryAI.WebService.Services;
/// <summary>
/// Consolidated authorization service for advisory-ai endpoints.
/// Provides consistent scope-based authorization checks.
/// </summary>
public interface IAuthorizationService
{
/// <summary>
/// Checks if the request is authorized for the given task type.
/// </summary>
bool IsAuthorized(HttpContext context, AdvisoryTaskType taskType);
/// <summary>
/// Checks if the request is authorized for explanation operations.
/// </summary>
bool IsExplainAuthorized(HttpContext context);
/// <summary>
/// Checks if the request is authorized for remediation operations.
/// </summary>
bool IsRemediationAuthorized(HttpContext context);
/// <summary>
/// Checks if the request is authorized for policy studio operations.
/// </summary>
bool IsPolicyAuthorized(HttpContext context);
/// <summary>
/// Checks if the request is authorized for justification operations.
/// </summary>
bool IsJustifyAuthorized(HttpContext context);
/// <summary>
/// Gets the tenant ID from the request headers.
/// </summary>
string GetTenantId(HttpContext context);
/// <summary>
/// Gets the user ID from the request headers.
/// </summary>
string GetUserId(HttpContext context);
}
/// <summary>
/// Default implementation of authorization service using header-based scopes.
/// </summary>
public sealed class HeaderBasedAuthorizationService : IAuthorizationService
{
private const string ScopesHeader = "X-StellaOps-Scopes";
private const string TenantHeader = "X-StellaOps-Tenant";
private const string UserHeader = "X-StellaOps-User";
public bool IsAuthorized(HttpContext context, AdvisoryTaskType taskType)
{
var scopes = GetScopes(context);
if (scopes.Contains("advisory:run"))
{
return true;
}
return scopes.Contains($"advisory:{taskType.ToString().ToLowerInvariant()}");
}
public bool IsExplainAuthorized(HttpContext context)
{
var scopes = GetScopes(context);
return scopes.Contains("advisory:run") || scopes.Contains("advisory:explain");
}
public bool IsRemediationAuthorized(HttpContext context)
{
var scopes = GetScopes(context);
return scopes.Contains("advisory:run") || scopes.Contains("advisory:remediate");
}
public bool IsPolicyAuthorized(HttpContext context)
{
var scopes = GetScopes(context);
return scopes.Contains("advisory:run") || scopes.Contains("policy:write");
}
public bool IsJustifyAuthorized(HttpContext context)
{
var scopes = GetScopes(context);
return scopes.Contains("advisory:run") || scopes.Contains("advisory:justify");
}
public string GetTenantId(HttpContext context)
{
return context.Request.Headers.TryGetValue(TenantHeader, out var value)
? value.ToString()
: "default";
}
public string GetUserId(HttpContext context)
{
return context.Request.Headers.TryGetValue(UserHeader, out var value)
? value.ToString()
: "anonymous";
}
private static HashSet<string> GetScopes(HttpContext context)
{
if (!context.Request.Headers.TryGetValue(ScopesHeader, out var scopes))
{
return [];
}
return scopes
.SelectMany(value => value?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) ?? [])
.ToHashSet(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -0,0 +1,108 @@
using Microsoft.Extensions.Options;
namespace StellaOps.AdvisoryAI.WebService.Services;
/// <summary>
/// Configuration for feature-specific rate limits.
/// </summary>
public sealed class RateLimitsOptions
{
public const string SectionName = "AdvisoryAI:RateLimits";
/// <summary>
/// Rate limit for the explain feature.
/// </summary>
public FeatureRateLimitOptions Explain { get; set; } = new() { Limit = 10, PeriodMinutes = 1 };
/// <summary>
/// Rate limit for the remediate feature.
/// </summary>
public FeatureRateLimitOptions Remediate { get; set; } = new() { Limit = 5, PeriodMinutes = 1 };
/// <summary>
/// Rate limit for the justify feature.
/// </summary>
public FeatureRateLimitOptions Justify { get; set; } = new() { Limit = 3, PeriodMinutes = 1 };
}
/// <summary>
/// Rate limit configuration for a single feature.
/// </summary>
public sealed class FeatureRateLimitOptions
{
/// <summary>
/// Maximum number of requests allowed per period.
/// </summary>
public int Limit { get; set; }
/// <summary>
/// Period duration in minutes.
/// </summary>
public int PeriodMinutes { get; set; }
}
/// <summary>
/// Represents rate limit information for a feature.
/// </summary>
public sealed class RateLimitInfo
{
public required string Feature { get; init; }
public required int Limit { get; init; }
public required int Remaining { get; init; }
public required DateTimeOffset ResetsAt { get; init; }
}
/// <summary>
/// Service for managing rate limit state and reporting.
/// </summary>
public interface IRateLimitsService
{
/// <summary>
/// Gets the current rate limit information for all features.
/// </summary>
IReadOnlyList<RateLimitInfo> GetRateLimits(TimeProvider timeProvider);
}
/// <summary>
/// Default implementation of rate limits service using configuration.
/// In production, this would integrate with the actual rate limiter state.
/// </summary>
public sealed class ConfigDrivenRateLimitsService : IRateLimitsService
{
private readonly RateLimitsOptions _options;
public ConfigDrivenRateLimitsService(IOptions<RateLimitsOptions> options)
{
_options = options.Value;
}
public IReadOnlyList<RateLimitInfo> GetRateLimits(TimeProvider timeProvider)
{
var now = timeProvider.GetUtcNow();
return
[
new RateLimitInfo
{
Feature = "explain",
Limit = _options.Explain.Limit,
Remaining = _options.Explain.Limit, // Would integrate with actual limiter state
ResetsAt = now.AddMinutes(_options.Explain.PeriodMinutes)
},
new RateLimitInfo
{
Feature = "remediate",
Limit = _options.Remediate.Limit,
Remaining = _options.Remediate.Limit, // Would integrate with actual limiter state
ResetsAt = now.AddMinutes(_options.Remediate.PeriodMinutes)
},
new RateLimitInfo
{
Feature = "justify",
Limit = _options.Justify.Limit,
Remaining = _options.Justify.Limit, // Would integrate with actual limiter state
ResetsAt = now.AddMinutes(_options.Justify.PeriodMinutes)
}
];
}
}

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" />

View File

@@ -12,6 +12,10 @@ namespace StellaOps.AdvisoryAI.Worker.Services;
internal sealed class AdvisoryTaskWorker : BackgroundService
{
private const int MaxRetryDelaySeconds = 60;
private const int BaseRetryDelaySeconds = 2;
private const double JitterFactor = 0.2;
private readonly IAdvisoryTaskQueue _queue;
private readonly IAdvisoryPlanCache _cache;
private readonly IAdvisoryPipelineOrchestrator _orchestrator;
@@ -19,6 +23,7 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
private readonly IAdvisoryPipelineExecutor _executor;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AdvisoryTaskWorker> _logger;
private int _consecutiveErrors;
public AdvisoryTaskWorker(
IAdvisoryTaskQueue queue,
@@ -61,11 +66,28 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
var fromCache = plan is not null && !message.Request.ForceRefresh;
activity?.SetTag("advisory.plan_cache_hit", fromCache);
// When cache miss occurs, preserve the original plan cache key by storing
// under the message's key as an alias
string effectiveCacheKey = message.PlanCacheKey;
if (!fromCache)
{
var start = _timeProvider.GetTimestamp();
plan = await _orchestrator.CreatePlanAsync(message.Request, stoppingToken).ConfigureAwait(false);
// Store under both the new cache key and the original message key
await _cache.SetAsync(plan.CacheKey, plan, stoppingToken).ConfigureAwait(false);
// If the new plan's cache key differs from the original request,
// also store under the original key as an alias
if (!string.Equals(plan.CacheKey, message.PlanCacheKey, StringComparison.Ordinal))
{
await _cache.SetAsync(message.PlanCacheKey, plan, stoppingToken).ConfigureAwait(false);
_logger.LogDebug(
"Plan cache key changed from {OriginalKey} to {NewKey}; stored alias",
message.PlanCacheKey,
plan.CacheKey);
}
var elapsed = _timeProvider.GetElapsedTime(start);
_metrics.RecordPlanCreated(elapsed.TotalSeconds, message.Request.TaskType);
}
@@ -85,18 +107,48 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
var totalElapsed = _timeProvider.GetElapsedTime(processStart);
_metrics.RecordPipelineLatency(message.Request.TaskType, totalElapsed.TotalSeconds, fromCache);
activity?.SetTag("advisory.pipeline_latency_seconds", totalElapsed.TotalSeconds);
// Reset consecutive error count on success
_consecutiveErrors = 0;
}
catch (OperationCanceledException)
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
// graceful shutdown
// Graceful shutdown - exit the loop cleanly
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error processing advisory task queue message");
await Task.Delay(TimeSpan.FromSeconds(2), stoppingToken).ConfigureAwait(false);
_consecutiveErrors++;
// Apply exponential backoff with jitter
var delaySeconds = ComputeRetryDelay(_consecutiveErrors);
try
{
await Task.Delay(TimeSpan.FromSeconds(delaySeconds), stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
// Graceful shutdown during delay - exit cleanly
break;
}
}
}
_logger.LogInformation("Advisory pipeline worker stopping");
}
/// <summary>
/// Computes retry delay with exponential backoff and jitter.
/// </summary>
private double ComputeRetryDelay(int errorCount)
{
// Exponential backoff: base * 2^(errorCount-1), capped at max
var backoff = Math.Min(BaseRetryDelaySeconds * Math.Pow(2, errorCount - 1), MaxRetryDelaySeconds);
// Add jitter (+/- JitterFactor percent)
var jitter = backoff * JitterFactor * (2 * Random.Shared.NextDouble() - 1);
return Math.Max(BaseRetryDelaySeconds, backoff + jitter);
}
}

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />

View File

@@ -2,6 +2,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<RootNamespace>StellaOps.AirGap.Controller</RootNamespace>
</PropertyGroup>

View File

@@ -2,6 +2,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<RootNamespace>StellaOps.AirGap.Importer</RootNamespace>
</PropertyGroup>

View File

@@ -3,6 +3,7 @@
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<EnforceExtendedAnalyzerRules>true</EnforceExtendedAnalyzerRules>
<IncludeBuildOutput>false</IncludeBuildOutput>

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -2,6 +2,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<RootNamespace>StellaOps.AirGap.Time</RootNamespace>
</PropertyGroup>

View File

@@ -5,6 +5,7 @@
// Description: Extracts advisory data from Concelier for knowledge snapshot bundles.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text;
using System.Text.Json;
using StellaOps.AirGap.Bundle.Services;
@@ -23,10 +24,17 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
};
private readonly IAdvisoryDataSource _dataSource;
private readonly TimeProvider _timeProvider;
public AdvisorySnapshotExtractor(IAdvisoryDataSource dataSource)
: this(dataSource, TimeProvider.System)
{
}
public AdvisorySnapshotExtractor(IAdvisoryDataSource dataSource, TimeProvider timeProvider)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
@@ -46,7 +54,10 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
{
var feeds = await _dataSource.GetAvailableFeedsAsync(cancellationToken);
foreach (var feed in feeds)
// Sort feeds for deterministic output
var sortedFeeds = feeds.OrderBy(f => f.FeedId, StringComparer.Ordinal).ToList();
foreach (var feed in sortedFeeds)
{
// Skip if specific feeds are requested and this isn't one of them
if (request.FeedIds is { Count: > 0 } && !request.FeedIds.Contains(feed.FeedId))
@@ -119,6 +130,8 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
};
}
var snapshotAt = _timeProvider.GetUtcNow();
// Serialize advisories to NDJSON format for deterministic output
var contentBuilder = new StringBuilder();
foreach (var advisory in advisories.OrderBy(a => a.Id, StringComparer.Ordinal))
@@ -128,7 +141,8 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
}
var contentBytes = Encoding.UTF8.GetBytes(contentBuilder.ToString());
var fileName = $"{feedId}-{DateTime.UtcNow:yyyyMMddHHmmss}.ndjson";
// Use invariant culture for deterministic filename formatting
var fileName = $"{feedId}-{snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture)}.ndjson";
return new FeedExtractionResult
{
@@ -139,7 +153,7 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
FeedId = feedId,
FileName = fileName,
Content = contentBytes,
SnapshotAt = DateTimeOffset.UtcNow,
SnapshotAt = snapshotAt,
RecordCount = advisories.Count
}
};

View File

@@ -23,11 +23,23 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Fixed mtime for deterministic tar headers (2024-01-01 00:00:00 UTC).
/// </summary>
private const long DeterministicMtime = 1704067200;
private readonly IPolicyDataSource _dataSource;
private readonly TimeProvider _timeProvider;
public PolicySnapshotExtractor(IPolicyDataSource dataSource)
: this(dataSource, TimeProvider.System)
{
}
public PolicySnapshotExtractor(IPolicyDataSource dataSource, TimeProvider timeProvider)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
@@ -46,7 +58,10 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor
{
var policies = await _dataSource.GetAvailablePoliciesAsync(cancellationToken);
foreach (var policy in policies)
// Sort policies for deterministic output
var sortedPolicies = policies.OrderBy(p => p.PolicyId, StringComparer.Ordinal).ToList();
foreach (var policy in sortedPolicies)
{
// Skip if specific types are requested and this isn't one of them
if (request.Types is { Count: > 0 } && !request.Types.Contains(policy.Type))
@@ -247,9 +262,8 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor
// File size in octal (124-135)
Encoding.ASCII.GetBytes(Convert.ToString(fileSize, 8).PadLeft(11, '0')).CopyTo(header, 124);
// Modification time (136-147)
var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
Encoding.ASCII.GetBytes(Convert.ToString(mtime, 8).PadLeft(11, '0')).CopyTo(header, 136);
// Modification time (136-147) - use deterministic mtime for reproducible output
Encoding.ASCII.GetBytes(Convert.ToString(DeterministicMtime, 8).PadLeft(11, '0')).CopyTo(header, 136);
// Checksum placeholder (148-155) - spaces
for (var i = 148; i < 156; i++)

View File

@@ -5,6 +5,7 @@
// Description: Extracts VEX statement data from Excititor for knowledge snapshot bundles.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text;
using System.Text.Json;
using StellaOps.AirGap.Bundle.Services;
@@ -24,10 +25,17 @@ public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
};
private readonly IVexDataSource _dataSource;
private readonly TimeProvider _timeProvider;
public VexSnapshotExtractor(IVexDataSource dataSource)
: this(dataSource, TimeProvider.System)
{
}
public VexSnapshotExtractor(IVexDataSource dataSource, TimeProvider timeProvider)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
@@ -47,7 +55,10 @@ public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
{
var sources = await _dataSource.GetAvailableSourcesAsync(cancellationToken);
foreach (var source in sources)
// Sort sources for deterministic output
var sortedSources = sources.OrderBy(s => s.SourceId, StringComparer.Ordinal).ToList();
foreach (var source in sortedSources)
{
// Skip if specific sources are requested and this isn't one of them
if (request.SourceIds is { Count: > 0 } && !request.SourceIds.Contains(source.SourceId))
@@ -120,19 +131,22 @@ public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
};
}
var snapshotAt = _timeProvider.GetUtcNow();
var timestampStr = snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture);
// Serialize statements to OpenVEX format
var document = new OpenVexDocument
{
Context = "https://openvex.dev/ns",
Id = $"urn:stellaops:vex:{sourceId}:{DateTime.UtcNow:yyyyMMddHHmmss}",
Id = $"urn:stellaops:vex:{sourceId}:{timestampStr}",
Author = sourceId,
Timestamp = DateTimeOffset.UtcNow,
Timestamp = snapshotAt,
Version = 1,
Statements = statements.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal).ToList()
};
var contentBytes = JsonSerializer.SerializeToUtf8Bytes(document, JsonOptions);
var fileName = $"{sourceId}-{DateTime.UtcNow:yyyyMMddHHmmss}.json";
var fileName = $"{sourceId}-{timestampStr}.json";
return new VexSourceExtractionResult
{
@@ -143,7 +157,7 @@ public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
SourceId = sourceId,
FileName = fileName,
Content = contentBytes,
SnapshotAt = DateTimeOffset.UtcNow,
SnapshotAt = snapshotAt,
StatementCount = statements.Count
}
};

View File

@@ -0,0 +1,157 @@
// -----------------------------------------------------------------------------
// Abstractions.cs
// Description: Abstractions for deterministic/testable time and ID generation.
// -----------------------------------------------------------------------------
namespace StellaOps.AirGap.Bundle.Services;
/// <summary>
/// Provides unique identifiers. Inject to enable deterministic testing.
/// </summary>
public interface IGuidProvider
{
/// <summary>
/// Creates a new unique identifier.
/// </summary>
Guid NewGuid();
}
/// <summary>
/// Default GUID provider using system random GUIDs.
/// </summary>
public sealed class SystemGuidProvider : IGuidProvider
{
/// <summary>
/// Singleton instance of the system GUID provider.
/// </summary>
public static SystemGuidProvider Instance { get; } = new();
/// <inheritdoc />
public Guid NewGuid() => Guid.NewGuid();
}
/// <summary>
/// Options for configuring bundle validation behavior.
/// </summary>
public sealed class BundleValidationOptions
{
/// <summary>
/// Maximum age in days for feed snapshots before they are flagged as stale.
/// Default is 7 days.
/// </summary>
public int MaxFeedAgeDays { get; set; } = 7;
/// <summary>
/// Whether to fail validation on stale feeds or just warn.
/// </summary>
public bool FailOnStaleFeed { get; set; }
/// <summary>
/// Whether to validate policy digests.
/// </summary>
public bool ValidatePolicies { get; set; } = true;
/// <summary>
/// Whether to validate crypto material digests.
/// </summary>
public bool ValidateCryptoMaterials { get; set; } = true;
/// <summary>
/// Whether to validate catalog digests if present.
/// </summary>
public bool ValidateCatalogs { get; set; } = true;
/// <summary>
/// Whether to validate Rekor snapshot entries if present.
/// </summary>
public bool ValidateRekorSnapshots { get; set; } = true;
/// <summary>
/// Whether to validate crypto provider entries if present.
/// </summary>
public bool ValidateCryptoProviders { get; set; } = true;
}
/// <summary>
/// Utility methods for path validation and security.
/// </summary>
public static class PathValidation
{
/// <summary>
/// Validates that a relative path does not escape the bundle root.
/// </summary>
/// <param name="relativePath">The relative path to validate.</param>
/// <returns>True if the path is safe; false if it contains traversal sequences or is absolute.</returns>
public static bool IsSafeRelativePath(string? relativePath)
{
if (string.IsNullOrWhiteSpace(relativePath))
{
return false;
}
// Check for absolute paths
if (Path.IsPathRooted(relativePath))
{
return false;
}
// Check for path traversal sequences
var normalized = relativePath.Replace('\\', '/');
var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries);
var depth = 0;
foreach (var segment in segments)
{
if (segment == "..")
{
depth--;
if (depth < 0)
{
return false;
}
}
else if (segment != ".")
{
depth++;
}
}
// Also check the raw string for null bytes or other dangerous chars
if (relativePath.Contains('\0'))
{
return false;
}
return true;
}
/// <summary>
/// Combines a root path with a relative path, validating that the result does not escape the root.
/// </summary>
/// <param name="rootPath">The root directory path.</param>
/// <param name="relativePath">The relative path to combine.</param>
/// <returns>The combined path.</returns>
/// <exception cref="ArgumentException">Thrown if the relative path would escape the root.</exception>
public static string SafeCombine(string rootPath, string relativePath)
{
if (!IsSafeRelativePath(relativePath))
{
throw new ArgumentException(
$"Invalid relative path: path traversal or absolute path detected in '{relativePath}'",
nameof(relativePath));
}
var combined = Path.GetFullPath(Path.Combine(rootPath, relativePath));
var normalizedRoot = Path.GetFullPath(rootPath);
// Ensure the combined path starts with the root path
if (!combined.StartsWith(normalizedRoot, StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException(
$"Path traversal detected: combined path escapes root directory",
nameof(relativePath));
}
return combined;
}
}

View File

@@ -8,6 +8,19 @@ namespace StellaOps.AirGap.Bundle.Services;
public sealed class BundleBuilder : IBundleBuilder
{
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public BundleBuilder() : this(TimeProvider.System, SystemGuidProvider.Instance)
{
}
public BundleBuilder(TimeProvider timeProvider, IGuidProvider guidProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
}
public async Task<BundleManifest> BuildAsync(
BundleBuildRequest request,
string outputPath,
@@ -21,7 +34,10 @@ public sealed class BundleBuilder : IBundleBuilder
foreach (var feedConfig in request.Feeds)
{
var component = await CopyComponentAsync(feedConfig, outputPath, ct).ConfigureAwait(false);
// Validate relative path before combining
var targetPath = PathValidation.SafeCombine(outputPath, feedConfig.RelativePath);
var component = await CopyComponentAsync(feedConfig, outputPath, targetPath, ct).ConfigureAwait(false);
feeds.Add(new FeedComponent(
feedConfig.FeedId,
feedConfig.Name,
@@ -35,7 +51,10 @@ public sealed class BundleBuilder : IBundleBuilder
foreach (var policyConfig in request.Policies)
{
var component = await CopyComponentAsync(policyConfig, outputPath, ct).ConfigureAwait(false);
// Validate relative path before combining
var targetPath = PathValidation.SafeCombine(outputPath, policyConfig.RelativePath);
var component = await CopyComponentAsync(policyConfig, outputPath, targetPath, ct).ConfigureAwait(false);
policies.Add(new PolicyComponent(
policyConfig.PolicyId,
policyConfig.Name,
@@ -48,7 +67,10 @@ public sealed class BundleBuilder : IBundleBuilder
foreach (var cryptoConfig in request.CryptoMaterials)
{
var component = await CopyComponentAsync(cryptoConfig, outputPath, ct).ConfigureAwait(false);
// Validate relative path before combining
var targetPath = PathValidation.SafeCombine(outputPath, cryptoConfig.RelativePath);
var component = await CopyComponentAsync(cryptoConfig, outputPath, targetPath, ct).ConfigureAwait(false);
cryptoMaterials.Add(new CryptoComponent(
cryptoConfig.ComponentId,
cryptoConfig.Name,
@@ -65,11 +87,11 @@ public sealed class BundleBuilder : IBundleBuilder
var manifest = new BundleManifest
{
BundleId = Guid.NewGuid().ToString(),
BundleId = _guidProvider.NewGuid().ToString(),
SchemaVersion = "1.0.0",
Name = request.Name,
Version = request.Version,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = _timeProvider.GetUtcNow(),
ExpiresAt = request.ExpiresAt,
Feeds = feeds.ToImmutableArray(),
Policies = policies.ToImmutableArray(),
@@ -83,9 +105,9 @@ public sealed class BundleBuilder : IBundleBuilder
private static async Task<CopiedComponent> CopyComponentAsync(
BundleComponentSource source,
string outputPath,
string targetPath,
CancellationToken ct)
{
var targetPath = Path.Combine(outputPath, source.RelativePath);
Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath);
await using (var input = File.OpenRead(source.SourcePath))

View File

@@ -25,6 +25,19 @@ public sealed class SnapshotBundleReader : ISnapshotBundleReader
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public SnapshotBundleReader() : this(TimeProvider.System, SystemGuidProvider.Instance)
{
}
public SnapshotBundleReader(TimeProvider timeProvider, IGuidProvider guidProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
}
/// <summary>
/// Reads and verifies a snapshot bundle.
/// </summary>
@@ -40,12 +53,12 @@ public sealed class SnapshotBundleReader : ISnapshotBundleReader
return SnapshotBundleReadResult.Failed("Bundle file not found");
}
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{Guid.NewGuid():N}");
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{_guidProvider.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Extract the bundle
// Extract the bundle with path validation
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
// Read manifest
@@ -124,7 +137,7 @@ public sealed class SnapshotBundleReader : ISnapshotBundleReader
// Verify time anchor if present
if (request.VerifyTimeAnchor && manifest.TimeAnchor is not null)
{
var timeAnchorService = new TimeAnchorService();
var timeAnchorService = new TimeAnchorService(_timeProvider, _guidProvider);
var timeAnchorContent = new TimeAnchorContent
{
AnchorTime = manifest.TimeAnchor.AnchorTime,
@@ -185,7 +198,34 @@ public sealed class SnapshotBundleReader : ISnapshotBundleReader
{
await using var fileStream = File.OpenRead(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
await using var tarReader = new TarReader(gzipStream);
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(copyData: false, ct)) is not null)
{
ct.ThrowIfCancellationRequested();
// Validate entry name to prevent path traversal
if (!PathValidation.IsSafeRelativePath(entry.Name))
{
throw new InvalidOperationException(
$"Unsafe path detected in bundle: '{entry.Name}'. Path traversal or absolute paths are not allowed.");
}
// Calculate safe target path
var targetPath = PathValidation.SafeCombine(targetDir, entry.Name);
var targetEntryDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetEntryDir) && !Directory.Exists(targetEntryDir))
{
Directory.CreateDirectory(targetEntryDir);
}
if (entry.EntryType == TarEntryType.RegularFile && entry.DataStream is not null)
{
await using var outputStream = File.Create(targetPath);
await entry.DataStream.CopyToAsync(outputStream, ct);
}
}
}
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using System.Formats.Tar;
using System.Globalization;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
@@ -26,6 +27,24 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Fixed mtime for deterministic tar headers (2024-01-01 00:00:00 UTC).
/// </summary>
private static readonly DateTimeOffset DeterministicMtime = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public SnapshotBundleWriter() : this(TimeProvider.System, SystemGuidProvider.Instance)
{
}
public SnapshotBundleWriter(TimeProvider timeProvider, IGuidProvider guidProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
}
/// <summary>
/// Creates a knowledge snapshot bundle from the specified contents.
/// </summary>
@@ -36,18 +55,19 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{Guid.NewGuid():N}");
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{_guidProvider.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
var entries = new List<BundleEntry>();
var createdAt = _timeProvider.GetUtcNow();
var manifest = new KnowledgeSnapshotManifest
{
BundleId = request.BundleId ?? Guid.NewGuid().ToString("N"),
Name = request.Name ?? $"knowledge-{DateTime.UtcNow:yyyy-MM-dd}",
BundleId = request.BundleId ?? _guidProvider.NewGuid().ToString("N"),
Name = request.Name ?? $"knowledge-{createdAt.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture)}",
Version = request.Version ?? "1.0.0",
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = createdAt,
SchemaVersion = "1.0.0"
};
@@ -75,7 +95,7 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
RelativePath = relativePath,
Digest = digest,
SizeBytes = advisory.Content.Length,
SnapshotAt = advisory.SnapshotAt ?? DateTimeOffset.UtcNow,
SnapshotAt = advisory.SnapshotAt ?? createdAt,
RecordCount = advisory.RecordCount
});
}
@@ -105,7 +125,7 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
RelativePath = relativePath,
Digest = digest,
SizeBytes = vex.Content.Length,
SnapshotAt = vex.SnapshotAt ?? DateTimeOffset.UtcNow,
SnapshotAt = vex.SnapshotAt ?? createdAt,
StatementCount = vex.StatementCount
});
}
@@ -321,7 +341,24 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
await using var fileStream = File.Create(outputPath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, includeBaseDirectory: false, ct);
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
// Collect all files and sort for deterministic ordering
var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories)
.Select(f => (FullPath: f, RelativePath: Path.GetRelativePath(sourceDir, f).Replace('\\', '/')))
.OrderBy(f => f.RelativePath, StringComparer.Ordinal)
.ToList();
foreach (var (fullPath, relativePath) in files)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, relativePath)
{
DataStream = File.OpenRead(fullPath),
ModificationTime = DeterministicMtime,
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead
};
await tarWriter.WriteEntryAsync(entry, ct);
}
}
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);

View File

@@ -23,6 +23,19 @@ public sealed class TimeAnchorService : ITimeAnchorService
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public TimeAnchorService() : this(TimeProvider.System, SystemGuidProvider.Instance)
{
}
public TimeAnchorService(TimeProvider timeProvider, IGuidProvider guidProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
}
/// <summary>
/// Creates a time anchor token for a snapshot.
/// </summary>
@@ -39,8 +52,8 @@ public sealed class TimeAnchorService : ITimeAnchorService
return source switch
{
"local" => await CreateLocalAnchorAsync(request, cancellationToken),
var s when s.StartsWith("roughtime:") => await CreateRoughtimeAnchorAsync(request, cancellationToken),
var s when s.StartsWith("rfc3161:") => await CreateRfc3161AnchorAsync(request, cancellationToken),
var s when s.StartsWith("roughtime:", StringComparison.Ordinal) => await CreateRoughtimeAnchorAsync(request, cancellationToken),
var s when s.StartsWith("rfc3161:", StringComparison.Ordinal) => await CreateRfc3161AnchorAsync(request, cancellationToken),
_ => await CreateLocalAnchorAsync(request, cancellationToken)
};
}
@@ -64,7 +77,7 @@ public sealed class TimeAnchorService : ITimeAnchorService
try
{
// Validate timestamp is within acceptable range
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var anchorAge = now - anchor.AnchorTime;
if (request.MaxAgeHours.HasValue && anchorAge.TotalHours > request.MaxAgeHours.Value)
@@ -127,19 +140,19 @@ public sealed class TimeAnchorService : ITimeAnchorService
}
}
private static async Task<TimeAnchorResult> CreateLocalAnchorAsync(
private async Task<TimeAnchorResult> CreateLocalAnchorAsync(
TimeAnchorRequest request,
CancellationToken cancellationToken)
{
await Task.CompletedTask;
var anchorTime = DateTimeOffset.UtcNow;
var anchorTime = _timeProvider.GetUtcNow();
// Create a local anchor with a signed timestamp
var anchorData = new LocalAnchorData
{
Timestamp = anchorTime,
Nonce = Guid.NewGuid().ToString("N"),
Nonce = _guidProvider.NewGuid().ToString("N"),
MerkleRoot = request.MerkleRoot
};
@@ -160,7 +173,7 @@ public sealed class TimeAnchorService : ITimeAnchorService
};
}
private static async Task<TimeAnchorResult> CreateRoughtimeAnchorAsync(
private async Task<TimeAnchorResult> CreateRoughtimeAnchorAsync(
TimeAnchorRequest request,
CancellationToken cancellationToken)
{
@@ -169,14 +182,14 @@ public sealed class TimeAnchorService : ITimeAnchorService
var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003";
// For now, fallback to local with indication of intended source
var anchorTime = DateTimeOffset.UtcNow;
var anchorTime = _timeProvider.GetUtcNow();
var anchorData = new RoughtimeAnchorData
{
Timestamp = anchorTime,
Server = serverUrl,
Midpoint = anchorTime.ToUnixTimeSeconds(),
Radius = 1000000, // 1 second radius in microseconds
Nonce = Guid.NewGuid().ToString("N"),
Nonce = _guidProvider.NewGuid().ToString("N"),
MerkleRoot = request.MerkleRoot
};
@@ -200,7 +213,7 @@ public sealed class TimeAnchorService : ITimeAnchorService
};
}
private static async Task<TimeAnchorResult> CreateRfc3161AnchorAsync(
private async Task<TimeAnchorResult> CreateRfc3161AnchorAsync(
TimeAnchorRequest request,
CancellationToken cancellationToken)
{
@@ -208,12 +221,12 @@ public sealed class TimeAnchorService : ITimeAnchorService
// This is a placeholder implementation - full implementation would use a TSA client
var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com";
var anchorTime = DateTimeOffset.UtcNow;
var anchorTime = _timeProvider.GetUtcNow();
var anchorData = new Rfc3161AnchorData
{
Timestamp = anchorTime,
TsaUrl = tsaUrl,
SerialNumber = Guid.NewGuid().ToString("N"),
SerialNumber = _guidProvider.NewGuid().ToString("N"),
PolicyOid = "2.16.840.1.114412.2.1", // DigiCert timestamp policy
MerkleRoot = request.MerkleRoot
};

View File

@@ -4,6 +4,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -2,11 +2,25 @@
using System.Text;
using StellaOps.AirGap.Bundle.Models;
using StellaOps.AirGap.Bundle.Serialization;
using StellaOps.AirGap.Bundle.Services;
namespace StellaOps.AirGap.Bundle.Validation;
public sealed class BundleValidator : IBundleValidator
{
private readonly TimeProvider _timeProvider;
private readonly BundleValidationOptions _options;
public BundleValidator() : this(TimeProvider.System, new BundleValidationOptions())
{
}
public BundleValidator(TimeProvider timeProvider, BundleValidationOptions options)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_options = options ?? throw new ArgumentNullException(nameof(options));
}
public async Task<BundleValidationResult> ValidateAsync(
BundleManifest manifest,
string bundlePath,
@@ -14,6 +28,7 @@ public sealed class BundleValidator : IBundleValidator
{
var errors = new List<BundleValidationError>();
var warnings = new List<BundleValidationWarning>();
var now = _timeProvider.GetUtcNow();
if (manifest.Feeds.Length == 0)
{
@@ -25,9 +40,18 @@ public sealed class BundleValidator : IBundleValidator
errors.Add(new BundleValidationError("CryptoMaterials", "Trust roots required"));
}
// Validate feed digests and paths
foreach (var feed in manifest.Feeds)
{
var filePath = Path.Combine(bundlePath, feed.RelativePath);
// Validate path safety
if (!PathValidation.IsSafeRelativePath(feed.RelativePath))
{
errors.Add(new BundleValidationError("Feeds",
$"Feed {feed.FeedId} has unsafe relative path: {feed.RelativePath}"));
continue;
}
var filePath = PathValidation.SafeCombine(bundlePath, feed.RelativePath);
var result = await VerifyFileDigestAsync(filePath, feed.Digest, ct).ConfigureAwait(false);
if (!result.IsValid)
{
@@ -36,21 +60,75 @@ public sealed class BundleValidator : IBundleValidator
}
}
if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < DateTimeOffset.UtcNow)
// Validate policy digests if enabled
if (_options.ValidatePolicies)
{
foreach (var policy in manifest.Policies)
{
if (!PathValidation.IsSafeRelativePath(policy.RelativePath))
{
errors.Add(new BundleValidationError("Policies",
$"Policy {policy.PolicyId} has unsafe relative path: {policy.RelativePath}"));
continue;
}
var filePath = PathValidation.SafeCombine(bundlePath, policy.RelativePath);
var result = await VerifyFileDigestAsync(filePath, policy.Digest, ct).ConfigureAwait(false);
if (!result.IsValid)
{
errors.Add(new BundleValidationError("Policies",
$"Policy {policy.PolicyId} digest mismatch: expected {policy.Digest}, got {result.ActualDigest}"));
}
}
}
// Validate crypto material digests if enabled
if (_options.ValidateCryptoMaterials)
{
foreach (var crypto in manifest.CryptoMaterials)
{
if (!PathValidation.IsSafeRelativePath(crypto.RelativePath))
{
errors.Add(new BundleValidationError("CryptoMaterials",
$"Crypto material {crypto.ComponentId} has unsafe relative path: {crypto.RelativePath}"));
continue;
}
var filePath = PathValidation.SafeCombine(bundlePath, crypto.RelativePath);
var result = await VerifyFileDigestAsync(filePath, crypto.Digest, ct).ConfigureAwait(false);
if (!result.IsValid)
{
errors.Add(new BundleValidationError("CryptoMaterials",
$"Crypto material {crypto.ComponentId} digest mismatch: expected {crypto.Digest}, got {result.ActualDigest}"));
}
}
}
// Check bundle expiration
if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < now)
{
warnings.Add(new BundleValidationWarning("ExpiresAt", "Bundle has expired"));
}
// Check feed staleness using configurable threshold
foreach (var feed in manifest.Feeds)
{
var age = DateTimeOffset.UtcNow - feed.SnapshotAt;
if (age.TotalDays > 7)
var age = now - feed.SnapshotAt;
if (age.TotalDays > _options.MaxFeedAgeDays)
{
warnings.Add(new BundleValidationWarning("Feeds",
$"Feed {feed.FeedId} is {age.TotalDays:F0} days old"));
var message = $"Feed {feed.FeedId} is {age.TotalDays:F0} days old (threshold: {_options.MaxFeedAgeDays} days)";
if (_options.FailOnStaleFeed)
{
errors.Add(new BundleValidationError("Feeds", message));
}
else
{
warnings.Add(new BundleValidationWarning("Feeds", message));
}
}
}
// Verify bundle digest if present
if (manifest.BundleDigest is not null)
{
var computed = ComputeBundleDigest(manifest);

View File

@@ -2,6 +2,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.AirGap.Persistence</RootNamespace>

View File

@@ -4,6 +4,7 @@
<TargetFramework>netstandard2.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<LangVersion>preview</LangVersion>
<IncludeBuildOutput>false</IncludeBuildOutput>
<AnalysisLevel>latest</AnalysisLevel>

View File

@@ -3,6 +3,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Attestor.Bundle</RootNamespace>
<Description>Sigstore Bundle v0.3 implementation for DSSE envelope packaging and offline verification.</Description>
</PropertyGroup>

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Attestor.TrustVerdict</RootNamespace>
<LangVersion>preview</LangVersion>
<Description>TrustVerdict attestation library for signed VEX trust evaluations</Description>

View File

@@ -5,7 +5,7 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -5,7 +5,7 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -6,7 +6,7 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -6,7 +6,7 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -1,25 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Bun/StellaOps.Scanner.Analyzers.Lang.Bun.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Bench.ScannerAnalyzers.Tests" />
</ItemGroup>
</Project>

View File

@@ -1,7 +1,7 @@
// -----------------------------------------------------------------------------
// ApkBuildSecfixesExtractor.cs
// Sprint: SPRINT_20251226_012_BINIDX_backport_handling
// Task: BACKPORT-17 Implement APKBUILD secfixes extraction
// Task: BACKPORT-17 - Implement APKBUILD secfixes extraction
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;

View File

@@ -6,6 +6,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -5,7 +5,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.WebService</RootNamespace>
</PropertyGroup>
<ItemGroup>

View File

@@ -4,6 +4,7 @@
<TargetFramework>netstandard2.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<LangVersion>preview</LangVersion>
<IncludeBuildOutput>false</IncludeBuildOutput>
<AnalysisLevel>latest</AnalysisLevel>

View File

@@ -3,6 +3,7 @@
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.Concelier.Connector.Astra</RootNamespace>

View File

@@ -5,7 +5,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.BackportProof</RootNamespace>
</PropertyGroup>

View File

@@ -6,7 +6,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.Cache.Valkey</RootNamespace>
<AssemblyName>StellaOps.Concelier.Cache.Valkey</AssemblyName>
<Description>Valkey/Redis caching for Concelier canonical advisories</Description>

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using StellaOps.Concelier.Documents;
@@ -32,7 +33,11 @@ internal sealed record UbuntuCursor(
lastPublished = value.DocumentType switch
{
DocumentType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
DocumentType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
DocumentType.String when DateTimeOffset.TryParse(
value.AsString,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal,
out var parsed) => parsed.ToUniversalTime(),
_ => null
};
}
@@ -47,10 +52,14 @@ internal sealed record UbuntuCursor(
public DocumentObject ToDocumentObject()
{
// Sort collections for deterministic serialization
var sortedPendingDocs = PendingDocuments.OrderBy(id => id).Select(id => id.ToString());
var sortedPendingMaps = PendingMappings.OrderBy(id => id).Select(id => id.ToString());
var doc = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()))
["pendingDocuments"] = new DocumentArray(sortedPendingDocs),
["pendingMappings"] = new DocumentArray(sortedPendingMaps)
};
if (LastPublished.HasValue)
@@ -60,13 +69,16 @@ internal sealed record UbuntuCursor(
if (ProcessedNoticeIds.Count > 0)
{
doc["processedIds"] = new DocumentArray(ProcessedNoticeIds);
// Sort processed IDs for deterministic output
var sortedProcessedIds = ProcessedNoticeIds.OrderBy(id => id, StringComparer.Ordinal);
doc["processedIds"] = new DocumentArray(sortedProcessedIds);
}
if (FetchCache.Count > 0)
{
var cacheDoc = new DocumentObject();
foreach (var (key, entry) in FetchCache)
// Sort fetch cache keys for deterministic output
foreach (var (key, entry) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
cacheDoc[key] = entry.ToDocumentObject();
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Globalization;
using StellaOps.Concelier.Documents;
using StorageContracts = StellaOps.Concelier.Storage.Contracts;
@@ -31,7 +32,11 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
lastModified = modifiedValue.DocumentType switch
{
DocumentType.DateTime => DateTime.SpecifyKind(modifiedValue.ToUniversalTime(), DateTimeKind.Utc),
DocumentType.String when DateTimeOffset.TryParse(modifiedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
DocumentType.String when DateTimeOffset.TryParse(
modifiedValue.AsString,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal,
out var parsed) => parsed.ToUniversalTime(),
_ => null
};
}

View File

@@ -32,7 +32,8 @@ internal static class UbuntuNoticeParser
continue;
}
var published = ParseDate(noticeElement, "published") ?? DateTimeOffset.UtcNow;
// Use MinValue instead of UtcNow for deterministic fallback on invalid/missing dates
var published = ParseDate(noticeElement, "published") ?? DateTimeOffset.MinValue;
var title = noticeElement.TryGetProperty("title", out var titleElement)
? titleElement.GetString() ?? noticeId
: noticeId;

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -150,7 +150,8 @@ public sealed class UbuntuConnector : IFeedConnector
var dtoDocument = ToDocument(notice);
var sha256 = ComputeNoticeHash(dtoDocument);
var documentId = existing?.Id ?? Guid.NewGuid();
// Use existing ID or derive deterministic ID from source + uri hash
var documentId = existing?.Id ?? ComputeDeterministicId(SourceName, detailUri.AbsoluteUri);
var record = new DocumentRecord(
documentId,
SourceName,
@@ -167,7 +168,9 @@ public sealed class UbuntuConnector : IFeedConnector
await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
var dtoRecord = new DtoRecord(Guid.NewGuid(), record.Id, SourceName, "ubuntu.notice.v1", dtoDocument, now);
// Derive deterministic DTO ID from document ID + schema
var dtoId = ComputeDeterministicId(record.Id.ToString(), "ubuntu.notice.v1");
var dtoRecord = new DtoRecord(dtoId, record.Id, SourceName, "ubuntu.notice.v1", dtoDocument, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
pendingMappings.Add(record.Id);
@@ -435,6 +438,15 @@ public sealed class UbuntuConnector : IFeedConnector
return Convert.ToHexString(hash).ToLowerInvariant();
}
private Guid ComputeDeterministicId(string source, string identifier)
{
// Deterministic GUID based on SHA-256 hash of source + identifier
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
// Use first 16 bytes of hash as GUID
return new Guid(hash.AsSpan(0, 16));
}
private static DocumentObject ToDocument(UbuntuNoticeDto notice)
{
var packages = new DocumentArray();
@@ -486,14 +498,19 @@ public sealed class UbuntuConnector : IFeedConnector
private static UbuntuNoticeDto FromDocument(DocumentObject document)
{
var noticeId = document.GetValue("noticeId", string.Empty).AsString;
// Use MinValue instead of UtcNow for deterministic fallback on invalid/missing dates
var published = document.TryGetValue("published", out var publishedValue)
? publishedValue.DocumentType switch
{
DocumentType.DateTime => DateTime.SpecifyKind(publishedValue.ToUniversalTime(), DateTimeKind.Utc),
DocumentType.String when DateTimeOffset.TryParse(publishedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => DateTimeOffset.UtcNow
DocumentType.String when DateTimeOffset.TryParse(
publishedValue.AsString,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal,
out var parsed) => parsed.ToUniversalTime(),
_ => DateTimeOffset.MinValue
}
: DateTimeOffset.UtcNow;
: DateTimeOffset.MinValue;
var title = document.GetValue("title", noticeId).AsString;
var summary = document.GetValue("summary", string.Empty).AsString;

View File

@@ -223,7 +223,8 @@ public sealed class EpssConnector : IFeedConnector
continue;
}
var publishedDate = session.PublishedDate ?? TryParseDateFromMetadata(document.Metadata) ?? DateOnly.FromDateTime(document.CreatedAt.UtcDateTime);
// Use MinValue as deterministic fallback when published date cannot be determined
var publishedDate = session.PublishedDate ?? TryParseDateFromMetadata(document.Metadata) ?? DateOnly.MinValue;
var modelVersion = string.IsNullOrWhiteSpace(session.ModelVersionTag) ? "unknown" : session.ModelVersionTag!;
var contentHash = session.DecompressedSha256 ?? string.Empty;
@@ -235,8 +236,10 @@ public sealed class EpssConnector : IFeedConnector
["contentHash"] = contentHash
};
// Derive deterministic DTO ID from document ID + schema
var dtoId = ComputeDeterministicId(document.Id.ToString(), DtoSchemaVersion);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
dtoId,
document.Id,
SourceName,
DtoSchemaVersion,
@@ -467,7 +470,8 @@ public sealed class EpssConnector : IFeedConnector
}
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, fetchResult.SourceUri, cancellationToken).ConfigureAwait(false);
var recordId = existing?.Id ?? Guid.NewGuid();
// Use existing ID or derive deterministic ID from source + uri
var recordId = existing?.Id ?? ComputeDeterministicId(SourceName, fetchResult.SourceUri);
await _rawDocumentStorage.UploadAsync(
SourceName,
@@ -760,6 +764,15 @@ public sealed class EpssConnector : IFeedConnector
return _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken);
}
private Guid ComputeDeterministicId(string source, string identifier)
{
// Deterministic GUID based on SHA-256 hash of source + identifier
var input = System.Text.Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
// Use first 16 bytes of hash as GUID
return new Guid(hash.AsSpan(0, 16));
}
private sealed record EpssFetchResult(
DateOnly SnapshotDate,
string SourceUri,

View File

@@ -27,10 +27,14 @@ internal sealed record EpssCursor(
public DocumentObject ToDocumentObject()
{
// Sort collections for deterministic serialization
var sortedPendingDocs = PendingDocuments.OrderBy(id => id).Select(id => id.ToString());
var sortedPendingMaps = PendingMappings.OrderBy(id => id).Select(id => id.ToString());
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()))
["pendingDocuments"] = new DocumentArray(sortedPendingDocs),
["pendingMappings"] = new DocumentArray(sortedPendingMaps)
};
if (!string.IsNullOrWhiteSpace(ModelVersion))

View File

@@ -5,6 +5,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -2,6 +2,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -14,6 +15,7 @@ using StellaOps.Concelier.Connector.Ghsa.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Ghsa;
@@ -36,6 +38,7 @@ public sealed class GhsaConnector : IFeedConnector
private readonly GhsaDiagnostics _diagnostics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<GhsaConnector> _logger;
private readonly ICryptoHash _hash;
private readonly ICanonicalAdvisoryService? _canonicalService;
private readonly object _rateLimitWarningLock = new();
private readonly Dictionary<(string Phase, string Resource), bool> _rateLimitWarnings = new();
@@ -51,6 +54,7 @@ public sealed class GhsaConnector : IFeedConnector
GhsaDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<GhsaConnector> logger,
ICryptoHash cryptoHash,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
@@ -64,6 +68,7 @@ public sealed class GhsaConnector : IFeedConnector
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_canonicalService = canonicalService; // Optional - canonical ingest
}
@@ -322,8 +327,9 @@ public sealed class GhsaConnector : IFeedConnector
}
var payload = DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoId = ComputeDeterministicId(document.Id.ToString(), "ghsa/1.0");
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
dtoId,
document.Id,
SourceName,
"ghsa/1.0",
@@ -640,4 +646,15 @@ public sealed class GhsaConnector : IFeedConnector
}
}
}
/// <summary>
/// Computes a deterministic GUID from source and identifier using SHA-256 hash.
/// </summary>
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
// Use first 16 bytes of hash as GUID
return new Guid(hash.AsSpan(0, 16));
}
}

View File

@@ -28,8 +28,8 @@ internal sealed record GhsaCursor(
var document = new DocumentObject
{
["nextPage"] = NextPage,
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastUpdatedExclusive.HasValue)

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -28,6 +28,7 @@ using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Normalization.SemVer;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Ics.Cisa;
@@ -52,6 +53,7 @@ public sealed class IcsCisaConnector : IFeedConnector
private readonly IcsCisaDiagnostics _diagnostics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<IcsCisaConnector> _logger;
private readonly ICryptoHash _hash;
private readonly HtmlContentSanitizer _htmlSanitizer = new();
private readonly HtmlParser _htmlParser = new();
@@ -66,7 +68,8 @@ public sealed class IcsCisaConnector : IFeedConnector
IcsCisaFeedParser parser,
IcsCisaDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<IcsCisaConnector> logger)
ILogger<IcsCisaConnector> logger,
ICryptoHash cryptoHash)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -80,6 +83,7 @@ public sealed class IcsCisaConnector : IFeedConnector
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
public string SourceName => IcsCisaConnectorPlugin.SourceName;
@@ -323,8 +327,9 @@ public sealed class IcsCisaConnector : IFeedConnector
WriteIndented = false,
});
var doc = DocumentObject.Parse(json);
var dtoId = ComputeDeterministicId(document.Id.ToString(), SchemaVersion);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
dtoId,
document.Id,
SourceName,
SchemaVersion,
@@ -1416,4 +1421,15 @@ public sealed class IcsCisaConnector : IFeedConnector
private Task UpdateCursorAsync(IcsCisaCursor cursor, CancellationToken cancellationToken)
=> _stateRepository.UpdateCursorAsync(SourceName, cursor.ToDocumentObject(), _timeProvider.GetUtcNow(), cancellationToken);
/// <summary>
/// Computes a deterministic GUID from source and identifier using SHA-256 hash.
/// </summary>
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
// Use first 16 bytes of hash as GUID
return new Guid(hash.AsSpan(0, 16));
}
}

View File

@@ -16,8 +16,8 @@ internal sealed record IcsCisaCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(static id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(static id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(static id => id).Select(static id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(static id => id).Select(static id => id.ToString())),
};
if (LastPublished.HasValue)

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -21,8 +21,8 @@ internal sealed record KasperskyCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastPublished.HasValue)
@@ -33,7 +33,7 @@ internal sealed record KasperskyCursor(
if (FetchCache.Count > 0)
{
var cacheArray = new DocumentArray();
foreach (var (uri, metadata) in FetchCache)
foreach (var (uri, metadata) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
var cacheDocument = new DocumentObject
{

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
@@ -16,6 +17,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Ics.Kaspersky;
@@ -39,6 +41,7 @@ public sealed class KasperskyConnector : IFeedConnector
private readonly KasperskyOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<KasperskyConnector> _logger;
private readonly ICryptoHash _hash;
public KasperskyConnector(
KasperskyFeedClient feedClient,
@@ -50,7 +53,8 @@ public sealed class KasperskyConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<KasperskyOptions> options,
TimeProvider? timeProvider,
ILogger<KasperskyConnector> logger)
ILogger<KasperskyConnector> logger,
ICryptoHash cryptoHash)
{
_feedClient = feedClient ?? throw new ArgumentNullException(nameof(feedClient));
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
@@ -63,6 +67,7 @@ public sealed class KasperskyConnector : IFeedConnector
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
public string SourceName => KasperskyConnectorPlugin.SourceName;
@@ -255,7 +260,7 @@ public sealed class KasperskyConnector : IFeedConnector
: document.FetchedAt;
var summary = metadata.TryGetValue("kaspersky.summary", out var summaryValue) ? summaryValue : null;
var slug = metadata.TryGetValue("kaspersky.slug", out var slugValue) ? slugValue : ExtractSlug(new Uri(link, UriKind.Absolute));
var advisoryKey = string.IsNullOrWhiteSpace(slug) ? Guid.NewGuid().ToString("N") : slug;
var advisoryKey = string.IsNullOrWhiteSpace(slug) ? ComputeDeterministicId(document.Id.ToString(), "kaspersky.advisory").ToString("N") : slug;
byte[] rawBytes;
try
@@ -270,7 +275,8 @@ public sealed class KasperskyConnector : IFeedConnector
var dto = KasperskyAdvisoryParser.Parse(advisoryKey, title, link, published, summary, rawBytes);
var payload = DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ics.kaspersky/1", payload, _timeProvider.GetUtcNow());
var dtoId = ComputeDeterministicId(document.Id.ToString(), "ics.kaspersky/1");
var dtoRecord = new DtoRecord(dtoId, document.Id, SourceName, "ics.kaspersky/1", payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -461,4 +467,15 @@ public sealed class KasperskyConnector : IFeedConnector
var last = segments[^1].Trim('/');
return string.IsNullOrWhiteSpace(last) && segments.Length > 1 ? segments[^2].Trim('/') : last;
}
/// <summary>
/// Computes a deterministic GUID from source and identifier using SHA-256 hash.
/// </summary>
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
// Use first 16 bytes of hash as GUID
return new Guid(hash.AsSpan(0, 16));
}
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -31,8 +31,8 @@ internal sealed record JvnCursor(
document["lastCompletedWindowEnd"] = LastCompletedWindowEnd.Value.UtcDateTime;
}
document["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(static id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.Select(static id => id.ToString()));
document["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(static id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(static id => id.ToString()));
return document;
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -14,6 +15,7 @@ using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.JpFlags;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Jvn;
@@ -35,6 +37,7 @@ public sealed class JvnConnector : IFeedConnector
private readonly IAdvisoryStore _advisoryStore;
private readonly IJpFlagStore _jpFlagStore;
private readonly ISourceStateRepository _stateRepository;
private readonly ICryptoHash _hash;
private readonly TimeProvider _timeProvider;
private readonly JvnOptions _options;
private readonly ILogger<JvnConnector> _logger;
@@ -48,6 +51,7 @@ public sealed class JvnConnector : IFeedConnector
IAdvisoryStore advisoryStore,
IJpFlagStore jpFlagStore,
ISourceStateRepository stateRepository,
ICryptoHash cryptoHash,
IOptions<JvnOptions> options,
TimeProvider? timeProvider,
ILogger<JvnConnector> logger)
@@ -60,6 +64,7 @@ public sealed class JvnConnector : IFeedConnector
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_jpFlagStore = jpFlagStore ?? throw new ArgumentNullException(nameof(jpFlagStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -225,7 +230,7 @@ public sealed class JvnConnector : IFeedConnector
var sanitizedJson = JsonSerializer.Serialize(detail, SerializerOptions);
var payload = DocumentObject.Parse(sanitizedJson);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
ComputeDeterministicId(document.Id.ToString(), "jvn/1.0"),
document.Id,
SourceName,
JvnConstants.DtoSchemaVersion,
@@ -322,4 +327,11 @@ public sealed class JvnConnector : IFeedConnector
var cursorDocument = cursor.ToDocumentObject();
await _stateRepository.UpdateCursorAsync(SourceName, cursorDocument, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Include="Schemas\*.xsd" />

View File

@@ -17,8 +17,8 @@ internal sealed record KevCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(static id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(static id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(static id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(static id => id.ToString())),
};
if (!string.IsNullOrEmpty(CatalogVersion))

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
@@ -18,6 +19,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Kev;
@@ -40,6 +42,7 @@ public sealed class KevConnector : IFeedConnector
private readonly ISourceStateRepository _stateRepository;
private readonly KevOptions _options;
private readonly IJsonSchemaValidator _schemaValidator;
private readonly ICryptoHash _hash;
private readonly TimeProvider _timeProvider;
private readonly ILogger<KevConnector> _logger;
private readonly KevDiagnostics _diagnostics;
@@ -53,6 +56,7 @@ public sealed class KevConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<KevOptions> options,
IJsonSchemaValidator schemaValidator,
ICryptoHash cryptoHash,
KevDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<KevConnector> logger)
@@ -66,6 +70,7 @@ public sealed class KevConnector : IFeedConnector
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_schemaValidator = schemaValidator ?? throw new ArgumentNullException(nameof(schemaValidator));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
@@ -273,7 +278,7 @@ public sealed class KevConnector : IFeedConnector
_diagnostics.CatalogParsed(catalog.CatalogVersion, entryCount);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
ComputeDeterministicId(document.Id.ToString(), "kev/1.0"),
document.Id,
SourceName,
SchemaVersion,
@@ -438,4 +443,11 @@ public sealed class KevConnector : IFeedConnector
private static Uri? TryParseUri(string? value)
=> Uri.TryCreate(value, UriKind.Absolute, out var uri) ? uri : null;
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -36,9 +36,9 @@ internal sealed record KisaCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["knownIds"] = new DocumentArray(KnownIds),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
["knownIds"] = new DocumentArray(KnownIds.OrderBy(id => id, StringComparer.Ordinal)),
};
if (LastPublished.HasValue)

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
@@ -15,6 +16,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Kisa;
@@ -37,6 +39,7 @@ public sealed class KisaConnector : IFeedConnector
private readonly ISourceStateRepository _stateRepository;
private readonly KisaOptions _options;
private readonly KisaDiagnostics _diagnostics;
private readonly ICryptoHash _hash;
private readonly TimeProvider _timeProvider;
private readonly ILogger<KisaConnector> _logger;
@@ -51,6 +54,7 @@ public sealed class KisaConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<KisaOptions> options,
KisaDiagnostics diagnostics,
ICryptoHash cryptoHash,
TimeProvider? timeProvider,
ILogger<KisaConnector> logger)
{
@@ -65,6 +69,7 @@ public sealed class KisaConnector : IFeedConnector
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -288,7 +293,7 @@ public sealed class KisaConnector : IFeedConnector
_logger.LogDebug("KISA parsed detail for {DocumentId} ({Category})", document.Id, category ?? "unknown");
var dtoDoc = DocumentObject.Parse(JsonSerializer.Serialize(parsed, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "kisa.detail.v1", dtoDoc, now);
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "kisa/1.0"), document.Id, SourceName, "kisa.detail.v1", dtoDoc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -421,4 +426,11 @@ public sealed class KisaConnector : IFeedConnector
var completedAt = cursor.LastFetchAt ?? _timeProvider.GetUtcNow();
return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken);
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -15,8 +15,8 @@ internal sealed record NvdCursor(
{
var document = new DocumentObject();
Window.WriteTo(document);
document["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()));
document["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString()));
return document;
}

View File

@@ -213,7 +213,7 @@ public sealed class NvdConnector : IFeedConnector
var payload = DocumentObject.Parse(sanitized);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
ComputeDeterministicId(document.Id.ToString(), "nvd/1.0"),
document.Id,
SourceName,
"nvd.cve.v2",
@@ -473,7 +473,7 @@ public sealed class NvdConnector : IFeedConnector
: document.Sha256;
var record = new ChangeHistoryRecord(
Guid.NewGuid(),
ComputeDeterministicId($"{current.AdvisoryKey}:{document.Id}", "nvd-change/1.0"),
SourceName,
current.AdvisoryKey,
document.Id,
@@ -544,6 +544,13 @@ public sealed class NvdConnector : IFeedConnector
return $"sha256:{hex}";
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
private async Task<NvdCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Include="Schemas\nvd-vulnerability.schema.json" />

View File

@@ -27,14 +27,14 @@ internal sealed record OsvCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastModifiedByEcosystem.Count > 0)
{
var lastModifiedDoc = new DocumentObject();
foreach (var (ecosystem, timestamp) in LastModifiedByEcosystem)
foreach (var (ecosystem, timestamp) in LastModifiedByEcosystem.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
lastModifiedDoc[ecosystem] = timestamp.HasValue ? DocumentValue.Create(timestamp.Value.UtcDateTime) : DocumentNull.Value;
}
@@ -45,9 +45,9 @@ internal sealed record OsvCursor(
if (ProcessedIdsByEcosystem.Count > 0)
{
var processedDoc = new DocumentObject();
foreach (var (ecosystem, ids) in ProcessedIdsByEcosystem)
foreach (var (ecosystem, ids) in ProcessedIdsByEcosystem.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
processedDoc[ecosystem] = new DocumentArray(ids.Select(id => id));
processedDoc[ecosystem] = new DocumentArray(ids.OrderBy(id => id, StringComparer.Ordinal).Select(id => id));
}
document["processed"] = processedDoc;
@@ -56,7 +56,7 @@ internal sealed record OsvCursor(
if (ArchiveMetadataByEcosystem.Count > 0)
{
var metadataDoc = new DocumentObject();
foreach (var (ecosystem, metadata) in ArchiveMetadataByEcosystem)
foreach (var (ecosystem, metadata) in ArchiveMetadataByEcosystem.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
var element = new DocumentObject();
if (!string.IsNullOrWhiteSpace(metadata.ETag))

View File

@@ -192,7 +192,7 @@ public sealed class OsvConnector : IFeedConnector
var sanitized = JsonSerializer.Serialize(dto, SerializerOptions);
var payload = StellaOps.Concelier.Documents.DocumentObject.Parse(sanitized);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
ComputeDeterministicId(document.Id.ToString(), "osv/1.0"),
document.Id,
SourceName,
"osv.v1",
@@ -434,7 +434,7 @@ public sealed class OsvConnector : IFeedConnector
continue;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var recordId = existing?.Id ?? ComputeDeterministicId(documentUri, "osv-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, bytes, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
@@ -613,4 +613,11 @@ public sealed class OsvConnector : IFeedConnector
}
}
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = System.Text.Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />

View File

@@ -24,8 +24,8 @@ internal sealed record RuBduCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastSuccessfulFetch.HasValue)

View File

@@ -269,7 +269,7 @@ public sealed class RuBduConnector : IFeedConnector
}
var doc = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-bdu.v1", doc, _timeProvider.GetUtcNow());
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "ru-bdu/1.0"), document.Id, SourceName, "ru-bdu.v1", doc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
_diagnostics.ParseSuccess(
@@ -411,7 +411,7 @@ public sealed class RuBduConnector : IFeedConnector
continue;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var recordId = existing?.Id ?? ComputeDeterministicId(documentUri, "ru-bdu-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
@@ -530,4 +530,11 @@ public sealed class RuBduConnector : IFeedConnector
var completedAt = cursor.LastSuccessfulFetch ?? _timeProvider.GetUtcNow();
return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken);
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = System.Text.Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -29,9 +29,9 @@ internal sealed record RuNkckiCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["knownBulletins"] = new DocumentArray(KnownBulletins),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
["knownBulletins"] = new DocumentArray(KnownBulletins.OrderBy(id => id, StringComparer.Ordinal)),
};
if (LastListingFetchAt.HasValue)

View File

@@ -339,7 +339,7 @@ public sealed class RuNkckiConnector : IFeedConnector
}
var doc = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-nkcki.v1", doc, _timeProvider.GetUtcNow());
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "ru-nkcki/1.0"), document.Id, SourceName, "ru-nkcki.v1", doc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -609,7 +609,7 @@ public sealed class RuNkckiConnector : IFeedConnector
return false;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var recordId = existing?.Id ?? ComputeDeterministicId(documentUri, "ru-nkcki-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
@@ -725,12 +725,12 @@ public sealed class RuNkckiConnector : IFeedConnector
return new ListingPageResult(attachments, uniquePagination);
}
private static string DeriveBulletinId(Uri uri)
private string DeriveBulletinId(Uri uri)
{
var fileName = Path.GetFileName(uri.AbsolutePath);
if (string.IsNullOrWhiteSpace(fileName))
{
return Guid.NewGuid().ToString("N");
return ComputeDeterministicSlug(uri.AbsoluteUri, "bulletin-id");
}
if (fileName.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
@@ -746,7 +746,7 @@ public sealed class RuNkckiConnector : IFeedConnector
return fileName.Replace('_', '-');
}
private static string BuildDocumentUri(RuNkckiVulnerabilityDto dto)
private string BuildDocumentUri(RuNkckiVulnerabilityDto dto)
{
if (!string.IsNullOrWhiteSpace(dto.FstecId))
{
@@ -761,7 +761,10 @@ public sealed class RuNkckiConnector : IFeedConnector
return $"https://nvd.nist.gov/vuln/detail/{dto.MitreId}";
}
return $"https://cert.gov.ru/materialy/uyazvimosti/{Guid.NewGuid():N}";
// Fallback: deterministic slug based on dto content
var dtoJson = JsonSerializer.Serialize(dto, SerializerOptions);
var slug2 = ComputeDeterministicSlug(dtoJson, "nkcki-doc");
return $"https://cert.gov.ru/materialy/uyazvimosti/{slug2}";
}
private string ResolveCacheDirectory(string? configuredPath)
@@ -791,7 +794,7 @@ public sealed class RuNkckiConnector : IFeedConnector
private string GetBulletinCachePath(string bulletinId)
{
var fileStem = string.IsNullOrWhiteSpace(bulletinId)
? Guid.NewGuid().ToString("N")
? ComputeDeterministicSlug("unknown-bulletin", _timeProvider.GetUtcNow().ToString("O"))
: Uri.EscapeDataString(bulletinId);
return Path.Combine(_cacheDirectory, $"{fileStem}.json.zip");
}
@@ -947,4 +950,17 @@ public sealed class RuNkckiConnector : IFeedConnector
return new ListingFetchSummary(attachments, visited);
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
private string ComputeDeterministicSlug(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
return _hash.ComputeHashHex(input, HashAlgorithms.Sha256)[..32];
}
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -25,8 +25,8 @@ internal sealed record StellaOpsMirrorCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (!string.IsNullOrWhiteSpace(ExportId))

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -226,7 +226,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
return existing;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var recordId = existing?.Id ?? ComputeDeterministicId(absolute, "mirror-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, ExpiresAt: null, cancellationToken, recordId).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var sha = ComputeSha256(payload);
@@ -423,7 +423,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
}
var dtoDoc = DocumentObject.Parse(json);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoDoc, now);
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "mirror/1.0"), document.Id, Source, BundleDtoSchemaVersion, dtoDoc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -566,6 +566,13 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
pendingMappings.Count);
}
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}
file static class UriExtensions

View File

@@ -21,6 +21,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.PsirtFlags;
using StellaOps.Concelier.Models;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Vndr.Adobe;
@@ -39,6 +40,7 @@ public sealed class AdobeConnector : IFeedConnector
private readonly TimeProvider _timeProvider;
private readonly IHttpClientFactory _httpClientFactory;
private readonly AdobeDiagnostics _diagnostics;
private readonly ICryptoHash _hash;
private readonly ILogger<AdobeConnector> _logger;
private static readonly JsonSchema Schema = AdobeSchemaProvider.Schema;
@@ -61,6 +63,7 @@ public sealed class AdobeConnector : IFeedConnector
TimeProvider? timeProvider,
IHttpClientFactory httpClientFactory,
AdobeDiagnostics diagnostics,
ICryptoHash cryptoHash,
ILogger<AdobeConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
@@ -76,6 +79,7 @@ public sealed class AdobeConnector : IFeedConnector
_timeProvider = timeProvider ?? TimeProvider.System;
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -497,7 +501,7 @@ public sealed class AdobeConnector : IFeedConnector
var payload = StellaOps.Concelier.Documents.DocumentObject.Parse(json);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
ComputeDeterministicId(document.Id.ToString(), "adobe/1.0"),
document.Id,
SourceName,
"adobe.bulletin.v1",
@@ -754,4 +758,11 @@ public sealed class AdobeConnector : IFeedConnector
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
}
private Guid ComputeDeterministicId(string source, string identifier)
{
var input = Encoding.UTF8.GetBytes($"{source}:{identifier}");
var hash = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hash.AsSpan()[..16]);
}
}

View File

@@ -21,13 +21,13 @@ internal sealed record AdobeCursor(
document["lastPublished"] = LastPublished.Value.UtcDateTime;
}
document["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()));
document["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString()));
if (FetchCache is { Count: > 0 })
{
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
foreach (var (key, entry) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
cacheDocument[key] = entry.ToDocument();
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
@@ -17,7 +18,7 @@
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
</ItemGroup>

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
@@ -17,6 +18,7 @@ using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.PsirtFlags;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Vndr.Apple;
@@ -36,6 +38,7 @@ public sealed class AppleConnector : IFeedConnector
private readonly IAdvisoryStore _advisoryStore;
private readonly IPsirtFlagStore _psirtFlagStore;
private readonly ISourceStateRepository _stateRepository;
private readonly ICryptoHash _hash;
private readonly AppleOptions _options;
private readonly AppleDiagnostics _diagnostics;
private readonly TimeProvider _timeProvider;
@@ -49,6 +52,7 @@ public sealed class AppleConnector : IFeedConnector
IAdvisoryStore advisoryStore,
IPsirtFlagStore psirtFlagStore,
ISourceStateRepository stateRepository,
ICryptoHash hash,
AppleDiagnostics diagnostics,
IOptions<AppleOptions> options,
TimeProvider? timeProvider,
@@ -61,6 +65,7 @@ public sealed class AppleConnector : IFeedConnector
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_psirtFlagStore = psirtFlagStore ?? throw new ArgumentNullException(nameof(psirtFlagStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
@@ -70,6 +75,16 @@ public sealed class AppleConnector : IFeedConnector
public string SourceName => VndrAppleConnectorPlugin.SourceName;
/// <summary>
/// Computes a deterministic GUID from the source namespace and identifier using SHA-256.
/// </summary>
private Guid ComputeDeterministicId(string identifier, string sourceNamespace)
{
var input = Encoding.UTF8.GetBytes($"{sourceNamespace}:{identifier}");
var hashBytes = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hashBytes[..16]);
}
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
@@ -259,7 +274,7 @@ public sealed class AppleConnector : IFeedConnector
var existingDto = await _dtoStore.FindByDocumentIdAsync(document.Id, cancellationToken).ConfigureAwait(false);
var dtoRecord = existingDto is null
? new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "apple.security.update.v1", payload, validatedAt)
? new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "apple/1.0"), document.Id, SourceName, "apple.security.update.v1", payload, validatedAt)
: existingDto with
{
Payload = payload,

View File

@@ -20,8 +20,8 @@ internal sealed record AppleCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastPosted.HasValue)
@@ -31,7 +31,7 @@ internal sealed record AppleCursor(
if (ProcessedIds.Count > 0)
{
document["processedIds"] = new DocumentArray(ProcessedIds);
document["processedIds"] = new DocumentArray(ProcessedIds.OrderBy(id => id, StringComparer.Ordinal));
}
return document;

View File

@@ -5,9 +5,11 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />

View File

@@ -17,6 +17,7 @@ using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.PsirtFlags;
using StellaOps.Cryptography;
using StellaOps.Plugin;
using Json.Schema;
@@ -40,6 +41,7 @@ public sealed class ChromiumConnector : IFeedConnector
private readonly IPsirtFlagStore _psirtFlagStore;
private readonly ISourceStateRepository _stateRepository;
private readonly IJsonSchemaValidator _schemaValidator;
private readonly ICryptoHash _hash;
private readonly ChromiumOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ChromiumDiagnostics _diagnostics;
@@ -55,6 +57,7 @@ public sealed class ChromiumConnector : IFeedConnector
IPsirtFlagStore psirtFlagStore,
ISourceStateRepository stateRepository,
IJsonSchemaValidator schemaValidator,
ICryptoHash hash,
IOptions<ChromiumOptions> options,
TimeProvider? timeProvider,
ChromiumDiagnostics diagnostics,
@@ -69,6 +72,7 @@ public sealed class ChromiumConnector : IFeedConnector
_psirtFlagStore = psirtFlagStore ?? throw new ArgumentNullException(nameof(psirtFlagStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_schemaValidator = schemaValidator ?? throw new ArgumentNullException(nameof(schemaValidator));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -78,6 +82,16 @@ public sealed class ChromiumConnector : IFeedConnector
public string SourceName => VndrChromiumConnectorPlugin.SourceName;
/// <summary>
/// Computes a deterministic GUID from the source namespace and identifier using SHA-256.
/// </summary>
private Guid ComputeDeterministicId(string identifier, string sourceNamespace)
{
var input = Encoding.UTF8.GetBytes($"{sourceNamespace}:{identifier}");
var hashBytes = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hashBytes[..16]);
}
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
@@ -261,7 +275,7 @@ public sealed class ChromiumConnector : IFeedConnector
var validatedAt = _timeProvider.GetUtcNow();
var dtoRecord = existingDto is null
? new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "chromium.post.v1", payload, validatedAt)
? new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "chromium/1.0"), document.Id, SourceName, "chromium.post.v1", payload, validatedAt)
: existingDto with
{
Payload = payload,

View File

@@ -20,13 +20,13 @@ internal sealed record ChromiumCursor(
document["lastPublished"] = LastPublished.Value.UtcDateTime;
}
document["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()));
document["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString()));
document["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString()));
if (FetchCache.Count > 0)
{
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
foreach (var (key, entry) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
cacheDocument[key] = entry.ToDocument();
}

View File

@@ -5,6 +5,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
@@ -17,6 +18,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />

View File

@@ -1,6 +1,6 @@
using System.Globalization;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
@@ -14,6 +14,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Vndr.Cisco;
@@ -45,6 +46,7 @@ public sealed class CiscoConnector : IFeedConnector
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly CiscoDtoFactory _dtoFactory;
private readonly ICryptoHash _hash;
private readonly CiscoDiagnostics _diagnostics;
private readonly IOptions<CiscoOptions> _options;
private readonly TimeProvider _timeProvider;
@@ -58,6 +60,7 @@ public sealed class CiscoConnector : IFeedConnector
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
CiscoDtoFactory dtoFactory,
ICryptoHash hash,
CiscoDiagnostics diagnostics,
IOptions<CiscoOptions> options,
TimeProvider? timeProvider,
@@ -70,6 +73,7 @@ public sealed class CiscoConnector : IFeedConnector
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_dtoFactory = dtoFactory ?? throw new ArgumentNullException(nameof(dtoFactory));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -78,6 +82,25 @@ public sealed class CiscoConnector : IFeedConnector
public string SourceName => VndrCiscoConnectorPlugin.SourceName;
/// <summary>
/// Computes a deterministic GUID from the source namespace and identifier using SHA-256.
/// </summary>
private Guid ComputeDeterministicId(string identifier, string sourceNamespace)
{
var input = Encoding.UTF8.GetBytes($"{sourceNamespace}:{identifier}");
var hashBytes = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hashBytes[..16]);
}
/// <summary>
/// Computes a SHA-256 hash of the payload and returns it as a lowercase hex string.
/// </summary>
private string ComputeSha256(byte[] payload)
{
var hashBytes = _hash.ComputeHash(payload, HashAlgorithms.Sha256);
return Convert.ToHexString(hashBytes).ToLowerInvariant();
}
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
@@ -137,7 +160,7 @@ public sealed class CiscoConnector : IFeedConnector
continue;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var recordId = existing?.Id ?? ComputeDeterministicId(documentUri, "cisco-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(
SourceName,
documentUri,
@@ -326,7 +349,7 @@ public sealed class CiscoConnector : IFeedConnector
{
var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions);
var dtoDoc = DocumentObject.Parse(dtoJson);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoDoc, _timeProvider.GetUtcNow());
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "cisco/1.0"), document.Id, SourceName, DtoSchemaVersion, dtoDoc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
@@ -463,13 +486,6 @@ public sealed class CiscoConnector : IFeedConnector
}
}
private static string ComputeSha256(byte[] payload)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(payload, hash);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static bool ShouldProcess(CiscoAdvisoryItem advisory, DateTimeOffset? checkpoint, string? checkpointId)
{
if (checkpoint is null || advisory.LastUpdated is null)

View File

@@ -16,8 +16,8 @@ internal sealed record CiscoCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastModified.HasValue)

View File

@@ -5,9 +5,11 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />

View File

@@ -27,8 +27,8 @@ internal sealed record MsrcCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastModifiedCursor.HasValue)

View File

@@ -1,7 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
@@ -18,6 +18,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Vndr.Msrc;
@@ -39,6 +40,7 @@ public sealed class MsrcConnector : IFeedConnector
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly MsrcOptions _options;
private readonly ICryptoHash _hash;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MsrcConnector> _logger;
private readonly MsrcDiagnostics _diagnostics;
@@ -52,6 +54,7 @@ public sealed class MsrcConnector : IFeedConnector
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
ICryptoHash hash,
IOptions<MsrcOptions> options,
TimeProvider? timeProvider,
MsrcDiagnostics diagnostics,
@@ -65,6 +68,7 @@ public sealed class MsrcConnector : IFeedConnector
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -74,6 +78,25 @@ public sealed class MsrcConnector : IFeedConnector
public string SourceName => MsrcConnectorPlugin.SourceName;
/// <summary>
/// Computes a deterministic GUID from the source namespace and identifier using SHA-256.
/// </summary>
private Guid ComputeDeterministicId(string identifier, string sourceNamespace)
{
var input = Encoding.UTF8.GetBytes($"{sourceNamespace}:{identifier}");
var hashBytes = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hashBytes[..16]);
}
/// <summary>
/// Computes a SHA-256 hash of the payload and returns it as a lowercase hex string.
/// </summary>
private string ComputeSha256(byte[] payload)
{
var hashBytes = _hash.ComputeHash(payload, HashAlgorithms.Sha256);
return Convert.ToHexString(hashBytes).ToLowerInvariant();
}
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
@@ -139,9 +162,9 @@ public sealed class MsrcConnector : IFeedConnector
}
var bytes = await _apiClient.FetchDetailAsync(vulnerabilityId, cancellationToken).ConfigureAwait(false);
var sha = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
var sha = ComputeSha256(bytes);
var documentId = existing?.Id ?? Guid.NewGuid();
var documentId = existing?.Id ?? ComputeDeterministicId(detailUri, "msrc-doc/1.0");
_ = await _rawDocumentStorage.UploadAsync(
SourceName,
@@ -294,7 +317,7 @@ public sealed class MsrcConnector : IFeedConnector
var dto = _detailParser.Parse(detail);
var doc = DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "msrc.detail.v1", doc, now);
var dtoRecord = new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "msrc/1.0"), document.Id, SourceName, "msrc.detail.v1", doc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
remainingDocuments.Remove(documentId);

View File

@@ -5,9 +5,11 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />

View File

@@ -23,8 +23,8 @@ internal sealed record OracleCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastProcessed.HasValue)
@@ -35,7 +35,7 @@ internal sealed record OracleCursor(
if (FetchCache.Count > 0)
{
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
foreach (var (key, entry) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
cacheDocument[key] = entry.ToDocumentObject();
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
@@ -15,6 +16,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage.Contracts;
using StellaOps.Concelier.Storage.PsirtFlags;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Vndr.Oracle;
@@ -35,6 +37,7 @@ public sealed class OracleConnector : IFeedConnector
private readonly IPsirtFlagStore _psirtFlagStore;
private readonly ISourceStateRepository _stateRepository;
private readonly OracleCalendarFetcher _calendarFetcher;
private readonly ICryptoHash _hash;
private readonly OracleOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<OracleConnector> _logger;
@@ -48,6 +51,7 @@ public sealed class OracleConnector : IFeedConnector
IPsirtFlagStore psirtFlagStore,
ISourceStateRepository stateRepository,
OracleCalendarFetcher calendarFetcher,
ICryptoHash hash,
IOptions<OracleOptions> options,
TimeProvider? timeProvider,
ILogger<OracleConnector> logger)
@@ -60,6 +64,7 @@ public sealed class OracleConnector : IFeedConnector
_psirtFlagStore = psirtFlagStore ?? throw new ArgumentNullException(nameof(psirtFlagStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_calendarFetcher = calendarFetcher ?? throw new ArgumentNullException(nameof(calendarFetcher));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -68,6 +73,16 @@ public sealed class OracleConnector : IFeedConnector
public string SourceName => VndrOracleConnectorPlugin.SourceName;
/// <summary>
/// Computes a deterministic GUID from the source namespace and identifier using SHA-256.
/// </summary>
private Guid ComputeDeterministicId(string identifier, string sourceNamespace)
{
var input = Encoding.UTF8.GetBytes($"{sourceNamespace}:{identifier}");
var hashBytes = _hash.ComputeHash(input, HashAlgorithms.Sha256);
return new Guid(hashBytes[..16]);
}
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
@@ -227,7 +242,7 @@ public sealed class OracleConnector : IFeedConnector
var existingDto = await _dtoStore.FindByDocumentIdAsync(document.Id, cancellationToken).ConfigureAwait(false);
var dtoRecord = existingDto is null
? new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "oracle.advisory.v1", payload, validatedAt)
? new DtoRecord(ComputeDeterministicId(document.Id.ToString(), "oracle/1.0"), document.Id, SourceName, "oracle.advisory.v1", payload, validatedAt)
: existingDto with
{
Payload = payload,

View File

@@ -5,9 +5,11 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />

View File

@@ -23,8 +23,8 @@ internal sealed record VmwareCursor(
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString())),
["pendingDocuments"] = new DocumentArray(PendingDocuments.OrderBy(id => id).Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.OrderBy(id => id).Select(id => id.ToString())),
};
if (LastModified.HasValue)
@@ -34,13 +34,13 @@ internal sealed record VmwareCursor(
if (ProcessedIds.Count > 0)
{
document["processedIds"] = new DocumentArray(ProcessedIds);
document["processedIds"] = new DocumentArray(ProcessedIds.OrderBy(id => id, StringComparer.Ordinal));
}
if (FetchCache.Count > 0)
{
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
foreach (var (key, entry) in FetchCache.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
cacheDocument[key] = entry.ToDocumentObject();
}

View File

@@ -5,9 +5,11 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />

Some files were not shown because too many files have changed in this diff Show More