Add unit tests for AST parsing and security sink detection
- Created `StellaOps.AuditPack.Tests.csproj` for unit testing the AuditPack library. - Implemented comprehensive unit tests in `index.test.js` for AST parsing, covering various JavaScript and TypeScript constructs including functions, classes, decorators, and JSX. - Added `sink-detect.test.js` to test security sink detection patterns, validating command injection, SQL injection, file write, deserialization, SSRF, NoSQL injection, and more. - Included tests for taint source detection in various contexts such as Express, Koa, and AWS Lambda.
This commit is contained in:
@@ -0,0 +1,258 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ConcelierAdvisoryImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-015 - Apply snapshot advisory content to Concelier database
|
||||
// Description: Adapter implementing IAdvisoryImportTarget for Concelier module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Concelier.Core.Raw;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IAdvisoryImportTarget by adapting to Concelier's IAdvisoryRawRepository.
|
||||
/// Parses NDJSON advisory content and upserts records to the advisory database.
|
||||
/// </summary>
|
||||
public sealed class ConcelierAdvisoryImportTarget : IAdvisoryImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IAdvisoryRawRepository _repository;
|
||||
private readonly string _tenant;
|
||||
|
||||
public ConcelierAdvisoryImportTarget(
|
||||
IAdvisoryRawRepository repository,
|
||||
string tenant = "default")
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportAdvisoriesAsync(
|
||||
AdvisoryImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty advisory content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Parse NDJSON content - each line is a complete AdvisoryRawDocument
|
||||
var contentString = Encoding.UTF8.GetString(data.Content);
|
||||
var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var document = JsonSerializer.Deserialize<AdvisoryRawDocument>(line.Trim(), JsonOptions);
|
||||
if (document is null)
|
||||
{
|
||||
failed++;
|
||||
errors.Add("Failed to parse advisory line");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure tenant is set correctly
|
||||
var tenantedDocument = document with { Tenant = _tenant };
|
||||
|
||||
var result = await _repository.UpsertAsync(tenantedDocument, cancellationToken);
|
||||
|
||||
if (result.Inserted)
|
||||
{
|
||||
created++;
|
||||
}
|
||||
else
|
||||
{
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"JSON parse error: {ex.Message}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"Advisory import error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Import failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed,
|
||||
Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IAdvisoryRawRepository for air-gap scenarios.
|
||||
/// Used when direct database access is unavailable.
|
||||
/// </summary>
|
||||
public sealed class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository
|
||||
{
|
||||
private readonly Dictionary<string, AdvisoryRawRecord> _records = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
|
||||
{
|
||||
var contentHash = ComputeHash(document);
|
||||
var key = $"{document.Tenant}:{contentHash}";
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (_records.TryGetValue(key, out var existing))
|
||||
{
|
||||
return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: false, Record: existing));
|
||||
}
|
||||
|
||||
var record = new AdvisoryRawRecord(
|
||||
Id: Guid.NewGuid().ToString(),
|
||||
Document: document,
|
||||
IngestedAt: now,
|
||||
CreatedAt: now);
|
||||
|
||||
_records[key] = record;
|
||||
return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: true, Record: record));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var record = _records.Values.FirstOrDefault(r => r.Document.Tenant == tenant && r.Id == id);
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values.Where(r => r.Document.Tenant == options.Tenant);
|
||||
|
||||
if (!options.Vendors.IsEmpty)
|
||||
{
|
||||
query = query.Where(r => options.Vendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
if (options.Since.HasValue)
|
||||
{
|
||||
query = query.Where(r => r.IngestedAt >= options.Since.Value);
|
||||
}
|
||||
|
||||
var records = query.Take(options.Limit).ToList();
|
||||
return Task.FromResult(new AdvisoryRawQueryResult(
|
||||
Records: records,
|
||||
NextCursor: records.Count == options.Limit && records.Count > 0 ? records[^1].Id : null,
|
||||
HasMore: records.Count == options.Limit));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryRawRecord>> FindByAdvisoryKeyAsync(
|
||||
string tenant,
|
||||
IReadOnlyCollection<string> searchValues,
|
||||
IReadOnlyCollection<string> sourceVendors,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values.Where(r => r.Document.Tenant == tenant);
|
||||
|
||||
if (searchValues.Count > 0)
|
||||
{
|
||||
query = query.Where(r =>
|
||||
searchValues.Contains(r.Document.AdvisoryKey) ||
|
||||
r.Document.Identifiers.Aliases.Any(a => searchValues.Contains(a)));
|
||||
}
|
||||
|
||||
if (sourceVendors.Count > 0)
|
||||
{
|
||||
query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AdvisoryRawRecord>>(query.ToList());
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync(
|
||||
string tenant,
|
||||
DateTimeOffset since,
|
||||
DateTimeOffset until,
|
||||
IReadOnlyCollection<string> sourceVendors,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values
|
||||
.Where(r => r.Document.Tenant == tenant && r.IngestedAt >= since && r.IngestedAt <= until);
|
||||
|
||||
if (sourceVendors.Count > 0)
|
||||
{
|
||||
query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AdvisoryRawRecord>>(query.ToList());
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _records.Count;
|
||||
|
||||
public IEnumerable<AdvisoryRawRecord> GetAllRecords()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _records.Values.ToList();
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeHash(AdvisoryRawDocument document)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(document);
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return $"sha256:{Convert.ToHexStringLower(bytes)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,259 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ExcititorVexImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-016 - Apply snapshot VEX content to Excititor database
|
||||
// Description: Adapter implementing IVexImportTarget for Excititor module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Core.Storage;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IVexImportTarget by adapting to Excititor's IVexRawDocumentSink.
|
||||
/// Parses NDJSON VEX statement content and stores records to the VEX database.
|
||||
/// </summary>
|
||||
public sealed class ExcititorVexImportTarget : IVexImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IVexRawDocumentSink _sink;
|
||||
private readonly string _tenant;
|
||||
|
||||
public ExcititorVexImportTarget(
|
||||
IVexRawDocumentSink sink,
|
||||
string tenant = "default")
|
||||
{
|
||||
_sink = sink ?? throw new ArgumentNullException(nameof(sink));
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportVexStatementsAsync(
|
||||
VexImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty VEX content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Parse NDJSON content - each line is a VEX statement
|
||||
var contentString = Encoding.UTF8.GetString(data.Content);
|
||||
var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var statement = JsonSerializer.Deserialize<VexStatementDto>(line.Trim(), JsonOptions);
|
||||
if (statement is null)
|
||||
{
|
||||
failed++;
|
||||
errors.Add("Failed to parse VEX statement line");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to VexRawDocument
|
||||
var contentBytes = Encoding.UTF8.GetBytes(line.Trim());
|
||||
var digest = ComputeDigest(contentBytes);
|
||||
|
||||
var document = new VexRawDocument(
|
||||
ProviderId: data.SourceId,
|
||||
Format: DetectFormat(statement),
|
||||
SourceUri: statement.SourceUri ?? new Uri($"urn:stellaops:airgap:vex:{digest}"),
|
||||
RetrievedAt: data.SnapshotAt,
|
||||
Digest: digest,
|
||||
Content: contentBytes,
|
||||
Metadata: ImmutableDictionary<string, string>.Empty
|
||||
.Add("importSource", "airgap-snapshot")
|
||||
.Add("snapshotAt", data.SnapshotAt.ToString("O")));
|
||||
|
||||
await _sink.StoreAsync(document, cancellationToken);
|
||||
created++;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"JSON parse error: {ex.Message}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"VEX import error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Import failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed,
|
||||
Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static VexDocumentFormat DetectFormat(VexStatementDto statement)
|
||||
{
|
||||
// Detect format from statement structure
|
||||
if (!string.IsNullOrEmpty(statement.Context))
|
||||
{
|
||||
if (statement.Context.Contains("openvex", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.OpenVex;
|
||||
if (statement.Context.Contains("csaf", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.Csaf;
|
||||
if (statement.Context.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.CycloneDx;
|
||||
}
|
||||
|
||||
// Default to OpenVEX
|
||||
return VexDocumentFormat.OpenVex;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IVexRawDocumentSink for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawStore
|
||||
{
|
||||
private readonly Dictionary<string, VexRawRecord> _records = new();
|
||||
private readonly string _tenant;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public InMemoryVexRawDocumentSink(string tenant = "default")
|
||||
{
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_records.ContainsKey(document.Digest))
|
||||
{
|
||||
_records[document.Digest] = new VexRawRecord(
|
||||
Digest: document.Digest,
|
||||
Tenant: _tenant,
|
||||
ProviderId: document.ProviderId,
|
||||
Format: document.Format,
|
||||
SourceUri: document.SourceUri,
|
||||
RetrievedAt: document.RetrievedAt,
|
||||
Metadata: document.Metadata,
|
||||
Content: document.Content,
|
||||
InlineContent: true,
|
||||
RecordedAt: DateTimeOffset.UtcNow);
|
||||
}
|
||||
}
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask<VexRawRecord?> FindByDigestAsync(string digest, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_records.TryGetValue(digest, out var record);
|
||||
return ValueTask.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask<VexRawDocumentPage> QueryAsync(VexRawQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var items = _records.Values
|
||||
.Where(r => r.Tenant == query.Tenant)
|
||||
.Where(r => query.ProviderIds.Count == 0 || query.ProviderIds.Contains(r.ProviderId))
|
||||
.Where(r => query.Digests.Count == 0 || query.Digests.Contains(r.Digest))
|
||||
.Where(r => query.Formats.Count == 0 || query.Formats.Contains(r.Format))
|
||||
.Where(r => !query.Since.HasValue || r.RetrievedAt >= query.Since.Value)
|
||||
.Where(r => !query.Until.HasValue || r.RetrievedAt <= query.Until.Value)
|
||||
.Take(query.Limit)
|
||||
.Select(r => new VexRawDocumentSummary(
|
||||
r.Digest,
|
||||
r.ProviderId,
|
||||
r.Format,
|
||||
r.SourceUri,
|
||||
r.RetrievedAt,
|
||||
r.InlineContent,
|
||||
r.Metadata))
|
||||
.ToList();
|
||||
|
||||
return ValueTask.FromResult(new VexRawDocumentPage(
|
||||
items,
|
||||
NextCursor: items.Count == query.Limit && items.Count > 0
|
||||
? new VexRawCursor(items[^1].RetrievedAt, items[^1].Digest)
|
||||
: null,
|
||||
HasMore: items.Count == query.Limit));
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _records.Count;
|
||||
|
||||
public IEnumerable<VexRawRecord> GetAllRecords()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _records.Values.ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DTO for deserializing VEX statements from NDJSON.
|
||||
/// </summary>
|
||||
internal sealed record VexStatementDto
|
||||
{
|
||||
public string? Context { get; init; }
|
||||
public string? Id { get; init; }
|
||||
public string? Vulnerability { get; init; }
|
||||
public string? Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? Impact { get; init; }
|
||||
public string? ActionStatement { get; init; }
|
||||
public Uri? SourceUri { get; init; }
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
public ImmutableArray<string> Products { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,489 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// KnowledgeSnapshotImporter.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-015, SEAL-016, SEAL-017 - Apply snapshot content to databases
|
||||
// Description: Imports knowledge snapshot content to Concelier, Excititor, and Policy.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Formats.Tar;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Imports knowledge snapshot content to module databases.
|
||||
/// </summary>
|
||||
public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IAdvisoryImportTarget? _advisoryTarget;
|
||||
private readonly IVexImportTarget? _vexTarget;
|
||||
private readonly IPolicyImportTarget? _policyTarget;
|
||||
|
||||
public KnowledgeSnapshotImporter(
|
||||
IAdvisoryImportTarget? advisoryTarget = null,
|
||||
IVexImportTarget? vexTarget = null,
|
||||
IPolicyImportTarget? policyTarget = null)
|
||||
{
|
||||
_advisoryTarget = advisoryTarget;
|
||||
_vexTarget = vexTarget;
|
||||
_policyTarget = policyTarget;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Imports all content from a verified snapshot bundle.
|
||||
/// </summary>
|
||||
public async Task<SnapshotImportResult> ImportAsync(
|
||||
SnapshotImportRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
|
||||
|
||||
if (!File.Exists(request.BundlePath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Bundle file not found");
|
||||
}
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"import-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
// Extract bundle
|
||||
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
|
||||
|
||||
// Read manifest
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Manifest not found in bundle");
|
||||
}
|
||||
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifest = JsonSerializer.Deserialize<KnowledgeSnapshotManifest>(manifestBytes, JsonOptions);
|
||||
if (manifest is null)
|
||||
{
|
||||
return SnapshotImportResult.Failed("Failed to parse manifest");
|
||||
}
|
||||
|
||||
var result = new SnapshotImportResult
|
||||
{
|
||||
Success = true,
|
||||
BundleId = manifest.BundleId,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var errors = new List<string>();
|
||||
var stats = new ImportStatistics();
|
||||
|
||||
// Import advisories (SEAL-015)
|
||||
if (request.ImportAdvisories && _advisoryTarget is not null)
|
||||
{
|
||||
var advisoryResult = await ImportAdvisoriesAsync(
|
||||
tempDir, manifest.Advisories, request.DryRun, cancellationToken);
|
||||
|
||||
stats.AdvisoriesProcessed = advisoryResult.Processed;
|
||||
stats.AdvisoriesCreated = advisoryResult.Created;
|
||||
stats.AdvisoriesUpdated = advisoryResult.Updated;
|
||||
stats.AdvisoriesFailed = advisoryResult.Failed;
|
||||
|
||||
if (advisoryResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(advisoryResult.Errors.Select(e => $"Advisory: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportAdvisories)
|
||||
{
|
||||
errors.Add("Advisory import target not configured");
|
||||
}
|
||||
|
||||
// Import VEX statements (SEAL-016)
|
||||
if (request.ImportVex && _vexTarget is not null)
|
||||
{
|
||||
var vexResult = await ImportVexStatementsAsync(
|
||||
tempDir, manifest.VexStatements, request.DryRun, cancellationToken);
|
||||
|
||||
stats.VexProcessed = vexResult.Processed;
|
||||
stats.VexCreated = vexResult.Created;
|
||||
stats.VexUpdated = vexResult.Updated;
|
||||
stats.VexFailed = vexResult.Failed;
|
||||
|
||||
if (vexResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(vexResult.Errors.Select(e => $"VEX: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportVex)
|
||||
{
|
||||
errors.Add("VEX import target not configured");
|
||||
}
|
||||
|
||||
// Import policies (SEAL-017)
|
||||
if (request.ImportPolicies && _policyTarget is not null)
|
||||
{
|
||||
var policyResult = await ImportPoliciesAsync(
|
||||
tempDir, manifest.Policies, request.DryRun, cancellationToken);
|
||||
|
||||
stats.PoliciesProcessed = policyResult.Processed;
|
||||
stats.PoliciesCreated = policyResult.Created;
|
||||
stats.PoliciesUpdated = policyResult.Updated;
|
||||
stats.PoliciesFailed = policyResult.Failed;
|
||||
|
||||
if (policyResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(policyResult.Errors.Select(e => $"Policy: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportPolicies)
|
||||
{
|
||||
errors.Add("Policy import target not configured");
|
||||
}
|
||||
|
||||
result = result with
|
||||
{
|
||||
CompletedAt = DateTimeOffset.UtcNow,
|
||||
Statistics = stats,
|
||||
Errors = errors.Count > 0 ? [.. errors] : null,
|
||||
Success = errors.Count == 0 || !request.FailOnAnyError
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SnapshotImportResult.Failed($"Import failed: {ex.Message}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportAdvisoriesAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<AdvisorySnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _advisoryTarget is not null)
|
||||
{
|
||||
var importResult = await _advisoryTarget.ImportAdvisoriesAsync(
|
||||
new AdvisoryImportData
|
||||
{
|
||||
FeedId = entry.FeedId,
|
||||
Content = content,
|
||||
SnapshotAt = entry.SnapshotAt,
|
||||
RecordCount = entry.RecordCount
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportVexStatementsAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<VexSnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _vexTarget is not null)
|
||||
{
|
||||
var importResult = await _vexTarget.ImportVexStatementsAsync(
|
||||
new VexImportData
|
||||
{
|
||||
SourceId = entry.SourceId,
|
||||
Content = content,
|
||||
SnapshotAt = entry.SnapshotAt,
|
||||
StatementCount = entry.StatementCount
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportPoliciesAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<PolicySnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _policyTarget is not null)
|
||||
{
|
||||
var importResult = await _policyTarget.ImportPolicyAsync(
|
||||
new PolicyImportData
|
||||
{
|
||||
PolicyId = entry.PolicyId,
|
||||
Content = content,
|
||||
Version = entry.Version
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
}
|
||||
|
||||
private sealed class ModuleImportResult
|
||||
{
|
||||
public int Processed { get; set; }
|
||||
public int Created { get; set; }
|
||||
public int Updated { get; set; }
|
||||
public int Failed { get; set; }
|
||||
public List<string> Errors { get; } = [];
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for knowledge snapshot importing.
|
||||
/// </summary>
|
||||
public interface IKnowledgeSnapshotImporter
|
||||
{
|
||||
Task<SnapshotImportResult> ImportAsync(
|
||||
SnapshotImportRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing advisories (SEAL-015).
|
||||
/// Implemented by Concelier module.
|
||||
/// </summary>
|
||||
public interface IAdvisoryImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportAdvisoriesAsync(
|
||||
AdvisoryImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing VEX statements (SEAL-016).
|
||||
/// Implemented by Excititor module.
|
||||
/// </summary>
|
||||
public interface IVexImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportVexStatementsAsync(
|
||||
VexImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing policies (SEAL-017).
|
||||
/// Implemented by Policy module.
|
||||
/// </summary>
|
||||
public interface IPolicyImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportPolicyAsync(
|
||||
PolicyImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
public sealed record SnapshotImportRequest
|
||||
{
|
||||
public required string BundlePath { get; init; }
|
||||
public bool ImportAdvisories { get; init; } = true;
|
||||
public bool ImportVex { get; init; } = true;
|
||||
public bool ImportPolicies { get; init; } = true;
|
||||
public bool DryRun { get; init; } = false;
|
||||
public bool FailOnAnyError { get; init; } = false;
|
||||
}
|
||||
|
||||
public sealed record SnapshotImportResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
public DateTimeOffset CompletedAt { get; init; }
|
||||
public ImportStatistics? Statistics { get; init; }
|
||||
public IReadOnlyList<string>? Errors { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static SnapshotImportResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error,
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
CompletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
public sealed record ImportStatistics
|
||||
{
|
||||
public int AdvisoriesProcessed { get; set; }
|
||||
public int AdvisoriesCreated { get; set; }
|
||||
public int AdvisoriesUpdated { get; set; }
|
||||
public int AdvisoriesFailed { get; set; }
|
||||
|
||||
public int VexProcessed { get; set; }
|
||||
public int VexCreated { get; set; }
|
||||
public int VexUpdated { get; set; }
|
||||
public int VexFailed { get; set; }
|
||||
|
||||
public int PoliciesProcessed { get; set; }
|
||||
public int PoliciesCreated { get; set; }
|
||||
public int PoliciesUpdated { get; set; }
|
||||
public int PoliciesFailed { get; set; }
|
||||
|
||||
public int TotalProcessed => AdvisoriesProcessed + VexProcessed + PoliciesProcessed;
|
||||
public int TotalCreated => AdvisoriesCreated + VexCreated + PoliciesCreated;
|
||||
public int TotalUpdated => AdvisoriesUpdated + VexUpdated + PoliciesUpdated;
|
||||
public int TotalFailed => AdvisoriesFailed + VexFailed + PoliciesFailed;
|
||||
}
|
||||
|
||||
public sealed record AdvisoryImportData
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record VexImportData
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record PolicyImportData
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ModuleImportResultData
|
||||
{
|
||||
public int Created { get; init; }
|
||||
public int Updated { get; init; }
|
||||
public int Failed { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,247 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyRegistryImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-017 - Apply snapshot policy content to Policy registry
|
||||
// Description: Adapter implementing IPolicyImportTarget for Policy module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IPolicyImportTarget for importing policy packs from snapshots.
|
||||
/// Parses policy bundle content and stores to the policy registry.
|
||||
/// </summary>
|
||||
public sealed class PolicyRegistryImportTarget : IPolicyImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IPolicyPackImportStore _store;
|
||||
private readonly string _tenantId;
|
||||
|
||||
public PolicyRegistryImportTarget(
|
||||
IPolicyPackImportStore store,
|
||||
string tenantId = "default")
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_tenantId = tenantId;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportPolicyAsync(
|
||||
PolicyImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty policy content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
|
||||
try
|
||||
{
|
||||
// Compute content digest for deduplication
|
||||
var digest = ComputeDigest(data.Content);
|
||||
|
||||
// Check if already exists
|
||||
var existing = await _store.FindByDigestAsync(_tenantId, digest, cancellationToken);
|
||||
if (existing is not null)
|
||||
{
|
||||
updated++;
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Updated = updated,
|
||||
Error = null
|
||||
};
|
||||
}
|
||||
|
||||
// Parse policy bundle to validate
|
||||
var bundle = ParsePolicyBundle(data.Content);
|
||||
|
||||
// Store the policy pack
|
||||
var pack = new ImportedPolicyPack(
|
||||
Id: data.PolicyId,
|
||||
TenantId: _tenantId,
|
||||
Digest: digest,
|
||||
Version: data.Version ?? "1.0.0",
|
||||
Content: data.Content,
|
||||
Metadata: bundle.Metadata,
|
||||
ImportedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _store.SaveAsync(pack, cancellationToken);
|
||||
created++;
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Error = null
|
||||
};
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = $"JSON parse error: {ex.Message}"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Policy import error: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static PolicyBundleDto ParsePolicyBundle(byte[] content)
|
||||
{
|
||||
var json = Encoding.UTF8.GetString(content);
|
||||
return JsonSerializer.Deserialize<PolicyBundleDto>(json, JsonOptions)
|
||||
?? throw new InvalidDataException("Failed to parse policy bundle");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store interface for importing policy packs from air-gap snapshots.
|
||||
/// </summary>
|
||||
public interface IPolicyPackImportStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Finds an imported policy pack by content digest.
|
||||
/// </summary>
|
||||
Task<ImportedPolicyPack?> FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Saves an imported policy pack.
|
||||
/// </summary>
|
||||
Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists all imported policy packs for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ImportedPolicyPack>> ListAsync(string tenantId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IPolicyPackImportStore for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryPolicyPackImportStore : IPolicyPackImportStore
|
||||
{
|
||||
private readonly Dictionary<string, ImportedPolicyPack> _packs = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<ImportedPolicyPack?> FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var pack = _packs.Values.FirstOrDefault(p => p.TenantId == tenantId && p.Digest == digest);
|
||||
return Task.FromResult(pack);
|
||||
}
|
||||
}
|
||||
|
||||
public Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_packs[$"{pack.TenantId}:{pack.Id}"] = pack;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ImportedPolicyPack>> ListAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var packs = _packs.Values.Where(p => p.TenantId == tenantId).ToList();
|
||||
return Task.FromResult<IReadOnlyList<ImportedPolicyPack>>(packs);
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _packs.Count;
|
||||
|
||||
public IEnumerable<ImportedPolicyPack> GetAllPacks()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _packs.Values.ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Imported policy pack record.
|
||||
/// </summary>
|
||||
public sealed record ImportedPolicyPack(
|
||||
string Id,
|
||||
string TenantId,
|
||||
string Digest,
|
||||
string Version,
|
||||
byte[] Content,
|
||||
PolicyPackMetadata? Metadata,
|
||||
DateTimeOffset ImportedAt);
|
||||
|
||||
/// <summary>
|
||||
/// DTO for deserializing policy bundle.
|
||||
/// </summary>
|
||||
internal sealed record PolicyBundleDto
|
||||
{
|
||||
public int SchemaVersion { get; init; } = 1;
|
||||
public string? DomainId { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public PolicyPackMetadata? Metadata { get; init; }
|
||||
public IReadOnlyList<PolicyRuleDto>? Rules { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack metadata.
|
||||
/// </summary>
|
||||
public sealed record PolicyPackMetadata
|
||||
{
|
||||
public string? Author { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string? Homepage { get; init; }
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
public IReadOnlyList<string>? Tags { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy rule within a bundle.
|
||||
/// </summary>
|
||||
internal sealed record PolicyRuleDto
|
||||
{
|
||||
public string? Id { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public string? Expression { get; init; }
|
||||
}
|
||||
@@ -12,6 +12,9 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
Reference in New Issue
Block a user