up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
This commit is contained in:
@@ -1,378 +1,378 @@
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Ghsa;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv;
|
||||
using StellaOps.Concelier.Connector.Nvd;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", ".."));
|
||||
|
||||
var osvFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures");
|
||||
var ghsaFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures");
|
||||
var nvdFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Nvd.Tests", "Nvd", "Fixtures");
|
||||
|
||||
RewriteOsvFixtures(osvFixturesPath);
|
||||
RewriteSnapshotFixtures(osvFixturesPath);
|
||||
RewriteGhsaFixtures(osvFixturesPath);
|
||||
RewriteCreditParityFixtures(ghsaFixturesPath, nvdFixturesPath);
|
||||
return;
|
||||
|
||||
void RewriteOsvFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
var dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), serializerOptions);
|
||||
if (dto is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
|
||||
var uri = new Uri($"https://osv.dev/vulnerability/{dto.Id}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["osv.ecosystem"] = ecosystem,
|
||||
},
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(element.GetRawText());
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
documentRecord.Id,
|
||||
OsvConnectorPlugin.SourceName,
|
||||
"osv.v1",
|
||||
payload,
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.osv.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.osv.json")}");
|
||||
}
|
||||
|
||||
void RewriteSnapshotFixtures(string fixturesPath)
|
||||
{
|
||||
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
|
||||
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
|
||||
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
|
||||
{
|
||||
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
|
||||
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
|
||||
};
|
||||
|
||||
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
|
||||
{
|
||||
var dto = new OsvVulnerabilityDto
|
||||
{
|
||||
Id = $"OSV-2025-{ecosystem}-0001",
|
||||
Summary = $"{ecosystem} package vulnerability",
|
||||
Details = $"Detailed description for {ecosystem} package {packageName}.",
|
||||
Published = baselinePublished,
|
||||
Modified = baselineModified,
|
||||
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
|
||||
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
|
||||
References = new[]
|
||||
{
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
|
||||
},
|
||||
Severity = new[]
|
||||
{
|
||||
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
|
||||
},
|
||||
Affected = new[]
|
||||
{
|
||||
new OsvAffectedPackageDto
|
||||
{
|
||||
Package = new OsvPackageDto
|
||||
{
|
||||
Ecosystem = ecosystem,
|
||||
Name = packageName,
|
||||
Purl = purl,
|
||||
},
|
||||
Ranges = new[]
|
||||
{
|
||||
new OsvRangeDto
|
||||
{
|
||||
Type = "SEMVER",
|
||||
Events = new[]
|
||||
{
|
||||
new OsvEventDto { Introduced = "0" },
|
||||
new OsvEventDto { Fixed = "2.0.0" },
|
||||
},
|
||||
},
|
||||
},
|
||||
Versions = new[] { "1.0.0", "1.5.0" },
|
||||
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
|
||||
},
|
||||
},
|
||||
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
|
||||
};
|
||||
|
||||
var document = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
$"https://osv.dev/vulnerability/{dto.Id}",
|
||||
baselineFetched,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingParse,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
|
||||
null,
|
||||
baselineModified,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(JsonSerializer.Serialize(dto, serializerOptions));
|
||||
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, OsvConnectorPlugin.SourceName, "osv.v1", payload, baselineModified);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, snapshotFile), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, snapshotFile)}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteGhsaFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
JsonDocument document;
|
||||
try
|
||||
{
|
||||
document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
|
||||
return;
|
||||
}
|
||||
using (document)
|
||||
{
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
GhsaRecordDto dto;
|
||||
try
|
||||
{
|
||||
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var uri = new Uri($"https://github.com/advisories/{dto.GhsaId}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
GhsaConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal),
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var advisory = GhsaMapper.Map(dto, documentRecord, DateTimeOffset.UtcNow);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.ghsa.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.ghsa.json")}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
|
||||
{
|
||||
Directory.CreateDirectory(ghsaFixturesPath);
|
||||
Directory.CreateDirectory(nvdFixturesPath);
|
||||
|
||||
var advisoryKeyGhsa = "GHSA-credit-parity";
|
||||
var advisoryKeyNvd = "CVE-2025-5555";
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
|
||||
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
|
||||
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
AdvisoryCredit[] CreateCredits(string source) =>
|
||||
[
|
||||
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
|
||||
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
|
||||
];
|
||||
|
||||
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
source,
|
||||
"credit",
|
||||
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Credits });
|
||||
|
||||
return new AdvisoryCredit(displayName, role, contacts, provenance);
|
||||
}
|
||||
|
||||
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
|
||||
{
|
||||
if (entries is null || entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryReference>();
|
||||
}
|
||||
|
||||
var references = new List<AdvisoryReference>(entries.Length);
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"reference",
|
||||
entry.Url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(
|
||||
entry.Url,
|
||||
entry.Kind,
|
||||
sourceTag: null,
|
||||
summary: null,
|
||||
provenance));
|
||||
}
|
||||
|
||||
return references.ToArray();
|
||||
}
|
||||
|
||||
Advisory CreateAdvisory(
|
||||
string sourceName,
|
||||
string advisoryKey,
|
||||
IEnumerable<string> aliases,
|
||||
AdvisoryCredit[] credits,
|
||||
AdvisoryReference[] references,
|
||||
string documentValue)
|
||||
{
|
||||
var documentProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"document",
|
||||
documentValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
var mappingProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"mapping",
|
||||
advisoryKey,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
"Credit parity regression fixture",
|
||||
"Credit parity regression fixture",
|
||||
"en",
|
||||
published,
|
||||
modified,
|
||||
"moderate",
|
||||
exploitKnown: false,
|
||||
aliases,
|
||||
credits,
|
||||
references,
|
||||
Array.Empty<AffectedPackage>(),
|
||||
Array.Empty<CvssMetric>(),
|
||||
new[] { documentProvenance, mappingProvenance });
|
||||
}
|
||||
|
||||
var ghsa = CreateAdvisory(
|
||||
"ghsa",
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits("ghsa"),
|
||||
CreateReferences(
|
||||
"ghsa",
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( "https://example.com/ghsa/patch", "patch")),
|
||||
$"security/advisories/{advisoryKeyGhsa}");
|
||||
|
||||
var osv = CreateAdvisory(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits(OsvConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( $"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
|
||||
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
|
||||
|
||||
var nvd = CreateAdvisory(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
advisoryKeyNvd,
|
||||
new[] { advisoryKeyNvd, advisoryKeyGhsa },
|
||||
CreateCredits(NvdConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
( $"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
|
||||
( "https://example.com/nvd/reference", "report")),
|
||||
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
|
||||
|
||||
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
|
||||
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
|
||||
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
|
||||
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
Console.WriteLine($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
|
||||
}
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Ghsa;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv;
|
||||
using StellaOps.Concelier.Connector.Nvd;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", ".."));
|
||||
|
||||
var osvFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures");
|
||||
var ghsaFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures");
|
||||
var nvdFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Nvd.Tests", "Nvd", "Fixtures");
|
||||
|
||||
RewriteOsvFixtures(osvFixturesPath);
|
||||
RewriteSnapshotFixtures(osvFixturesPath);
|
||||
RewriteGhsaFixtures(osvFixturesPath);
|
||||
RewriteCreditParityFixtures(ghsaFixturesPath, nvdFixturesPath);
|
||||
return;
|
||||
|
||||
void RewriteOsvFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
var dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), serializerOptions);
|
||||
if (dto is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
|
||||
var uri = new Uri($"https://osv.dev/vulnerability/{dto.Id}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["osv.ecosystem"] = ecosystem,
|
||||
},
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(element.GetRawText());
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
documentRecord.Id,
|
||||
OsvConnectorPlugin.SourceName,
|
||||
"osv.v1",
|
||||
payload,
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.osv.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.osv.json")}");
|
||||
}
|
||||
|
||||
void RewriteSnapshotFixtures(string fixturesPath)
|
||||
{
|
||||
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
|
||||
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
|
||||
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
|
||||
{
|
||||
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
|
||||
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
|
||||
};
|
||||
|
||||
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
|
||||
{
|
||||
var dto = new OsvVulnerabilityDto
|
||||
{
|
||||
Id = $"OSV-2025-{ecosystem}-0001",
|
||||
Summary = $"{ecosystem} package vulnerability",
|
||||
Details = $"Detailed description for {ecosystem} package {packageName}.",
|
||||
Published = baselinePublished,
|
||||
Modified = baselineModified,
|
||||
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
|
||||
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
|
||||
References = new[]
|
||||
{
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
|
||||
},
|
||||
Severity = new[]
|
||||
{
|
||||
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
|
||||
},
|
||||
Affected = new[]
|
||||
{
|
||||
new OsvAffectedPackageDto
|
||||
{
|
||||
Package = new OsvPackageDto
|
||||
{
|
||||
Ecosystem = ecosystem,
|
||||
Name = packageName,
|
||||
Purl = purl,
|
||||
},
|
||||
Ranges = new[]
|
||||
{
|
||||
new OsvRangeDto
|
||||
{
|
||||
Type = "SEMVER",
|
||||
Events = new[]
|
||||
{
|
||||
new OsvEventDto { Introduced = "0" },
|
||||
new OsvEventDto { Fixed = "2.0.0" },
|
||||
},
|
||||
},
|
||||
},
|
||||
Versions = new[] { "1.0.0", "1.5.0" },
|
||||
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
|
||||
},
|
||||
},
|
||||
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
|
||||
};
|
||||
|
||||
var document = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
$"https://osv.dev/vulnerability/{dto.Id}",
|
||||
baselineFetched,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingParse,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
|
||||
null,
|
||||
baselineModified,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(JsonSerializer.Serialize(dto, serializerOptions));
|
||||
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, OsvConnectorPlugin.SourceName, "osv.v1", payload, baselineModified);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, snapshotFile), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, snapshotFile)}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteGhsaFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
JsonDocument document;
|
||||
try
|
||||
{
|
||||
document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
|
||||
return;
|
||||
}
|
||||
using (document)
|
||||
{
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
GhsaRecordDto dto;
|
||||
try
|
||||
{
|
||||
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var uri = new Uri($"https://github.com/advisories/{dto.GhsaId}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
GhsaConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal),
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var advisory = GhsaMapper.Map(dto, documentRecord, DateTimeOffset.UtcNow);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.ghsa.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.ghsa.json")}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
|
||||
{
|
||||
Directory.CreateDirectory(ghsaFixturesPath);
|
||||
Directory.CreateDirectory(nvdFixturesPath);
|
||||
|
||||
var advisoryKeyGhsa = "GHSA-credit-parity";
|
||||
var advisoryKeyNvd = "CVE-2025-5555";
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
|
||||
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
|
||||
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
AdvisoryCredit[] CreateCredits(string source) =>
|
||||
[
|
||||
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
|
||||
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
|
||||
];
|
||||
|
||||
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
source,
|
||||
"credit",
|
||||
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Credits });
|
||||
|
||||
return new AdvisoryCredit(displayName, role, contacts, provenance);
|
||||
}
|
||||
|
||||
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
|
||||
{
|
||||
if (entries is null || entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryReference>();
|
||||
}
|
||||
|
||||
var references = new List<AdvisoryReference>(entries.Length);
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"reference",
|
||||
entry.Url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(
|
||||
entry.Url,
|
||||
entry.Kind,
|
||||
sourceTag: null,
|
||||
summary: null,
|
||||
provenance));
|
||||
}
|
||||
|
||||
return references.ToArray();
|
||||
}
|
||||
|
||||
Advisory CreateAdvisory(
|
||||
string sourceName,
|
||||
string advisoryKey,
|
||||
IEnumerable<string> aliases,
|
||||
AdvisoryCredit[] credits,
|
||||
AdvisoryReference[] references,
|
||||
string documentValue)
|
||||
{
|
||||
var documentProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"document",
|
||||
documentValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
var mappingProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"mapping",
|
||||
advisoryKey,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
"Credit parity regression fixture",
|
||||
"Credit parity regression fixture",
|
||||
"en",
|
||||
published,
|
||||
modified,
|
||||
"moderate",
|
||||
exploitKnown: false,
|
||||
aliases,
|
||||
credits,
|
||||
references,
|
||||
Array.Empty<AffectedPackage>(),
|
||||
Array.Empty<CvssMetric>(),
|
||||
new[] { documentProvenance, mappingProvenance });
|
||||
}
|
||||
|
||||
var ghsa = CreateAdvisory(
|
||||
"ghsa",
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits("ghsa"),
|
||||
CreateReferences(
|
||||
"ghsa",
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( "https://example.com/ghsa/patch", "patch")),
|
||||
$"security/advisories/{advisoryKeyGhsa}");
|
||||
|
||||
var osv = CreateAdvisory(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits(OsvConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( $"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
|
||||
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
|
||||
|
||||
var nvd = CreateAdvisory(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
advisoryKeyNvd,
|
||||
new[] { advisoryKeyNvd, advisoryKeyGhsa },
|
||||
CreateCredits(NvdConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
( $"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
|
||||
( "https://example.com/nvd/reference", "report")),
|
||||
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
|
||||
|
||||
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
|
||||
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
|
||||
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
|
||||
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
Console.WriteLine($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
|
||||
internal sealed record SmokeScenario(string Name, string[] UsageHintRelatives)
|
||||
{
|
||||
public IReadOnlyList<string> ResolveUsageHints(string scenarioRoot)
|
||||
@@ -80,15 +80,15 @@ internal sealed class SmokeOptions
|
||||
{
|
||||
var options = new SmokeOptions();
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--repo-root":
|
||||
case "-r":
|
||||
options.RepoRoot = RequireValue(args, ref index, current);
|
||||
break;
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--repo-root":
|
||||
case "-r":
|
||||
options.RepoRoot = RequireValue(args, ref index, current);
|
||||
break;
|
||||
case "--plugin-directory":
|
||||
case "-p":
|
||||
options.PluginDirectoryName = RequireValue(args, ref index, current);
|
||||
@@ -107,10 +107,10 @@ internal sealed class SmokeOptions
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'. Use --help for usage.");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'. Use --help for usage.");
|
||||
}
|
||||
}
|
||||
|
||||
options.RepoRoot = Path.GetFullPath(options.RepoRoot);
|
||||
@@ -135,22 +135,22 @@ internal sealed class SmokeOptions
|
||||
|
||||
private static string RequireValue(string[] args, ref int index, string switchName)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException($"Missing value for '{switchName}'.");
|
||||
}
|
||||
|
||||
index++;
|
||||
var value = args[index];
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
throw new ArgumentException($"Value for '{switchName}' cannot be empty.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException($"Missing value for '{switchName}'.");
|
||||
}
|
||||
|
||||
index++;
|
||||
var value = args[index];
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
throw new ArgumentException($"Value for '{switchName}' cannot be empty.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Language Analyzer Smoke Harness");
|
||||
Console.WriteLine("Usage: dotnet run --project src/Tools/LanguageAnalyzerSmoke -- [options]");
|
||||
@@ -162,57 +162,57 @@ internal sealed class SmokeOptions
|
||||
Console.WriteLine(" -f, --fixture-path <path> Relative path to fixtures root (defaults to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)");
|
||||
Console.WriteLine(" -h, --help Show usage information");
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record PluginManifest
|
||||
{
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string Id { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("displayName")]
|
||||
public string DisplayName { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
public string Version { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("requiresRestart")]
|
||||
public bool RequiresRestart { get; init; }
|
||||
|
||||
[JsonPropertyName("entryPoint")]
|
||||
public PluginEntryPoint EntryPoint { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("capabilities")]
|
||||
public IReadOnlyList<string> Capabilities { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; init; } = ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
internal sealed record PluginEntryPoint
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("assembly")]
|
||||
public string Assembly { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("typeName")]
|
||||
public string TypeName { get; init; } = string.Empty;
|
||||
}
|
||||
|
||||
file static class Program
|
||||
{
|
||||
private static readonly SmokeScenario[] PythonScenarios =
|
||||
{
|
||||
new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }),
|
||||
new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }),
|
||||
new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") })
|
||||
};
|
||||
|
||||
public static async Task<int> Main(string[] args)
|
||||
}
|
||||
|
||||
internal sealed record PluginManifest
|
||||
{
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string Id { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("displayName")]
|
||||
public string DisplayName { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
public string Version { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("requiresRestart")]
|
||||
public bool RequiresRestart { get; init; }
|
||||
|
||||
[JsonPropertyName("entryPoint")]
|
||||
public PluginEntryPoint EntryPoint { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("capabilities")]
|
||||
public IReadOnlyList<string> Capabilities { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; init; } = ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
internal sealed record PluginEntryPoint
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("assembly")]
|
||||
public string Assembly { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("typeName")]
|
||||
public string TypeName { get; init; } = string.Empty;
|
||||
}
|
||||
|
||||
file static class Program
|
||||
{
|
||||
private static readonly SmokeScenario[] PythonScenarios =
|
||||
{
|
||||
new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }),
|
||||
new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }),
|
||||
new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") })
|
||||
};
|
||||
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -224,7 +224,7 @@ file static class Program
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"❌ {ex.Message}");
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,13 +240,13 @@ file static class Program
|
||||
var pluginRoot = Path.Combine(options.RepoRoot, "plugins", "scanner", "analyzers", "lang", options.PluginDirectoryName);
|
||||
var manifestPath = Path.Combine(pluginRoot, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Plug-in manifest not found at '{manifestPath}'.", manifestPath);
|
||||
}
|
||||
|
||||
using var manifestStream = File.OpenRead(manifestPath);
|
||||
var manifest = JsonSerializer.Deserialize<PluginManifest>(manifestStream, new JsonSerializerOptions
|
||||
{
|
||||
{
|
||||
throw new FileNotFoundException($"Plug-in manifest not found at '{manifestPath}'.", manifestPath);
|
||||
}
|
||||
|
||||
using var manifestStream = File.OpenRead(manifestPath);
|
||||
var manifest = JsonSerializer.Deserialize<PluginManifest>(manifestStream, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
}) ?? throw new InvalidOperationException($"Unable to parse manifest '{manifestPath}'.");
|
||||
@@ -257,21 +257,21 @@ file static class Program
|
||||
if (!File.Exists(pluginAssemblyPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Plug-in assembly '{manifest.EntryPoint.Assembly}' not found under '{pluginRoot}'.", pluginAssemblyPath);
|
||||
}
|
||||
|
||||
var sha256 = ComputeSha256(pluginAssemblyPath);
|
||||
Console.WriteLine($"→ Plug-in assembly SHA-256: {sha256}");
|
||||
|
||||
using var serviceProvider = BuildServiceProvider();
|
||||
var catalog = new LanguageAnalyzerPluginCatalog(new RestartOnlyPluginGuard(), NullLogger<LanguageAnalyzerPluginCatalog>.Instance);
|
||||
catalog.LoadFromDirectory(pluginRoot, seal: true);
|
||||
|
||||
if (catalog.Plugins.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No analyzer plug-ins were loaded from '{pluginRoot}'.");
|
||||
}
|
||||
|
||||
var analyzerSet = catalog.CreateAnalyzers(serviceProvider);
|
||||
}
|
||||
|
||||
var sha256 = ComputeSha256(pluginAssemblyPath);
|
||||
Console.WriteLine($"→ Plug-in assembly SHA-256: {sha256}");
|
||||
|
||||
using var serviceProvider = BuildServiceProvider();
|
||||
var catalog = new LanguageAnalyzerPluginCatalog(new RestartOnlyPluginGuard(), NullLogger<LanguageAnalyzerPluginCatalog>.Instance);
|
||||
catalog.LoadFromDirectory(pluginRoot, seal: true);
|
||||
|
||||
if (catalog.Plugins.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No analyzer plug-ins were loaded from '{pluginRoot}'.");
|
||||
}
|
||||
|
||||
var analyzerSet = catalog.CreateAnalyzers(serviceProvider);
|
||||
if (analyzerSet.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Language analyzer plug-ins reported no analyzers.");
|
||||
@@ -298,104 +298,104 @@ file static class Program
|
||||
|
||||
return profile;
|
||||
}
|
||||
|
||||
private static ServiceProvider BuildServiceProvider()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddLogging();
|
||||
return services.BuildServiceProvider();
|
||||
}
|
||||
|
||||
private static async Task RunScenarioAsync(SmokeScenario scenario, string fixtureRoot, ILanguageAnalyzerPluginCatalog catalog, IServiceProvider services)
|
||||
{
|
||||
var scenarioRoot = Path.Combine(fixtureRoot, scenario.Name);
|
||||
if (!Directory.Exists(scenarioRoot))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Scenario '{scenario.Name}' directory missing at '{scenarioRoot}'.");
|
||||
}
|
||||
|
||||
var goldenPath = Path.Combine(scenarioRoot, "expected.json");
|
||||
string? goldenNormalized = null;
|
||||
if (File.Exists(goldenPath))
|
||||
{
|
||||
goldenNormalized = NormalizeJson(await File.ReadAllTextAsync(goldenPath).ConfigureAwait(false));
|
||||
}
|
||||
|
||||
var usageHints = new LanguageUsageHints(scenario.ResolveUsageHints(scenarioRoot));
|
||||
var context = new LanguageAnalyzerContext(scenarioRoot, TimeProvider.System, usageHints, services);
|
||||
|
||||
var coldEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
|
||||
var coldStopwatch = Stopwatch.StartNew();
|
||||
var coldResult = await coldEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
|
||||
coldStopwatch.Stop();
|
||||
|
||||
if (coldResult.Components.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced no components during cold run.");
|
||||
}
|
||||
|
||||
var coldJson = NormalizeJson(coldResult.ToJson(indent: true));
|
||||
if (goldenNormalized is string expected && !string.Equals(coldJson, expected, StringComparison.Ordinal))
|
||||
{
|
||||
Console.WriteLine($"⚠️ Scenario '{scenario.Name}' output deviates from repository golden snapshot.");
|
||||
}
|
||||
|
||||
var warmEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
|
||||
var warmStopwatch = Stopwatch.StartNew();
|
||||
var warmResult = await warmEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
|
||||
warmStopwatch.Stop();
|
||||
|
||||
var warmJson = NormalizeJson(warmResult.ToJson(indent: true));
|
||||
if (!string.Equals(coldJson, warmJson, StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced different outputs between cold and warm runs.");
|
||||
}
|
||||
|
||||
EnsureDurationWithinBudget(scenario.Name, coldStopwatch.Elapsed, warmStopwatch.Elapsed);
|
||||
|
||||
Console.WriteLine($"✓ Scenario '{scenario.Name}' — components {coldResult.Components.Count}, cold {coldStopwatch.Elapsed.TotalMilliseconds:F1} ms, warm {warmStopwatch.Elapsed.TotalMilliseconds:F1} ms");
|
||||
}
|
||||
|
||||
private static void EnsureDurationWithinBudget(string scenarioName, TimeSpan coldDuration, TimeSpan warmDuration)
|
||||
{
|
||||
var coldBudget = TimeSpan.FromSeconds(30);
|
||||
var warmBudget = TimeSpan.FromSeconds(5);
|
||||
|
||||
if (coldDuration > coldBudget)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenarioName}' cold run exceeded budget ({coldDuration.TotalSeconds:F2}s > {coldBudget.TotalSeconds:F2}s).");
|
||||
}
|
||||
|
||||
if (warmDuration > warmBudget)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenarioName}' warm run exceeded budget ({warmDuration.TotalSeconds:F2}s > {warmBudget.TotalSeconds:F2}s).");
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeJson(string json)
|
||||
=> json.Replace("\r\n", "\n", StringComparison.Ordinal).TrimEnd();
|
||||
|
||||
private static void ValidateOptions(SmokeOptions options)
|
||||
{
|
||||
if (!Directory.Exists(options.RepoRoot))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Repository root '{options.RepoRoot}' does not exist.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static ServiceProvider BuildServiceProvider()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddLogging();
|
||||
return services.BuildServiceProvider();
|
||||
}
|
||||
|
||||
private static async Task RunScenarioAsync(SmokeScenario scenario, string fixtureRoot, ILanguageAnalyzerPluginCatalog catalog, IServiceProvider services)
|
||||
{
|
||||
var scenarioRoot = Path.Combine(fixtureRoot, scenario.Name);
|
||||
if (!Directory.Exists(scenarioRoot))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Scenario '{scenario.Name}' directory missing at '{scenarioRoot}'.");
|
||||
}
|
||||
|
||||
var goldenPath = Path.Combine(scenarioRoot, "expected.json");
|
||||
string? goldenNormalized = null;
|
||||
if (File.Exists(goldenPath))
|
||||
{
|
||||
goldenNormalized = NormalizeJson(await File.ReadAllTextAsync(goldenPath).ConfigureAwait(false));
|
||||
}
|
||||
|
||||
var usageHints = new LanguageUsageHints(scenario.ResolveUsageHints(scenarioRoot));
|
||||
var context = new LanguageAnalyzerContext(scenarioRoot, TimeProvider.System, usageHints, services);
|
||||
|
||||
var coldEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
|
||||
var coldStopwatch = Stopwatch.StartNew();
|
||||
var coldResult = await coldEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
|
||||
coldStopwatch.Stop();
|
||||
|
||||
if (coldResult.Components.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced no components during cold run.");
|
||||
}
|
||||
|
||||
var coldJson = NormalizeJson(coldResult.ToJson(indent: true));
|
||||
if (goldenNormalized is string expected && !string.Equals(coldJson, expected, StringComparison.Ordinal))
|
||||
{
|
||||
Console.WriteLine($"⚠️ Scenario '{scenario.Name}' output deviates from repository golden snapshot.");
|
||||
}
|
||||
|
||||
var warmEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
|
||||
var warmStopwatch = Stopwatch.StartNew();
|
||||
var warmResult = await warmEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
|
||||
warmStopwatch.Stop();
|
||||
|
||||
var warmJson = NormalizeJson(warmResult.ToJson(indent: true));
|
||||
if (!string.Equals(coldJson, warmJson, StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced different outputs between cold and warm runs.");
|
||||
}
|
||||
|
||||
EnsureDurationWithinBudget(scenario.Name, coldStopwatch.Elapsed, warmStopwatch.Elapsed);
|
||||
|
||||
Console.WriteLine($"✓ Scenario '{scenario.Name}' — components {coldResult.Components.Count}, cold {coldStopwatch.Elapsed.TotalMilliseconds:F1} ms, warm {warmStopwatch.Elapsed.TotalMilliseconds:F1} ms");
|
||||
}
|
||||
|
||||
private static void EnsureDurationWithinBudget(string scenarioName, TimeSpan coldDuration, TimeSpan warmDuration)
|
||||
{
|
||||
var coldBudget = TimeSpan.FromSeconds(30);
|
||||
var warmBudget = TimeSpan.FromSeconds(5);
|
||||
|
||||
if (coldDuration > coldBudget)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenarioName}' cold run exceeded budget ({coldDuration.TotalSeconds:F2}s > {coldBudget.TotalSeconds:F2}s).");
|
||||
}
|
||||
|
||||
if (warmDuration > warmBudget)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{scenarioName}' warm run exceeded budget ({warmDuration.TotalSeconds:F2}s > {warmBudget.TotalSeconds:F2}s).");
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeJson(string json)
|
||||
=> json.Replace("\r\n", "\n", StringComparison.Ordinal).TrimEnd();
|
||||
|
||||
private static void ValidateOptions(SmokeOptions options)
|
||||
{
|
||||
if (!Directory.Exists(options.RepoRoot))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Repository root '{options.RepoRoot}' does not exist.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void ValidateManifest(PluginManifest manifest, AnalyzerProfile profile, string pluginDirectoryName)
|
||||
{
|
||||
if (!string.Equals(manifest.SchemaVersion, "1.0", StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected manifest schema version '{manifest.SchemaVersion}'.");
|
||||
}
|
||||
|
||||
if (!manifest.RequiresRestart)
|
||||
{
|
||||
throw new InvalidOperationException("Language analyzer plug-in must be marked as restart-only.");
|
||||
}
|
||||
|
||||
if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase))
|
||||
|
||||
if (!manifest.RequiresRestart)
|
||||
{
|
||||
throw new InvalidOperationException("Language analyzer plug-in must be marked as restart-only.");
|
||||
}
|
||||
|
||||
if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException($"Unsupported entry point type '{manifest.EntryPoint.Type}'.");
|
||||
}
|
||||
@@ -418,17 +418,17 @@ file static class Program
|
||||
throw new InvalidOperationException($"Manifest id '{manifest.Id}' does not match expected plug-in id for directory '{pluginDirectoryName}'.");
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string path)
|
||||
{
|
||||
using var hash = SHA256.Create();
|
||||
using var stream = File.OpenRead(path);
|
||||
var digest = hash.ComputeHash(stream);
|
||||
var builder = new StringBuilder(digest.Length * 2);
|
||||
foreach (var b in digest)
|
||||
{
|
||||
builder.Append(b.ToString("x2"));
|
||||
}
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string path)
|
||||
{
|
||||
using var hash = SHA256.Create();
|
||||
using var stream = File.OpenRead(path);
|
||||
var digest = hash.ComputeHash(stream);
|
||||
var builder = new StringBuilder(digest.Length * 2);
|
||||
foreach (var b in digest)
|
||||
{
|
||||
builder.Append(b.ToString("x2"));
|
||||
}
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,198 +1,198 @@
|
||||
using System.Globalization;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StackExchange.Redis;
|
||||
|
||||
static string RequireEnv(string name)
|
||||
{
|
||||
var value = Environment.GetEnvironmentVariable(name);
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
static string? GetField(StreamEntry entry, string fieldName)
|
||||
{
|
||||
foreach (var pair in entry.Values)
|
||||
{
|
||||
if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return pair.Value.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static void Ensure(bool condition, string message)
|
||||
{
|
||||
if (!condition)
|
||||
{
|
||||
throw new InvalidOperationException(message);
|
||||
}
|
||||
}
|
||||
|
||||
var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN");
|
||||
var redisStream = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_STREAM");
|
||||
if (string.IsNullOrWhiteSpace(redisStream))
|
||||
{
|
||||
redisStream = "stella.events";
|
||||
}
|
||||
|
||||
var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS");
|
||||
|
||||
var expectedKinds = expectedKindsEnv
|
||||
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||
.Select(kind => kind.ToLowerInvariant())
|
||||
.Distinct()
|
||||
.ToArray();
|
||||
Ensure(expectedKinds.Length > 0, "Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS.");
|
||||
|
||||
var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES");
|
||||
if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes))
|
||||
{
|
||||
throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric.");
|
||||
}
|
||||
Ensure(lookbackMinutes > 0, "NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero.");
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var sinceThreshold = now - TimeSpan.FromMinutes(Math.Max(1, lookbackMinutes));
|
||||
|
||||
Console.WriteLine($"ℹ️ Checking Redis stream '{redisStream}' for kinds [{string.Join(", ", expectedKinds)}] within the last {lookbackMinutes:F1} minutes.");
|
||||
|
||||
var redisConfig = ConfigurationOptions.Parse(redisDsn);
|
||||
redisConfig.AbortOnConnectFail = false;
|
||||
|
||||
await using var redisConnection = await ConnectionMultiplexer.ConnectAsync(redisConfig);
|
||||
var database = redisConnection.GetDatabase();
|
||||
|
||||
var streamEntries = await database.StreamRangeAsync(redisStream, "-", "+", count: 200);
|
||||
if (streamEntries.Length > 1)
|
||||
{
|
||||
Array.Reverse(streamEntries);
|
||||
}
|
||||
Ensure(streamEntries.Length > 0, $"Redis stream '{redisStream}' is empty.");
|
||||
|
||||
var recentEntries = new List<StreamEntry>();
|
||||
foreach (var entry in streamEntries)
|
||||
{
|
||||
var timestampText = GetField(entry, "ts");
|
||||
if (timestampText is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!DateTimeOffset.TryParse(timestampText, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var entryTimestamp))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entryTimestamp >= sinceThreshold)
|
||||
{
|
||||
recentEntries.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{redisStream}'.");
|
||||
|
||||
var missingKinds = new List<string>();
|
||||
foreach (var kind in expectedKinds)
|
||||
{
|
||||
var match = recentEntries.FirstOrDefault(entry =>
|
||||
{
|
||||
var entryKind = GetField(entry, "kind")?.ToLowerInvariant();
|
||||
return entryKind == kind;
|
||||
});
|
||||
|
||||
if (match.Equals(default(StreamEntry)))
|
||||
{
|
||||
missingKinds.Add(kind);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}");
|
||||
|
||||
Console.WriteLine("✅ Redis event stream contains the expected scanner events.");
|
||||
|
||||
var notifyBaseUrl = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/');
|
||||
var notifyToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN");
|
||||
var notifyTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT");
|
||||
var notifyTenantHeader = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER");
|
||||
if (string.IsNullOrWhiteSpace(notifyTenantHeader))
|
||||
{
|
||||
notifyTenantHeader = "X-StellaOps-Tenant";
|
||||
}
|
||||
|
||||
var notifyTimeoutSeconds = 30;
|
||||
var notifyTimeoutEnv = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS");
|
||||
if (!string.IsNullOrWhiteSpace(notifyTimeoutEnv) && int.TryParse(notifyTimeoutEnv, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedTimeout))
|
||||
{
|
||||
notifyTimeoutSeconds = Math.Max(5, parsedTimeout);
|
||||
}
|
||||
|
||||
using var httpClient = new HttpClient
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(notifyTimeoutSeconds),
|
||||
};
|
||||
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", notifyToken);
|
||||
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
|
||||
httpClient.DefaultRequestHeaders.Add(notifyTenantHeader, notifyTenant);
|
||||
|
||||
var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture));
|
||||
var deliveriesUrl = $"{notifyBaseUrl}/api/v1/deliveries?since={sinceQuery}&limit=200";
|
||||
|
||||
Console.WriteLine($"ℹ️ Querying Notify deliveries via {deliveriesUrl}.");
|
||||
|
||||
using var response = await httpClient.GetAsync(deliveriesUrl);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync();
|
||||
throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync();
|
||||
if (string.IsNullOrWhiteSpace(json))
|
||||
{
|
||||
throw new InvalidOperationException("Notify deliveries response body was empty.");
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var root = document.RootElement;
|
||||
|
||||
IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Array => element.EnumerateArray(),
|
||||
JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(),
|
||||
_ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.")
|
||||
};
|
||||
}
|
||||
|
||||
var deliveries = EnumerateDeliveries(root).ToArray();
|
||||
Ensure(deliveries.Length > 0, "Notify deliveries response did not return any records.");
|
||||
|
||||
var missingDeliveryKinds = new List<string>();
|
||||
foreach (var kind in expectedKinds)
|
||||
{
|
||||
var found = deliveries.Any(delivery =>
|
||||
delivery.TryGetProperty("kind", out var kindProperty) &&
|
||||
kindProperty.GetString()?.Equals(kind, StringComparison.OrdinalIgnoreCase) == true &&
|
||||
delivery.TryGetProperty("status", out var statusProperty) &&
|
||||
!string.Equals(statusProperty.GetString(), "failed", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!found)
|
||||
{
|
||||
missingDeliveryKinds.Add(kind);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}");
|
||||
|
||||
Console.WriteLine("✅ Notify deliveries include the expected scanner events.");
|
||||
Console.WriteLine("🎉 Notify smoke validation completed successfully.");
|
||||
using System.Globalization;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StackExchange.Redis;
|
||||
|
||||
static string RequireEnv(string name)
|
||||
{
|
||||
var value = Environment.GetEnvironmentVariable(name);
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
static string? GetField(StreamEntry entry, string fieldName)
|
||||
{
|
||||
foreach (var pair in entry.Values)
|
||||
{
|
||||
if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return pair.Value.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static void Ensure(bool condition, string message)
|
||||
{
|
||||
if (!condition)
|
||||
{
|
||||
throw new InvalidOperationException(message);
|
||||
}
|
||||
}
|
||||
|
||||
var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN");
|
||||
var redisStream = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_STREAM");
|
||||
if (string.IsNullOrWhiteSpace(redisStream))
|
||||
{
|
||||
redisStream = "stella.events";
|
||||
}
|
||||
|
||||
var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS");
|
||||
|
||||
var expectedKinds = expectedKindsEnv
|
||||
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||
.Select(kind => kind.ToLowerInvariant())
|
||||
.Distinct()
|
||||
.ToArray();
|
||||
Ensure(expectedKinds.Length > 0, "Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS.");
|
||||
|
||||
var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES");
|
||||
if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes))
|
||||
{
|
||||
throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric.");
|
||||
}
|
||||
Ensure(lookbackMinutes > 0, "NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero.");
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var sinceThreshold = now - TimeSpan.FromMinutes(Math.Max(1, lookbackMinutes));
|
||||
|
||||
Console.WriteLine($"ℹ️ Checking Redis stream '{redisStream}' for kinds [{string.Join(", ", expectedKinds)}] within the last {lookbackMinutes:F1} minutes.");
|
||||
|
||||
var redisConfig = ConfigurationOptions.Parse(redisDsn);
|
||||
redisConfig.AbortOnConnectFail = false;
|
||||
|
||||
await using var redisConnection = await ConnectionMultiplexer.ConnectAsync(redisConfig);
|
||||
var database = redisConnection.GetDatabase();
|
||||
|
||||
var streamEntries = await database.StreamRangeAsync(redisStream, "-", "+", count: 200);
|
||||
if (streamEntries.Length > 1)
|
||||
{
|
||||
Array.Reverse(streamEntries);
|
||||
}
|
||||
Ensure(streamEntries.Length > 0, $"Redis stream '{redisStream}' is empty.");
|
||||
|
||||
var recentEntries = new List<StreamEntry>();
|
||||
foreach (var entry in streamEntries)
|
||||
{
|
||||
var timestampText = GetField(entry, "ts");
|
||||
if (timestampText is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!DateTimeOffset.TryParse(timestampText, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var entryTimestamp))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entryTimestamp >= sinceThreshold)
|
||||
{
|
||||
recentEntries.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{redisStream}'.");
|
||||
|
||||
var missingKinds = new List<string>();
|
||||
foreach (var kind in expectedKinds)
|
||||
{
|
||||
var match = recentEntries.FirstOrDefault(entry =>
|
||||
{
|
||||
var entryKind = GetField(entry, "kind")?.ToLowerInvariant();
|
||||
return entryKind == kind;
|
||||
});
|
||||
|
||||
if (match.Equals(default(StreamEntry)))
|
||||
{
|
||||
missingKinds.Add(kind);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}");
|
||||
|
||||
Console.WriteLine("✅ Redis event stream contains the expected scanner events.");
|
||||
|
||||
var notifyBaseUrl = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/');
|
||||
var notifyToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN");
|
||||
var notifyTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT");
|
||||
var notifyTenantHeader = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER");
|
||||
if (string.IsNullOrWhiteSpace(notifyTenantHeader))
|
||||
{
|
||||
notifyTenantHeader = "X-StellaOps-Tenant";
|
||||
}
|
||||
|
||||
var notifyTimeoutSeconds = 30;
|
||||
var notifyTimeoutEnv = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS");
|
||||
if (!string.IsNullOrWhiteSpace(notifyTimeoutEnv) && int.TryParse(notifyTimeoutEnv, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedTimeout))
|
||||
{
|
||||
notifyTimeoutSeconds = Math.Max(5, parsedTimeout);
|
||||
}
|
||||
|
||||
using var httpClient = new HttpClient
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(notifyTimeoutSeconds),
|
||||
};
|
||||
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", notifyToken);
|
||||
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
|
||||
httpClient.DefaultRequestHeaders.Add(notifyTenantHeader, notifyTenant);
|
||||
|
||||
var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture));
|
||||
var deliveriesUrl = $"{notifyBaseUrl}/api/v1/deliveries?since={sinceQuery}&limit=200";
|
||||
|
||||
Console.WriteLine($"ℹ️ Querying Notify deliveries via {deliveriesUrl}.");
|
||||
|
||||
using var response = await httpClient.GetAsync(deliveriesUrl);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync();
|
||||
throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync();
|
||||
if (string.IsNullOrWhiteSpace(json))
|
||||
{
|
||||
throw new InvalidOperationException("Notify deliveries response body was empty.");
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var root = document.RootElement;
|
||||
|
||||
IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Array => element.EnumerateArray(),
|
||||
JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(),
|
||||
_ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.")
|
||||
};
|
||||
}
|
||||
|
||||
var deliveries = EnumerateDeliveries(root).ToArray();
|
||||
Ensure(deliveries.Length > 0, "Notify deliveries response did not return any records.");
|
||||
|
||||
var missingDeliveryKinds = new List<string>();
|
||||
foreach (var kind in expectedKinds)
|
||||
{
|
||||
var found = deliveries.Any(delivery =>
|
||||
delivery.TryGetProperty("kind", out var kindProperty) &&
|
||||
kindProperty.GetString()?.Equals(kind, StringComparison.OrdinalIgnoreCase) == true &&
|
||||
delivery.TryGetProperty("status", out var statusProperty) &&
|
||||
!string.Equals(statusProperty.GetString(), "failed", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!found)
|
||||
{
|
||||
missingDeliveryKinds.Add(kind);
|
||||
}
|
||||
}
|
||||
|
||||
Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}");
|
||||
|
||||
Console.WriteLine("✅ Notify deliveries include the expected scanner events.");
|
||||
Console.WriteLine("🎉 Notify smoke validation completed successfully.");
|
||||
|
||||
@@ -1,56 +1,56 @@
|
||||
using StellaOps.Policy;
|
||||
|
||||
if (args.Length == 0)
|
||||
{
|
||||
Console.Error.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
|
||||
Console.Error.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
|
||||
return 64; // EX_USAGE
|
||||
}
|
||||
|
||||
var inputs = new List<string>();
|
||||
var strict = false;
|
||||
var outputJson = false;
|
||||
|
||||
foreach (var arg in args)
|
||||
{
|
||||
switch (arg)
|
||||
{
|
||||
case "--strict":
|
||||
case "-s":
|
||||
strict = true;
|
||||
break;
|
||||
|
||||
case "--json":
|
||||
case "-j":
|
||||
outputJson = true;
|
||||
break;
|
||||
|
||||
case "--help":
|
||||
case "-h":
|
||||
case "-?":
|
||||
Console.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
|
||||
Console.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
|
||||
return 0;
|
||||
|
||||
default:
|
||||
inputs.Add(arg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (inputs.Count == 0)
|
||||
{
|
||||
Console.Error.WriteLine("No input files or directories provided.");
|
||||
return 64; // EX_USAGE
|
||||
}
|
||||
|
||||
var options = new PolicyValidationCliOptions
|
||||
{
|
||||
Inputs = inputs,
|
||||
Strict = strict,
|
||||
OutputJson = outputJson,
|
||||
};
|
||||
|
||||
var cli = new PolicyValidationCli();
|
||||
var exitCode = await cli.RunAsync(options, CancellationToken.None);
|
||||
return exitCode;
|
||||
using StellaOps.Policy;
|
||||
|
||||
if (args.Length == 0)
|
||||
{
|
||||
Console.Error.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
|
||||
Console.Error.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
|
||||
return 64; // EX_USAGE
|
||||
}
|
||||
|
||||
var inputs = new List<string>();
|
||||
var strict = false;
|
||||
var outputJson = false;
|
||||
|
||||
foreach (var arg in args)
|
||||
{
|
||||
switch (arg)
|
||||
{
|
||||
case "--strict":
|
||||
case "-s":
|
||||
strict = true;
|
||||
break;
|
||||
|
||||
case "--json":
|
||||
case "-j":
|
||||
outputJson = true;
|
||||
break;
|
||||
|
||||
case "--help":
|
||||
case "-h":
|
||||
case "-?":
|
||||
Console.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
|
||||
Console.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
|
||||
return 0;
|
||||
|
||||
default:
|
||||
inputs.Add(arg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (inputs.Count == 0)
|
||||
{
|
||||
Console.Error.WriteLine("No input files or directories provided.");
|
||||
return 64; // EX_USAGE
|
||||
}
|
||||
|
||||
var options = new PolicyValidationCliOptions
|
||||
{
|
||||
Inputs = inputs,
|
||||
Strict = strict,
|
||||
OutputJson = outputJson,
|
||||
};
|
||||
|
||||
var cli = new PolicyValidationCli();
|
||||
var exitCode = await cli.RunAsync(options, CancellationToken.None);
|
||||
return exitCode;
|
||||
|
||||
@@ -1,48 +1,48 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using NJsonSchema;
|
||||
using NJsonSchema.Generation;
|
||||
using NJsonSchema.Generation.SystemTextJson;
|
||||
using Newtonsoft.Json;
|
||||
using StellaOps.Scheduler.Models;
|
||||
|
||||
var output = args.Length switch
|
||||
{
|
||||
0 => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "docs", "schemas")),
|
||||
1 => Path.GetFullPath(args[0]),
|
||||
_ => throw new ArgumentException("Usage: dotnet run --project src/Tools/PolicySchemaExporter -- [outputDirectory]")
|
||||
};
|
||||
|
||||
Directory.CreateDirectory(output);
|
||||
|
||||
var generatorSettings = new SystemTextJsonSchemaGeneratorSettings
|
||||
{
|
||||
SchemaType = SchemaType.JsonSchema,
|
||||
DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull,
|
||||
SerializerOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
},
|
||||
};
|
||||
|
||||
var generator = new JsonSchemaGenerator(generatorSettings);
|
||||
|
||||
var exports = ImmutableArray.Create(
|
||||
(FileName: "policy-run-request.schema.json", Type: typeof(PolicyRunRequest)),
|
||||
(FileName: "policy-run-status.schema.json", Type: typeof(PolicyRunStatus)),
|
||||
(FileName: "policy-diff-summary.schema.json", Type: typeof(PolicyDiffSummary)),
|
||||
(FileName: "policy-explain-trace.schema.json", Type: typeof(PolicyExplainTrace))
|
||||
);
|
||||
|
||||
foreach (var export in exports)
|
||||
{
|
||||
var schema = generator.Generate(export.Type);
|
||||
schema.Title = export.Type.Name;
|
||||
schema.AllowAdditionalProperties = false;
|
||||
|
||||
var outputPath = Path.Combine(output, export.FileName);
|
||||
await File.WriteAllTextAsync(outputPath, schema.ToJson(Formatting.Indented) + Environment.NewLine);
|
||||
Console.WriteLine($"Wrote {outputPath}");
|
||||
}
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using NJsonSchema;
|
||||
using NJsonSchema.Generation;
|
||||
using NJsonSchema.Generation.SystemTextJson;
|
||||
using Newtonsoft.Json;
|
||||
using StellaOps.Scheduler.Models;
|
||||
|
||||
var output = args.Length switch
|
||||
{
|
||||
0 => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "docs", "schemas")),
|
||||
1 => Path.GetFullPath(args[0]),
|
||||
_ => throw new ArgumentException("Usage: dotnet run --project src/Tools/PolicySchemaExporter -- [outputDirectory]")
|
||||
};
|
||||
|
||||
Directory.CreateDirectory(output);
|
||||
|
||||
var generatorSettings = new SystemTextJsonSchemaGeneratorSettings
|
||||
{
|
||||
SchemaType = SchemaType.JsonSchema,
|
||||
DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull,
|
||||
SerializerOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
},
|
||||
};
|
||||
|
||||
var generator = new JsonSchemaGenerator(generatorSettings);
|
||||
|
||||
var exports = ImmutableArray.Create(
|
||||
(FileName: "policy-run-request.schema.json", Type: typeof(PolicyRunRequest)),
|
||||
(FileName: "policy-run-status.schema.json", Type: typeof(PolicyRunStatus)),
|
||||
(FileName: "policy-diff-summary.schema.json", Type: typeof(PolicyDiffSummary)),
|
||||
(FileName: "policy-explain-trace.schema.json", Type: typeof(PolicyExplainTrace))
|
||||
);
|
||||
|
||||
foreach (var export in exports)
|
||||
{
|
||||
var schema = generator.Generate(export.Type);
|
||||
schema.Title = export.Type.Name;
|
||||
schema.AllowAdditionalProperties = false;
|
||||
|
||||
var outputPath = Path.Combine(output, export.FileName);
|
||||
await File.WriteAllTextAsync(outputPath, schema.ToJson(Formatting.Indented) + Environment.NewLine);
|
||||
Console.WriteLine($"Wrote {outputPath}");
|
||||
}
|
||||
|
||||
@@ -1,291 +1,291 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy;
|
||||
|
||||
var scenarioRoot = "samples/policy/simulations";
|
||||
string? outputDir = null;
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
var arg = args[i];
|
||||
switch (arg)
|
||||
{
|
||||
case "--scenario-root":
|
||||
case "-r":
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine("Missing value for --scenario-root.");
|
||||
return 64;
|
||||
}
|
||||
scenarioRoot = args[++i];
|
||||
break;
|
||||
case "--output":
|
||||
case "-o":
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine("Missing value for --output.");
|
||||
return 64;
|
||||
}
|
||||
outputDir = args[++i];
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
case "-?":
|
||||
PrintUsage();
|
||||
return 0;
|
||||
default:
|
||||
Console.Error.WriteLine($"Unknown argument '{arg}'.");
|
||||
PrintUsage();
|
||||
return 64;
|
||||
}
|
||||
}
|
||||
|
||||
if (!Directory.Exists(scenarioRoot))
|
||||
{
|
||||
Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist.");
|
||||
return 66;
|
||||
}
|
||||
|
||||
var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories);
|
||||
if (scenarioFiles.Length == 0)
|
||||
{
|
||||
Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'.");
|
||||
return 0;
|
||||
}
|
||||
|
||||
var loggerFactory = NullLoggerFactory.Instance;
|
||||
var snapshotStore = new PolicySnapshotStore(
|
||||
new NullPolicySnapshotRepository(),
|
||||
new NullPolicyAuditRepository(),
|
||||
TimeProvider.System,
|
||||
loggerFactory.CreateLogger<PolicySnapshotStore>());
|
||||
var previewService = new PolicyPreviewService(snapshotStore, loggerFactory.CreateLogger<PolicyPreviewService>());
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
};
|
||||
|
||||
var summary = new List<ScenarioResult>();
|
||||
var success = true;
|
||||
|
||||
foreach (var scenarioFile in scenarioFiles.OrderBy(static f => f, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
var scenarioText = await File.ReadAllTextAsync(scenarioFile);
|
||||
var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions);
|
||||
if (scenario is null)
|
||||
{
|
||||
Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'.");
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
var repoRoot = Directory.GetCurrentDirectory();
|
||||
var policyPath = Path.Combine(repoRoot, scenario.PolicyPath);
|
||||
if (!File.Exists(policyPath))
|
||||
{
|
||||
Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist.");
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
var policyContent = await File.ReadAllTextAsync(policyPath);
|
||||
var policyFormat = PolicySchema.DetectFormat(policyPath);
|
||||
var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray();
|
||||
var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty;
|
||||
|
||||
var request = new PolicyPreviewRequest(
|
||||
ImageDigest: $"sha256:simulation-{scenario.Name}",
|
||||
Findings: findings,
|
||||
BaselineVerdicts: baseline,
|
||||
SnapshotOverride: null,
|
||||
ProposedPolicy: new PolicySnapshotContent(
|
||||
Content: policyContent,
|
||||
Format: policyFormat,
|
||||
Actor: "ci",
|
||||
Source: "ci/simulation-smoke",
|
||||
Description: $"CI simulation for scenario '{scenario.Name}'"));
|
||||
|
||||
var response = await previewService.PreviewAsync(request, CancellationToken.None);
|
||||
var scenarioResult = EvaluateScenario(scenario, response);
|
||||
summary.Add(scenarioResult);
|
||||
|
||||
if (!scenarioResult.Success)
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (outputDir is not null)
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
var summaryPath = Path.Combine(outputDir, "policy-simulation-summary.json");
|
||||
await File.WriteAllTextAsync(summaryPath, JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true }));
|
||||
}
|
||||
|
||||
return success ? 0 : 1;
|
||||
|
||||
static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: policy-simulation-smoke [--scenario-root <path>] [--output <dir>]");
|
||||
Console.WriteLine("Example: policy-simulation-smoke --scenario-root samples/policy/simulations --output artifacts/policy-simulations");
|
||||
}
|
||||
|
||||
static PolicyFinding ToPolicyFinding(ScenarioFinding finding)
|
||||
{
|
||||
var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags);
|
||||
var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true);
|
||||
return new PolicyFinding(
|
||||
finding.FindingId,
|
||||
severity,
|
||||
finding.Environment,
|
||||
finding.Source,
|
||||
finding.Vendor,
|
||||
finding.License,
|
||||
finding.Image,
|
||||
finding.Repository,
|
||||
finding.Package,
|
||||
finding.Purl,
|
||||
finding.Cve,
|
||||
finding.Path,
|
||||
finding.LayerDigest,
|
||||
tags);
|
||||
}
|
||||
|
||||
static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline)
|
||||
{
|
||||
var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true);
|
||||
var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty;
|
||||
return new PolicyVerdict(
|
||||
baseline.FindingId,
|
||||
status,
|
||||
RuleName: baseline.RuleName,
|
||||
RuleAction: baseline.RuleAction,
|
||||
Notes: baseline.Notes,
|
||||
Score: baseline.Score,
|
||||
ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version,
|
||||
Inputs: inputs,
|
||||
QuietedBy: null,
|
||||
Quiet: false,
|
||||
UnknownConfidence: null,
|
||||
ConfidenceBand: null,
|
||||
UnknownAgeDays: null,
|
||||
SourceTrust: null,
|
||||
Reachability: null);
|
||||
}
|
||||
|
||||
static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response)
|
||||
{
|
||||
var result = new ScenarioResult(scenario.Name);
|
||||
if (!response.Success)
|
||||
{
|
||||
result.Failures.Add("Preview failed.");
|
||||
return result with { Success = false, ChangedCount = response.ChangedCount };
|
||||
}
|
||||
|
||||
var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var expected in scenario.ExpectedDiffs)
|
||||
{
|
||||
if (!diffs.TryGetValue(expected.FindingId, out var diff))
|
||||
{
|
||||
result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff.");
|
||||
continue;
|
||||
}
|
||||
|
||||
var projectedStatus = diff.Projected.Status.ToString();
|
||||
result.ActualStatuses[expected.FindingId] = projectedStatus;
|
||||
if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'.");
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var diff in diffs.Values)
|
||||
{
|
||||
if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId))
|
||||
{
|
||||
result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
var success = result.Failures.Count == 0;
|
||||
return result with
|
||||
{
|
||||
Success = success,
|
||||
ChangedCount = response.ChangedCount
|
||||
};
|
||||
}
|
||||
|
||||
internal sealed record PolicySimulationScenario
|
||||
{
|
||||
public string Name { get; init; } = "scenario";
|
||||
public string PolicyPath { get; init; } = string.Empty;
|
||||
public List<ScenarioFinding> Findings { get; init; } = new();
|
||||
public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new();
|
||||
public List<ScenarioBaseline>? Baseline { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioFinding
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Severity { get; init; } = "Low";
|
||||
public string? Environment { get; init; }
|
||||
public string? Source { get; init; }
|
||||
public string? Vendor { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string? Image { get; init; }
|
||||
public string? Repository { get; init; }
|
||||
public string? Package { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public string? Cve { get; init; }
|
||||
public string? Path { get; init; }
|
||||
public string? LayerDigest { get; init; }
|
||||
public string[]? Tags { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioExpectedDiff
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Status { get; init; } = "Pass";
|
||||
}
|
||||
|
||||
internal sealed record ScenarioBaseline
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Status { get; init; } = "Pass";
|
||||
public string? RuleName { get; init; }
|
||||
public string? RuleAction { get; init; }
|
||||
public string? Notes { get; init; }
|
||||
public double Score { get; init; }
|
||||
public string? ConfigVersion { get; init; }
|
||||
public Dictionary<string, double>? Inputs { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioResult(string ScenarioName)
|
||||
{
|
||||
public bool Success { get; init; } = true;
|
||||
public int ChangedCount { get; init; }
|
||||
public List<string> Failures { get; } = new();
|
||||
public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
internal sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository
|
||||
{
|
||||
public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
|
||||
public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null);
|
||||
|
||||
public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>());
|
||||
}
|
||||
|
||||
internal sealed class NullPolicyAuditRepository : IPolicyAuditRepository
|
||||
{
|
||||
public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
|
||||
public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>());
|
||||
}
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy;
|
||||
|
||||
var scenarioRoot = "samples/policy/simulations";
|
||||
string? outputDir = null;
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
var arg = args[i];
|
||||
switch (arg)
|
||||
{
|
||||
case "--scenario-root":
|
||||
case "-r":
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine("Missing value for --scenario-root.");
|
||||
return 64;
|
||||
}
|
||||
scenarioRoot = args[++i];
|
||||
break;
|
||||
case "--output":
|
||||
case "-o":
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine("Missing value for --output.");
|
||||
return 64;
|
||||
}
|
||||
outputDir = args[++i];
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
case "-?":
|
||||
PrintUsage();
|
||||
return 0;
|
||||
default:
|
||||
Console.Error.WriteLine($"Unknown argument '{arg}'.");
|
||||
PrintUsage();
|
||||
return 64;
|
||||
}
|
||||
}
|
||||
|
||||
if (!Directory.Exists(scenarioRoot))
|
||||
{
|
||||
Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist.");
|
||||
return 66;
|
||||
}
|
||||
|
||||
var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories);
|
||||
if (scenarioFiles.Length == 0)
|
||||
{
|
||||
Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'.");
|
||||
return 0;
|
||||
}
|
||||
|
||||
var loggerFactory = NullLoggerFactory.Instance;
|
||||
var snapshotStore = new PolicySnapshotStore(
|
||||
new NullPolicySnapshotRepository(),
|
||||
new NullPolicyAuditRepository(),
|
||||
TimeProvider.System,
|
||||
loggerFactory.CreateLogger<PolicySnapshotStore>());
|
||||
var previewService = new PolicyPreviewService(snapshotStore, loggerFactory.CreateLogger<PolicyPreviewService>());
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
};
|
||||
|
||||
var summary = new List<ScenarioResult>();
|
||||
var success = true;
|
||||
|
||||
foreach (var scenarioFile in scenarioFiles.OrderBy(static f => f, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
var scenarioText = await File.ReadAllTextAsync(scenarioFile);
|
||||
var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions);
|
||||
if (scenario is null)
|
||||
{
|
||||
Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'.");
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
var repoRoot = Directory.GetCurrentDirectory();
|
||||
var policyPath = Path.Combine(repoRoot, scenario.PolicyPath);
|
||||
if (!File.Exists(policyPath))
|
||||
{
|
||||
Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist.");
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
var policyContent = await File.ReadAllTextAsync(policyPath);
|
||||
var policyFormat = PolicySchema.DetectFormat(policyPath);
|
||||
var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray();
|
||||
var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty;
|
||||
|
||||
var request = new PolicyPreviewRequest(
|
||||
ImageDigest: $"sha256:simulation-{scenario.Name}",
|
||||
Findings: findings,
|
||||
BaselineVerdicts: baseline,
|
||||
SnapshotOverride: null,
|
||||
ProposedPolicy: new PolicySnapshotContent(
|
||||
Content: policyContent,
|
||||
Format: policyFormat,
|
||||
Actor: "ci",
|
||||
Source: "ci/simulation-smoke",
|
||||
Description: $"CI simulation for scenario '{scenario.Name}'"));
|
||||
|
||||
var response = await previewService.PreviewAsync(request, CancellationToken.None);
|
||||
var scenarioResult = EvaluateScenario(scenario, response);
|
||||
summary.Add(scenarioResult);
|
||||
|
||||
if (!scenarioResult.Success)
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (outputDir is not null)
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
var summaryPath = Path.Combine(outputDir, "policy-simulation-summary.json");
|
||||
await File.WriteAllTextAsync(summaryPath, JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true }));
|
||||
}
|
||||
|
||||
return success ? 0 : 1;
|
||||
|
||||
static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: policy-simulation-smoke [--scenario-root <path>] [--output <dir>]");
|
||||
Console.WriteLine("Example: policy-simulation-smoke --scenario-root samples/policy/simulations --output artifacts/policy-simulations");
|
||||
}
|
||||
|
||||
static PolicyFinding ToPolicyFinding(ScenarioFinding finding)
|
||||
{
|
||||
var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags);
|
||||
var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true);
|
||||
return new PolicyFinding(
|
||||
finding.FindingId,
|
||||
severity,
|
||||
finding.Environment,
|
||||
finding.Source,
|
||||
finding.Vendor,
|
||||
finding.License,
|
||||
finding.Image,
|
||||
finding.Repository,
|
||||
finding.Package,
|
||||
finding.Purl,
|
||||
finding.Cve,
|
||||
finding.Path,
|
||||
finding.LayerDigest,
|
||||
tags);
|
||||
}
|
||||
|
||||
static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline)
|
||||
{
|
||||
var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true);
|
||||
var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty;
|
||||
return new PolicyVerdict(
|
||||
baseline.FindingId,
|
||||
status,
|
||||
RuleName: baseline.RuleName,
|
||||
RuleAction: baseline.RuleAction,
|
||||
Notes: baseline.Notes,
|
||||
Score: baseline.Score,
|
||||
ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version,
|
||||
Inputs: inputs,
|
||||
QuietedBy: null,
|
||||
Quiet: false,
|
||||
UnknownConfidence: null,
|
||||
ConfidenceBand: null,
|
||||
UnknownAgeDays: null,
|
||||
SourceTrust: null,
|
||||
Reachability: null);
|
||||
}
|
||||
|
||||
static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response)
|
||||
{
|
||||
var result = new ScenarioResult(scenario.Name);
|
||||
if (!response.Success)
|
||||
{
|
||||
result.Failures.Add("Preview failed.");
|
||||
return result with { Success = false, ChangedCount = response.ChangedCount };
|
||||
}
|
||||
|
||||
var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var expected in scenario.ExpectedDiffs)
|
||||
{
|
||||
if (!diffs.TryGetValue(expected.FindingId, out var diff))
|
||||
{
|
||||
result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff.");
|
||||
continue;
|
||||
}
|
||||
|
||||
var projectedStatus = diff.Projected.Status.ToString();
|
||||
result.ActualStatuses[expected.FindingId] = projectedStatus;
|
||||
if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'.");
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var diff in diffs.Values)
|
||||
{
|
||||
if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId))
|
||||
{
|
||||
result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
var success = result.Failures.Count == 0;
|
||||
return result with
|
||||
{
|
||||
Success = success,
|
||||
ChangedCount = response.ChangedCount
|
||||
};
|
||||
}
|
||||
|
||||
internal sealed record PolicySimulationScenario
|
||||
{
|
||||
public string Name { get; init; } = "scenario";
|
||||
public string PolicyPath { get; init; } = string.Empty;
|
||||
public List<ScenarioFinding> Findings { get; init; } = new();
|
||||
public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new();
|
||||
public List<ScenarioBaseline>? Baseline { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioFinding
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Severity { get; init; } = "Low";
|
||||
public string? Environment { get; init; }
|
||||
public string? Source { get; init; }
|
||||
public string? Vendor { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string? Image { get; init; }
|
||||
public string? Repository { get; init; }
|
||||
public string? Package { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public string? Cve { get; init; }
|
||||
public string? Path { get; init; }
|
||||
public string? LayerDigest { get; init; }
|
||||
public string[]? Tags { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioExpectedDiff
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Status { get; init; } = "Pass";
|
||||
}
|
||||
|
||||
internal sealed record ScenarioBaseline
|
||||
{
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public string Status { get; init; } = "Pass";
|
||||
public string? RuleName { get; init; }
|
||||
public string? RuleAction { get; init; }
|
||||
public string? Notes { get; init; }
|
||||
public double Score { get; init; }
|
||||
public string? ConfigVersion { get; init; }
|
||||
public Dictionary<string, double>? Inputs { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record ScenarioResult(string ScenarioName)
|
||||
{
|
||||
public bool Success { get; init; } = true;
|
||||
public int ChangedCount { get; init; }
|
||||
public List<string> Failures { get; } = new();
|
||||
public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
internal sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository
|
||||
{
|
||||
public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
|
||||
public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null);
|
||||
|
||||
public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>());
|
||||
}
|
||||
|
||||
internal sealed class NullPolicyAuditRepository : IPolicyAuditRepository
|
||||
{
|
||||
public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
|
||||
public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>());
|
||||
}
|
||||
|
||||
@@ -1,286 +1,286 @@
|
||||
using Amazon;
|
||||
using Amazon.Runtime;
|
||||
using Amazon.S3;
|
||||
using Amazon.S3.Model;
|
||||
using System.Net.Http.Headers;
|
||||
|
||||
var options = MigrationOptions.Parse(args);
|
||||
if (options is null)
|
||||
{
|
||||
MigrationOptions.PrintUsage();
|
||||
return 1;
|
||||
}
|
||||
|
||||
Console.WriteLine($"RustFS migrator starting (prefix: '{options.Prefix ?? "<all>"}')");
|
||||
if (options.DryRun)
|
||||
{
|
||||
Console.WriteLine("Dry-run enabled. No objects will be written to RustFS.");
|
||||
}
|
||||
|
||||
var s3Config = new AmazonS3Config
|
||||
{
|
||||
ForcePathStyle = true,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.S3ServiceUrl))
|
||||
{
|
||||
s3Config.ServiceURL = options.S3ServiceUrl;
|
||||
s3Config.UseHttp = options.S3ServiceUrl.StartsWith("http://", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.S3Region))
|
||||
{
|
||||
s3Config.RegionEndpoint = RegionEndpoint.GetBySystemName(options.S3Region);
|
||||
}
|
||||
|
||||
using var s3Client = CreateS3Client(options, s3Config);
|
||||
using var httpClient = CreateRustFsClient(options);
|
||||
|
||||
var listRequest = new ListObjectsV2Request
|
||||
{
|
||||
BucketName = options.S3Bucket,
|
||||
Prefix = options.Prefix,
|
||||
MaxKeys = 1000,
|
||||
};
|
||||
|
||||
var migrated = 0;
|
||||
var skipped = 0;
|
||||
|
||||
do
|
||||
{
|
||||
var response = await s3Client.ListObjectsV2Async(listRequest).ConfigureAwait(false);
|
||||
foreach (var entry in response.S3Objects)
|
||||
{
|
||||
if (entry.Size == 0 && entry.Key.EndsWith('/'))
|
||||
{
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Migrating {entry.Key} ({entry.Size} bytes)...");
|
||||
|
||||
if (options.DryRun)
|
||||
{
|
||||
migrated++;
|
||||
continue;
|
||||
}
|
||||
|
||||
using var getResponse = await s3Client.GetObjectAsync(new GetObjectRequest
|
||||
{
|
||||
BucketName = options.S3Bucket,
|
||||
Key = entry.Key,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
await using var memory = new MemoryStream();
|
||||
await getResponse.ResponseStream.CopyToAsync(memory).ConfigureAwait(false);
|
||||
memory.Position = 0;
|
||||
|
||||
using var request = new HttpRequestMessage(HttpMethod.Put, BuildRustFsUri(options, entry.Key))
|
||||
{
|
||||
Content = new ByteArrayContent(memory.ToArray()),
|
||||
};
|
||||
request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/octet-stream");
|
||||
|
||||
if (options.Immutable)
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation("X-RustFS-Immutable", "true");
|
||||
}
|
||||
|
||||
if (options.RetentionSeconds is { } retainSeconds)
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation("X-RustFS-Retain-Seconds", retainSeconds.ToString());
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation(options.RustFsApiKeyHeader!, options.RustFsApiKey!);
|
||||
}
|
||||
|
||||
using var responseMessage = await httpClient.SendAsync(request).ConfigureAwait(false);
|
||||
if (!responseMessage.IsSuccessStatusCode)
|
||||
{
|
||||
var error = await responseMessage.Content.ReadAsStringAsync().ConfigureAwait(false);
|
||||
Console.Error.WriteLine($"Failed to upload {entry.Key}: {(int)responseMessage.StatusCode} {responseMessage.ReasonPhrase}\n{error}");
|
||||
return 2;
|
||||
}
|
||||
|
||||
migrated++;
|
||||
}
|
||||
|
||||
listRequest.ContinuationToken = response.NextContinuationToken;
|
||||
} while (!string.IsNullOrEmpty(listRequest.ContinuationToken));
|
||||
|
||||
Console.WriteLine($"Migration complete. Migrated {migrated} objects. Skipped {skipped} directory markers.");
|
||||
return 0;
|
||||
|
||||
static AmazonS3Client CreateS3Client(MigrationOptions options, AmazonS3Config config)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(options.S3AccessKey) && !string.IsNullOrWhiteSpace(options.S3SecretKey))
|
||||
{
|
||||
var credentials = new BasicAWSCredentials(options.S3AccessKey, options.S3SecretKey);
|
||||
return new AmazonS3Client(credentials, config);
|
||||
}
|
||||
|
||||
return new AmazonS3Client(config);
|
||||
}
|
||||
|
||||
static HttpClient CreateRustFsClient(MigrationOptions options)
|
||||
{
|
||||
var client = new HttpClient
|
||||
{
|
||||
BaseAddress = new Uri(options.RustFsEndpoint, UriKind.Absolute),
|
||||
Timeout = TimeSpan.FromMinutes(5),
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation(options.RustFsApiKeyHeader, options.RustFsApiKey);
|
||||
}
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
static Uri BuildRustFsUri(MigrationOptions options, string key)
|
||||
{
|
||||
var normalized = string.Join('/', key
|
||||
.Split('/', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Select(Uri.EscapeDataString));
|
||||
|
||||
var builder = new UriBuilder(options.RustFsEndpoint)
|
||||
{
|
||||
Path = $"/api/v1/buckets/{Uri.EscapeDataString(options.RustFsBucket)}/objects/{normalized}",
|
||||
};
|
||||
|
||||
return builder.Uri;
|
||||
}
|
||||
|
||||
internal sealed record MigrationOptions
|
||||
{
|
||||
public string S3Bucket { get; init; } = string.Empty;
|
||||
|
||||
public string? S3ServiceUrl { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3Region { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3AccessKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3SecretKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string RustFsEndpoint { get; init; } = string.Empty;
|
||||
|
||||
public string RustFsBucket { get; init; } = string.Empty;
|
||||
|
||||
public string? RustFsApiKeyHeader { get; init; }
|
||||
= null;
|
||||
|
||||
public string? RustFsApiKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string? Prefix { get; init; }
|
||||
= null;
|
||||
|
||||
public bool Immutable { get; init; }
|
||||
= false;
|
||||
|
||||
public int? RetentionSeconds { get; init; }
|
||||
= null;
|
||||
|
||||
public bool DryRun { get; init; }
|
||||
= false;
|
||||
|
||||
public static MigrationOptions? Parse(string[] args)
|
||||
{
|
||||
var builder = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
var key = args[i];
|
||||
if (key.StartsWith("--", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var normalized = key[2..];
|
||||
if (string.Equals(normalized, "immutable", StringComparison.OrdinalIgnoreCase) || string.Equals(normalized, "dry-run", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
builder[normalized] = "true";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine($"Missing value for argument '{key}'.");
|
||||
return null;
|
||||
}
|
||||
|
||||
builder[normalized] = args[++i];
|
||||
}
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("s3-bucket", out var bucket) || string.IsNullOrWhiteSpace(bucket))
|
||||
{
|
||||
Console.Error.WriteLine("--s3-bucket is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("rustfs-endpoint", out var rustFsEndpoint) || string.IsNullOrWhiteSpace(rustFsEndpoint))
|
||||
{
|
||||
Console.Error.WriteLine("--rustfs-endpoint is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("rustfs-bucket", out var rustFsBucket) || string.IsNullOrWhiteSpace(rustFsBucket))
|
||||
{
|
||||
Console.Error.WriteLine("--rustfs-bucket is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
int? retentionSeconds = null;
|
||||
if (builder.TryGetValue("retain-days", out var retainStr) && !string.IsNullOrWhiteSpace(retainStr))
|
||||
{
|
||||
if (double.TryParse(retainStr, out var days) && days > 0)
|
||||
{
|
||||
retentionSeconds = (int)Math.Ceiling(days * 24 * 60 * 60);
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.Error.WriteLine("--retain-days must be a positive number.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return new MigrationOptions
|
||||
{
|
||||
S3Bucket = bucket,
|
||||
S3ServiceUrl = builder.TryGetValue("s3-endpoint", out var s3Endpoint) ? s3Endpoint : null,
|
||||
S3Region = builder.TryGetValue("s3-region", out var s3Region) ? s3Region : null,
|
||||
S3AccessKey = builder.TryGetValue("s3-access-key", out var s3AccessKey) ? s3AccessKey : null,
|
||||
S3SecretKey = builder.TryGetValue("s3-secret-key", out var s3SecretKey) ? s3SecretKey : null,
|
||||
RustFsEndpoint = rustFsEndpoint!,
|
||||
RustFsBucket = rustFsBucket!,
|
||||
RustFsApiKeyHeader = builder.TryGetValue("rustfs-api-key-header", out var apiKeyHeader) ? apiKeyHeader : null,
|
||||
RustFsApiKey = builder.TryGetValue("rustfs-api-key", out var apiKey) ? apiKey : null,
|
||||
Prefix = builder.TryGetValue("prefix", out var prefix) ? prefix : null,
|
||||
Immutable = builder.ContainsKey("immutable"),
|
||||
RetentionSeconds = retentionSeconds,
|
||||
DryRun = builder.ContainsKey("dry-run"),
|
||||
};
|
||||
}
|
||||
|
||||
public static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine(@"Usage: dotnet run --project src/Tools/RustFsMigrator -- \
|
||||
--s3-bucket <name> \
|
||||
[--s3-endpoint http://minio:9000] \
|
||||
[--s3-region us-east-1] \
|
||||
[--s3-access-key key --s3-secret-key secret] \
|
||||
--rustfs-endpoint http://rustfs:8080 \
|
||||
--rustfs-bucket scanner-artifacts \
|
||||
[--rustfs-api-key-header X-API-Key --rustfs-api-key token] \
|
||||
[--prefix scanner/] \
|
||||
[--immutable] \
|
||||
[--retain-days 365] \
|
||||
[--dry-run]");
|
||||
}
|
||||
}
|
||||
using Amazon;
|
||||
using Amazon.Runtime;
|
||||
using Amazon.S3;
|
||||
using Amazon.S3.Model;
|
||||
using System.Net.Http.Headers;
|
||||
|
||||
var options = MigrationOptions.Parse(args);
|
||||
if (options is null)
|
||||
{
|
||||
MigrationOptions.PrintUsage();
|
||||
return 1;
|
||||
}
|
||||
|
||||
Console.WriteLine($"RustFS migrator starting (prefix: '{options.Prefix ?? "<all>"}')");
|
||||
if (options.DryRun)
|
||||
{
|
||||
Console.WriteLine("Dry-run enabled. No objects will be written to RustFS.");
|
||||
}
|
||||
|
||||
var s3Config = new AmazonS3Config
|
||||
{
|
||||
ForcePathStyle = true,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.S3ServiceUrl))
|
||||
{
|
||||
s3Config.ServiceURL = options.S3ServiceUrl;
|
||||
s3Config.UseHttp = options.S3ServiceUrl.StartsWith("http://", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.S3Region))
|
||||
{
|
||||
s3Config.RegionEndpoint = RegionEndpoint.GetBySystemName(options.S3Region);
|
||||
}
|
||||
|
||||
using var s3Client = CreateS3Client(options, s3Config);
|
||||
using var httpClient = CreateRustFsClient(options);
|
||||
|
||||
var listRequest = new ListObjectsV2Request
|
||||
{
|
||||
BucketName = options.S3Bucket,
|
||||
Prefix = options.Prefix,
|
||||
MaxKeys = 1000,
|
||||
};
|
||||
|
||||
var migrated = 0;
|
||||
var skipped = 0;
|
||||
|
||||
do
|
||||
{
|
||||
var response = await s3Client.ListObjectsV2Async(listRequest).ConfigureAwait(false);
|
||||
foreach (var entry in response.S3Objects)
|
||||
{
|
||||
if (entry.Size == 0 && entry.Key.EndsWith('/'))
|
||||
{
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Migrating {entry.Key} ({entry.Size} bytes)...");
|
||||
|
||||
if (options.DryRun)
|
||||
{
|
||||
migrated++;
|
||||
continue;
|
||||
}
|
||||
|
||||
using var getResponse = await s3Client.GetObjectAsync(new GetObjectRequest
|
||||
{
|
||||
BucketName = options.S3Bucket,
|
||||
Key = entry.Key,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
await using var memory = new MemoryStream();
|
||||
await getResponse.ResponseStream.CopyToAsync(memory).ConfigureAwait(false);
|
||||
memory.Position = 0;
|
||||
|
||||
using var request = new HttpRequestMessage(HttpMethod.Put, BuildRustFsUri(options, entry.Key))
|
||||
{
|
||||
Content = new ByteArrayContent(memory.ToArray()),
|
||||
};
|
||||
request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/octet-stream");
|
||||
|
||||
if (options.Immutable)
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation("X-RustFS-Immutable", "true");
|
||||
}
|
||||
|
||||
if (options.RetentionSeconds is { } retainSeconds)
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation("X-RustFS-Retain-Seconds", retainSeconds.ToString());
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
|
||||
{
|
||||
request.Headers.TryAddWithoutValidation(options.RustFsApiKeyHeader!, options.RustFsApiKey!);
|
||||
}
|
||||
|
||||
using var responseMessage = await httpClient.SendAsync(request).ConfigureAwait(false);
|
||||
if (!responseMessage.IsSuccessStatusCode)
|
||||
{
|
||||
var error = await responseMessage.Content.ReadAsStringAsync().ConfigureAwait(false);
|
||||
Console.Error.WriteLine($"Failed to upload {entry.Key}: {(int)responseMessage.StatusCode} {responseMessage.ReasonPhrase}\n{error}");
|
||||
return 2;
|
||||
}
|
||||
|
||||
migrated++;
|
||||
}
|
||||
|
||||
listRequest.ContinuationToken = response.NextContinuationToken;
|
||||
} while (!string.IsNullOrEmpty(listRequest.ContinuationToken));
|
||||
|
||||
Console.WriteLine($"Migration complete. Migrated {migrated} objects. Skipped {skipped} directory markers.");
|
||||
return 0;
|
||||
|
||||
static AmazonS3Client CreateS3Client(MigrationOptions options, AmazonS3Config config)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(options.S3AccessKey) && !string.IsNullOrWhiteSpace(options.S3SecretKey))
|
||||
{
|
||||
var credentials = new BasicAWSCredentials(options.S3AccessKey, options.S3SecretKey);
|
||||
return new AmazonS3Client(credentials, config);
|
||||
}
|
||||
|
||||
return new AmazonS3Client(config);
|
||||
}
|
||||
|
||||
static HttpClient CreateRustFsClient(MigrationOptions options)
|
||||
{
|
||||
var client = new HttpClient
|
||||
{
|
||||
BaseAddress = new Uri(options.RustFsEndpoint, UriKind.Absolute),
|
||||
Timeout = TimeSpan.FromMinutes(5),
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation(options.RustFsApiKeyHeader, options.RustFsApiKey);
|
||||
}
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
static Uri BuildRustFsUri(MigrationOptions options, string key)
|
||||
{
|
||||
var normalized = string.Join('/', key
|
||||
.Split('/', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Select(Uri.EscapeDataString));
|
||||
|
||||
var builder = new UriBuilder(options.RustFsEndpoint)
|
||||
{
|
||||
Path = $"/api/v1/buckets/{Uri.EscapeDataString(options.RustFsBucket)}/objects/{normalized}",
|
||||
};
|
||||
|
||||
return builder.Uri;
|
||||
}
|
||||
|
||||
internal sealed record MigrationOptions
|
||||
{
|
||||
public string S3Bucket { get; init; } = string.Empty;
|
||||
|
||||
public string? S3ServiceUrl { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3Region { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3AccessKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string? S3SecretKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string RustFsEndpoint { get; init; } = string.Empty;
|
||||
|
||||
public string RustFsBucket { get; init; } = string.Empty;
|
||||
|
||||
public string? RustFsApiKeyHeader { get; init; }
|
||||
= null;
|
||||
|
||||
public string? RustFsApiKey { get; init; }
|
||||
= null;
|
||||
|
||||
public string? Prefix { get; init; }
|
||||
= null;
|
||||
|
||||
public bool Immutable { get; init; }
|
||||
= false;
|
||||
|
||||
public int? RetentionSeconds { get; init; }
|
||||
= null;
|
||||
|
||||
public bool DryRun { get; init; }
|
||||
= false;
|
||||
|
||||
public static MigrationOptions? Parse(string[] args)
|
||||
{
|
||||
var builder = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
var key = args[i];
|
||||
if (key.StartsWith("--", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var normalized = key[2..];
|
||||
if (string.Equals(normalized, "immutable", StringComparison.OrdinalIgnoreCase) || string.Equals(normalized, "dry-run", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
builder[normalized] = "true";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i + 1 >= args.Length)
|
||||
{
|
||||
Console.Error.WriteLine($"Missing value for argument '{key}'.");
|
||||
return null;
|
||||
}
|
||||
|
||||
builder[normalized] = args[++i];
|
||||
}
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("s3-bucket", out var bucket) || string.IsNullOrWhiteSpace(bucket))
|
||||
{
|
||||
Console.Error.WriteLine("--s3-bucket is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("rustfs-endpoint", out var rustFsEndpoint) || string.IsNullOrWhiteSpace(rustFsEndpoint))
|
||||
{
|
||||
Console.Error.WriteLine("--rustfs-endpoint is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!builder.TryGetValue("rustfs-bucket", out var rustFsBucket) || string.IsNullOrWhiteSpace(rustFsBucket))
|
||||
{
|
||||
Console.Error.WriteLine("--rustfs-bucket is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
int? retentionSeconds = null;
|
||||
if (builder.TryGetValue("retain-days", out var retainStr) && !string.IsNullOrWhiteSpace(retainStr))
|
||||
{
|
||||
if (double.TryParse(retainStr, out var days) && days > 0)
|
||||
{
|
||||
retentionSeconds = (int)Math.Ceiling(days * 24 * 60 * 60);
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.Error.WriteLine("--retain-days must be a positive number.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return new MigrationOptions
|
||||
{
|
||||
S3Bucket = bucket,
|
||||
S3ServiceUrl = builder.TryGetValue("s3-endpoint", out var s3Endpoint) ? s3Endpoint : null,
|
||||
S3Region = builder.TryGetValue("s3-region", out var s3Region) ? s3Region : null,
|
||||
S3AccessKey = builder.TryGetValue("s3-access-key", out var s3AccessKey) ? s3AccessKey : null,
|
||||
S3SecretKey = builder.TryGetValue("s3-secret-key", out var s3SecretKey) ? s3SecretKey : null,
|
||||
RustFsEndpoint = rustFsEndpoint!,
|
||||
RustFsBucket = rustFsBucket!,
|
||||
RustFsApiKeyHeader = builder.TryGetValue("rustfs-api-key-header", out var apiKeyHeader) ? apiKeyHeader : null,
|
||||
RustFsApiKey = builder.TryGetValue("rustfs-api-key", out var apiKey) ? apiKey : null,
|
||||
Prefix = builder.TryGetValue("prefix", out var prefix) ? prefix : null,
|
||||
Immutable = builder.ContainsKey("immutable"),
|
||||
RetentionSeconds = retentionSeconds,
|
||||
DryRun = builder.ContainsKey("dry-run"),
|
||||
};
|
||||
}
|
||||
|
||||
public static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine(@"Usage: dotnet run --project src/Tools/RustFsMigrator -- \
|
||||
--s3-bucket <name> \
|
||||
[--s3-endpoint http://minio:9000] \
|
||||
[--s3-region us-east-1] \
|
||||
[--s3-access-key key --s3-secret-key secret] \
|
||||
--rustfs-endpoint http://rustfs:8080 \
|
||||
--rustfs-bucket scanner-artifacts \
|
||||
[--rustfs-api-key-header X-API-Key --rustfs-api-key token] \
|
||||
[--prefix scanner/] \
|
||||
[--immutable] \
|
||||
[--retain-days 365] \
|
||||
[--dry-run]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,346 +0,0 @@
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.State;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace SourceStateSeeder;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
};
|
||||
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = SeedOptions.Parse(args);
|
||||
if (options is null)
|
||||
{
|
||||
SeedOptions.PrintUsage();
|
||||
return 1;
|
||||
}
|
||||
|
||||
var seed = await LoadSpecificationAsync(options.InputPath).ConfigureAwait(false);
|
||||
var sourceName = seed.Source ?? options.SourceName;
|
||||
if (string.IsNullOrWhiteSpace(sourceName))
|
||||
{
|
||||
Console.Error.WriteLine("Source name must be supplied via --source or the seed file.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
var specification = await BuildSpecificationAsync(seed, sourceName, options.InputPath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var client = new MongoClient(options.ConnectionString);
|
||||
var database = client.GetDatabase(options.DatabaseName);
|
||||
var loggerFactory = NullLoggerFactory.Instance;
|
||||
|
||||
var documentStore = new DocumentStore(database, loggerFactory.CreateLogger<DocumentStore>());
|
||||
var rawStorage = new RawDocumentStorage(database);
|
||||
var stateRepository = new MongoSourceStateRepository(database, loggerFactory.CreateLogger<MongoSourceStateRepository>());
|
||||
|
||||
var processor = new SourceStateSeedProcessor(
|
||||
documentStore,
|
||||
rawStorage,
|
||||
stateRepository,
|
||||
TimeProvider.System,
|
||||
loggerFactory.CreateLogger<SourceStateSeedProcessor>());
|
||||
|
||||
var result = await processor.ProcessAsync(specification, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine(
|
||||
$"Seeded {result.DocumentsProcessed} document(s) for {sourceName} " +
|
||||
$"(pendingDocuments+= {result.PendingDocumentsAdded}, pendingMappings+= {result.PendingMappingsAdded}, knownAdvisories+= {result.KnownAdvisoriesAdded.Count}).");
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<StateSeed> LoadSpecificationAsync(string inputPath)
|
||||
{
|
||||
await using var stream = File.OpenRead(inputPath);
|
||||
var seed = await JsonSerializer.DeserializeAsync<StateSeed>(stream, JsonOptions).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("Input file deserialized to null.");
|
||||
return seed;
|
||||
}
|
||||
|
||||
private static async Task<SourceStateSeedSpecification> BuildSpecificationAsync(
|
||||
StateSeed seed,
|
||||
string sourceName,
|
||||
string inputPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(inputPath)) ?? Directory.GetCurrentDirectory();
|
||||
var documents = new List<SourceStateSeedDocument>(seed.Documents.Count);
|
||||
|
||||
foreach (var documentSeed in seed.Documents)
|
||||
{
|
||||
documents.Add(await BuildDocumentAsync(documentSeed, baseDirectory, cancellationToken).ConfigureAwait(false));
|
||||
}
|
||||
|
||||
return new SourceStateSeedSpecification
|
||||
{
|
||||
Source = sourceName,
|
||||
Documents = documents.AsReadOnly(),
|
||||
Cursor = BuildCursor(seed.Cursor),
|
||||
KnownAdvisories = NormalizeStrings(seed.KnownAdvisories),
|
||||
CompletedAt = seed.CompletedAt,
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<SourceStateSeedDocument> BuildDocumentAsync(
|
||||
DocumentSeed seed,
|
||||
string baseDirectory,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(seed.Uri))
|
||||
{
|
||||
throw new InvalidOperationException("Seed entry missing 'uri'.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(seed.ContentFile))
|
||||
{
|
||||
throw new InvalidOperationException($"Seed entry for '{seed.Uri}' missing 'contentFile'.");
|
||||
}
|
||||
|
||||
var contentPath = ResolvePath(seed.ContentFile, baseDirectory);
|
||||
if (!File.Exists(contentPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Content file not found for '{seed.Uri}'.", contentPath);
|
||||
}
|
||||
|
||||
var contentBytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadata = seed.Metadata is null
|
||||
? null
|
||||
: new Dictionary<string, string>(seed.Metadata, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var headers = seed.Headers is null
|
||||
? null
|
||||
: new Dictionary<string, string>(seed.Headers, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(seed.ContentType))
|
||||
{
|
||||
headers ??= new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!headers.ContainsKey("content-type"))
|
||||
{
|
||||
headers["content-type"] = seed.ContentType!;
|
||||
}
|
||||
}
|
||||
|
||||
return new SourceStateSeedDocument
|
||||
{
|
||||
Uri = seed.Uri,
|
||||
DocumentId = seed.DocumentId,
|
||||
Content = contentBytes,
|
||||
ContentType = seed.ContentType,
|
||||
Status = string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status,
|
||||
Headers = headers,
|
||||
Metadata = metadata,
|
||||
Etag = seed.Etag,
|
||||
LastModified = ParseOptionalDate(seed.LastModified),
|
||||
ExpiresAt = seed.ExpiresAt,
|
||||
FetchedAt = ParseOptionalDate(seed.FetchedAt),
|
||||
AddToPendingDocuments = seed.AddToPendingDocuments,
|
||||
AddToPendingMappings = seed.AddToPendingMappings,
|
||||
KnownIdentifiers = NormalizeStrings(seed.KnownIdentifiers),
|
||||
};
|
||||
}
|
||||
|
||||
private static SourceStateSeedCursor? BuildCursor(CursorSeed? cursorSeed)
|
||||
{
|
||||
if (cursorSeed is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SourceStateSeedCursor
|
||||
{
|
||||
PendingDocuments = NormalizeGuids(cursorSeed.PendingDocuments),
|
||||
PendingMappings = NormalizeGuids(cursorSeed.PendingMappings),
|
||||
KnownAdvisories = NormalizeStrings(cursorSeed.KnownAdvisories),
|
||||
LastModifiedCursor = cursorSeed.LastModifiedCursor,
|
||||
LastFetchAt = cursorSeed.LastFetchAt,
|
||||
Additional = cursorSeed.Additional is null
|
||||
? null
|
||||
: new Dictionary<string, string>(cursorSeed.Additional, StringComparer.OrdinalIgnoreCase),
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<Guid>? NormalizeGuids(IEnumerable<Guid>? values)
|
||||
{
|
||||
if (values is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var set = new HashSet<Guid>();
|
||||
foreach (var guid in values)
|
||||
{
|
||||
if (guid != Guid.Empty)
|
||||
{
|
||||
set.Add(guid);
|
||||
}
|
||||
}
|
||||
|
||||
return set.Count == 0 ? null : set.ToList();
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<string>? NormalizeStrings(IEnumerable<string>? values)
|
||||
{
|
||||
if (values is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var set = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
set.Add(value.Trim());
|
||||
}
|
||||
}
|
||||
|
||||
return set.Count == 0 ? null : set.ToList();
|
||||
}
|
||||
|
||||
private static DateTimeOffset? ParseOptionalDate(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return DateTimeOffset.Parse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
|
||||
}
|
||||
|
||||
private static string ResolvePath(string path, string baseDirectory)
|
||||
=> Path.IsPathRooted(path) ? path : Path.GetFullPath(Path.Combine(baseDirectory, path));
|
||||
}
|
||||
|
||||
internal sealed record SeedOptions
|
||||
{
|
||||
public required string ConnectionString { get; init; }
|
||||
public required string DatabaseName { get; init; }
|
||||
public required string InputPath { get; init; }
|
||||
public string? SourceName { get; init; }
|
||||
|
||||
public static SeedOptions? Parse(string[] args)
|
||||
{
|
||||
string? connectionString = null;
|
||||
string? database = null;
|
||||
string? input = null;
|
||||
string? source = null;
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
var arg = args[i];
|
||||
switch (arg)
|
||||
{
|
||||
case "--connection-string":
|
||||
case "-c":
|
||||
connectionString = TakeValue(args, ref i, arg);
|
||||
break;
|
||||
case "--database":
|
||||
case "-d":
|
||||
database = TakeValue(args, ref i, arg);
|
||||
break;
|
||||
case "--input":
|
||||
case "-i":
|
||||
input = TakeValue(args, ref i, arg);
|
||||
break;
|
||||
case "--source":
|
||||
case "-s":
|
||||
source = TakeValue(args, ref i, arg);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
return null;
|
||||
default:
|
||||
Console.Error.WriteLine($"Unrecognized argument '{arg}'.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(connectionString) || string.IsNullOrWhiteSpace(database) || string.IsNullOrWhiteSpace(input))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SeedOptions
|
||||
{
|
||||
ConnectionString = connectionString,
|
||||
DatabaseName = database,
|
||||
InputPath = input,
|
||||
SourceName = source,
|
||||
};
|
||||
}
|
||||
|
||||
public static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: dotnet run --project src/Tools/SourceStateSeeder -- --connection-string <connection> --database <name> --input <seed.json> [--source <source>]");
|
||||
}
|
||||
|
||||
private static string TakeValue(string[] args, ref int index, string arg)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException($"Missing value for {arg}.");
|
||||
}
|
||||
|
||||
index++;
|
||||
return args[index];
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record StateSeed
|
||||
{
|
||||
public string? Source { get; init; }
|
||||
public List<DocumentSeed> Documents { get; init; } = new();
|
||||
public CursorSeed? Cursor { get; init; }
|
||||
public List<string>? KnownAdvisories { get; init; }
|
||||
public DateTimeOffset? CompletedAt { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record DocumentSeed
|
||||
{
|
||||
public string Uri { get; init; } = string.Empty;
|
||||
public string ContentFile { get; init; } = string.Empty;
|
||||
public Guid? DocumentId { get; init; }
|
||||
public string? ContentType { get; init; }
|
||||
public Dictionary<string, string>? Metadata { get; init; }
|
||||
public Dictionary<string, string>? Headers { get; init; }
|
||||
public string Status { get; init; } = DocumentStatuses.PendingParse;
|
||||
public bool AddToPendingDocuments { get; init; } = true;
|
||||
public bool AddToPendingMappings { get; init; }
|
||||
public string? LastModified { get; init; }
|
||||
public string? FetchedAt { get; init; }
|
||||
public string? Etag { get; init; }
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
public List<string>? KnownIdentifiers { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record CursorSeed
|
||||
{
|
||||
public List<Guid>? PendingDocuments { get; init; }
|
||||
public List<Guid>? PendingMappings { get; init; }
|
||||
public List<string>? KnownAdvisories { get; init; }
|
||||
public DateTimeOffset? LastModifiedCursor { get; init; }
|
||||
public DateTimeOffset? LastFetchAt { get; init; }
|
||||
public Dictionary<string, string>? Additional { get; init; }
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\src\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="..\Concelier\__Libraries\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user