Resolve Concelier/Excititor merge conflicts
This commit is contained in:
@@ -1,20 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Source.Osv/StellaOps.Feedser.Source.Osv.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Source.Ghsa/StellaOps.Feedser.Source.Ghsa.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Source.Nvd/StellaOps.Feedser.Source.Nvd.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Feedser.Testing/StellaOps.Feedser.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../src/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,378 +1,378 @@
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Feedser.Models;
|
||||
using StellaOps.Feedser.Source.Ghsa;
|
||||
using StellaOps.Feedser.Source.Common;
|
||||
using StellaOps.Feedser.Source.Ghsa.Internal;
|
||||
using StellaOps.Feedser.Source.Osv.Internal;
|
||||
using StellaOps.Feedser.Source.Osv;
|
||||
using StellaOps.Feedser.Source.Nvd;
|
||||
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||
using StellaOps.Feedser.Storage.Mongo.Dtos;
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", ".."));
|
||||
|
||||
var osvFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Feedser.Source.Osv.Tests", "Fixtures");
|
||||
var ghsaFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Feedser.Source.Ghsa.Tests", "Fixtures");
|
||||
var nvdFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Feedser.Source.Nvd.Tests", "Nvd", "Fixtures");
|
||||
|
||||
RewriteOsvFixtures(osvFixturesPath);
|
||||
RewriteSnapshotFixtures(osvFixturesPath);
|
||||
RewriteGhsaFixtures(osvFixturesPath);
|
||||
RewriteCreditParityFixtures(ghsaFixturesPath, nvdFixturesPath);
|
||||
return;
|
||||
|
||||
void RewriteOsvFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
var dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), serializerOptions);
|
||||
if (dto is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
|
||||
var uri = new Uri($"https://osv.dev/vulnerability/{dto.Id}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["osv.ecosystem"] = ecosystem,
|
||||
},
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(element.GetRawText());
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
documentRecord.Id,
|
||||
OsvConnectorPlugin.SourceName,
|
||||
"osv.v1",
|
||||
payload,
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.osv.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.osv.json")}");
|
||||
}
|
||||
|
||||
void RewriteSnapshotFixtures(string fixturesPath)
|
||||
{
|
||||
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
|
||||
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
|
||||
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
|
||||
{
|
||||
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
|
||||
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
|
||||
};
|
||||
|
||||
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
|
||||
{
|
||||
var dto = new OsvVulnerabilityDto
|
||||
{
|
||||
Id = $"OSV-2025-{ecosystem}-0001",
|
||||
Summary = $"{ecosystem} package vulnerability",
|
||||
Details = $"Detailed description for {ecosystem} package {packageName}.",
|
||||
Published = baselinePublished,
|
||||
Modified = baselineModified,
|
||||
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
|
||||
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
|
||||
References = new[]
|
||||
{
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
|
||||
},
|
||||
Severity = new[]
|
||||
{
|
||||
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
|
||||
},
|
||||
Affected = new[]
|
||||
{
|
||||
new OsvAffectedPackageDto
|
||||
{
|
||||
Package = new OsvPackageDto
|
||||
{
|
||||
Ecosystem = ecosystem,
|
||||
Name = packageName,
|
||||
Purl = purl,
|
||||
},
|
||||
Ranges = new[]
|
||||
{
|
||||
new OsvRangeDto
|
||||
{
|
||||
Type = "SEMVER",
|
||||
Events = new[]
|
||||
{
|
||||
new OsvEventDto { Introduced = "0" },
|
||||
new OsvEventDto { Fixed = "2.0.0" },
|
||||
},
|
||||
},
|
||||
},
|
||||
Versions = new[] { "1.0.0", "1.5.0" },
|
||||
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
|
||||
},
|
||||
},
|
||||
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
|
||||
};
|
||||
|
||||
var document = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
$"https://osv.dev/vulnerability/{dto.Id}",
|
||||
baselineFetched,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingParse,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
|
||||
null,
|
||||
baselineModified,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(JsonSerializer.Serialize(dto, serializerOptions));
|
||||
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, OsvConnectorPlugin.SourceName, "osv.v1", payload, baselineModified);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, snapshotFile), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, snapshotFile)}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteGhsaFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
JsonDocument document;
|
||||
try
|
||||
{
|
||||
document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
|
||||
return;
|
||||
}
|
||||
using (document)
|
||||
{
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
GhsaRecordDto dto;
|
||||
try
|
||||
{
|
||||
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var uri = new Uri($"https://github.com/advisories/{dto.GhsaId}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
GhsaConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal),
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var advisory = GhsaMapper.Map(dto, documentRecord, DateTimeOffset.UtcNow);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.ghsa.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.ghsa.json")}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
|
||||
{
|
||||
Directory.CreateDirectory(ghsaFixturesPath);
|
||||
Directory.CreateDirectory(nvdFixturesPath);
|
||||
|
||||
var advisoryKeyGhsa = "GHSA-credit-parity";
|
||||
var advisoryKeyNvd = "CVE-2025-5555";
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
|
||||
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
|
||||
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
AdvisoryCredit[] CreateCredits(string source) =>
|
||||
[
|
||||
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
|
||||
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
|
||||
];
|
||||
|
||||
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
source,
|
||||
"credit",
|
||||
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Credits });
|
||||
|
||||
return new AdvisoryCredit(displayName, role, contacts, provenance);
|
||||
}
|
||||
|
||||
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
|
||||
{
|
||||
if (entries is null || entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryReference>();
|
||||
}
|
||||
|
||||
var references = new List<AdvisoryReference>(entries.Length);
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"reference",
|
||||
entry.Url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(
|
||||
entry.Url,
|
||||
entry.Kind,
|
||||
sourceTag: null,
|
||||
summary: null,
|
||||
provenance));
|
||||
}
|
||||
|
||||
return references.ToArray();
|
||||
}
|
||||
|
||||
Advisory CreateAdvisory(
|
||||
string sourceName,
|
||||
string advisoryKey,
|
||||
IEnumerable<string> aliases,
|
||||
AdvisoryCredit[] credits,
|
||||
AdvisoryReference[] references,
|
||||
string documentValue)
|
||||
{
|
||||
var documentProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"document",
|
||||
documentValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
var mappingProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"mapping",
|
||||
advisoryKey,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
"Credit parity regression fixture",
|
||||
"Credit parity regression fixture",
|
||||
"en",
|
||||
published,
|
||||
modified,
|
||||
"moderate",
|
||||
exploitKnown: false,
|
||||
aliases,
|
||||
credits,
|
||||
references,
|
||||
Array.Empty<AffectedPackage>(),
|
||||
Array.Empty<CvssMetric>(),
|
||||
new[] { documentProvenance, mappingProvenance });
|
||||
}
|
||||
|
||||
var ghsa = CreateAdvisory(
|
||||
"ghsa",
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits("ghsa"),
|
||||
CreateReferences(
|
||||
"ghsa",
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( "https://example.com/ghsa/patch", "patch")),
|
||||
$"security/advisories/{advisoryKeyGhsa}");
|
||||
|
||||
var osv = CreateAdvisory(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits(OsvConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( $"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
|
||||
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
|
||||
|
||||
var nvd = CreateAdvisory(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
advisoryKeyNvd,
|
||||
new[] { advisoryKeyNvd, advisoryKeyGhsa },
|
||||
CreateCredits(NvdConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
( $"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
|
||||
( "https://example.com/nvd/reference", "report")),
|
||||
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
|
||||
|
||||
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
|
||||
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
|
||||
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
|
||||
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
Console.WriteLine($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
|
||||
}
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Ghsa;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||
using StellaOps.Concelier.Connector.Osv;
|
||||
using StellaOps.Concelier.Connector.Nvd;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", ".."));
|
||||
|
||||
var osvFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures");
|
||||
var ghsaFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures");
|
||||
var nvdFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Nvd.Tests", "Nvd", "Fixtures");
|
||||
|
||||
RewriteOsvFixtures(osvFixturesPath);
|
||||
RewriteSnapshotFixtures(osvFixturesPath);
|
||||
RewriteGhsaFixtures(osvFixturesPath);
|
||||
RewriteCreditParityFixtures(ghsaFixturesPath, nvdFixturesPath);
|
||||
return;
|
||||
|
||||
void RewriteOsvFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
using var document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
var dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), serializerOptions);
|
||||
if (dto is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
|
||||
var uri = new Uri($"https://osv.dev/vulnerability/{dto.Id}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["osv.ecosystem"] = ecosystem,
|
||||
},
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(element.GetRawText());
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
documentRecord.Id,
|
||||
OsvConnectorPlugin.SourceName,
|
||||
"osv.v1",
|
||||
payload,
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.osv.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.osv.json")}");
|
||||
}
|
||||
|
||||
void RewriteSnapshotFixtures(string fixturesPath)
|
||||
{
|
||||
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
|
||||
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
|
||||
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
|
||||
{
|
||||
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
|
||||
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
|
||||
};
|
||||
|
||||
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
|
||||
{
|
||||
var dto = new OsvVulnerabilityDto
|
||||
{
|
||||
Id = $"OSV-2025-{ecosystem}-0001",
|
||||
Summary = $"{ecosystem} package vulnerability",
|
||||
Details = $"Detailed description for {ecosystem} package {packageName}.",
|
||||
Published = baselinePublished,
|
||||
Modified = baselineModified,
|
||||
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
|
||||
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
|
||||
References = new[]
|
||||
{
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
|
||||
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
|
||||
},
|
||||
Severity = new[]
|
||||
{
|
||||
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
|
||||
},
|
||||
Affected = new[]
|
||||
{
|
||||
new OsvAffectedPackageDto
|
||||
{
|
||||
Package = new OsvPackageDto
|
||||
{
|
||||
Ecosystem = ecosystem,
|
||||
Name = packageName,
|
||||
Purl = purl,
|
||||
},
|
||||
Ranges = new[]
|
||||
{
|
||||
new OsvRangeDto
|
||||
{
|
||||
Type = "SEMVER",
|
||||
Events = new[]
|
||||
{
|
||||
new OsvEventDto { Introduced = "0" },
|
||||
new OsvEventDto { Fixed = "2.0.0" },
|
||||
},
|
||||
},
|
||||
},
|
||||
Versions = new[] { "1.0.0", "1.5.0" },
|
||||
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
|
||||
},
|
||||
},
|
||||
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
|
||||
};
|
||||
|
||||
var document = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
OsvConnectorPlugin.SourceName,
|
||||
$"https://osv.dev/vulnerability/{dto.Id}",
|
||||
baselineFetched,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingParse,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
|
||||
null,
|
||||
baselineModified,
|
||||
null);
|
||||
|
||||
var payload = BsonDocument.Parse(JsonSerializer.Serialize(dto, serializerOptions));
|
||||
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, OsvConnectorPlugin.SourceName, "osv.v1", payload, baselineModified);
|
||||
|
||||
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, snapshotFile), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, snapshotFile)}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteGhsaFixtures(string fixturesPath)
|
||||
{
|
||||
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
|
||||
if (!File.Exists(rawPath))
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
|
||||
return;
|
||||
}
|
||||
|
||||
JsonDocument document;
|
||||
try
|
||||
{
|
||||
document = JsonDocument.Parse(File.ReadAllText(rawPath));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
Console.WriteLine($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
|
||||
return;
|
||||
}
|
||||
using (document)
|
||||
{
|
||||
var advisories = new List<Advisory>();
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
GhsaRecordDto dto;
|
||||
try
|
||||
{
|
||||
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var uri = new Uri($"https://github.com/advisories/{dto.GhsaId}");
|
||||
var documentRecord = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
GhsaConnectorPlugin.SourceName,
|
||||
uri.ToString(),
|
||||
DateTimeOffset.UtcNow,
|
||||
"fixture-sha",
|
||||
DocumentStatuses.PendingMap,
|
||||
"application/json",
|
||||
null,
|
||||
new Dictionary<string, string>(StringComparer.Ordinal),
|
||||
null,
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
null);
|
||||
|
||||
var advisory = GhsaMapper.Map(dto, documentRecord, DateTimeOffset.UtcNow);
|
||||
advisories.Add(advisory);
|
||||
}
|
||||
|
||||
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
|
||||
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.ghsa.json"), snapshot);
|
||||
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.ghsa.json")}");
|
||||
}
|
||||
}
|
||||
|
||||
void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
|
||||
{
|
||||
Directory.CreateDirectory(ghsaFixturesPath);
|
||||
Directory.CreateDirectory(nvdFixturesPath);
|
||||
|
||||
var advisoryKeyGhsa = "GHSA-credit-parity";
|
||||
var advisoryKeyNvd = "CVE-2025-5555";
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
|
||||
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
|
||||
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
AdvisoryCredit[] CreateCredits(string source) =>
|
||||
[
|
||||
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
|
||||
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
|
||||
];
|
||||
|
||||
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
source,
|
||||
"credit",
|
||||
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Credits });
|
||||
|
||||
return new AdvisoryCredit(displayName, role, contacts, provenance);
|
||||
}
|
||||
|
||||
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
|
||||
{
|
||||
if (entries is null || entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryReference>();
|
||||
}
|
||||
|
||||
var references = new List<AdvisoryReference>(entries.Length);
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"reference",
|
||||
entry.Url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(
|
||||
entry.Url,
|
||||
entry.Kind,
|
||||
sourceTag: null,
|
||||
summary: null,
|
||||
provenance));
|
||||
}
|
||||
|
||||
return references.ToArray();
|
||||
}
|
||||
|
||||
Advisory CreateAdvisory(
|
||||
string sourceName,
|
||||
string advisoryKey,
|
||||
IEnumerable<string> aliases,
|
||||
AdvisoryCredit[] credits,
|
||||
AdvisoryReference[] references,
|
||||
string documentValue)
|
||||
{
|
||||
var documentProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"document",
|
||||
documentValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
var mappingProvenance = new AdvisoryProvenance(
|
||||
sourceName,
|
||||
"mapping",
|
||||
advisoryKey,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory });
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
"Credit parity regression fixture",
|
||||
"Credit parity regression fixture",
|
||||
"en",
|
||||
published,
|
||||
modified,
|
||||
"moderate",
|
||||
exploitKnown: false,
|
||||
aliases,
|
||||
credits,
|
||||
references,
|
||||
Array.Empty<AffectedPackage>(),
|
||||
Array.Empty<CvssMetric>(),
|
||||
new[] { documentProvenance, mappingProvenance });
|
||||
}
|
||||
|
||||
var ghsa = CreateAdvisory(
|
||||
"ghsa",
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits("ghsa"),
|
||||
CreateReferences(
|
||||
"ghsa",
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( "https://example.com/ghsa/patch", "patch")),
|
||||
$"security/advisories/{advisoryKeyGhsa}");
|
||||
|
||||
var osv = CreateAdvisory(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
advisoryKeyGhsa,
|
||||
new[] { advisoryKeyGhsa, advisoryKeyNvd },
|
||||
CreateCredits(OsvConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
OsvConnectorPlugin.SourceName,
|
||||
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
|
||||
( $"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
|
||||
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
|
||||
|
||||
var nvd = CreateAdvisory(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
advisoryKeyNvd,
|
||||
new[] { advisoryKeyNvd, advisoryKeyGhsa },
|
||||
CreateCredits(NvdConnectorPlugin.SourceName),
|
||||
CreateReferences(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
( $"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
|
||||
( "https://example.com/nvd/reference", "report")),
|
||||
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
|
||||
|
||||
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
|
||||
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
|
||||
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
|
||||
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
|
||||
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
|
||||
|
||||
Console.WriteLine($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Feedser.Source.Common;
|
||||
using StellaOps.Feedser.Source.Common.Fetch;
|
||||
using StellaOps.Feedser.Storage.Mongo;
|
||||
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.State;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace SourceStateSeeder;
|
||||
|
||||
@@ -40,58 +39,28 @@ internal static class Program
|
||||
return 1;
|
||||
}
|
||||
|
||||
var specification = await BuildSpecificationAsync(seed, sourceName, options.InputPath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var client = new MongoClient(options.ConnectionString);
|
||||
var database = client.GetDatabase(options.DatabaseName);
|
||||
|
||||
var loggerFactory = NullLoggerFactory.Instance;
|
||||
|
||||
var documentStore = new DocumentStore(database, loggerFactory.CreateLogger<DocumentStore>());
|
||||
var rawStorage = new RawDocumentStorage(database);
|
||||
var stateRepository = new MongoSourceStateRepository(database, loggerFactory.CreateLogger<MongoSourceStateRepository>());
|
||||
|
||||
var pendingDocumentIds = new List<Guid>();
|
||||
var pendingMappingIds = new List<Guid>();
|
||||
var knownAdvisories = new List<string>();
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(options.InputPath)) ?? Directory.GetCurrentDirectory();
|
||||
|
||||
foreach (var document in seed.Documents)
|
||||
{
|
||||
var (record, addedToPendingDocs, addedToPendingMaps, known) = await UpsertDocumentAsync(
|
||||
documentStore,
|
||||
rawStorage,
|
||||
sourceName,
|
||||
baseDirectory,
|
||||
now,
|
||||
document,
|
||||
cancellationToken: default).ConfigureAwait(false);
|
||||
|
||||
if (addedToPendingDocs)
|
||||
{
|
||||
pendingDocumentIds.Add(record.Id);
|
||||
}
|
||||
|
||||
if (addedToPendingMaps)
|
||||
{
|
||||
pendingMappingIds.Add(record.Id);
|
||||
}
|
||||
|
||||
if (known is not null)
|
||||
{
|
||||
knownAdvisories.AddRange(known);
|
||||
}
|
||||
}
|
||||
|
||||
await UpdateCursorAsync(
|
||||
var processor = new SourceStateSeedProcessor(
|
||||
documentStore,
|
||||
rawStorage,
|
||||
stateRepository,
|
||||
sourceName,
|
||||
seed.Cursor,
|
||||
pendingDocumentIds,
|
||||
pendingMappingIds,
|
||||
knownAdvisories,
|
||||
now).ConfigureAwait(false);
|
||||
TimeProvider.System,
|
||||
loggerFactory.CreateLogger<SourceStateSeedProcessor>());
|
||||
|
||||
Console.WriteLine($"Seeded {pendingDocumentIds.Count + pendingMappingIds.Count} documents for {sourceName}.");
|
||||
var result = await processor.ProcessAsync(specification, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine(
|
||||
$"Seeded {result.DocumentsProcessed} document(s) for {sourceName} " +
|
||||
$"(pendingDocuments+= {result.PendingDocumentsAdded}, pendingMappings+= {result.PendingMappingsAdded}, knownAdvisories+= {result.KnownAdvisoriesAdded.Count}).");
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -109,13 +78,33 @@ internal static class Program
|
||||
return seed;
|
||||
}
|
||||
|
||||
private static async Task<(DocumentRecord Record, bool PendingDoc, bool PendingMap, IReadOnlyCollection<string>? Known)> UpsertDocumentAsync(
|
||||
DocumentStore documentStore,
|
||||
RawDocumentStorage rawStorage,
|
||||
private static async Task<SourceStateSeedSpecification> BuildSpecificationAsync(
|
||||
StateSeed seed,
|
||||
string sourceName,
|
||||
string baseDirectory,
|
||||
DateTimeOffset fetchedAt,
|
||||
string inputPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(inputPath)) ?? Directory.GetCurrentDirectory();
|
||||
var documents = new List<SourceStateSeedDocument>(seed.Documents.Count);
|
||||
|
||||
foreach (var documentSeed in seed.Documents)
|
||||
{
|
||||
documents.Add(await BuildDocumentAsync(documentSeed, baseDirectory, cancellationToken).ConfigureAwait(false));
|
||||
}
|
||||
|
||||
return new SourceStateSeedSpecification
|
||||
{
|
||||
Source = sourceName,
|
||||
Documents = documents.AsReadOnly(),
|
||||
Cursor = BuildCursor(seed.Cursor),
|
||||
KnownAdvisories = NormalizeStrings(seed.KnownAdvisories),
|
||||
CompletedAt = seed.CompletedAt,
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<SourceStateSeedDocument> BuildDocumentAsync(
|
||||
DocumentSeed seed,
|
||||
string baseDirectory,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(seed.Uri))
|
||||
@@ -128,152 +117,120 @@ internal static class Program
|
||||
throw new InvalidOperationException($"Seed entry for '{seed.Uri}' missing 'contentFile'.");
|
||||
}
|
||||
|
||||
var contentPath = Path.IsPathRooted(seed.ContentFile)
|
||||
? seed.ContentFile
|
||||
: Path.GetFullPath(Path.Combine(baseDirectory, seed.ContentFile));
|
||||
|
||||
var contentPath = ResolvePath(seed.ContentFile, baseDirectory);
|
||||
if (!File.Exists(contentPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Content file not found for '{seed.Uri}'.", contentPath);
|
||||
}
|
||||
|
||||
var contentBytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false);
|
||||
var sha256 = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant();
|
||||
var gridId = await rawStorage.UploadAsync(
|
||||
sourceName,
|
||||
seed.Uri,
|
||||
contentBytes,
|
||||
seed.ContentType,
|
||||
seed.ExpiresAt,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadata = seed.Metadata is null
|
||||
? new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
? null
|
||||
: new Dictionary<string, string>(seed.Metadata, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var headers = seed.Headers is null
|
||||
? new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
? null
|
||||
: new Dictionary<string, string>(seed.Headers, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!headers.ContainsKey("content-type") && !string.IsNullOrWhiteSpace(seed.ContentType))
|
||||
if (!string.IsNullOrWhiteSpace(seed.ContentType))
|
||||
{
|
||||
headers["content-type"] = seed.ContentType!;
|
||||
headers ??= new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!headers.ContainsKey("content-type"))
|
||||
{
|
||||
headers["content-type"] = seed.ContentType!;
|
||||
}
|
||||
}
|
||||
|
||||
var lastModified = seed.LastModified is null
|
||||
? (DateTimeOffset?)null
|
||||
: DateTimeOffset.Parse(seed.LastModified, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
|
||||
|
||||
var record = new DocumentRecord(
|
||||
Guid.NewGuid(),
|
||||
sourceName,
|
||||
seed.Uri,
|
||||
fetchedAt,
|
||||
sha256,
|
||||
string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status,
|
||||
seed.ContentType,
|
||||
headers,
|
||||
metadata,
|
||||
seed.Etag,
|
||||
lastModified,
|
||||
gridId,
|
||||
seed.ExpiresAt);
|
||||
|
||||
var upserted = await documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return (upserted, seed.AddToPendingDocuments, seed.AddToPendingMappings, seed.KnownIdentifiers);
|
||||
return new SourceStateSeedDocument
|
||||
{
|
||||
Uri = seed.Uri,
|
||||
DocumentId = seed.DocumentId,
|
||||
Content = contentBytes,
|
||||
ContentType = seed.ContentType,
|
||||
Status = string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status,
|
||||
Headers = headers,
|
||||
Metadata = metadata,
|
||||
Etag = seed.Etag,
|
||||
LastModified = ParseOptionalDate(seed.LastModified),
|
||||
ExpiresAt = seed.ExpiresAt,
|
||||
FetchedAt = ParseOptionalDate(seed.FetchedAt),
|
||||
AddToPendingDocuments = seed.AddToPendingDocuments,
|
||||
AddToPendingMappings = seed.AddToPendingMappings,
|
||||
KnownIdentifiers = NormalizeStrings(seed.KnownIdentifiers),
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task UpdateCursorAsync(
|
||||
ISourceStateRepository repository,
|
||||
string sourceName,
|
||||
CursorSeed? cursorSeed,
|
||||
IReadOnlyCollection<Guid> pendingDocuments,
|
||||
IReadOnlyCollection<Guid> pendingMappings,
|
||||
IReadOnlyCollection<string> knownAdvisories,
|
||||
DateTimeOffset completedAt)
|
||||
private static SourceStateSeedCursor? BuildCursor(CursorSeed? cursorSeed)
|
||||
{
|
||||
var state = await repository.TryGetAsync(sourceName, CancellationToken.None).ConfigureAwait(false);
|
||||
var cursor = state?.Cursor ?? new BsonDocument();
|
||||
|
||||
MergeGuidArray(cursor, "pendingDocuments", pendingDocuments);
|
||||
MergeGuidArray(cursor, "pendingMappings", pendingMappings);
|
||||
|
||||
if (knownAdvisories.Count > 0)
|
||||
if (cursorSeed is null)
|
||||
{
|
||||
MergeStringArray(cursor, "knownAdvisories", knownAdvisories);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (cursorSeed is not null)
|
||||
return new SourceStateSeedCursor
|
||||
{
|
||||
if (cursorSeed.LastModifiedCursor.HasValue)
|
||||
{
|
||||
cursor["lastModifiedCursor"] = cursorSeed.LastModifiedCursor.Value.UtcDateTime;
|
||||
}
|
||||
|
||||
if (cursorSeed.LastFetchAt.HasValue)
|
||||
{
|
||||
cursor["lastFetchAt"] = cursorSeed.LastFetchAt.Value.UtcDateTime;
|
||||
}
|
||||
|
||||
if (cursorSeed.Additional is not null)
|
||||
{
|
||||
foreach (var kvp in cursorSeed.Additional)
|
||||
{
|
||||
cursor[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cursor["lastSeededAt"] = completedAt.UtcDateTime;
|
||||
|
||||
await repository.UpdateCursorAsync(sourceName, cursor, completedAt, CancellationToken.None).ConfigureAwait(false);
|
||||
PendingDocuments = NormalizeGuids(cursorSeed.PendingDocuments),
|
||||
PendingMappings = NormalizeGuids(cursorSeed.PendingMappings),
|
||||
KnownAdvisories = NormalizeStrings(cursorSeed.KnownAdvisories),
|
||||
LastModifiedCursor = cursorSeed.LastModifiedCursor,
|
||||
LastFetchAt = cursorSeed.LastFetchAt,
|
||||
Additional = cursorSeed.Additional is null
|
||||
? null
|
||||
: new Dictionary<string, string>(cursorSeed.Additional, StringComparer.OrdinalIgnoreCase),
|
||||
};
|
||||
}
|
||||
|
||||
private static void MergeGuidArray(BsonDocument cursor, string field, IReadOnlyCollection<Guid> values)
|
||||
private static IReadOnlyCollection<Guid>? NormalizeGuids(IEnumerable<Guid>? values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
if (values is null)
|
||||
{
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array
|
||||
? array.Select(v => Guid.TryParse(v?.AsString, out var parsed) ? parsed : Guid.Empty)
|
||||
.Where(g => g != Guid.Empty)
|
||||
.ToHashSet()
|
||||
: new HashSet<Guid>();
|
||||
|
||||
var set = new HashSet<Guid>();
|
||||
foreach (var guid in values)
|
||||
{
|
||||
existing.Add(guid);
|
||||
}
|
||||
|
||||
cursor[field] = new BsonArray(existing.Select(g => g.ToString()));
|
||||
}
|
||||
|
||||
private static void MergeStringArray(BsonDocument cursor, string field, IReadOnlyCollection<string> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array
|
||||
? array.Select(v => v?.AsString ?? string.Empty)
|
||||
.Where(s => !string.IsNullOrWhiteSpace(s))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase)
|
||||
: new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var entry in values)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(entry))
|
||||
if (guid != Guid.Empty)
|
||||
{
|
||||
existing.Add(entry.Trim());
|
||||
set.Add(guid);
|
||||
}
|
||||
}
|
||||
|
||||
cursor[field] = new BsonArray(existing.OrderBy(s => s, StringComparer.OrdinalIgnoreCase));
|
||||
return set.Count == 0 ? null : set.ToList();
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<string>? NormalizeStrings(IEnumerable<string>? values)
|
||||
{
|
||||
if (values is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var set = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
set.Add(value.Trim());
|
||||
}
|
||||
}
|
||||
|
||||
return set.Count == 0 ? null : set.ToList();
|
||||
}
|
||||
|
||||
private static DateTimeOffset? ParseOptionalDate(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return DateTimeOffset.Parse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
|
||||
}
|
||||
|
||||
private static string ResolvePath(string path, string baseDirectory)
|
||||
=> Path.IsPathRooted(path) ? path : Path.GetFullPath(Path.Combine(baseDirectory, path));
|
||||
}
|
||||
|
||||
internal sealed record SeedOptions
|
||||
@@ -356,12 +313,15 @@ internal sealed record StateSeed
|
||||
public string? Source { get; init; }
|
||||
public List<DocumentSeed> Documents { get; init; } = new();
|
||||
public CursorSeed? Cursor { get; init; }
|
||||
public List<string>? KnownAdvisories { get; init; }
|
||||
public DateTimeOffset? CompletedAt { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record DocumentSeed
|
||||
{
|
||||
public string Uri { get; init; } = string.Empty;
|
||||
public string ContentFile { get; init; } = string.Empty;
|
||||
public Guid? DocumentId { get; init; }
|
||||
public string? ContentType { get; init; }
|
||||
public Dictionary<string, string>? Metadata { get; init; }
|
||||
public Dictionary<string, string>? Headers { get; init; }
|
||||
@@ -369,13 +329,17 @@ internal sealed record DocumentSeed
|
||||
public bool AddToPendingDocuments { get; init; } = true;
|
||||
public bool AddToPendingMappings { get; init; }
|
||||
public string? LastModified { get; init; }
|
||||
public string? FetchedAt { get; init; }
|
||||
public string? Etag { get; init; }
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
public IReadOnlyCollection<string>? KnownIdentifiers { get; init; }
|
||||
public List<string>? KnownIdentifiers { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record CursorSeed
|
||||
{
|
||||
public List<Guid>? PendingDocuments { get; init; }
|
||||
public List<Guid>? PendingMappings { get; init; }
|
||||
public List<string>? KnownAdvisories { get; init; }
|
||||
public DateTimeOffset? LastModifiedCursor { get; init; }
|
||||
public DateTimeOffset? LastFetchAt { get; init; }
|
||||
public Dictionary<string, string>? Additional { get; init; }
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\src\StellaOps.Feedser.Source.Common\StellaOps.Feedser.Source.Common.csproj" />
|
||||
<ProjectReference Include="..\..\src\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="..\..\src\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="..\..\src\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -67,7 +67,7 @@ class CertBundClient:
|
||||
|
||||
raise RuntimeError(
|
||||
"CERT-Bund XSRF token not available. Provide --xsrf-token or a cookie file "
|
||||
"containing XSRF-TOKEN (see docs/ops/feedser-certbund-operations.md)."
|
||||
"containing XSRF-TOKEN (see docs/ops/concelier-certbund-operations.md)."
|
||||
)
|
||||
|
||||
def fetch_search_pages(
|
||||
@@ -281,7 +281,7 @@ def _build_search_record(path: Path) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": "search",
|
||||
"path": path,
|
||||
"source": "feedser.cert-bund.search",
|
||||
"source": "concelier.cert-bund.search",
|
||||
"itemCount": len(content),
|
||||
"from": range_from,
|
||||
"to": range_to,
|
||||
@@ -301,7 +301,7 @@ def _build_export_record(path: Path) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": "export",
|
||||
"path": path,
|
||||
"source": "feedser.cert-bund.export",
|
||||
"source": "concelier.cert-bund.export",
|
||||
"itemCount": None,
|
||||
"from": from_value,
|
||||
"to": to_value,
|
||||
@@ -358,7 +358,7 @@ def build_manifest(root: Path, records: Iterable[Dict[str, Any]], manifest_path:
|
||||
|
||||
manifest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
manifest_document = {
|
||||
"source": "feedser.cert-bund",
|
||||
"source": "concelier.cert-bund",
|
||||
"generatedAt": dt.datetime.now(tz=UTC).isoformat(),
|
||||
"artifacts": manifest_entries,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user