tam
Some checks failed
Build Test Deploy / build-test (push) Has been cancelled
Build Test Deploy / authority-container (push) Has been cancelled
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled

This commit is contained in:
2025-10-12 20:42:07 +00:00
parent 49293e7d4e
commit 0f1b203fde
40 changed files with 4253 additions and 1022 deletions

View File

@@ -433,8 +433,8 @@ public sealed class CertCcConnector : IFeedConnector
}
var advisory = CertCcMapper.Map(dto, document, dtoRecord, SourceName);
var affectedCount = advisory.AffectedPackages.Count;
var normalizedRuleCount = advisory.AffectedPackages.Sum(static package => package.NormalizedVersions.Count);
var affectedCount = advisory.AffectedPackages.Length;
var normalizedRuleCount = advisory.AffectedPackages.Sum(static package => package.NormalizedVersions.Length);
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
_diagnostics.MapSuccess(affectedCount, normalizedRuleCount);

View File

@@ -0,0 +1,65 @@
using System.Collections.Immutable;
using MongoDB.Bson;
using StellaOps.Feedser.Source.Common;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Source.Ru.Bdu.Internal;
using StellaOps.Feedser.Storage.Mongo.Documents;
using Xunit;
namespace StellaOps.Feedser.Source.Ru.Bdu.Tests;
public sealed class RuBduMapperTests
{
[Fact]
public void Map_ConstructsCanonicalAdvisory()
{
var dto = new RuBduVulnerabilityDto(
Identifier: "BDU:2025-12345",
Name: "Уязвимость тестового продукта",
Description: "Описание",
Solution: "Обновить",
IdentifyDate: new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero),
SeverityText: "Высокий",
CvssVector: "AV:N/AC:L/Au:N/C:P/I:P/A:P",
CvssScore: 7.5,
Cvss3Vector: null,
Cvss3Score: null,
ExploitStatus: "Существует",
IncidentCount: 1,
FixStatus: "Устранена",
VulStatus: "Подтверждена",
VulClass: null,
VulState: null,
Other: null,
Software: new[]
{
new RuBduSoftwareDto("ООО Вендор", "Продукт", "1.2.3", "Windows", ImmutableArray<string>.Empty)
}.ToImmutableArray(),
Environment: ImmutableArray<RuBduEnvironmentDto>.Empty,
Cwes: new[] { new RuBduCweDto("CWE-79", "XSS") }.ToImmutableArray());
var document = new DocumentRecord(
Guid.NewGuid(),
RuBduConnectorPlugin.SourceName,
"https://bdu.fstec.ru/vul/2025-12345",
DateTimeOffset.UtcNow,
"abc",
DocumentStatuses.PendingMap,
"application/json",
null,
null,
null,
dto.IdentifyDate,
ObjectId.GenerateNewId());
var advisory = RuBduMapper.Map(dto, document, dto.IdentifyDate!.Value);
Assert.Equal("BDU:2025-12345", advisory.AdvisoryKey);
Assert.Contains("BDU:2025-12345", advisory.Aliases);
Assert.Equal("high", advisory.Severity);
Assert.True(advisory.ExploitKnown);
Assert.Single(advisory.AffectedPackages);
Assert.Single(advisory.CvssMetrics);
Assert.Contains(advisory.References, reference => reference.Url.Contains("bdu.fstec.ru", StringComparison.OrdinalIgnoreCase));
}
}

View File

@@ -0,0 +1,58 @@
using System.Xml.Linq;
using StellaOps.Feedser.Source.Ru.Bdu.Internal;
using Xunit;
namespace StellaOps.Feedser.Source.Ru.Bdu.Tests;
public sealed class RuBduXmlParserTests
{
[Fact]
public void TryParse_ValidElement_ReturnsDto()
{
const string xml = """
<vul>
<identifier>BDU:2025-12345</identifier>
<name>Уязвимость тестового продукта</name>
<description>Описание уязвимости</description>
<solution>Обновить продукт</solution>
<identify_date>2025-10-10</identify_date>
<severity>Высокий уровень опасности</severity>
<exploit_status>Существует эксплойт</exploit_status>
<fix_status>Устранена</fix_status>
<vul_status>Подтверждена производителем</vul_status>
<vul_incident>1</vul_incident>
<cvss>
<vector score=\"7.5\">AV:N/AC:L/Au:N/C:P/I:P/A:P</vector>
</cvss>
<vulnerable_software>
<soft>
<vendor>ООО «Вендор»</vendor>
<name>Продукт</name>
<version>1.2.3</version>
<platform>Windows</platform>
<types>
<type>ics</type>
</types>
</soft>
</vulnerable_software>
<cwes>
<cwe>
<identifier>CWE-79</identifier>
<name>XSS</name>
</cwe>
</cwes>
</vul>
""";
var element = XElement.Parse(xml);
var dto = RuBduXmlParser.TryParse(element);
Assert.NotNull(dto);
Assert.Equal("BDU:2025-12345", dto!.Identifier);
Assert.Equal("Уязвимость тестового продукта", dto.Name);
Assert.Equal("AV:N/AC:L/Au:N/C:P/I:P/A:P", dto.CvssVector);
Assert.Equal(7.5, dto.CvssScore);
Assert.Single(dto.Software);
Assert.Single(dto.Cwes);
}
}

View File

@@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Ru.Bdu/StellaOps.Feedser.Source.Ru.Bdu.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,29 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Bdu;
public sealed class RuBduConnectorPlugin : IConnectorPlugin
{
public string Name => "ru-bdu";
public bool IsAvailable(IServiceProvider services) => true;
public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name);
private sealed class StubConnector : IFeedConnector
{
public StubConnector(string sourceName) => SourceName = sourceName;
public string SourceName { get; }
public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
}
}

View File

@@ -0,0 +1,102 @@
using System.Net;
namespace StellaOps.Feedser.Source.Ru.Bdu.Configuration;
/// <summary>
/// Connector options for the Russian BDU archive ingestion pipeline.
/// </summary>
public sealed class RuBduOptions
{
public const string HttpClientName = "ru-bdu";
private static readonly TimeSpan DefaultRequestTimeout = TimeSpan.FromMinutes(2);
private static readonly TimeSpan DefaultFailureBackoff = TimeSpan.FromMinutes(30);
/// <summary>
/// Base endpoint used for resolving relative resource paths.
/// </summary>
public Uri BaseAddress { get; set; } = new("https://bdu.fstec.ru/", UriKind.Absolute);
/// <summary>
/// Relative path to the zipped vulnerability dataset.
/// </summary>
public string DataArchivePath { get; set; } = "files/documents/vulxml.zip";
/// <summary>
/// HTTP timeout applied when downloading the archive.
/// </summary>
public TimeSpan RequestTimeout { get; set; } = DefaultRequestTimeout;
/// <summary>
/// Backoff applied when the remote endpoint fails to serve the archive.
/// </summary>
public TimeSpan FailureBackoff { get; set; } = DefaultFailureBackoff;
/// <summary>
/// User-Agent header used for outbound requests.
/// </summary>
public string UserAgent { get; set; } = "StellaOps/Feedser (+https://stella-ops.org)";
/// <summary>
/// Accept-Language preference sent with outbound requests.
/// </summary>
public string AcceptLanguage { get; set; } = "ru-RU,ru;q=0.9,en-US;q=0.6,en;q=0.4";
/// <summary>
/// Maximum number of vulnerabilities ingested per fetch cycle.
/// </summary>
public int MaxVulnerabilitiesPerFetch { get; set; } = 500;
/// <summary>
/// Returns the absolute URI for the archive download.
/// </summary>
public Uri DataArchiveUri => new(BaseAddress, DataArchivePath);
/// <summary>
/// Optional directory for caching the most recent archive (relative paths resolve under the content root).
/// </summary>
public string? CacheDirectory { get; set; } = null;
public void Validate()
{
if (BaseAddress is null || !BaseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException("RuBdu BaseAddress must be an absolute URI.");
}
if (string.IsNullOrWhiteSpace(DataArchivePath))
{
throw new InvalidOperationException("RuBdu DataArchivePath must be provided.");
}
if (RequestTimeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("RuBdu RequestTimeout must be positive.");
}
if (FailureBackoff < TimeSpan.Zero)
{
throw new InvalidOperationException("RuBdu FailureBackoff cannot be negative.");
}
if (string.IsNullOrWhiteSpace(UserAgent))
{
throw new InvalidOperationException("RuBdu UserAgent cannot be empty.");
}
if (string.IsNullOrWhiteSpace(AcceptLanguage))
{
throw new InvalidOperationException("RuBdu AcceptLanguage cannot be empty.");
}
if (MaxVulnerabilitiesPerFetch <= 0)
{
throw new InvalidOperationException("RuBdu MaxVulnerabilitiesPerFetch must be greater than zero.");
}
if (CacheDirectory is not null && CacheDirectory.Trim().Length == 0)
{
throw new InvalidOperationException("RuBdu CacheDirectory cannot be whitespace.");
}
}
}

View File

@@ -0,0 +1,81 @@
using MongoDB.Bson;
namespace StellaOps.Feedser.Source.Ru.Bdu.Internal;
internal sealed record RuBduCursor(
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
DateTimeOffset? LastSuccessfulFetch)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuids = Array.Empty<Guid>();
public static RuBduCursor Empty { get; } = new(EmptyGuids, EmptyGuids, null);
public RuBduCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = (documents ?? Enumerable.Empty<Guid>()).Distinct().ToArray() };
public RuBduCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = (mappings ?? Enumerable.Empty<Guid>()).Distinct().ToArray() };
public RuBduCursor WithLastSuccessfulFetch(DateTimeOffset? timestamp)
=> this with { LastSuccessfulFetch = timestamp };
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
};
if (LastSuccessfulFetch.HasValue)
{
document["lastSuccessfulFetch"] = LastSuccessfulFetch.Value.UtcDateTime;
}
return document;
}
public static RuBduCursor FromBson(BsonDocument? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
var pendingMappings = ReadGuidArray(document, "pendingMappings");
var lastFetch = document.TryGetValue("lastSuccessfulFetch", out var fetchValue)
? ParseDate(fetchValue)
: null;
return new RuBduCursor(pendingDocuments, pendingMappings, lastFetch);
}
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuids;
}
var result = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element?.ToString(), out var guid))
{
result.Add(guid);
}
}
return result;
}
private static DateTimeOffset? ParseDate(BsonValue value)
=> value.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}

View File

@@ -0,0 +1,249 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Normalization.Cvss;
using StellaOps.Feedser.Storage.Mongo.Documents;
namespace StellaOps.Feedser.Source.Ru.Bdu.Internal;
internal static class RuBduMapper
{
public static Advisory Map(RuBduVulnerabilityDto dto, DocumentRecord document, DateTimeOffset recordedAt)
{
ArgumentNullException.ThrowIfNull(dto);
ArgumentNullException.ThrowIfNull(document);
var advisoryProvenance = new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"advisory",
dto.Identifier,
recordedAt,
new[] { ProvenanceFieldMasks.Advisory });
var aliases = BuildAliases(dto);
var packages = BuildPackages(dto, recordedAt);
var references = BuildReferences(dto, document, recordedAt);
var cvssMetrics = BuildCvssMetrics(dto, recordedAt, out var severityFromCvss);
var severity = severityFromCvss;
var exploitKnown = DetermineExploitKnown(dto);
return new Advisory(
advisoryKey: dto.Identifier,
title: dto.Name ?? dto.Identifier,
summary: dto.Description,
language: "ru",
published: dto.IdentifyDate,
modified: dto.IdentifyDate,
severity: severity,
exploitKnown: exploitKnown,
aliases: aliases,
references: references,
affectedPackages: packages,
cvssMetrics: cvssMetrics,
provenance: new[] { advisoryProvenance });
}
private static IReadOnlyList<string> BuildAliases(RuBduVulnerabilityDto dto)
{
var aliases = new List<string>(capacity: 2) { dto.Identifier };
return aliases;
}
private static IReadOnlyList<AffectedPackage> BuildPackages(RuBduVulnerabilityDto dto, DateTimeOffset recordedAt)
{
if (dto.Software.IsDefaultOrEmpty)
{
return Array.Empty<AffectedPackage>();
}
var packages = new List<AffectedPackage>(dto.Software.Length);
foreach (var software in dto.Software)
{
if (string.IsNullOrWhiteSpace(software.Name) && string.IsNullOrWhiteSpace(software.Vendor))
{
continue;
}
var identifier = string.Join(
" ",
new[] { software.Vendor, software.Name }
.Where(static part => !string.IsNullOrWhiteSpace(part))
.Select(static part => part!.Trim()));
if (string.IsNullOrWhiteSpace(identifier))
{
identifier = software.Name ?? software.Vendor ?? dto.Identifier;
}
var isIcs = !software.Types.IsDefaultOrEmpty && software.Types.Any(static type => string.Equals(type, "ics", StringComparison.OrdinalIgnoreCase));
var packageProvenance = new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"package",
identifier,
recordedAt,
new[] { ProvenanceFieldMasks.AffectedPackages });
var normalizedStatus = NormalizeStatus(dto.VulStatus);
var statuses = normalizedStatus is null
? Array.Empty<AffectedPackageStatus>()
: new[]
{
new AffectedPackageStatus(normalizedStatus, new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"package-status",
dto.VulStatus ?? normalizedStatus,
recordedAt,
new[] { ProvenanceFieldMasks.PackageStatuses }))
};
var ranges = Array.Empty<AffectedVersionRange>();
if (!string.IsNullOrWhiteSpace(software.Version))
{
ranges = new[]
{
new AffectedVersionRange(
rangeKind: "string",
introducedVersion: null,
fixedVersion: null,
lastAffectedVersion: null,
rangeExpression: software.Version,
provenance: new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"package-range",
software.Version,
recordedAt,
new[] { ProvenanceFieldMasks.VersionRanges }))
};
}
packages.Add(new AffectedPackage(
isIcs ? AffectedPackageTypes.IcsVendor : AffectedPackageTypes.Vendor,
identifier,
platform: software.Platform,
versionRanges: ranges,
statuses: statuses,
provenance: new[] { packageProvenance }));
}
return packages;
}
private static IReadOnlyList<AdvisoryReference> BuildReferences(RuBduVulnerabilityDto dto, DocumentRecord document, DateTimeOffset recordedAt)
{
var references = new List<AdvisoryReference>
{
new(document.Uri, "details", "ru-bdu", summary: null, new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"reference",
document.Uri,
recordedAt,
new[] { ProvenanceFieldMasks.References }))
};
foreach (var cwe in dto.Cwes)
{
if (string.IsNullOrWhiteSpace(cwe.Identifier))
{
continue;
}
var slug = cwe.Identifier.ToUpperInvariant().Replace("CWE-", string.Empty, StringComparison.OrdinalIgnoreCase);
if (!slug.All(char.IsDigit))
{
continue;
}
var url = $"https://cwe.mitre.org/data/definitions/{slug}.html";
references.Add(new AdvisoryReference(url, "cwe", "cwe", cwe.Name, new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"reference",
url,
recordedAt,
new[] { ProvenanceFieldMasks.References })));
}
return references;
}
private static IReadOnlyList<CvssMetric> BuildCvssMetrics(RuBduVulnerabilityDto dto, DateTimeOffset recordedAt, out string? severity)
{
severity = null;
var metrics = new List<CvssMetric>();
if (!string.IsNullOrWhiteSpace(dto.CvssVector) && CvssMetricNormalizer.TryNormalize("2.0", dto.CvssVector, dto.CvssScore, null, out var normalized))
{
var provenance = new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"cvss",
normalized.Vector,
recordedAt,
new[] { ProvenanceFieldMasks.CvssMetrics });
var metric = normalized.ToModel(provenance);
metrics.Add(metric);
severity ??= metric.BaseSeverity;
}
if (!string.IsNullOrWhiteSpace(dto.Cvss3Vector) && CvssMetricNormalizer.TryNormalize("3.1", dto.Cvss3Vector, dto.Cvss3Score, null, out var normalized3))
{
var provenance = new AdvisoryProvenance(
RuBduConnectorPlugin.SourceName,
"cvss",
normalized3.Vector,
recordedAt,
new[] { ProvenanceFieldMasks.CvssMetrics });
var metric = normalized3.ToModel(provenance);
metrics.Add(metric);
severity ??= metric.BaseSeverity;
}
if (metrics.Count > 1)
{
metrics = metrics
.OrderByDescending(static metric => metric.BaseScore)
.ThenBy(static metric => metric.Version, StringComparer.Ordinal)
.ToList();
}
return metrics;
}
private static string NormalizeStatus(string? status)
{
if (string.IsNullOrWhiteSpace(status))
{
return null;
}
var normalized = status.Trim().ToLowerInvariant();
return normalized switch
{
"устранена" or "устранена производителем" or "устранена разработчиком" => AffectedPackageStatusCatalog.Fixed,
"устраняется" or "устранение планируется" or "разрабатывается" => AffectedPackageStatusCatalog.Pending,
"не устранена" => AffectedPackageStatusCatalog.Pending,
"актуальна" or "подтверждена" or "подтверждена производителем" or "подтверждена исследователями" => AffectedPackageStatusCatalog.Affected,
_ => null,
};
}
private static bool DetermineExploitKnown(RuBduVulnerabilityDto dto)
{
if (dto.IncidentCount.HasValue && dto.IncidentCount.Value > 0)
{
return true;
}
if (!string.IsNullOrWhiteSpace(dto.ExploitStatus))
{
var status = dto.ExploitStatus.Trim().ToLowerInvariant();
if (status.Contains("существ", StringComparison.Ordinal) || status.Contains("использ", StringComparison.Ordinal))
{
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,45 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Source.Ru.Bdu.Internal;
internal sealed record RuBduVulnerabilityDto(
string Identifier,
string? Name,
string? Description,
string? Solution,
DateTimeOffset? IdentifyDate,
string? SeverityText,
string? CvssVector,
double? CvssScore,
string? Cvss3Vector,
double? Cvss3Score,
string? ExploitStatus,
int? IncidentCount,
string? FixStatus,
string? VulStatus,
string? VulClass,
string? VulState,
string? Other,
ImmutableArray<RuBduSoftwareDto> Software,
ImmutableArray<RuBduEnvironmentDto> Environment,
ImmutableArray<RuBduCweDto> Cwes)
{
[JsonIgnore]
public bool HasCvss => !string.IsNullOrWhiteSpace(CvssVector) || !string.IsNullOrWhiteSpace(Cvss3Vector);
}
internal sealed record RuBduSoftwareDto(
string? Vendor,
string? Name,
string? Version,
string? Platform,
ImmutableArray<string> Types);
internal sealed record RuBduEnvironmentDto(
string? Vendor,
string? Name,
string? Version,
string? Platform);
internal sealed record RuBduCweDto(string Identifier, string? Name);

View File

@@ -0,0 +1,196 @@
using System.Collections.Immutable;
using System.Linq;
using System.Globalization;
using System.Xml.Linq;
namespace StellaOps.Feedser.Source.Ru.Bdu.Internal;
internal static class RuBduXmlParser
{
public static RuBduVulnerabilityDto? TryParse(XElement element)
{
ArgumentNullException.ThrowIfNull(element);
var identifier = element.Element("identifier")?.Value?.Trim();
if (string.IsNullOrWhiteSpace(identifier))
{
return null;
}
var name = Normalize(element.Element("name")?.Value);
var description = Normalize(element.Element("description")?.Value);
var solution = Normalize(element.Element("solution")?.Value);
var severity = Normalize(element.Element("severity")?.Value);
var exploitStatus = Normalize(element.Element("exploit_status")?.Value);
var fixStatus = Normalize(element.Element("fix_status")?.Value);
var vulStatus = Normalize(element.Element("vul_status")?.Value);
var vulClass = Normalize(element.Element("vul_class")?.Value);
var vulState = Normalize(element.Element("vul_state")?.Value);
var other = Normalize(element.Element("other")?.Value);
var incidentCount = ParseInt(element.Element("vul_incident")?.Value);
var identifyDate = ParseDate(element.Element("identify_date")?.Value);
var cvssVectorElement = element.Element("cvss")?.Element("vector");
var cvssVector = Normalize(cvssVectorElement?.Value);
var cvssScore = ParseDouble(cvssVectorElement?.Attribute("score")?.Value);
var cvss3VectorElement = element.Element("cvss3")?.Element("vector");
var cvss3Vector = Normalize(cvss3VectorElement?.Value);
var cvss3Score = ParseDouble(cvss3VectorElement?.Attribute("score")?.Value);
var software = ParseSoftware(element.Element("vulnerable_software"));
var environment = ParseEnvironment(element.Element("environment"));
var cwes = ParseCwes(element.Element("cwes"));
return new RuBduVulnerabilityDto(
identifier.Trim(),
name,
description,
solution,
identifyDate,
severity,
cvssVector,
cvssScore,
cvss3Vector,
cvss3Score,
exploitStatus,
incidentCount,
fixStatus,
vulStatus,
vulClass,
vulState,
other,
software,
environment,
cwes);
}
private static ImmutableArray<RuBduSoftwareDto> ParseSoftware(XElement? root)
{
if (root is null)
{
return ImmutableArray<RuBduSoftwareDto>.Empty;
}
var builder = ImmutableArray.CreateBuilder<RuBduSoftwareDto>();
foreach (var soft in root.Elements("soft"))
{
var vendor = Normalize(soft.Element("vendor")?.Value);
var name = Normalize(soft.Element("name")?.Value);
var version = Normalize(soft.Element("version")?.Value);
var platform = Normalize(soft.Element("platform")?.Value);
var types = soft.Element("types") is { } typesElement
? typesElement.Elements("type").Select(static x => Normalize(x.Value)).Where(static value => !string.IsNullOrWhiteSpace(value)).Cast<string>().ToImmutableArray()
: ImmutableArray<string>.Empty;
builder.Add(new RuBduSoftwareDto(vendor, name, version, platform, types));
}
return builder.ToImmutable();
}
private static ImmutableArray<RuBduEnvironmentDto> ParseEnvironment(XElement? root)
{
if (root is null)
{
return ImmutableArray<RuBduEnvironmentDto>.Empty;
}
var builder = ImmutableArray.CreateBuilder<RuBduEnvironmentDto>();
foreach (var os in root.Elements())
{
var vendor = Normalize(os.Element("vendor")?.Value);
var name = Normalize(os.Element("name")?.Value);
var version = Normalize(os.Element("version")?.Value);
var platform = Normalize(os.Element("platform")?.Value);
builder.Add(new RuBduEnvironmentDto(vendor, name, version, platform));
}
return builder.ToImmutable();
}
private static ImmutableArray<RuBduCweDto> ParseCwes(XElement? root)
{
if (root is null)
{
return ImmutableArray<RuBduCweDto>.Empty;
}
var builder = ImmutableArray.CreateBuilder<RuBduCweDto>();
foreach (var cwe in root.Elements("cwe"))
{
var identifier = Normalize(cwe.Element("identifier")?.Value);
if (string.IsNullOrWhiteSpace(identifier))
{
continue;
}
var name = Normalize(cwe.Element("name")?.Value);
builder.Add(new RuBduCweDto(identifier, name));
}
return builder.ToImmutable();
}
private static DateTimeOffset? ParseDate(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var trimmed = value.Trim();
if (DateTimeOffset.TryParse(trimmed, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var isoDate))
{
return isoDate;
}
if (DateTimeOffset.TryParseExact(trimmed, new[] { "dd.MM.yyyy", "dd.MM.yyyy HH:mm:ss" }, CultureInfo.GetCultureInfo("ru-RU"), DateTimeStyles.AssumeUniversal, out var ruDate))
{
return ruDate;
}
return null;
}
private static double? ParseDouble(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (double.TryParse(value.Trim(), NumberStyles.Any, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
return null;
}
private static int? ParseInt(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (int.TryParse(value.Trim(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
return null;
}
private static string? Normalize(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return value.Replace('\r', ' ').Replace('\n', ' ').Trim();
}
}

View File

@@ -0,0 +1,43 @@
using StellaOps.Feedser.Core.Jobs;
namespace StellaOps.Feedser.Source.Ru.Bdu;
internal static class RuBduJobKinds
{
public const string Fetch = "source:ru-bdu:fetch";
public const string Parse = "source:ru-bdu:parse";
public const string Map = "source:ru-bdu:map";
}
internal sealed class RuBduFetchJob : IJob
{
private readonly RuBduConnector _connector;
public RuBduFetchJob(RuBduConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class RuBduParseJob : IJob
{
private readonly RuBduConnector _connector;
public RuBduParseJob(RuBduConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class RuBduMapJob : IJob
{
private readonly RuBduConnector _connector;
public RuBduMapJob(RuBduConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Feedser.Source.Ru.Bdu.Tests")]

View File

@@ -0,0 +1,493 @@
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Xml;
using System.Xml.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Feedser.Normalization.Cvss;
using StellaOps.Feedser.Source.Common;
using StellaOps.Feedser.Source.Common.Fetch;
using StellaOps.Feedser.Source.Ru.Bdu.Configuration;
using StellaOps.Feedser.Source.Ru.Bdu.Internal;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Documents;
using StellaOps.Feedser.Storage.Mongo.Dtos;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Bdu;
public sealed class RuBduConnector : IFeedConnector
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
};
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly RuBduOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<RuBduConnector> _logger;
private readonly string _cacheDirectory;
private readonly string _archiveCachePath;
public RuBduConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<RuBduOptions> options,
TimeProvider? timeProvider,
ILogger<RuBduConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
_archiveCachePath = Path.Combine(_cacheDirectory, "vulxml.zip");
EnsureCacheDirectory();
}
public string SourceName => RuBduConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var pendingDocuments = cursor.PendingDocuments.ToHashSet();
var pendingMappings = cursor.PendingMappings.ToHashSet();
var now = _timeProvider.GetUtcNow();
SourceFetchContentResult archiveResult = default;
byte[]? archiveContent = null;
var usedCache = false;
try
{
var request = new SourceFetchRequest(RuBduOptions.HttpClientName, SourceName, _options.DataArchiveUri)
{
AcceptHeaders = new[]
{
"application/zip",
"application/octet-stream",
"application/x-zip-compressed",
},
TimeoutOverride = _options.RequestTimeout,
};
archiveResult = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false);
if (archiveResult.IsNotModified)
{
_logger.LogDebug("RU-BDU archive not modified.");
await UpdateCursorAsync(cursor.WithLastSuccessfulFetch(now), cancellationToken).ConfigureAwait(false);
return;
}
if (archiveResult.IsSuccess && archiveResult.Content is not null)
{
archiveContent = archiveResult.Content;
TryWriteCachedArchive(archiveContent);
}
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
if (TryReadCachedArchive(out var cachedFallback))
{
_logger.LogWarning(ex, "RU-BDU archive fetch failed; using cached artefact {CachePath}", _archiveCachePath);
archiveContent = cachedFallback;
usedCache = true;
}
else
{
_logger.LogError(ex, "RU-BDU archive fetch failed for {ArchiveUri}", _options.DataArchiveUri);
await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
}
if (archiveContent is null)
{
if (TryReadCachedArchive(out var cachedFallback))
{
_logger.LogWarning("RU-BDU archive unavailable (status={Status}); using cached artefact {CachePath}", archiveResult.StatusCode, _archiveCachePath);
archiveContent = cachedFallback;
usedCache = true;
}
else
{
_logger.LogWarning("RU-BDU archive fetch returned no content (status={Status})", archiveResult.StatusCode);
await UpdateCursorAsync(cursor.WithLastSuccessfulFetch(now), cancellationToken).ConfigureAwait(false);
return;
}
}
var archiveLastModified = archiveResult.LastModified;
int added;
try
{
added = await ProcessArchiveAsync(archiveContent, now, pendingDocuments, pendingMappings, archiveLastModified, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
if (!usedCache)
{
_logger.LogError(ex, "RU-BDU archive processing failed");
await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false);
}
throw;
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithLastSuccessfulFetch(now);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var pendingDocuments = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
if (!document.GridFsId.HasValue)
{
_logger.LogWarning("RU-BDU document {DocumentId} missing GridFS payload", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
byte[] payload;
try
{
payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "RU-BDU unable to download raw document {DocumentId}", documentId);
throw;
}
RuBduVulnerabilityDto? dto;
try
{
dto = JsonSerializer.Deserialize<RuBduVulnerabilityDto>(payload, SerializerOptions);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "RU-BDU failed to deserialize document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
if (dto is null)
{
_logger.LogWarning("RU-BDU document {DocumentId} produced null DTO", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
var bson = MongoDB.Bson.BsonDocument.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-bdu.v1", bson, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
if (!pendingMappings.Contains(documentId))
{
pendingMappings.Add(documentId);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingMappings.Remove(documentId);
continue;
}
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null)
{
_logger.LogWarning("RU-BDU document {DocumentId} missing DTO payload", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
RuBduVulnerabilityDto dto;
try
{
dto = JsonSerializer.Deserialize<RuBduVulnerabilityDto>(dtoRecord.Payload.ToString(), SerializerOptions) ?? throw new InvalidOperationException("DTO deserialized to null");
}
catch (Exception ex)
{
_logger.LogError(ex, "RU-BDU failed to deserialize DTO for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
try
{
var advisory = RuBduMapper.Map(dto, document, dtoRecord.ValidatedAt);
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
catch (Exception ex)
{
_logger.LogError(ex, "RU-BDU mapping failed for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<int> ProcessArchiveAsync(
byte[] archiveContent,
DateTimeOffset now,
HashSet<Guid> pendingDocuments,
HashSet<Guid> pendingMappings,
DateTimeOffset? archiveLastModified,
CancellationToken cancellationToken)
{
var added = 0;
using var archiveStream = new MemoryStream(archiveContent, writable: false);
using var archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: false);
var entry = archive.GetEntry("export/export.xml") ?? archive.Entries.FirstOrDefault();
if (entry is null)
{
_logger.LogWarning("RU-BDU archive does not contain export/export.xml; skipping.");
return added;
}
await using var entryStream = entry.Open();
using var reader = XmlReader.Create(entryStream, new XmlReaderSettings
{
IgnoreComments = true,
IgnoreWhitespace = true,
DtdProcessing = DtdProcessing.Ignore,
CloseInput = false,
});
while (reader.Read())
{
cancellationToken.ThrowIfCancellationRequested();
if (reader.NodeType != XmlNodeType.Element || !reader.Name.Equals("vul", StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (RuBduXmlParser.TryParse(XNode.ReadFrom(reader) as XElement ?? new XElement("vul")) is not { } dto)
{
continue;
}
var payload = JsonSerializer.SerializeToUtf8Bytes(dto, SerializerOptions);
var sha = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
var documentUri = BuildDocumentUri(dto.Identifier);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false);
if (existing is not null && string.Equals(existing.Sha256, sha, StringComparison.OrdinalIgnoreCase))
{
continue;
}
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["ru-bdu.identifier"] = dto.Identifier,
};
if (!string.IsNullOrWhiteSpace(dto.Name))
{
metadata["ru-bdu.name"] = dto.Name!;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var record = new DocumentRecord(
recordId,
SourceName,
documentUri,
now,
sha,
DocumentStatuses.PendingParse,
"application/json",
Headers: null,
Metadata: metadata,
Etag: null,
LastModified: archiveLastModified ?? dto.IdentifyDate,
GridFsId: gridFsId,
ExpiresAt: null);
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
pendingDocuments.Add(upserted.Id);
pendingMappings.Remove(upserted.Id);
added++;
if (added >= _options.MaxVulnerabilitiesPerFetch)
{
break;
}
}
return added;
}
private string ResolveCacheDirectory(string? configuredPath)
{
if (!string.IsNullOrWhiteSpace(configuredPath))
{
return Path.GetFullPath(Path.IsPathRooted(configuredPath)
? configuredPath
: Path.Combine(AppContext.BaseDirectory, configuredPath));
}
return Path.Combine(AppContext.BaseDirectory, "cache", RuBduConnectorPlugin.SourceName);
}
private void EnsureCacheDirectory()
{
try
{
Directory.CreateDirectory(_cacheDirectory);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "RU-BDU unable to ensure cache directory {CachePath}", _cacheDirectory);
}
}
private void TryWriteCachedArchive(byte[] content)
{
try
{
Directory.CreateDirectory(Path.GetDirectoryName(_archiveCachePath)!);
File.WriteAllBytes(_archiveCachePath, content);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "RU-BDU failed to write cache archive {CachePath}", _archiveCachePath);
}
}
private bool TryReadCachedArchive(out byte[] content)
{
try
{
if (File.Exists(_archiveCachePath))
{
content = File.ReadAllBytes(_archiveCachePath);
return true;
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "RU-BDU failed to read cache archive {CachePath}", _archiveCachePath);
}
content = Array.Empty<byte>();
return false;
}
private static string BuildDocumentUri(string identifier)
{
var slug = identifier.Contains(':', StringComparison.Ordinal)
? identifier[(identifier.IndexOf(':') + 1)..]
: identifier;
return $"https://bdu.fstec.ru/vul/{slug}";
}
private async Task<RuBduCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? RuBduCursor.Empty : RuBduCursor.FromBson(state.Cursor);
}
private Task UpdateCursorAsync(RuBduCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBsonDocument();
var completedAt = cursor.LastSuccessfulFetch ?? _timeProvider.GetUtcNow();
return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken);
}
}

View File

@@ -0,0 +1,19 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Bdu;
public sealed class RuBduConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "ru-bdu";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<RuBduConnector>(services);
}
}

View File

@@ -0,0 +1,53 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Source.Ru.Bdu.Configuration;
namespace StellaOps.Feedser.Source.Ru.Bdu;
public sealed class RuBduDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:sources:ru-bdu";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddRuBduConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
services.AddTransient<RuBduFetchJob>();
services.AddTransient<RuBduParseJob>();
services.AddTransient<RuBduMapJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
EnsureJob(options, RuBduJobKinds.Fetch, typeof(RuBduFetchJob));
EnsureJob(options, RuBduJobKinds.Parse, typeof(RuBduParseJob));
EnsureJob(options, RuBduJobKinds.Map, typeof(RuBduMapJob));
});
return services;
}
private static void EnsureJob(JobSchedulerOptions schedulerOptions, string kind, Type jobType)
{
if (schedulerOptions.Definitions.ContainsKey(kind))
{
return;
}
schedulerOptions.Definitions[kind] = new JobDefinition(
kind,
jobType,
schedulerOptions.DefaultTimeout,
schedulerOptions.DefaultLeaseDuration,
CronExpression: null,
Enabled: true);
}
}

View File

@@ -0,0 +1,43 @@
using System.Net;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Source.Ru.Bdu.Configuration;
using StellaOps.Feedser.Source.Common.Http;
namespace StellaOps.Feedser.Source.Ru.Bdu;
public static class RuBduServiceCollectionExtensions
{
public static IServiceCollection AddRuBduConnector(this IServiceCollection services, Action<RuBduOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<RuBduOptions>()
.Configure(configure)
.PostConfigure(static options => options.Validate());
services.AddSourceHttpClient(RuBduOptions.HttpClientName, (sp, clientOptions) =>
{
var options = sp.GetRequiredService<IOptions<RuBduOptions>>().Value;
clientOptions.BaseAddress = options.BaseAddress;
clientOptions.Timeout = options.RequestTimeout;
clientOptions.UserAgent = options.UserAgent;
clientOptions.AllowAutoRedirect = true;
clientOptions.DefaultRequestHeaders["Accept-Language"] = options.AcceptLanguage;
clientOptions.AllowedHosts.Clear();
clientOptions.AllowedHosts.Add(options.BaseAddress.Host);
clientOptions.ConfigureHandler = handler =>
{
handler.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
handler.AllowAutoRedirect = true;
handler.UseCookies = true;
handler.CookieContainer = new CookieContainer();
};
});
services.AddTransient<RuBduConnector>();
return services;
}
}

View File

@@ -1,16 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -2,10 +2,10 @@
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|FEEDCONN-RUBDU-02-001 Identify BDU data source & schema|BE-Conn-BDU|Research|**DONE (2025-10-11)** Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml`, `...?format=json`) return 403/404 even with `--insecure` because TLS chain requires Russian Trusted Sub CA and WAF expects referer/session headers. Documented request/response samples in `docs/feedser-connector-research-20251011.md`; blocked until trusted root + access strategy from Ops.|
|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**TODO** Fetcher must support custom trust store (`SourceHttpClientOptions.TrustedRootCertificates`), optional proxy, and signed cookie injection. Persist raw HTML/CSV once accessible, with cursor based on advisory `unicId` + `lastmod`. Implement retry/backoff aware of WAF transaction IDs. _(2025-10-12: Source.Common trust-store plumbing landed; blocked until sanctioned RU CA bundle is supplied.)_ **Coordination:** Ops to hand off sanctioned RU CA bundle + packaging notes for Offline Kit; Source.Common to review trust-store configuration once materials arrive.|
|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**TODO** Create DTOs for BDU records (title, severity, vendor/product, references, CVEs); sanitise text.|
|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**TODO** Map into canonical advisories with aliases, references, and vendor range primitives. Use normalized rule checkpoints from `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.<br>2025-10-11 research trail: sample payload `[{"scheme":"semver","type":"range","min":"<start>","minInclusive":true,"max":"<end>","maxInclusive":false,"notes":"ru.bdu:ID"}]`; if advisories rely on firmware build strings, preserve them in `notes` until a dedicated scheme is approved.|
|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**DOING (2025-10-12)** Fetch job now expands `vulxml.zip` into per-advisory JSON documents with cursor tracking + trust store wiring (`certificates/russian_trusted_*`). Parser/mapper emit canonical advisories; next up is wiring fixtures, regression tests, and telemetry before closing the task.|
|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**DOING (2025-10-12)** `RuBduXmlParser` materialises per-entry DTOs and serialises them into Mongo DTO records; remaining work covers resilience fixtures and edge-case coverage (multi-CWE, empty software lists).|
|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**DOING (2025-10-12)** `RuBduMapper` produces canonical advisories (aliases, references, vendor packages, CVSS). Follow-up: refine status translation + range primitives once richer samples arrive; ensure fixtures cover environment/other metadata before marking DONE.|
|FEEDCONN-RUBDU-02-005 Deterministic fixtures & regression tests|QA|Testing|**TODO** Add fetch/parse/map tests with fixtures; support `UPDATE_BDU_FIXTURES=1`.|
|FEEDCONN-RUBDU-02-006 Telemetry & documentation|DevEx|Docs|**TODO** Add logging/metrics, document connector configuration, close backlog when complete.|
|FEEDCONN-RUBDU-02-007 Access & export options assessment|BE-Conn-BDU|Research|**TODO** Once access unblocked, compare RSS/Atom (if restored) vs HTML table export (`/vul` list) and legacy CSV dumps. Need to confirm whether login/anti-bot tokens required and outline offline mirroring plan (one-time tarball seeded into Offline Kit).|
|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**BLOCKED** 2025-10-11: Attempt to download Russian Trusted Sub CA returned placeholder HTML; need alternate distribution (mirror or manual bundle) before TLS validation succeeds.<br>2025-10-11 23:05Z: Shared HTTP trust-store support landed (`SourceHttpClientOptions.TrustedRootCertificates`, config keys `feedser:httpClients:source.bdu:*`); now blocked on Ops delivering sanctioned RU CA bundle + Offline Kit packaging instructions.|
|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**DOING (2025-10-12)** Mirrored official Russian Trusted Root/Sub CA PEMs from rostelecom.ru (`certificates/russian_trusted_root_ca.pem`, `certificates/russian_trusted_sub_ca.pem`, bundle `certificates/russian_trusted_bundle.pem`) and validated TLS handshake. Next: confirm packaging guidance for Offline Kit + config samples using `feedser:httpClients:source.bdu:trustedRootPaths`.|

View File

@@ -0,0 +1,43 @@
using System.Text.Json;
using StellaOps.Feedser.Source.Ru.Nkcki.Internal;
using Xunit;
namespace StellaOps.Feedser.Source.Ru.Nkcki.Tests;
public sealed class RuNkckiJsonParserTests
{
[Fact]
public void Parse_WellFormedEntry_ReturnsDto()
{
const string json = """
{
"vuln_id": {"MITRE": "CVE-2025-0001", "FSTEC": "BDU:2025-00001"},
"date_published": "2025-09-01",
"date_updated": "2025-09-02",
"cvss_rating": "КРИТИЧЕСКИЙ",
"patch_available": true,
"description": "Test description",
"cwe": {"cwe_number": 79, "cwe_description": "Cross-site scripting"},
"product_category": "Web",
"mitigation": "Apply update",
"vulnerable_software": {"software_text": "ExampleApp 1.0", "cpe": false},
"cvss": {"cvss_score": 8.8, "cvss_vector": "AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", "cvss_score_v4": 5.5, "cvss_vector_v4": "AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H"},
"impact": "ACE",
"method_of_exploitation": "Special request",
"user_interaction": false,
"urls": ["https://example.com/advisory", "https://cert.gov.ru/materialy/uyazvimosti/2025-00001"]
}
""";
using var document = JsonDocument.Parse(json);
var dto = RuNkckiJsonParser.Parse(document.RootElement);
Assert.Equal("BDU:2025-00001", dto.FstecId);
Assert.Equal("CVE-2025-0001", dto.MitreId);
Assert.Equal(8.8, dto.CvssScore);
Assert.Equal("AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", dto.CvssVector);
Assert.True(dto.PatchAvailable);
Assert.Equal(79, dto.Cwe?.Number);
Assert.Equal(2, dto.Urls.Length);
}
}

View File

@@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Ru.Nkcki/StellaOps.Feedser.Source.Ru.Nkcki.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,29 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
public sealed class RuNkckiConnectorPlugin : IConnectorPlugin
{
public string Name => "ru-nkcki";
public bool IsAvailable(IServiceProvider services) => true;
public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name);
private sealed class StubConnector : IFeedConnector
{
public StubConnector(string sourceName) => SourceName = sourceName;
public string SourceName { get; }
public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
}
}

View File

@@ -0,0 +1,127 @@
using System.Net;
namespace StellaOps.Feedser.Source.Ru.Nkcki.Configuration;
/// <summary>
/// Connector options for the Russian NKTsKI bulletin ingestion pipeline.
/// </summary>
public sealed class RuNkckiOptions
{
public const string HttpClientName = "ru-nkcki";
private static readonly TimeSpan DefaultRequestTimeout = TimeSpan.FromSeconds(90);
private static readonly TimeSpan DefaultFailureBackoff = TimeSpan.FromMinutes(20);
private static readonly TimeSpan DefaultListingCache = TimeSpan.FromMinutes(10);
/// <summary>
/// Base endpoint used for resolving relative resource links.
/// </summary>
public Uri BaseAddress { get; set; } = new("https://cert.gov.ru/", UriKind.Absolute);
/// <summary>
/// Relative path to the bulletin listing page.
/// </summary>
public string ListingPath { get; set; } = "materialy/uyazvimosti/";
/// <summary>
/// Timeout applied to listing and bulletin fetch requests.
/// </summary>
public TimeSpan RequestTimeout { get; set; } = DefaultRequestTimeout;
/// <summary>
/// Backoff applied when the listing or attachments cannot be retrieved.
/// </summary>
public TimeSpan FailureBackoff { get; set; } = DefaultFailureBackoff;
/// <summary>
/// Maximum number of bulletin attachments downloaded per fetch run.
/// </summary>
public int MaxBulletinsPerFetch { get; set; } = 5;
/// <summary>
/// Maximum number of vulnerabilities ingested per fetch cycle across all attachments.
/// </summary>
public int MaxVulnerabilitiesPerFetch { get; set; } = 250;
/// <summary>
/// Maximum bulletin identifiers remembered to avoid refetching historical files.
/// </summary>
public int KnownBulletinCapacity { get; set; } = 512;
/// <summary>
/// Delay between sequential bulletin downloads.
/// </summary>
public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250);
/// <summary>
/// Duration the HTML listing can be cached before forcing a refetch.
/// </summary>
public TimeSpan ListingCacheDuration { get; set; } = DefaultListingCache;
public string UserAgent { get; set; } = "StellaOps/Feedser (+https://stella-ops.org)";
public string AcceptLanguage { get; set; } = "ru-RU,ru;q=0.9,en-US;q=0.6,en;q=0.4";
/// <summary>
/// Absolute URI for the listing page.
/// </summary>
public Uri ListingUri => new(BaseAddress, ListingPath);
/// <summary>
/// Optional directory for caching downloaded bulletins (relative paths resolve under the content root).
/// </summary>
public string? CacheDirectory { get; set; } = null;
public void Validate()
{
if (BaseAddress is null || !BaseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException("RuNkcki BaseAddress must be an absolute URI.");
}
if (string.IsNullOrWhiteSpace(ListingPath))
{
throw new InvalidOperationException("RuNkcki ListingPath must be provided.");
}
if (RequestTimeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("RuNkcki RequestTimeout must be positive.");
}
if (FailureBackoff < TimeSpan.Zero)
{
throw new InvalidOperationException("RuNkcki FailureBackoff cannot be negative.");
}
if (MaxBulletinsPerFetch <= 0)
{
throw new InvalidOperationException("RuNkcki MaxBulletinsPerFetch must be greater than zero.");
}
if (MaxVulnerabilitiesPerFetch <= 0)
{
throw new InvalidOperationException("RuNkcki MaxVulnerabilitiesPerFetch must be greater than zero.");
}
if (KnownBulletinCapacity <= 0)
{
throw new InvalidOperationException("RuNkcki KnownBulletinCapacity must be greater than zero.");
}
if (CacheDirectory is not null && CacheDirectory.Trim().Length == 0)
{
throw new InvalidOperationException("RuNkcki CacheDirectory cannot be whitespace.");
}
if (string.IsNullOrWhiteSpace(UserAgent))
{
throw new InvalidOperationException("RuNkcki UserAgent cannot be empty.");
}
if (string.IsNullOrWhiteSpace(AcceptLanguage))
{
throw new InvalidOperationException("RuNkcki AcceptLanguage cannot be empty.");
}
}
}

View File

@@ -0,0 +1,108 @@
using MongoDB.Bson;
namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal;
internal sealed record RuNkckiCursor(
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
IReadOnlyCollection<string> KnownBulletins,
DateTimeOffset? LastListingFetchAt)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuids = Array.Empty<Guid>();
private static readonly IReadOnlyCollection<string> EmptyBulletins = Array.Empty<string>();
public static RuNkckiCursor Empty { get; } = new(EmptyGuids, EmptyGuids, EmptyBulletins, null);
public RuNkckiCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = (documents ?? Enumerable.Empty<Guid>()).Distinct().ToArray() };
public RuNkckiCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = (mappings ?? Enumerable.Empty<Guid>()).Distinct().ToArray() };
public RuNkckiCursor WithKnownBulletins(IEnumerable<string> bulletins)
=> this with { KnownBulletins = (bulletins ?? Enumerable.Empty<string>()).Where(static id => !string.IsNullOrWhiteSpace(id)).Distinct(StringComparer.OrdinalIgnoreCase).ToArray() };
public RuNkckiCursor WithLastListingFetch(DateTimeOffset? timestamp)
=> this with { LastListingFetchAt = timestamp };
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
["knownBulletins"] = new BsonArray(KnownBulletins),
};
if (LastListingFetchAt.HasValue)
{
document["lastListingFetchAt"] = LastListingFetchAt.Value.UtcDateTime;
}
return document;
}
public static RuNkckiCursor FromBson(BsonDocument? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
var pendingMappings = ReadGuidArray(document, "pendingMappings");
var knownBulletins = ReadStringArray(document, "knownBulletins");
var lastListingFetch = document.TryGetValue("lastListingFetchAt", out var dateValue)
? ParseDate(dateValue)
: null;
return new RuNkckiCursor(pendingDocuments, pendingMappings, knownBulletins, lastListingFetch);
}
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuids;
}
var result = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element?.ToString(), out var guid))
{
result.Add(guid);
}
}
return result;
}
private static IReadOnlyCollection<string> ReadStringArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyBulletins;
}
var result = new List<string>(array.Count);
foreach (var element in array)
{
var text = element?.ToString();
if (!string.IsNullOrWhiteSpace(text))
{
result.Add(text);
}
}
return result;
}
private static DateTimeOffset? ParseDate(BsonValue value)
=> value.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}

View File

@@ -0,0 +1,169 @@
using System.Collections.Immutable;
using System.Linq;
using System.Globalization;
using System.Text.Json;
namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal;
internal static class RuNkckiJsonParser
{
public static RuNkckiVulnerabilityDto Parse(JsonElement element)
{
var fstecId = element.TryGetProperty("vuln_id", out var vulnIdElement) && vulnIdElement.TryGetProperty("FSTEC", out var fstec) ? Normalize(fstec.GetString()) : null;
var mitreId = element.TryGetProperty("vuln_id", out vulnIdElement) && vulnIdElement.TryGetProperty("MITRE", out var mitre) ? Normalize(mitre.GetString()) : null;
var datePublished = ParseDate(element.TryGetProperty("date_published", out var published) ? published.GetString() : null);
var dateUpdated = ParseDate(element.TryGetProperty("date_updated", out var updated) ? updated.GetString() : null);
var cvssRating = Normalize(element.TryGetProperty("cvss_rating", out var rating) ? rating.GetString() : null);
bool? patchAvailable = element.TryGetProperty("patch_available", out var patch) ? patch.ValueKind switch
{
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => null,
} : null;
var description = Normalize(element.TryGetProperty("description", out var desc) ? desc.GetString() : null);
var mitigation = Normalize(element.TryGetProperty("mitigation", out var mitigationElement) ? mitigationElement.GetString() : null);
var productCategory = Normalize(element.TryGetProperty("product_category", out var category) ? category.GetString() : null);
var impact = Normalize(element.TryGetProperty("impact", out var impactElement) ? impactElement.GetString() : null);
var method = Normalize(element.TryGetProperty("method_of_exploitation", out var methodElement) ? methodElement.GetString() : null);
bool? userInteraction = element.TryGetProperty("user_interaction", out var uiElement) ? uiElement.ValueKind switch
{
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => null,
} : null;
string? softwareText = null;
bool? softwareHasCpe = null;
if (element.TryGetProperty("vulnerable_software", out var softwareElement))
{
if (softwareElement.TryGetProperty("software_text", out var textElement))
{
softwareText = Normalize(textElement.GetString()?.Replace('\r', ' '));
}
if (softwareElement.TryGetProperty("cpe", out var cpeElement))
{
softwareHasCpe = cpeElement.ValueKind switch
{
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => null,
};
}
}
RuNkckiCweDto? cweDto = null;
if (element.TryGetProperty("cwe", out var cweElement))
{
int? number = null;
if (cweElement.TryGetProperty("cwe_number", out var numberElement))
{
if (numberElement.ValueKind == JsonValueKind.Number && numberElement.TryGetInt32(out var parsed))
{
number = parsed;
}
else if (int.TryParse(numberElement.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedInt))
{
number = parsedInt;
}
}
var cweDescription = Normalize(cweElement.TryGetProperty("cwe_description", out var descElement) ? descElement.GetString() : null);
if (number.HasValue || !string.IsNullOrWhiteSpace(cweDescription))
{
cweDto = new RuNkckiCweDto(number, cweDescription);
}
}
double? cvssScore = element.TryGetProperty("cvss", out var cvssElement) && cvssElement.TryGetProperty("cvss_score", out var scoreElement)
? ParseDouble(scoreElement)
: null;
var cvssVector = element.TryGetProperty("cvss", out cvssElement) && cvssElement.TryGetProperty("cvss_vector", out var vectorElement)
? Normalize(vectorElement.GetString())
: null;
double? cvssScoreV4 = element.TryGetProperty("cvss", out cvssElement) && cvssElement.TryGetProperty("cvss_score_v4", out var scoreV4Element)
? ParseDouble(scoreV4Element)
: null;
var cvssVectorV4 = element.TryGetProperty("cvss", out cvssElement) && cvssElement.TryGetProperty("cvss_vector_v4", out var vectorV4Element)
? Normalize(vectorV4Element.GetString())
: null;
var urls = element.TryGetProperty("urls", out var urlsElement) && urlsElement.ValueKind == JsonValueKind.Array
? urlsElement.EnumerateArray()
.Select(static url => Normalize(url.GetString()))
.Where(static url => !string.IsNullOrWhiteSpace(url))
.Cast<string>()
.ToImmutableArray()
: ImmutableArray<string>.Empty;
return new RuNkckiVulnerabilityDto(
fstecId,
mitreId,
datePublished,
dateUpdated,
cvssRating,
patchAvailable,
description,
cweDto,
productCategory,
mitigation,
softwareText,
softwareHasCpe,
cvssScore,
cvssVector,
cvssScoreV4,
cvssVectorV4,
impact,
method,
userInteraction,
urls);
}
private static double? ParseDouble(JsonElement element)
{
if (element.ValueKind == JsonValueKind.Number && element.TryGetDouble(out var value))
{
return value;
}
if (element.ValueKind == JsonValueKind.String && double.TryParse(element.GetString(), NumberStyles.Any, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
return null;
}
private static DateTimeOffset? ParseDate(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed))
{
return parsed;
}
if (DateTimeOffset.TryParse(value, CultureInfo.GetCultureInfo("ru-RU"), DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var ruParsed))
{
return ruParsed;
}
return null;
}
private static string? Normalize(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return value.Replace('\r', ' ').Replace('\n', ' ').Trim();
}
}

View File

@@ -0,0 +1,36 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal;
internal sealed record RuNkckiVulnerabilityDto(
string? FstecId,
string? MitreId,
DateTimeOffset? DatePublished,
DateTimeOffset? DateUpdated,
string? CvssRating,
bool? PatchAvailable,
string? Description,
RuNkckiCweDto? Cwe,
string? ProductCategory,
string? Mitigation,
string? VulnerableSoftwareText,
bool? VulnerableSoftwareHasCpe,
double? CvssScore,
string? CvssVector,
double? CvssScoreV4,
string? CvssVectorV4,
string? Impact,
string? MethodOfExploitation,
bool? UserInteraction,
ImmutableArray<string> Urls)
{
[JsonIgnore]
public string AdvisoryKey => !string.IsNullOrWhiteSpace(FstecId)
? FstecId!
: !string.IsNullOrWhiteSpace(MitreId)
? MitreId!
: Guid.NewGuid().ToString();
}
internal sealed record RuNkckiCweDto(int? Number, string? Description);

View File

@@ -0,0 +1,43 @@
using StellaOps.Feedser.Core.Jobs;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
internal static class RuNkckiJobKinds
{
public const string Fetch = "source:ru-nkcki:fetch";
public const string Parse = "source:ru-nkcki:parse";
public const string Map = "source:ru-nkcki:map";
}
internal sealed class RuNkckiFetchJob : IJob
{
private readonly RuNkckiConnector _connector;
public RuNkckiFetchJob(RuNkckiConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class RuNkckiParseJob : IJob
{
private readonly RuNkckiConnector _connector;
public RuNkckiParseJob(RuNkckiConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class RuNkckiMapJob : IJob
{
private readonly RuNkckiConnector _connector;
public RuNkckiMapJob(RuNkckiConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Feedser.Source.Ru.Nkcki.Tests")]

View File

@@ -0,0 +1,825 @@
using System.Collections.Immutable;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Net;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using AngleSharp.Html.Parser;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Feedser.Source.Common;
using StellaOps.Feedser.Source.Common.Fetch;
using StellaOps.Feedser.Source.Ru.Nkcki.Configuration;
using StellaOps.Feedser.Source.Ru.Nkcki.Internal;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Documents;
using StellaOps.Feedser.Storage.Mongo.Dtos;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
public sealed class RuNkckiConnector : IFeedConnector
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
};
private static readonly string[] ListingAcceptHeaders =
{
"text/html",
"application/xhtml+xml;q=0.9",
"text/plain;q=0.1",
};
private static readonly string[] BulletinAcceptHeaders =
{
"application/zip",
"application/octet-stream",
"application/x-zip-compressed",
};
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly RuNkckiOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<RuNkckiConnector> _logger;
private readonly string _cacheDirectory;
private readonly HtmlParser _htmlParser = new();
public RuNkckiConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<RuNkckiOptions> options,
TimeProvider? timeProvider,
ILogger<RuNkckiConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
EnsureCacheDirectory();
}
public string SourceName => RuNkckiConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var pendingDocuments = cursor.PendingDocuments.ToHashSet();
var pendingMappings = cursor.PendingMappings.ToHashSet();
var knownBulletins = cursor.KnownBulletins.ToHashSet(StringComparer.OrdinalIgnoreCase);
var now = _timeProvider.GetUtcNow();
var processed = 0;
IReadOnlyList<BulletinAttachment> attachments = Array.Empty<BulletinAttachment>();
try
{
var listingResult = await FetchListingAsync(cancellationToken).ConfigureAwait(false);
if (!listingResult.IsSuccess || listingResult.Content is null)
{
_logger.LogWarning("NKCKI listing fetch returned no content (status={Status})", listingResult.StatusCode);
processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false);
await UpdateCursorAsync(cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithKnownBulletins(NormalizeBulletins(knownBulletins))
.WithLastListingFetch(now), cancellationToken).ConfigureAwait(false);
return;
}
attachments = await ParseListingAsync(listingResult.Content, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
_logger.LogWarning(ex, "NKCKI listing fetch failed; attempting cached bulletins");
processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false);
await UpdateCursorAsync(cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithKnownBulletins(NormalizeBulletins(knownBulletins))
.WithLastListingFetch(cursor.LastListingFetchAt ?? now), cancellationToken).ConfigureAwait(false);
return;
}
if (attachments.Count == 0)
{
_logger.LogDebug("NKCKI listing contained no bulletin attachments");
processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false);
await UpdateCursorAsync(cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithKnownBulletins(NormalizeBulletins(knownBulletins))
.WithLastListingFetch(now), cancellationToken).ConfigureAwait(false);
return;
}
var newAttachments = attachments
.Where(attachment => !knownBulletins.Contains(attachment.Id))
.Take(_options.MaxBulletinsPerFetch)
.ToList();
if (newAttachments.Count == 0)
{
await UpdateCursorAsync(cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithKnownBulletins(NormalizeBulletins(knownBulletins))
.WithLastListingFetch(now), cancellationToken).ConfigureAwait(false);
return;
}
foreach (var attachment in newAttachments)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var request = new SourceFetchRequest(RuNkckiOptions.HttpClientName, SourceName, attachment.Uri)
{
AcceptHeaders = BulletinAcceptHeaders,
TimeoutOverride = _options.RequestTimeout,
};
var attachmentResult = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false);
if (!attachmentResult.IsSuccess || attachmentResult.Content is null)
{
if (TryReadCachedBulletin(attachment.Id, out var cachedBytes))
{
_logger.LogWarning("NKCKI bulletin {BulletinId} unavailable (status={Status}); using cached artefact", attachment.Id, attachmentResult.StatusCode);
processed = await ProcessBulletinEntriesAsync(cachedBytes, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false);
knownBulletins.Add(attachment.Id);
}
else
{
_logger.LogWarning("NKCKI bulletin {BulletinId} returned no content (status={Status})", attachment.Id, attachmentResult.StatusCode);
}
continue;
}
TryWriteCachedBulletin(attachment.Id, attachmentResult.Content);
processed = await ProcessBulletinEntriesAsync(attachmentResult.Content, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false);
knownBulletins.Add(attachment.Id);
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
if (TryReadCachedBulletin(attachment.Id, out var cachedBytes))
{
_logger.LogWarning(ex, "NKCKI bulletin fetch failed for {BulletinId}; using cached artefact", attachment.Id);
processed = await ProcessBulletinEntriesAsync(cachedBytes, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false);
knownBulletins.Add(attachment.Id);
}
else
{
_logger.LogWarning(ex, "NKCKI bulletin fetch failed for {BulletinId}", attachment.Id);
await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
}
if (processed >= _options.MaxVulnerabilitiesPerFetch)
{
break;
}
if (_options.RequestDelay > TimeSpan.Zero)
{
try
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
break;
}
}
}
var normalizedBulletins = NormalizeBulletins(knownBulletins);
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithKnownBulletins(normalizedBulletins)
.WithLastListingFetch(now);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var pendingDocuments = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
if (!document.GridFsId.HasValue)
{
_logger.LogWarning("NKCKI document {DocumentId} missing GridFS payload", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
byte[] payload;
try
{
payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "NKCKI unable to download raw document {DocumentId}", documentId);
throw;
}
RuNkckiVulnerabilityDto? dto;
try
{
dto = JsonSerializer.Deserialize<RuNkckiVulnerabilityDto>(payload, SerializerOptions);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "NKCKI failed to deserialize document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
if (dto is null)
{
_logger.LogWarning("NKCKI document {DocumentId} produced null DTO", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
var bson = MongoDB.Bson.BsonDocument.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-nkcki.v1", bson, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
if (!pendingMappings.Contains(documentId))
{
pendingMappings.Add(documentId);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingMappings.Remove(documentId);
continue;
}
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null)
{
_logger.LogWarning("NKCKI document {DocumentId} missing DTO payload", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
RuNkckiVulnerabilityDto dto;
try
{
dto = JsonSerializer.Deserialize<RuNkckiVulnerabilityDto>(dtoRecord.Payload.ToString(), SerializerOptions) ?? throw new InvalidOperationException("DTO deserialized to null");
}
catch (Exception ex)
{
_logger.LogError(ex, "NKCKI failed to deserialize DTO for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
try
{
var advisory = RuNkckiMapper.Map(dto, document, dtoRecord.ValidatedAt);
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
catch (Exception ex)
{
_logger.LogError(ex, "NKCKI mapping failed for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<int> ProcessCachedBulletinsAsync(
HashSet<Guid> pendingDocuments,
HashSet<Guid> pendingMappings,
HashSet<string> knownBulletins,
DateTimeOffset now,
int processed,
CancellationToken cancellationToken)
{
if (!Directory.Exists(_cacheDirectory))
{
return processed;
}
var updated = processed;
var cacheFiles = Directory
.EnumerateFiles(_cacheDirectory, "*.json.zip", SearchOption.TopDirectoryOnly)
.OrderBy(static path => path, StringComparer.OrdinalIgnoreCase)
.ToList();
foreach (var filePath in cacheFiles)
{
cancellationToken.ThrowIfCancellationRequested();
var bulletinId = ExtractBulletinIdFromCachePath(filePath);
if (string.IsNullOrWhiteSpace(bulletinId) || knownBulletins.Contains(bulletinId))
{
continue;
}
byte[] content;
try
{
content = File.ReadAllBytes(filePath);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "NKCKI failed to read cached bulletin at {CachePath}", filePath);
continue;
}
updated = await ProcessBulletinEntriesAsync(content, bulletinId, pendingDocuments, pendingMappings, now, updated, cancellationToken).ConfigureAwait(false);
knownBulletins.Add(bulletinId);
if (updated >= _options.MaxVulnerabilitiesPerFetch)
{
break;
}
}
return updated;
}
private async Task<int> ProcessBulletinEntriesAsync(
byte[] content,
string bulletinId,
HashSet<Guid> pendingDocuments,
HashSet<Guid> pendingMappings,
DateTimeOffset now,
int processed,
CancellationToken cancellationToken)
{
if (content.Length == 0)
{
return processed;
}
var updated = processed;
using var archiveStream = new MemoryStream(content, writable: false);
using var archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: false);
foreach (var entry in archive.Entries.OrderBy(static e => e.FullName, StringComparer.OrdinalIgnoreCase))
{
cancellationToken.ThrowIfCancellationRequested();
if (!entry.FullName.EndsWith(".json", StringComparison.OrdinalIgnoreCase))
{
continue;
}
using var entryStream = entry.Open();
using var buffer = new MemoryStream();
await entryStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
if (buffer.Length == 0)
{
continue;
}
buffer.Position = 0;
using var document = await JsonDocument.ParseAsync(buffer, cancellationToken: cancellationToken).ConfigureAwait(false);
updated = await ProcessBulletinJsonElementAsync(document.RootElement, entry.FullName, bulletinId, pendingDocuments, pendingMappings, now, updated, cancellationToken).ConfigureAwait(false);
if (updated >= _options.MaxVulnerabilitiesPerFetch)
{
break;
}
}
return updated;
}
private async Task<int> ProcessBulletinJsonElementAsync(
JsonElement element,
string entryName,
string bulletinId,
HashSet<Guid> pendingDocuments,
HashSet<Guid> pendingMappings,
DateTimeOffset now,
int processed,
CancellationToken cancellationToken)
{
var updated = processed;
switch (element.ValueKind)
{
case JsonValueKind.Array:
foreach (var child in element.EnumerateArray())
{
cancellationToken.ThrowIfCancellationRequested();
if (updated >= _options.MaxVulnerabilitiesPerFetch)
{
break;
}
if (child.ValueKind != JsonValueKind.Object)
{
continue;
}
if (await ProcessVulnerabilityObjectAsync(child, entryName, bulletinId, pendingDocuments, pendingMappings, now, cancellationToken).ConfigureAwait(false))
{
updated++;
}
}
break;
case JsonValueKind.Object:
if (await ProcessVulnerabilityObjectAsync(element, entryName, bulletinId, pendingDocuments, pendingMappings, now, cancellationToken).ConfigureAwait(false))
{
updated++;
}
break;
}
return updated;
}
private async Task<bool> ProcessVulnerabilityObjectAsync(
JsonElement element,
string entryName,
string bulletinId,
HashSet<Guid> pendingDocuments,
HashSet<Guid> pendingMappings,
DateTimeOffset now,
CancellationToken cancellationToken)
{
RuNkckiVulnerabilityDto dto;
try
{
dto = RuNkckiJsonParser.Parse(element);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "NKCKI failed to parse vulnerability in bulletin {BulletinId} entry {Entry}", bulletinId, entryName);
return false;
}
var payload = JsonSerializer.SerializeToUtf8Bytes(dto, SerializerOptions);
var sha = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
var documentUri = BuildDocumentUri(dto);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false);
if (existing is not null && string.Equals(existing.Sha256, sha, StringComparison.OrdinalIgnoreCase))
{
return false;
}
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["ru-nkcki.bulletin"] = bulletinId,
["ru-nkcki.entry"] = entryName,
};
if (!string.IsNullOrWhiteSpace(dto.FstecId))
{
metadata["ru-nkcki.fstec_id"] = dto.FstecId!;
}
if (!string.IsNullOrWhiteSpace(dto.MitreId))
{
metadata["ru-nkcki.mitre_id"] = dto.MitreId!;
}
var recordId = existing?.Id ?? Guid.NewGuid();
var lastModified = dto.DateUpdated ?? dto.DatePublished;
var record = new DocumentRecord(
recordId,
SourceName,
documentUri,
now,
sha,
DocumentStatuses.PendingParse,
"application/json",
Headers: null,
Metadata: metadata,
Etag: null,
LastModified: lastModified,
GridFsId: gridFsId,
ExpiresAt: null);
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
pendingDocuments.Add(upserted.Id);
pendingMappings.Remove(upserted.Id);
return true;
}
private async Task<SourceFetchContentResult> FetchListingAsync(CancellationToken cancellationToken)
{
try
{
var request = new SourceFetchRequest(RuNkckiOptions.HttpClientName, SourceName, _options.ListingUri)
{
AcceptHeaders = ListingAcceptHeaders,
TimeoutOverride = _options.RequestTimeout,
};
return await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
_logger.LogError(ex, "NKCKI listing fetch failed for {ListingUri}", _options.ListingUri);
await _stateRepository.MarkFailureAsync(SourceName, _timeProvider.GetUtcNow(), _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
}
private async Task<IReadOnlyList<BulletinAttachment>> ParseListingAsync(byte[] content, CancellationToken cancellationToken)
{
var html = Encoding.UTF8.GetString(content);
var document = await _htmlParser.ParseDocumentAsync(html, cancellationToken).ConfigureAwait(false);
var anchors = document.QuerySelectorAll("a[href$='.json.zip']");
var attachments = new List<BulletinAttachment>();
foreach (var anchor in anchors)
{
var href = anchor.GetAttribute("href");
if (string.IsNullOrWhiteSpace(href))
{
continue;
}
if (!Uri.TryCreate(_options.BaseAddress, href, out var absoluteUri))
{
continue;
}
var id = DeriveBulletinId(absoluteUri);
if (string.IsNullOrWhiteSpace(id))
{
continue;
}
var title = anchor.GetAttribute("title");
if (string.IsNullOrWhiteSpace(title))
{
title = anchor.TextContent?.Trim();
}
attachments.Add(new BulletinAttachment(id, absoluteUri, title ?? id));
}
return attachments;
}
private static string DeriveBulletinId(Uri uri)
{
var fileName = Path.GetFileName(uri.AbsolutePath);
if (string.IsNullOrWhiteSpace(fileName))
{
return Guid.NewGuid().ToString("N");
}
if (fileName.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
fileName = fileName[..^4];
}
if (fileName.EndsWith(".json", StringComparison.OrdinalIgnoreCase))
{
fileName = fileName[..^5];
}
return fileName.Replace('_', '-');
}
private static string BuildDocumentUri(RuNkckiVulnerabilityDto dto)
{
if (!string.IsNullOrWhiteSpace(dto.FstecId))
{
var slug = dto.FstecId.Contains(':', StringComparison.Ordinal)
? dto.FstecId[(dto.FstecId.IndexOf(':') + 1)..]
: dto.FstecId;
return $"https://cert.gov.ru/materialy/uyazvimosti/{slug}";
}
if (!string.IsNullOrWhiteSpace(dto.MitreId))
{
return $"https://nvd.nist.gov/vuln/detail/{dto.MitreId}";
}
return $"https://cert.gov.ru/materialy/uyazvimosti/{Guid.NewGuid():N}";
}
private string ResolveCacheDirectory(string? configuredPath)
{
if (!string.IsNullOrWhiteSpace(configuredPath))
{
return Path.GetFullPath(Path.IsPathRooted(configuredPath)
? configuredPath
: Path.Combine(AppContext.BaseDirectory, configuredPath));
}
return Path.Combine(AppContext.BaseDirectory, "cache", RuNkckiConnectorPlugin.SourceName);
}
private void EnsureCacheDirectory()
{
try
{
Directory.CreateDirectory(_cacheDirectory);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "NKCKI unable to ensure cache directory {CachePath}", _cacheDirectory);
}
}
private string GetBulletinCachePath(string bulletinId)
{
var fileStem = string.IsNullOrWhiteSpace(bulletinId)
? Guid.NewGuid().ToString("N")
: Uri.EscapeDataString(bulletinId);
return Path.Combine(_cacheDirectory, $"{fileStem}.json.zip");
}
private static string ExtractBulletinIdFromCachePath(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
return string.Empty;
}
var fileName = Path.GetFileName(path);
if (fileName.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
fileName = fileName[..^4];
}
if (fileName.EndsWith(".json", StringComparison.OrdinalIgnoreCase))
{
fileName = fileName[..^5];
}
return Uri.UnescapeDataString(fileName);
}
private void TryWriteCachedBulletin(string bulletinId, byte[] content)
{
try
{
var cachePath = GetBulletinCachePath(bulletinId);
Directory.CreateDirectory(Path.GetDirectoryName(cachePath)!);
File.WriteAllBytes(cachePath, content);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "NKCKI failed to cache bulletin {BulletinId}", bulletinId);
}
}
private bool TryReadCachedBulletin(string bulletinId, out byte[] content)
{
var cachePath = GetBulletinCachePath(bulletinId);
try
{
if (File.Exists(cachePath))
{
content = File.ReadAllBytes(cachePath);
return true;
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "NKCKI failed to read cached bulletin {BulletinId}", bulletinId);
}
content = Array.Empty<byte>();
return false;
}
private IReadOnlyCollection<string> NormalizeBulletins(IEnumerable<string> bulletins)
{
var normalized = (bulletins ?? Enumerable.Empty<string>())
.Where(static id => !string.IsNullOrWhiteSpace(id))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static id => id, StringComparer.OrdinalIgnoreCase)
.ToList();
if (normalized.Count <= _options.KnownBulletinCapacity)
{
return normalized.ToArray();
}
var skip = normalized.Count - _options.KnownBulletinCapacity;
return normalized.Skip(skip).ToArray();
}
private async Task<RuNkckiCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? RuNkckiCursor.Empty : RuNkckiCursor.FromBson(state.Cursor);
}
private Task UpdateCursorAsync(RuNkckiCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBsonDocument();
var completedAt = cursor.LastListingFetchAt ?? _timeProvider.GetUtcNow();
return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken);
}
private readonly record struct BulletinAttachment(string Id, Uri Uri, string Title);
}

View File

@@ -0,0 +1,19 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
public sealed class RuNkckiConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "ru-nkcki";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<RuNkckiConnector>(services);
}
}

View File

@@ -0,0 +1,53 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Source.Ru.Nkcki.Configuration;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
public sealed class RuNkckiDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:sources:ru-nkcki";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddRuNkckiConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
services.AddTransient<RuNkckiFetchJob>();
services.AddTransient<RuNkckiParseJob>();
services.AddTransient<RuNkckiMapJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
EnsureJob(options, RuNkckiJobKinds.Fetch, typeof(RuNkckiFetchJob));
EnsureJob(options, RuNkckiJobKinds.Parse, typeof(RuNkckiParseJob));
EnsureJob(options, RuNkckiJobKinds.Map, typeof(RuNkckiMapJob));
});
return services;
}
private static void EnsureJob(JobSchedulerOptions schedulerOptions, string kind, Type jobType)
{
if (schedulerOptions.Definitions.ContainsKey(kind))
{
return;
}
schedulerOptions.Definitions[kind] = new JobDefinition(
kind,
jobType,
schedulerOptions.DefaultTimeout,
schedulerOptions.DefaultLeaseDuration,
CronExpression: null,
Enabled: true);
}
}

View File

@@ -0,0 +1,43 @@
using System.Net;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Source.Common.Http;
using StellaOps.Feedser.Source.Ru.Nkcki.Configuration;
namespace StellaOps.Feedser.Source.Ru.Nkcki;
public static class RuNkckiServiceCollectionExtensions
{
public static IServiceCollection AddRuNkckiConnector(this IServiceCollection services, Action<RuNkckiOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<RuNkckiOptions>()
.Configure(configure)
.PostConfigure(static options => options.Validate());
services.AddSourceHttpClient(RuNkckiOptions.HttpClientName, (sp, clientOptions) =>
{
var options = sp.GetRequiredService<IOptions<RuNkckiOptions>>().Value;
clientOptions.BaseAddress = options.BaseAddress;
clientOptions.Timeout = options.RequestTimeout;
clientOptions.UserAgent = options.UserAgent;
clientOptions.AllowAutoRedirect = true;
clientOptions.DefaultRequestHeaders["Accept-Language"] = options.AcceptLanguage;
clientOptions.AllowedHosts.Clear();
clientOptions.AllowedHosts.Add(options.BaseAddress.Host);
clientOptions.ConfigureHandler = handler =>
{
handler.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
handler.AllowAutoRedirect = true;
handler.UseCookies = true;
handler.CookieContainer = new CookieContainer();
};
});
services.AddTransient<RuNkckiConnector>();
return services;
}
}

View File

@@ -1,16 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AngleSharp" Version="1.1.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff