feat: Add tests for RichGraphPublisher and RichGraphWriter
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled

- Implement unit tests for RichGraphPublisher to verify graph publishing to CAS.
- Implement unit tests for RichGraphWriter to ensure correct writing of canonical graphs and metadata.

feat: Implement AOC Guard validation logic

- Add AOC Guard validation logic to enforce document structure and field constraints.
- Introduce violation codes for various validation errors.
- Implement tests for AOC Guard to validate expected behavior.

feat: Create Console Status API client and service

- Implement ConsoleStatusClient for fetching console status and streaming run events.
- Create ConsoleStatusService to manage console status polling and event subscriptions.
- Add tests for ConsoleStatusClient to verify API interactions.

feat: Develop Console Status component

- Create ConsoleStatusComponent for displaying console status and run events.
- Implement UI for showing status metrics and handling user interactions.
- Add styles for console status display.

test: Add tests for Console Status store

- Implement tests for ConsoleStatusStore to verify event handling and state management.
This commit is contained in:
StellaOps Bot
2025-12-01 07:34:50 +02:00
parent 7df0677e34
commit c11d87d252
108 changed files with 4773 additions and 351 deletions

View File

@@ -0,0 +1,24 @@
# StellaOps.Authority.Storage.Postgres — Agent Charter
## Mission
Deliver PostgreSQL-backed persistence for Authority (tenants, users, roles, permissions, tokens, refresh tokens, API keys, sessions, audit) per `docs/db/SPECIFICATION.md` §5.1 and enable the Mongo → Postgres dual-write/backfill cutover with deterministic behaviour.
## Working Directory
- `src/Authority/__Libraries/StellaOps.Authority.Storage.Postgres`
## Required Reading
- docs/modules/authority/architecture.md
- docs/db/README.md
- docs/db/SPECIFICATION.md (Authority schema §5.1; shared rules)
- docs/db/RULES.md
- docs/db/VERIFICATION.md
- docs/db/tasks/PHASE_1_AUTHORITY.md
- src/Authority/StellaOps.Authority/AGENTS.md (host integration expectations)
## Working Agreement
- Update related sprint rows in `docs/implplan/SPRINT_*.md` when work starts/finishes; keep statuses `TODO → DOING → DONE/BLOCKED`.
- Keep migrations idempotent and deterministic (stable ordering, UTC timestamps). Use curated NuGet cache at `local-nugets/`; no external feeds.
- Align schema and repository contracts to `docs/db/SPECIFICATION.md`; mirror any contract/schema change into that spec and the sprints Decisions & Risks.
- Tests live in `src/Authority/__Tests/StellaOps.Authority.Storage.Postgres.Tests`; maintain deterministic Testcontainers config (fixed image tag, seeded data) and cover all repositories plus determinism of token/refresh generation.
- Use `StellaOps.Cryptography` abstractions for password/hash handling; never log secrets or hashes. Ensure transaction boundaries and retries follow `docs/db/RULES.md`.
- Coordinate with Authority host service (`StellaOps.Authority`) before altering DI registrations or shared models; avoid cross-module edits unless the sprint explicitly allows and logs them.

View File

@@ -45,7 +45,7 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
SELECT id, tenant_id, user_id, token_hash, token_type, scopes, client_id, issued_at, expires_at, revoked_at, revoked_by, metadata
FROM authority.tokens
WHERE tenant_id = @tenant_id AND user_id = @user_id AND revoked_at IS NULL
ORDER BY issued_at DESC
ORDER BY issued_at DESC, id ASC
""";
return await QueryAsync(tenantId, sql,
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
@@ -168,7 +168,7 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
SELECT id, tenant_id, user_id, token_hash, access_token_id, client_id, issued_at, expires_at, revoked_at, revoked_by, replaced_by, metadata
FROM authority.refresh_tokens
WHERE tenant_id = @tenant_id AND user_id = @user_id AND revoked_at IS NULL
ORDER BY issued_at DESC
ORDER BY issued_at DESC, id ASC
""";
return await QueryAsync(tenantId, sql,
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },

View File

@@ -1,3 +1,4 @@
using System.Linq;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
@@ -136,6 +137,70 @@ public sealed class RefreshTokenRepositoryTests : IAsyncLifetime
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
}
[Fact]
public async Task GetByUserId_IsDeterministic_WhenIssuedAtTies()
{
// Arrange: fixed IDs with same IssuedAt to assert stable ordering
var userId = Guid.NewGuid();
var issuedAt = new DateTimeOffset(2025, 11, 30, 12, 0, 0, TimeSpan.Zero);
var tokens = new[]
{
new RefreshTokenEntity
{
Id = Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "rhash1-" + Guid.NewGuid().ToString("N"),
AccessTokenId = Guid.Parse("10000000-0000-0000-0000-000000000000"),
ClientId = "web-app",
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddDays(30)
},
new RefreshTokenEntity
{
Id = Guid.Parse("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "rhash2-" + Guid.NewGuid().ToString("N"),
AccessTokenId = Guid.Parse("20000000-0000-0000-0000-000000000000"),
ClientId = "web-app",
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddDays(30)
},
new RefreshTokenEntity
{
Id = Guid.Parse("cccccccc-cccc-cccc-cccc-cccccccccccc"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "rhash3-" + Guid.NewGuid().ToString("N"),
AccessTokenId = Guid.Parse("30000000-0000-0000-0000-000000000000"),
ClientId = "web-app",
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddDays(30)
}
};
foreach (var token in tokens.Reverse())
{
await _repository.CreateAsync(_tenantId, token);
}
// Act
var first = await _repository.GetByUserIdAsync(_tenantId, userId);
var second = await _repository.GetByUserIdAsync(_tenantId, userId);
var expectedOrder = tokens
.OrderByDescending(t => t.IssuedAt)
.ThenBy(t => t.Id)
.Select(t => t.Id)
.ToArray();
// Assert
first.Select(t => t.Id).Should().ContainInOrder(expectedOrder);
second.Should().BeEquivalentTo(first, o => o.WithStrictOrdering());
}
private RefreshTokenEntity CreateRefreshToken(Guid userId) => new()
{
Id = Guid.NewGuid(),

View File

@@ -1,3 +1,4 @@
using System.Linq;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
@@ -121,6 +122,71 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
}
[Fact]
public async Task GetByUserId_IsDeterministic_WhenIssuedAtTies()
{
// Arrange: same IssuedAt, fixed IDs to validate ordering
var userId = Guid.NewGuid();
var issuedAt = new DateTimeOffset(2025, 11, 30, 12, 0, 0, TimeSpan.Zero);
var tokens = new[]
{
new TokenEntity
{
Id = Guid.Parse("11111111-1111-1111-1111-111111111111"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "hash1-" + Guid.NewGuid().ToString("N"),
TokenType = TokenType.Access,
Scopes = ["a"],
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddHours(1)
},
new TokenEntity
{
Id = Guid.Parse("22222222-2222-2222-2222-222222222222"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "hash2-" + Guid.NewGuid().ToString("N"),
TokenType = TokenType.Access,
Scopes = ["a"],
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddHours(1)
},
new TokenEntity
{
Id = Guid.Parse("33333333-3333-3333-3333-333333333333"),
TenantId = _tenantId,
UserId = userId,
TokenHash = "hash3-" + Guid.NewGuid().ToString("N"),
TokenType = TokenType.Access,
Scopes = ["a"],
IssuedAt = issuedAt,
ExpiresAt = issuedAt.AddHours(1)
}
};
// Insert out of order to ensure repository enforces deterministic ordering
foreach (var token in tokens.Reverse())
{
await _repository.CreateAsync(_tenantId, token);
}
// Act
var first = await _repository.GetByUserIdAsync(_tenantId, userId);
var second = await _repository.GetByUserIdAsync(_tenantId, userId);
var expectedOrder = tokens
.OrderByDescending(t => t.IssuedAt)
.ThenBy(t => t.Id)
.Select(t => t.Id)
.ToArray();
// Assert
first.Select(t => t.Id).Should().ContainInOrder(expectedOrder);
second.Should().BeEquivalentTo(first, o => o.WithStrictOrdering());
}
private TokenEntity CreateToken(Guid userId) => new()
{
Id = Guid.NewGuid(),

View File

@@ -0,0 +1,19 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents an affected package entry for an advisory.
/// </summary>
public sealed class AdvisoryAffectedEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string Ecosystem { get; init; }
public required string PackageName { get; init; }
public string? Purl { get; init; }
public string VersionRange { get; init; } = "{}";
public string[]? VersionsAffected { get; init; }
public string[]? VersionsFixed { get; init; }
public string? DatabaseSpecific { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents an advisory alias (e.g., CVE, GHSA).
/// </summary>
public sealed class AdvisoryAliasEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string AliasType { get; init; }
public required string AliasValue { get; init; }
public bool IsPrimary { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a credit entry for an advisory.
/// </summary>
public sealed class AdvisoryCreditEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string Name { get; init; }
public string? Contact { get; init; }
public string? CreditType { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,20 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a CVSS score for an advisory.
/// </summary>
public sealed class AdvisoryCvssEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string CvssVersion { get; init; }
public required string VectorString { get; init; }
public decimal BaseScore { get; init; }
public string? BaseSeverity { get; init; }
public decimal? ExploitabilityScore { get; init; }
public decimal? ImpactScore { get; init; }
public string? Source { get; init; }
public bool IsPrimary { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents an advisory reference URL.
/// </summary>
public sealed class AdvisoryReferenceEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string RefType { get; init; }
public required string Url { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a snapshot of an advisory at a point in time.
/// </summary>
public sealed class AdvisorySnapshotEntity
{
public Guid Id { get; init; }
public Guid FeedSnapshotId { get; init; }
public required string AdvisoryKey { get; init; }
public required string ContentHash { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a CWE weakness linked to an advisory.
/// </summary>
public sealed class AdvisoryWeaknessEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string CweId { get; init; }
public string? Description { get; init; }
public string? Source { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a feed snapshot record.
/// </summary>
public sealed class FeedSnapshotEntity
{
public Guid Id { get; init; }
public Guid SourceId { get; init; }
public required string SnapshotId { get; init; }
public int AdvisoryCount { get; init; }
public string? Checksum { get; init; }
public string Metadata { get; init; } = "{}";
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,20 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a Known Exploited Vulnerability flag entry.
/// </summary>
public sealed class KevFlagEntity
{
public Guid Id { get; init; }
public Guid AdvisoryId { get; init; }
public required string CveId { get; init; }
public string? VendorProject { get; init; }
public string? Product { get; init; }
public string? VulnerabilityName { get; init; }
public DateOnly DateAdded { get; init; }
public DateOnly? DueDate { get; init; }
public bool KnownRansomwareUse { get; init; }
public string? Notes { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents a merge event audit record.
/// </summary>
public sealed class MergeEventEntity
{
public long Id { get; init; }
public Guid AdvisoryId { get; init; }
public Guid? SourceId { get; init; }
public required string EventType { get; init; }
public string? OldValue { get; init; }
public string? NewValue { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,19 @@
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Tracks source ingestion cursors and metrics.
/// </summary>
public sealed class SourceStateEntity
{
public Guid Id { get; init; }
public Guid SourceId { get; init; }
public string? Cursor { get; init; }
public DateTimeOffset? LastSyncAt { get; init; }
public DateTimeOffset? LastSuccessAt { get; init; }
public string? LastError { get; init; }
public long SyncCount { get; init; }
public int ErrorCount { get; init; }
public string Metadata { get; init; } = "{}";
public DateTimeOffset UpdatedAt { get; init; }
}

View File

@@ -0,0 +1,143 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory affected packages.
/// </summary>
public sealed class AdvisoryAffectedRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryAffectedRepository
{
private const string SystemTenantId = "_system";
public AdvisoryAffectedRepository(ConcelierDataSource dataSource, ILogger<AdvisoryAffectedRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryAffectedEntity> affected, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_affected WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_affected
(id, advisory_id, ecosystem, package_name, purl, version_range, versions_affected,
versions_fixed, database_specific)
VALUES
(@id, @advisory_id, @ecosystem, @package_name, @purl, @version_range::jsonb,
@versions_affected, @versions_fixed, @database_specific::jsonb)
""";
foreach (var entry in affected)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", entry.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "ecosystem", entry.Ecosystem);
AddParameter(insertCmd, "package_name", entry.PackageName);
AddParameter(insertCmd, "purl", entry.Purl);
AddJsonbParameter(insertCmd, "version_range", entry.VersionRange);
AddTextArrayParameter(insertCmd, "versions_affected", entry.VersionsAffected);
AddTextArrayParameter(insertCmd, "versions_fixed", entry.VersionsFixed);
AddJsonbParameter(insertCmd, "database_specific", entry.DatabaseSpecific);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, ecosystem, package_name, purl, version_range::text,
versions_affected, versions_fixed, database_specific::text, created_at
FROM vuln.advisory_affected
WHERE advisory_id = @advisory_id
ORDER BY ecosystem, package_name, purl NULLS LAST
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapAffected,
cancellationToken);
}
public Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByPurlAsync(string purl, int limit = 100, int offset = 0, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, ecosystem, package_name, purl, version_range::text,
versions_affected, versions_fixed, database_specific::text, created_at
FROM vuln.advisory_affected
WHERE purl = @purl
ORDER BY advisory_id, id
LIMIT @limit OFFSET @offset
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "purl", purl);
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapAffected,
cancellationToken);
}
public Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByPackageNameAsync(string ecosystem, string packageName, int limit = 100, int offset = 0, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, ecosystem, package_name, purl, version_range::text,
versions_affected, versions_fixed, database_specific::text, created_at
FROM vuln.advisory_affected
WHERE ecosystem = @ecosystem AND package_name = @package_name
ORDER BY advisory_id, id
LIMIT @limit OFFSET @offset
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "ecosystem", ecosystem);
AddParameter(cmd, "package_name", packageName);
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapAffected,
cancellationToken);
}
private static AdvisoryAffectedEntity MapAffected(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
Ecosystem = reader.GetString(2),
PackageName = reader.GetString(3),
Purl = GetNullableString(reader, 4),
VersionRange = reader.GetString(5),
VersionsAffected = reader.IsDBNull(6) ? null : reader.GetFieldValue<string[]>(6),
VersionsFixed = reader.IsDBNull(7) ? null : reader.GetFieldValue<string[]>(7),
DatabaseSpecific = GetNullableString(reader, 8),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(9)
};
}

View File

@@ -0,0 +1,100 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory aliases.
/// </summary>
public sealed class AdvisoryAliasRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryAliasRepository
{
private const string SystemTenantId = "_system";
public AdvisoryAliasRepository(ConcelierDataSource dataSource, ILogger<AdvisoryAliasRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryAliasEntity> aliases, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_aliases WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_aliases
(id, advisory_id, alias_type, alias_value, is_primary)
VALUES
(@id, @advisory_id, @alias_type, @alias_value, @is_primary)
""";
foreach (var alias in aliases)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", alias.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "alias_type", alias.AliasType);
AddParameter(insertCmd, "alias_value", alias.AliasValue);
AddParameter(insertCmd, "is_primary", alias.IsPrimary);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryAliasEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, alias_type, alias_value, is_primary, created_at
FROM vuln.advisory_aliases
WHERE advisory_id = @advisory_id
ORDER BY is_primary DESC, alias_type, alias_value
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapAlias,
cancellationToken);
}
public Task<IReadOnlyList<AdvisoryAliasEntity>> GetByAliasAsync(string aliasValue, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, alias_type, alias_value, is_primary, created_at
FROM vuln.advisory_aliases
WHERE alias_value = @alias_value
ORDER BY is_primary DESC
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "alias_value", aliasValue),
MapAlias,
cancellationToken);
}
private static AdvisoryAliasEntity MapAlias(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
AliasType = reader.GetString(2),
AliasValue = reader.GetString(3),
IsPrimary = reader.GetBoolean(4),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(5)
};
}

View File

@@ -0,0 +1,83 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory credits.
/// </summary>
public sealed class AdvisoryCreditRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryCreditRepository
{
private const string SystemTenantId = "_system";
public AdvisoryCreditRepository(ConcelierDataSource dataSource, ILogger<AdvisoryCreditRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryCreditEntity> credits, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_credits WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_credits
(id, advisory_id, name, contact, credit_type)
VALUES
(@id, @advisory_id, @name, @contact, @credit_type)
""";
foreach (var credit in credits)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", credit.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "name", credit.Name);
AddParameter(insertCmd, "contact", credit.Contact);
AddParameter(insertCmd, "credit_type", credit.CreditType);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryCreditEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, name, contact, credit_type, created_at
FROM vuln.advisory_credits
WHERE advisory_id = @advisory_id
ORDER BY name
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapCredit,
cancellationToken);
}
private static AdvisoryCreditEntity MapCredit(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
Name = reader.GetString(2),
Contact = GetNullableString(reader, 3),
CreditType = GetNullableString(reader, 4),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(5)
};
}

View File

@@ -0,0 +1,96 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory CVSS scores.
/// </summary>
public sealed class AdvisoryCvssRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryCvssRepository
{
private const string SystemTenantId = "_system";
public AdvisoryCvssRepository(ConcelierDataSource dataSource, ILogger<AdvisoryCvssRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryCvssEntity> scores, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_cvss WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_cvss
(id, advisory_id, cvss_version, vector_string, base_score, base_severity,
exploitability_score, impact_score, source, is_primary)
VALUES
(@id, @advisory_id, @cvss_version, @vector_string, @base_score, @base_severity,
@exploitability_score, @impact_score, @source, @is_primary)
""";
foreach (var score in scores)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", score.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "cvss_version", score.CvssVersion);
AddParameter(insertCmd, "vector_string", score.VectorString);
AddParameter(insertCmd, "base_score", score.BaseScore);
AddParameter(insertCmd, "base_severity", score.BaseSeverity);
AddParameter(insertCmd, "exploitability_score", score.ExploitabilityScore);
AddParameter(insertCmd, "impact_score", score.ImpactScore);
AddParameter(insertCmd, "source", score.Source);
AddParameter(insertCmd, "is_primary", score.IsPrimary);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryCvssEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, cvss_version, vector_string, base_score, base_severity,
exploitability_score, impact_score, source, is_primary, created_at
FROM vuln.advisory_cvss
WHERE advisory_id = @advisory_id
ORDER BY is_primary DESC, cvss_version
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapCvss,
cancellationToken);
}
private static AdvisoryCvssEntity MapCvss(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
CvssVersion = reader.GetString(2),
VectorString = reader.GetString(3),
BaseScore = reader.GetDecimal(4),
BaseSeverity = GetNullableString(reader, 5),
ExploitabilityScore = GetNullableDecimal(reader, 6),
ImpactScore = GetNullableDecimal(reader, 7),
Source = GetNullableString(reader, 8),
IsPrimary = reader.GetBoolean(9),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10)
};
}

View File

@@ -0,0 +1,81 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory references.
/// </summary>
public sealed class AdvisoryReferenceRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryReferenceRepository
{
private const string SystemTenantId = "_system";
public AdvisoryReferenceRepository(ConcelierDataSource dataSource, ILogger<AdvisoryReferenceRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryReferenceEntity> references, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_references WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_references
(id, advisory_id, ref_type, url)
VALUES
(@id, @advisory_id, @ref_type, @url)
""";
foreach (var reference in references)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", reference.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "ref_type", reference.RefType);
AddParameter(insertCmd, "url", reference.Url);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryReferenceEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, ref_type, url, created_at
FROM vuln.advisory_references
WHERE advisory_id = @advisory_id
ORDER BY ref_type, url
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapReference,
cancellationToken);
}
private static AdvisoryReferenceEntity MapReference(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
RefType = reader.GetString(2),
Url = reader.GetString(3),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(4)
};
}

View File

@@ -25,55 +25,24 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
}
/// <inheritdoc />
public async Task<AdvisoryEntity> UpsertAsync(AdvisoryEntity advisory, CancellationToken cancellationToken = default)
public Task<AdvisoryEntity> UpsertAsync(
AdvisoryEntity advisory,
IEnumerable<AdvisoryAliasEntity>? aliases,
IEnumerable<AdvisoryCvssEntity>? cvss,
IEnumerable<AdvisoryAffectedEntity>? affected,
IEnumerable<AdvisoryReferenceEntity>? references,
IEnumerable<AdvisoryCreditEntity>? credits,
IEnumerable<AdvisoryWeaknessEntity>? weaknesses,
IEnumerable<KevFlagEntity>? kevFlags,
CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.advisories (
id, advisory_key, primary_vuln_id, source_id, title, summary, description,
severity, published_at, modified_at, withdrawn_at, provenance, raw_payload
)
VALUES (
@id, @advisory_key, @primary_vuln_id, @source_id, @title, @summary, @description,
@severity, @published_at, @modified_at, @withdrawn_at, @provenance::jsonb, @raw_payload::jsonb
)
ON CONFLICT (advisory_key) DO UPDATE SET
primary_vuln_id = EXCLUDED.primary_vuln_id,
source_id = COALESCE(EXCLUDED.source_id, vuln.advisories.source_id),
title = COALESCE(EXCLUDED.title, vuln.advisories.title),
summary = COALESCE(EXCLUDED.summary, vuln.advisories.summary),
description = COALESCE(EXCLUDED.description, vuln.advisories.description),
severity = COALESCE(EXCLUDED.severity, vuln.advisories.severity),
published_at = COALESCE(EXCLUDED.published_at, vuln.advisories.published_at),
modified_at = COALESCE(EXCLUDED.modified_at, vuln.advisories.modified_at),
withdrawn_at = EXCLUDED.withdrawn_at,
provenance = vuln.advisories.provenance || EXCLUDED.provenance,
raw_payload = EXCLUDED.raw_payload
RETURNING id, advisory_key, primary_vuln_id, source_id, title, summary, description,
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
created_at, updated_at
""";
return UpsertInternalAsync(advisory, aliases, cvss, affected, references, credits, weaknesses, kevFlags, cancellationToken);
}
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "id", advisory.Id);
AddParameter(command, "advisory_key", advisory.AdvisoryKey);
AddParameter(command, "primary_vuln_id", advisory.PrimaryVulnId);
AddParameter(command, "source_id", advisory.SourceId);
AddParameter(command, "title", advisory.Title);
AddParameter(command, "summary", advisory.Summary);
AddParameter(command, "description", advisory.Description);
AddParameter(command, "severity", advisory.Severity);
AddParameter(command, "published_at", advisory.PublishedAt);
AddParameter(command, "modified_at", advisory.ModifiedAt);
AddParameter(command, "withdrawn_at", advisory.WithdrawnAt);
AddJsonbParameter(command, "provenance", advisory.Provenance);
AddJsonbParameter(command, "raw_payload", advisory.RawPayload);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
return MapAdvisory(reader);
/// <inheritdoc />
public Task<AdvisoryEntity> UpsertAsync(AdvisoryEntity advisory, CancellationToken cancellationToken = default)
{
return UpsertInternalAsync(advisory, null, null, null, null, null, null, null, cancellationToken);
}
/// <inheritdoc />
@@ -133,6 +102,90 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<AdvisoryEntity>> GetByAliasAsync(string aliasValue, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
a.created_at, a.updated_at
FROM vuln.advisories a
JOIN vuln.advisory_aliases al ON al.advisory_id = a.id
WHERE al.alias_value = @alias_value
ORDER BY a.modified_at DESC, a.id
""";
return await QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "alias_value", aliasValue),
MapAdvisory,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<AdvisoryEntity>> GetAffectingPackageAsync(
string purl,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
a.created_at, a.updated_at
FROM vuln.advisories a
JOIN vuln.advisory_affected af ON af.advisory_id = a.id
WHERE af.purl = @purl
ORDER BY a.modified_at DESC, a.id
LIMIT @limit OFFSET @offset
""";
return await QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "purl", purl);
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapAdvisory,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<AdvisoryEntity>> GetAffectingPackageNameAsync(
string ecosystem,
string packageName,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
a.created_at, a.updated_at
FROM vuln.advisories a
JOIN vuln.advisory_affected af ON af.advisory_id = a.id
WHERE af.ecosystem = @ecosystem AND af.package_name = @package_name
ORDER BY a.modified_at DESC, a.id
LIMIT @limit OFFSET @offset
""";
return await QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "ecosystem", ecosystem);
AddParameter(cmd, "package_name", packageName);
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapAdvisory,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<AdvisoryEntity>> SearchAsync(
string query,
@@ -299,6 +352,358 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
return results.ToDictionary(r => r.Severity, r => r.Count);
}
private async Task<AdvisoryEntity> UpsertInternalAsync(
AdvisoryEntity advisory,
IEnumerable<AdvisoryAliasEntity>? aliases,
IEnumerable<AdvisoryCvssEntity>? cvss,
IEnumerable<AdvisoryAffectedEntity>? affected,
IEnumerable<AdvisoryReferenceEntity>? references,
IEnumerable<AdvisoryCreditEntity>? credits,
IEnumerable<AdvisoryWeaknessEntity>? weaknesses,
IEnumerable<KevFlagEntity>? kevFlags,
CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO vuln.advisories (
id, advisory_key, primary_vuln_id, source_id, title, summary, description,
severity, published_at, modified_at, withdrawn_at, provenance, raw_payload
)
VALUES (
@id, @advisory_key, @primary_vuln_id, @source_id, @title, @summary, @description,
@severity, @published_at, @modified_at, @withdrawn_at, @provenance::jsonb, @raw_payload::jsonb
)
ON CONFLICT (advisory_key) DO UPDATE SET
primary_vuln_id = EXCLUDED.primary_vuln_id,
source_id = COALESCE(EXCLUDED.source_id, vuln.advisories.source_id),
title = COALESCE(EXCLUDED.title, vuln.advisories.title),
summary = COALESCE(EXCLUDED.summary, vuln.advisories.summary),
description = COALESCE(EXCLUDED.description, vuln.advisories.description),
severity = COALESCE(EXCLUDED.severity, vuln.advisories.severity),
published_at = COALESCE(EXCLUDED.published_at, vuln.advisories.published_at),
modified_at = COALESCE(EXCLUDED.modified_at, vuln.advisories.modified_at),
withdrawn_at = EXCLUDED.withdrawn_at,
provenance = vuln.advisories.provenance || EXCLUDED.provenance,
raw_payload = EXCLUDED.raw_payload,
updated_at = NOW()
RETURNING id, advisory_key, primary_vuln_id, source_id, title, summary, description,
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
created_at, updated_at
""";
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
AdvisoryEntity result;
await using (var command = CreateCommand(sql, connection))
{
command.Transaction = transaction;
AddParameter(command, "id", advisory.Id);
AddParameter(command, "advisory_key", advisory.AdvisoryKey);
AddParameter(command, "primary_vuln_id", advisory.PrimaryVulnId);
AddParameter(command, "source_id", advisory.SourceId);
AddParameter(command, "title", advisory.Title);
AddParameter(command, "summary", advisory.Summary);
AddParameter(command, "description", advisory.Description);
AddParameter(command, "severity", advisory.Severity);
AddParameter(command, "published_at", advisory.PublishedAt);
AddParameter(command, "modified_at", advisory.ModifiedAt);
AddParameter(command, "withdrawn_at", advisory.WithdrawnAt);
AddJsonbParameter(command, "provenance", advisory.Provenance);
AddJsonbParameter(command, "raw_payload", advisory.RawPayload);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
result = MapAdvisory(reader);
}
// Replace child tables only when collections are provided (null = leave existing).
if (aliases is not null)
{
await ReplaceAliasesAsync(result.Id, aliases, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (cvss is not null)
{
await ReplaceCvssAsync(result.Id, cvss, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (affected is not null)
{
await ReplaceAffectedAsync(result.Id, affected, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (references is not null)
{
await ReplaceReferencesAsync(result.Id, references, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (credits is not null)
{
await ReplaceCreditsAsync(result.Id, credits, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (weaknesses is not null)
{
await ReplaceWeaknessesAsync(result.Id, weaknesses, connection, transaction, cancellationToken).ConfigureAwait(false);
}
if (kevFlags is not null)
{
await ReplaceKevFlagsAsync(result.Id, kevFlags, connection, transaction, cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
return result;
}
private static async Task ReplaceAliasesAsync(
Guid advisoryId,
IEnumerable<AdvisoryAliasEntity> aliases,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_aliases WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_aliases (id, advisory_id, alias_type, alias_value, is_primary)
VALUES (@id, @advisory_id, @alias_type, @alias_value, @is_primary)
""";
foreach (var alias in aliases)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", alias.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("alias_type", alias.AliasType);
insertCmd.Parameters.AddWithValue("alias_value", alias.AliasValue);
insertCmd.Parameters.AddWithValue("is_primary", alias.IsPrimary);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceCvssAsync(
Guid advisoryId,
IEnumerable<AdvisoryCvssEntity> scores,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_cvss WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_cvss
(id, advisory_id, cvss_version, vector_string, base_score, base_severity,
exploitability_score, impact_score, source, is_primary)
VALUES
(@id, @advisory_id, @cvss_version, @vector_string, @base_score, @base_severity,
@exploitability_score, @impact_score, @source, @is_primary)
""";
foreach (var score in scores)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", score.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("cvss_version", score.CvssVersion);
insertCmd.Parameters.AddWithValue("vector_string", score.VectorString);
insertCmd.Parameters.AddWithValue("base_score", score.BaseScore);
insertCmd.Parameters.AddWithValue("base_severity", (object?)score.BaseSeverity ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("exploitability_score", (object?)score.ExploitabilityScore ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("impact_score", (object?)score.ImpactScore ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("source", (object?)score.Source ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("is_primary", score.IsPrimary);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceAffectedAsync(
Guid advisoryId,
IEnumerable<AdvisoryAffectedEntity> affected,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_affected WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_affected
(id, advisory_id, ecosystem, package_name, purl, version_range, versions_affected,
versions_fixed, database_specific)
VALUES
(@id, @advisory_id, @ecosystem, @package_name, @purl, @version_range::jsonb,
@versions_affected, @versions_fixed, @database_specific::jsonb)
""";
foreach (var entry in affected)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", entry.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("ecosystem", entry.Ecosystem);
insertCmd.Parameters.AddWithValue("package_name", entry.PackageName);
insertCmd.Parameters.AddWithValue("purl", (object?)entry.Purl ?? DBNull.Value);
insertCmd.Parameters.Add(new NpgsqlParameter<string?>("version_range", NpgsqlTypes.NpgsqlDbType.Jsonb)
{
TypedValue = entry.VersionRange
});
insertCmd.Parameters.Add(new NpgsqlParameter<string[]?>("versions_affected", NpgsqlTypes.NpgsqlDbType.Array | NpgsqlTypes.NpgsqlDbType.Text)
{
TypedValue = entry.VersionsAffected
});
insertCmd.Parameters.Add(new NpgsqlParameter<string[]?>("versions_fixed", NpgsqlTypes.NpgsqlDbType.Array | NpgsqlTypes.NpgsqlDbType.Text)
{
TypedValue = entry.VersionsFixed
});
insertCmd.Parameters.Add(new NpgsqlParameter<string?>("database_specific", NpgsqlTypes.NpgsqlDbType.Jsonb)
{
TypedValue = entry.DatabaseSpecific
});
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceReferencesAsync(
Guid advisoryId,
IEnumerable<AdvisoryReferenceEntity> references,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_references WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_references (id, advisory_id, ref_type, url)
VALUES (@id, @advisory_id, @ref_type, @url)
""";
foreach (var reference in references)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", reference.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("ref_type", reference.RefType);
insertCmd.Parameters.AddWithValue("url", reference.Url);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceCreditsAsync(
Guid advisoryId,
IEnumerable<AdvisoryCreditEntity> credits,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_credits WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_credits (id, advisory_id, name, contact, credit_type)
VALUES (@id, @advisory_id, @name, @contact, @credit_type)
""";
foreach (var credit in credits)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", credit.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("name", credit.Name);
insertCmd.Parameters.AddWithValue("contact", (object?)credit.Contact ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("credit_type", (object?)credit.CreditType ?? DBNull.Value);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceWeaknessesAsync(
Guid advisoryId,
IEnumerable<AdvisoryWeaknessEntity> weaknesses,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.advisory_weaknesses WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_weaknesses (id, advisory_id, cwe_id, description, source)
VALUES (@id, @advisory_id, @cwe_id, @description, @source)
""";
foreach (var weakness in weaknesses)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", weakness.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("cwe_id", weakness.CweId);
insertCmd.Parameters.AddWithValue("description", (object?)weakness.Description ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("source", (object?)weakness.Source ?? DBNull.Value);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static async Task ReplaceKevFlagsAsync(
Guid advisoryId,
IEnumerable<KevFlagEntity> kevFlags,
NpgsqlConnection connection,
NpgsqlTransaction transaction,
CancellationToken cancellationToken)
{
const string deleteSql = "DELETE FROM vuln.kev_flags WHERE advisory_id = @advisory_id";
await using (var deleteCmd = new NpgsqlCommand(deleteSql, connection, transaction))
{
deleteCmd.Parameters.AddWithValue("advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.kev_flags
(id, advisory_id, cve_id, vendor_project, product, vulnerability_name,
date_added, due_date, known_ransomware_use, notes)
VALUES
(@id, @advisory_id, @cve_id, @vendor_project, @product, @vulnerability_name,
@date_added, @due_date, @known_ransomware_use, @notes)
""";
foreach (var flag in kevFlags)
{
await using var insertCmd = new NpgsqlCommand(insertSql, connection, transaction);
insertCmd.Parameters.AddWithValue("id", flag.Id);
insertCmd.Parameters.AddWithValue("advisory_id", advisoryId);
insertCmd.Parameters.AddWithValue("cve_id", flag.CveId);
insertCmd.Parameters.AddWithValue("vendor_project", (object?)flag.VendorProject ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("product", (object?)flag.Product ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("vulnerability_name", (object?)flag.VulnerabilityName ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("date_added", flag.DateAdded);
insertCmd.Parameters.AddWithValue("due_date", (object?)flag.DueDate ?? DBNull.Value);
insertCmd.Parameters.AddWithValue("known_ransomware_use", flag.KnownRansomwareUse);
insertCmd.Parameters.AddWithValue("notes", (object?)flag.Notes ?? DBNull.Value);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}
private static AdvisoryEntity MapAdvisory(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),

View File

@@ -0,0 +1,72 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory snapshots.
/// </summary>
public sealed class AdvisorySnapshotRepository : RepositoryBase<ConcelierDataSource>, IAdvisorySnapshotRepository
{
private const string SystemTenantId = "_system";
public AdvisorySnapshotRepository(ConcelierDataSource dataSource, ILogger<AdvisorySnapshotRepository> logger)
: base(dataSource, logger)
{
}
public async Task<AdvisorySnapshotEntity> InsertAsync(AdvisorySnapshotEntity snapshot, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.advisory_snapshots
(id, feed_snapshot_id, advisory_key, content_hash)
VALUES
(@id, @feed_snapshot_id, @advisory_key, @content_hash)
ON CONFLICT (feed_snapshot_id, advisory_key) DO UPDATE SET
content_hash = EXCLUDED.content_hash
RETURNING id, feed_snapshot_id, advisory_key, content_hash, created_at
""";
return await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", snapshot.Id);
AddParameter(cmd, "feed_snapshot_id", snapshot.FeedSnapshotId);
AddParameter(cmd, "advisory_key", snapshot.AdvisoryKey);
AddParameter(cmd, "content_hash", snapshot.ContentHash);
},
MapSnapshot!,
cancellationToken).ConfigureAwait(false) ?? throw new InvalidOperationException("Insert returned null");
}
public Task<IReadOnlyList<AdvisorySnapshotEntity>> GetByFeedSnapshotAsync(Guid feedSnapshotId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, feed_snapshot_id, advisory_key, content_hash, created_at
FROM vuln.advisory_snapshots
WHERE feed_snapshot_id = @feed_snapshot_id
ORDER BY advisory_key
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "feed_snapshot_id", feedSnapshotId),
MapSnapshot,
cancellationToken);
}
private static AdvisorySnapshotEntity MapSnapshot(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
FeedSnapshotId = reader.GetGuid(1),
AdvisoryKey = reader.GetString(2),
ContentHash = reader.GetString(3),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(4)
};
}

View File

@@ -0,0 +1,83 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for advisory weaknesses (CWE).
/// </summary>
public sealed class AdvisoryWeaknessRepository : RepositoryBase<ConcelierDataSource>, IAdvisoryWeaknessRepository
{
private const string SystemTenantId = "_system";
public AdvisoryWeaknessRepository(ConcelierDataSource dataSource, ILogger<AdvisoryWeaknessRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryWeaknessEntity> weaknesses, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.advisory_weaknesses WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.advisory_weaknesses
(id, advisory_id, cwe_id, description, source)
VALUES
(@id, @advisory_id, @cwe_id, @description, @source)
""";
foreach (var weakness in weaknesses)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", weakness.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "cwe_id", weakness.CweId);
AddParameter(insertCmd, "description", weakness.Description);
AddParameter(insertCmd, "source", weakness.Source);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<AdvisoryWeaknessEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, cwe_id, description, source, created_at
FROM vuln.advisory_weaknesses
WHERE advisory_id = @advisory_id
ORDER BY cwe_id
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapWeakness,
cancellationToken);
}
private static AdvisoryWeaknessEntity MapWeakness(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
CweId = reader.GetString(2),
Description = GetNullableString(reader, 3),
Source = GetNullableString(reader, 4),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(5)
};
}

View File

@@ -0,0 +1,78 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for feed snapshots.
/// </summary>
public sealed class FeedSnapshotRepository : RepositoryBase<ConcelierDataSource>, IFeedSnapshotRepository
{
private const string SystemTenantId = "_system";
public FeedSnapshotRepository(ConcelierDataSource dataSource, ILogger<FeedSnapshotRepository> logger)
: base(dataSource, logger)
{
}
public async Task<FeedSnapshotEntity> InsertAsync(FeedSnapshotEntity snapshot, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.feed_snapshots
(id, source_id, snapshot_id, advisory_count, checksum, metadata)
VALUES
(@id, @source_id, @snapshot_id, @advisory_count, @checksum, @metadata::jsonb)
ON CONFLICT (source_id, snapshot_id) DO NOTHING
RETURNING id, source_id, snapshot_id, advisory_count, checksum, metadata::text, created_at
""";
return await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", snapshot.Id);
AddParameter(cmd, "source_id", snapshot.SourceId);
AddParameter(cmd, "snapshot_id", snapshot.SnapshotId);
AddParameter(cmd, "advisory_count", snapshot.AdvisoryCount);
AddParameter(cmd, "checksum", snapshot.Checksum);
AddJsonbParameter(cmd, "metadata", snapshot.Metadata);
},
MapSnapshot!,
cancellationToken).ConfigureAwait(false) ?? snapshot;
}
public Task<FeedSnapshotEntity?> GetBySourceAndIdAsync(Guid sourceId, string snapshotId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, source_id, snapshot_id, advisory_count, checksum, metadata::text, created_at
FROM vuln.feed_snapshots
WHERE source_id = @source_id AND snapshot_id = @snapshot_id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "source_id", sourceId);
AddParameter(cmd, "snapshot_id", snapshotId);
},
MapSnapshot,
cancellationToken);
}
private static FeedSnapshotEntity MapSnapshot(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
SourceId = reader.GetGuid(1),
SnapshotId = reader.GetString(2),
AdvisoryCount = reader.GetInt32(3),
Checksum = GetNullableString(reader, 4),
Metadata = reader.GetString(5),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(6)
};
}

View File

@@ -0,0 +1,15 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory affected package rows.
/// </summary>
public interface IAdvisoryAffectedRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryAffectedEntity> affected, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByPurlAsync(string purl, int limit = 100, int offset = 0, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryAffectedEntity>> GetByPackageNameAsync(string ecosystem, string packageName, int limit = 100, int offset = 0, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,14 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory aliases.
/// </summary>
public interface IAdvisoryAliasRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryAliasEntity> aliases, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryAliasEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryAliasEntity>> GetByAliasAsync(string aliasValue, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory credits.
/// </summary>
public interface IAdvisoryCreditRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryCreditEntity> credits, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryCreditEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory CVSS scores.
/// </summary>
public interface IAdvisoryCvssRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryCvssEntity> scores, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryCvssEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory references.
/// </summary>
public interface IAdvisoryReferenceRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryReferenceEntity> references, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryReferenceEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
}

View File

@@ -27,6 +27,30 @@ public interface IAdvisoryRepository
/// </summary>
Task<AdvisoryEntity?> GetByVulnIdAsync(string vulnId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets advisories that include the provided alias (e.g., CVE, GHSA).
/// </summary>
Task<IReadOnlyList<AdvisoryEntity>> GetByAliasAsync(string aliasValue, CancellationToken cancellationToken = default);
/// <summary>
/// Gets advisories affecting a package identified by full PURL.
/// </summary>
Task<IReadOnlyList<AdvisoryEntity>> GetAffectingPackageAsync(
string purl,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets advisories affecting a package by ecosystem/name (when PURL missing).
/// </summary>
Task<IReadOnlyList<AdvisoryEntity>> GetAffectingPackageNameAsync(
string ecosystem,
string packageName,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
/// <summary>
/// Searches advisories by full-text search.
/// </summary>
@@ -72,4 +96,19 @@ public interface IAdvisoryRepository
/// Counts advisories by severity.
/// </summary>
Task<IDictionary<string, long>> CountBySeverityAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Upserts an advisory and optional child records in a single transaction.
/// Passing null collections leaves existing child rows untouched; pass empty collections to replace with none.
/// </summary>
Task<AdvisoryEntity> UpsertAsync(
AdvisoryEntity advisory,
IEnumerable<Models.AdvisoryAliasEntity>? aliases,
IEnumerable<Models.AdvisoryCvssEntity>? cvss,
IEnumerable<Models.AdvisoryAffectedEntity>? affected,
IEnumerable<Models.AdvisoryReferenceEntity>? references,
IEnumerable<Models.AdvisoryCreditEntity>? credits,
IEnumerable<Models.AdvisoryWeaknessEntity>? weaknesses,
IEnumerable<Models.KevFlagEntity>? kevFlags,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory snapshots.
/// </summary>
public interface IAdvisorySnapshotRepository
{
Task<AdvisorySnapshotEntity> InsertAsync(AdvisorySnapshotEntity snapshot, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisorySnapshotEntity>> GetByFeedSnapshotAsync(Guid feedSnapshotId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for advisory weaknesses (CWE).
/// </summary>
public interface IAdvisoryWeaknessRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<AdvisoryWeaknessEntity> weaknesses, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AdvisoryWeaknessEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for feed snapshots.
/// </summary>
public interface IFeedSnapshotRepository
{
Task<FeedSnapshotEntity> InsertAsync(FeedSnapshotEntity snapshot, CancellationToken cancellationToken = default);
Task<FeedSnapshotEntity?> GetBySourceAndIdAsync(Guid sourceId, string snapshotId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,14 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for KEV flag records.
/// </summary>
public interface IKevFlagRepository
{
Task ReplaceAsync(Guid advisoryId, IEnumerable<KevFlagEntity> flags, CancellationToken cancellationToken = default);
Task<IReadOnlyList<KevFlagEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default);
Task<IReadOnlyList<KevFlagEntity>> GetByCveAsync(string cveId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for merge event audit records.
/// </summary>
public interface IMergeEventRepository
{
Task<MergeEventEntity> InsertAsync(MergeEventEntity evt, CancellationToken cancellationToken = default);
Task<IReadOnlyList<MergeEventEntity>> GetByAdvisoryAsync(Guid advisoryId, int limit = 100, int offset = 0, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,15 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for vulnerability feed sources.
/// </summary>
public interface ISourceRepository
{
Task<SourceEntity> UpsertAsync(SourceEntity source, CancellationToken cancellationToken = default);
Task<SourceEntity?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default);
Task<SourceEntity?> GetByKeyAsync(string key, CancellationToken cancellationToken = default);
Task<IReadOnlyList<SourceEntity>> ListAsync(bool? enabled = null, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,13 @@
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository for source ingestion state.
/// </summary>
public interface ISourceStateRepository
{
Task<SourceStateEntity> UpsertAsync(SourceStateEntity state, CancellationToken cancellationToken = default);
Task<SourceStateEntity?> GetBySourceIdAsync(Guid sourceId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,114 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for KEV flags.
/// </summary>
public sealed class KevFlagRepository : RepositoryBase<ConcelierDataSource>, IKevFlagRepository
{
private const string SystemTenantId = "_system";
public KevFlagRepository(ConcelierDataSource dataSource, ILogger<KevFlagRepository> logger)
: base(dataSource, logger)
{
}
public async Task ReplaceAsync(Guid advisoryId, IEnumerable<KevFlagEntity> flags, CancellationToken cancellationToken = default)
{
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string deleteSql = "DELETE FROM vuln.kev_flags WHERE advisory_id = @advisory_id";
await using (var deleteCmd = CreateCommand(deleteSql, connection))
{
deleteCmd.Transaction = transaction;
AddParameter(deleteCmd, "advisory_id", advisoryId);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
const string insertSql = """
INSERT INTO vuln.kev_flags
(id, advisory_id, cve_id, vendor_project, product, vulnerability_name,
date_added, due_date, known_ransomware_use, notes)
VALUES
(@id, @advisory_id, @cve_id, @vendor_project, @product, @vulnerability_name,
@date_added, @due_date, @known_ransomware_use, @notes)
""";
foreach (var flag in flags)
{
await using var insertCmd = CreateCommand(insertSql, connection);
insertCmd.Transaction = transaction;
AddParameter(insertCmd, "id", flag.Id);
AddParameter(insertCmd, "advisory_id", advisoryId);
AddParameter(insertCmd, "cve_id", flag.CveId);
AddParameter(insertCmd, "vendor_project", flag.VendorProject);
AddParameter(insertCmd, "product", flag.Product);
AddParameter(insertCmd, "vulnerability_name", flag.VulnerabilityName);
AddParameter(insertCmd, "date_added", flag.DateAdded);
AddParameter(insertCmd, "due_date", flag.DueDate);
AddParameter(insertCmd, "known_ransomware_use", flag.KnownRansomwareUse);
AddParameter(insertCmd, "notes", flag.Notes);
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<KevFlagEntity>> GetByAdvisoryAsync(Guid advisoryId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, cve_id, vendor_project, product, vulnerability_name,
date_added, due_date, known_ransomware_use, notes, created_at
FROM vuln.kev_flags
WHERE advisory_id = @advisory_id
ORDER BY date_added DESC, cve_id
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "advisory_id", advisoryId),
MapKev,
cancellationToken);
}
public Task<IReadOnlyList<KevFlagEntity>> GetByCveAsync(string cveId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, cve_id, vendor_project, product, vulnerability_name,
date_added, due_date, known_ransomware_use, notes, created_at
FROM vuln.kev_flags
WHERE cve_id = @cve_id
ORDER BY date_added DESC, advisory_id
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "cve_id", cveId),
MapKev,
cancellationToken);
}
private static KevFlagEntity MapKev(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
AdvisoryId = reader.GetGuid(1),
CveId = reader.GetString(2),
VendorProject = GetNullableString(reader, 3),
Product = GetNullableString(reader, 4),
VulnerabilityName = GetNullableString(reader, 5),
DateAdded = DateOnly.FromDateTime(reader.GetDateTime(6)),
DueDate = reader.IsDBNull(7) ? null : DateOnly.FromDateTime(reader.GetDateTime(7)),
KnownRansomwareUse = reader.GetBoolean(8),
Notes = GetNullableString(reader, 9),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10)
};
}

View File

@@ -0,0 +1,79 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for merge event audit records.
/// </summary>
public sealed class MergeEventRepository : RepositoryBase<ConcelierDataSource>, IMergeEventRepository
{
private const string SystemTenantId = "_system";
public MergeEventRepository(ConcelierDataSource dataSource, ILogger<MergeEventRepository> logger)
: base(dataSource, logger)
{
}
public async Task<MergeEventEntity> InsertAsync(MergeEventEntity evt, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.merge_events
(advisory_id, source_id, event_type, old_value, new_value)
VALUES
(@advisory_id, @source_id, @event_type, @old_value::jsonb, @new_value::jsonb)
RETURNING id, advisory_id, source_id, event_type, old_value::text, new_value::text, created_at
""";
return await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "advisory_id", evt.AdvisoryId);
AddParameter(cmd, "source_id", evt.SourceId);
AddParameter(cmd, "event_type", evt.EventType);
AddJsonbParameter(cmd, "old_value", evt.OldValue);
AddJsonbParameter(cmd, "new_value", evt.NewValue);
},
MapEvent!,
cancellationToken).ConfigureAwait(false) ?? throw new InvalidOperationException("Insert returned null");
}
public Task<IReadOnlyList<MergeEventEntity>> GetByAdvisoryAsync(Guid advisoryId, int limit = 100, int offset = 0, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, advisory_id, source_id, event_type, old_value::text, new_value::text, created_at
FROM vuln.merge_events
WHERE advisory_id = @advisory_id
ORDER BY created_at DESC, id DESC
LIMIT @limit OFFSET @offset
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "advisory_id", advisoryId);
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapEvent,
cancellationToken);
}
private static MergeEventEntity MapEvent(NpgsqlDataReader reader) => new()
{
Id = reader.GetInt64(0),
AdvisoryId = reader.GetGuid(1),
SourceId = GetNullableGuid(reader, 2),
EventType = reader.GetString(3),
OldValue = GetNullableString(reader, 4),
NewValue = GetNullableString(reader, 5),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(6)
};
}

View File

@@ -0,0 +1,136 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for feed sources.
/// </summary>
public sealed class SourceRepository : RepositoryBase<ConcelierDataSource>, ISourceRepository
{
private const string SystemTenantId = "_system";
public SourceRepository(ConcelierDataSource dataSource, ILogger<SourceRepository> logger)
: base(dataSource, logger)
{
}
public async Task<SourceEntity> UpsertAsync(SourceEntity source, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.sources
(id, key, name, source_type, url, priority, enabled, config, metadata)
VALUES
(@id, @key, @name, @source_type, @url, @priority, @enabled, @config::jsonb, @metadata::jsonb)
ON CONFLICT (key) DO UPDATE SET
name = EXCLUDED.name,
source_type = EXCLUDED.source_type,
url = EXCLUDED.url,
priority = EXCLUDED.priority,
enabled = EXCLUDED.enabled,
config = EXCLUDED.config,
metadata = EXCLUDED.metadata,
updated_at = NOW()
RETURNING id, key, name, source_type, url, priority, enabled,
config::text, metadata::text, created_at, updated_at
""";
return await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", source.Id);
AddParameter(cmd, "key", source.Key);
AddParameter(cmd, "name", source.Name);
AddParameter(cmd, "source_type", source.SourceType);
AddParameter(cmd, "url", source.Url);
AddParameter(cmd, "priority", source.Priority);
AddParameter(cmd, "enabled", source.Enabled);
AddJsonbParameter(cmd, "config", source.Config);
AddJsonbParameter(cmd, "metadata", source.Metadata);
},
MapSource!,
cancellationToken).ConfigureAwait(false) ?? throw new InvalidOperationException("Upsert returned null");
}
public Task<SourceEntity?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, key, name, source_type, url, priority, enabled,
config::text, metadata::text, created_at, updated_at
FROM vuln.sources
WHERE id = @id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "id", id),
MapSource,
cancellationToken);
}
public Task<SourceEntity?> GetByKeyAsync(string key, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, key, name, source_type, url, priority, enabled,
config::text, metadata::text, created_at, updated_at
FROM vuln.sources
WHERE key = @key
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "key", key),
MapSource,
cancellationToken);
}
public Task<IReadOnlyList<SourceEntity>> ListAsync(bool? enabled = null, CancellationToken cancellationToken = default)
{
var sql = """
SELECT id, key, name, source_type, url, priority, enabled,
config::text, metadata::text, created_at, updated_at
FROM vuln.sources
""";
if (enabled.HasValue)
{
sql += " WHERE enabled = @enabled";
}
sql += " ORDER BY priority DESC, key";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
if (enabled.HasValue)
{
AddParameter(cmd, "enabled", enabled.Value);
}
},
MapSource,
cancellationToken);
}
private static SourceEntity MapSource(Npgsql.NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
Key = reader.GetString(1),
Name = reader.GetString(2),
SourceType = reader.GetString(3),
Url = GetNullableString(reader, 4),
Priority = reader.GetInt32(5),
Enabled = reader.GetBoolean(6),
Config = reader.GetString(7),
Metadata = reader.GetString(8),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(9),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(10)
};
}

View File

@@ -0,0 +1,92 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for source ingestion state.
/// </summary>
public sealed class SourceStateRepository : RepositoryBase<ConcelierDataSource>, ISourceStateRepository
{
private const string SystemTenantId = "_system";
public SourceStateRepository(ConcelierDataSource dataSource, ILogger<SourceStateRepository> logger)
: base(dataSource, logger)
{
}
public async Task<SourceStateEntity> UpsertAsync(SourceStateEntity state, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.source_states
(id, source_id, cursor, last_sync_at, last_success_at, last_error,
sync_count, error_count, metadata)
VALUES
(@id, @source_id, @cursor, @last_sync_at, @last_success_at, @last_error,
@sync_count, @error_count, @metadata::jsonb)
ON CONFLICT (source_id) DO UPDATE SET
cursor = EXCLUDED.cursor,
last_sync_at = EXCLUDED.last_sync_at,
last_success_at = EXCLUDED.last_success_at,
last_error = EXCLUDED.last_error,
sync_count = EXCLUDED.sync_count,
error_count = EXCLUDED.error_count,
metadata = EXCLUDED.metadata,
updated_at = NOW()
RETURNING id, source_id, cursor, last_sync_at, last_success_at, last_error,
sync_count, error_count, metadata::text, updated_at
""";
return await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", state.Id);
AddParameter(cmd, "source_id", state.SourceId);
AddParameter(cmd, "cursor", state.Cursor);
AddParameter(cmd, "last_sync_at", state.LastSyncAt);
AddParameter(cmd, "last_success_at", state.LastSuccessAt);
AddParameter(cmd, "last_error", state.LastError);
AddParameter(cmd, "sync_count", state.SyncCount);
AddParameter(cmd, "error_count", state.ErrorCount);
AddJsonbParameter(cmd, "metadata", state.Metadata);
},
MapState!,
cancellationToken).ConfigureAwait(false) ?? throw new InvalidOperationException("Upsert returned null");
}
public Task<SourceStateEntity?> GetBySourceIdAsync(Guid sourceId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, source_id, cursor, last_sync_at, last_success_at, last_error,
sync_count, error_count, metadata::text, updated_at
FROM vuln.source_states
WHERE source_id = @source_id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "source_id", sourceId),
MapState,
cancellationToken);
}
private static SourceStateEntity MapState(NpgsqlDataReader reader) => new()
{
Id = reader.GetGuid(0),
SourceId = reader.GetGuid(1),
Cursor = GetNullableString(reader, 2),
LastSyncAt = GetNullableDateTimeOffset(reader, 3),
LastSuccessAt = GetNullableDateTimeOffset(reader, 4),
LastError = GetNullableString(reader, 5),
SyncCount = reader.GetInt64(6),
ErrorCount = reader.GetInt32(7),
Metadata = reader.GetString(8),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(9)
};
}

View File

@@ -28,6 +28,18 @@ public static class ServiceCollectionExtensions
// Register repositories
services.AddScoped<IAdvisoryRepository, AdvisoryRepository>();
services.AddScoped<ISourceRepository, SourceRepository>();
services.AddScoped<IAdvisoryAliasRepository, AdvisoryAliasRepository>();
services.AddScoped<IAdvisoryCvssRepository, AdvisoryCvssRepository>();
services.AddScoped<IAdvisoryAffectedRepository, AdvisoryAffectedRepository>();
services.AddScoped<IAdvisoryReferenceRepository, AdvisoryReferenceRepository>();
services.AddScoped<IAdvisoryCreditRepository, AdvisoryCreditRepository>();
services.AddScoped<IAdvisoryWeaknessRepository, AdvisoryWeaknessRepository>();
services.AddScoped<IKevFlagRepository, KevFlagRepository>();
services.AddScoped<ISourceStateRepository, SourceStateRepository>();
services.AddScoped<IFeedSnapshotRepository, FeedSnapshotRepository>();
services.AddScoped<IAdvisorySnapshotRepository, AdvisorySnapshotRepository>();
services.AddScoped<IMergeEventRepository, MergeEventRepository>();
return services;
}
@@ -47,6 +59,18 @@ public static class ServiceCollectionExtensions
// Register repositories
services.AddScoped<IAdvisoryRepository, AdvisoryRepository>();
services.AddScoped<ISourceRepository, SourceRepository>();
services.AddScoped<IAdvisoryAliasRepository, AdvisoryAliasRepository>();
services.AddScoped<IAdvisoryCvssRepository, AdvisoryCvssRepository>();
services.AddScoped<IAdvisoryAffectedRepository, AdvisoryAffectedRepository>();
services.AddScoped<IAdvisoryReferenceRepository, AdvisoryReferenceRepository>();
services.AddScoped<IAdvisoryCreditRepository, AdvisoryCreditRepository>();
services.AddScoped<IAdvisoryWeaknessRepository, AdvisoryWeaknessRepository>();
services.AddScoped<IKevFlagRepository, KevFlagRepository>();
services.AddScoped<ISourceStateRepository, SourceStateRepository>();
services.AddScoped<IFeedSnapshotRepository, FeedSnapshotRepository>();
services.AddScoped<IAdvisorySnapshotRepository, AdvisorySnapshotRepository>();
services.AddScoped<IMergeEventRepository, MergeEventRepository>();
return services;
}

View File

@@ -16,6 +16,9 @@ builder.Services.Configure<DevPortalOfflineStorageOptions>(builder.Configuration
builder.Services.Configure<RiskBundleWorkerOptions>(builder.Configuration.GetSection("RiskBundles"));
builder.Services.Configure<RiskBundleManifestSigningOptions>(builder.Configuration.GetSection("RiskBundles:Signing"));
builder.Services.Configure<FileSystemRiskBundleStorageOptions>(builder.Configuration.GetSection("RiskBundles:Storage"));
builder.Services.AddSingleton<IValidateOptions<RiskBundleWorkerOptions>, RiskBundleWorkerOptionsValidator>();
builder.Services.AddSingleton<IValidateOptions<RiskBundleManifestSigningOptions>, RiskBundleSigningOptionsValidator>();
builder.Services.AddSingleton<IValidateOptions<FileSystemRiskBundleStorageOptions>, RiskBundleStorageOptionsValidator>();
builder.Services.AddSingleton<DevPortalOfflineBundleBuilder>();
builder.Services.AddSingleton<IDevPortalOfflineManifestSigner, HmacDevPortalOfflineManifestSigner>();

View File

@@ -0,0 +1,82 @@
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.RiskBundles;
namespace StellaOps.ExportCenter.Worker;
internal sealed class RiskBundleWorkerOptionsValidator : IValidateOptions<RiskBundleWorkerOptions>
{
public ValidateOptionsResult Validate(string? name, RiskBundleWorkerOptions options)
{
if (options is null)
{
return ValidateOptionsResult.Fail("Options instance is null.");
}
if (!options.Enabled)
{
return ValidateOptionsResult.Success;
}
var failures = new List<string>();
if (options.Providers is null || options.Providers.Count == 0)
{
failures.Add("At least one provider must be configured when RiskBundles.Enabled = true.");
}
else
{
foreach (var (provider, index) in options.Providers.Select((p, i) => (p, i)))
{
if (string.IsNullOrWhiteSpace(provider.ProviderId))
{
failures.Add($"Providers[{index}].ProviderId is required.");
}
if (string.IsNullOrWhiteSpace(provider.SourcePath))
{
failures.Add($"Providers[{index}].SourcePath is required.");
}
if (string.IsNullOrWhiteSpace(provider.Source))
{
failures.Add($"Providers[{index}].Source is required.");
}
}
}
return failures.Count == 0
? ValidateOptionsResult.Success
: ValidateOptionsResult.Fail(failures);
}
}
internal sealed class RiskBundleSigningOptionsValidator : IValidateOptions<RiskBundleManifestSigningOptions>
{
public ValidateOptionsResult Validate(string? name, RiskBundleManifestSigningOptions options)
{
if (options is null)
{
return ValidateOptionsResult.Fail("Signing options instance is null.");
}
if (string.IsNullOrWhiteSpace(options.Key))
{
return ValidateOptionsResult.Fail("RiskBundles:Signing:Key must be configured.");
}
return ValidateOptionsResult.Success;
}
}
internal sealed class RiskBundleStorageOptionsValidator : IValidateOptions<FileSystemRiskBundleStorageOptions>
{
public ValidateOptionsResult Validate(string? name, FileSystemRiskBundleStorageOptions options)
{
if (options is null)
{
return ValidateOptionsResult.Fail("Storage options instance is null.");
}
return string.IsNullOrWhiteSpace(options.RootPath)
? ValidateOptionsResult.Fail("RiskBundles:Storage:RootPath must be configured.")
: ValidateOptionsResult.Success;
}
}

View File

@@ -13,13 +13,16 @@ namespace StellaOps.Scanner.Worker.Processing.Reachability;
public sealed class ReachabilityPublishStageExecutor : IScanStageExecutor
{
private readonly IReachabilityUnionPublisherService _publisher;
private readonly IRichGraphPublisherService _richGraphPublisher;
private readonly ILogger<ReachabilityPublishStageExecutor> _logger;
public ReachabilityPublishStageExecutor(
IReachabilityUnionPublisherService publisher,
IRichGraphPublisherService richGraphPublisher,
ILogger<ReachabilityPublishStageExecutor> logger)
{
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
_richGraphPublisher = richGraphPublisher ?? throw new ArgumentNullException(nameof(richGraphPublisher));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -39,5 +42,14 @@ public sealed class ReachabilityPublishStageExecutor : IScanStageExecutor
context.Analysis.Set(ScanAnalysisKeys.ReachabilityUnionCas, publishResult);
_logger.LogInformation("Published reachability union graph to CAS: sha256={Sha} records={Records}", publishResult.Sha256, publishResult.Records);
var richGraphResult = await _richGraphPublisher.PublishAsync(graph, context.ScanId, cancellationToken).ConfigureAwait(false);
context.Analysis.Set(ScanAnalysisKeys.ReachabilityRichGraphCas, richGraphResult);
_logger.LogInformation(
"Published richgraph reachability to CAS: graph_hash={GraphHash} nodes={Nodes} edges={Edges}",
richGraphResult.GraphHash,
richGraphResult.NodeCount,
richGraphResult.EdgeCount);
}
}

View File

@@ -87,6 +87,9 @@ builder.Services.AddSingleton<IEntryTraceExecutionService, EntryTraceExecutionSe
builder.Services.AddSingleton<ReachabilityUnionWriter>();
builder.Services.AddSingleton<ReachabilityUnionPublisher>();
builder.Services.AddSingleton<IReachabilityUnionPublisherService, ReachabilityUnionPublisherService>();
builder.Services.AddSingleton<RichGraphWriter>();
builder.Services.AddSingleton<IRichGraphPublisher, ReachabilityRichGraphPublisher>();
builder.Services.AddSingleton<IRichGraphPublisherService, ReachabilityRichGraphPublisherService>();
builder.Services.AddSingleton<IScanStageExecutor, StellaOps.Scanner.Worker.Processing.Replay.ReplaySealedBundleStageExecutor>();
builder.Services.AddSingleton<StellaOps.Scanner.Worker.Processing.Replay.ReplayBundleFetcher>();

View File

@@ -30,6 +30,7 @@ public static class ScanAnalysisKeys
public const string ReachabilityUnionGraph = "analysis.reachability.union.graph";
public const string ReachabilityUnionCas = "analysis.reachability.union.cas";
public const string ReachabilityRichGraphCas = "analysis.reachability.richgraph.cas";
public const string FileEntries = "analysis.files.entries";
public const string EntropyReport = "analysis.entropy.report";

View File

@@ -0,0 +1,52 @@
using System;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Builds canonical CodeIDs used by richgraph-v1 to anchor symbols when names are missing.
/// </summary>
/// <remarks>
/// Format: <c>code:&lt;lang&gt;:&lt;base64url-sha256&gt;</c> where the hash is computed over a
/// canonical tuple that is stable across machines and paths.
/// </remarks>
public static class CodeId
{
public static string ForBinary(string buildId, string section, string? relativePath)
{
var tuple = $"{Norm(buildId)}\0{Norm(section)}\0{Norm(relativePath)}";
return Build("binary", tuple);
}
public static string ForDotNet(string assemblyName, string moduleName, string? mvid)
{
var tuple = $"{Norm(assemblyName)}\0{Norm(moduleName)}\0{Norm(mvid)}";
return Build("dotnet", tuple);
}
public static string ForNode(string packageName, string entryPath)
{
var tuple = $"{Norm(packageName)}\0{Norm(entryPath)}";
return Build("node", tuple);
}
public static string FromSymbolId(string symbolId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(symbolId);
return Build("sym", symbolId.Trim());
}
private static string Build(string lang, string tuple)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(Encoding.UTF8.GetBytes(tuple));
var base64 = Convert.ToBase64String(hash)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
return $"code:{lang}:{base64}";
}
private static string Norm(string? value) => (value ?? string.Empty).Trim();
}

View File

@@ -144,17 +144,18 @@ public sealed partial class DotNetReachabilityLifter : IReachabilityLifter
// Add assembly node
var assemblySymbol = SymbolId.ForDotNet(info.AssemblyName, string.Empty, string.Empty, string.Empty);
builder.AddNode(
symbolId: assemblySymbol,
lang: SymbolId.Lang.DotNet,
kind: "assembly",
display: info.AssemblyName,
sourceFile: relativePath,
attributes: new Dictionary<string, string>
{
["target_framework"] = info.TargetFramework,
["root_namespace"] = info.RootNamespace
});
builder.AddNode(
symbolId: assemblySymbol,
lang: SymbolId.Lang.DotNet,
kind: "assembly",
display: info.AssemblyName,
sourceFile: relativePath,
attributes: new Dictionary<string, string>
{
["target_framework"] = info.TargetFramework,
["root_namespace"] = info.RootNamespace,
["code_id"] = CodeId.ForDotNet(info.AssemblyName, info.AssemblyName, null)
});
// Add namespace node
if (!string.IsNullOrWhiteSpace(info.RootNamespace))
@@ -348,7 +349,8 @@ public sealed partial class DotNetReachabilityLifter : IReachabilityLifter
attributes: new Dictionary<string, string>
{
["version"] = version,
["purl"] = $"pkg:nuget/{packageName}@{version}"
["purl"] = $"pkg:nuget/{packageName}@{version}",
["code_id"] = CodeId.ForDotNet(packageName, packageName, null)
});
// Process dependencies

View File

@@ -94,7 +94,8 @@ public sealed class NodeReachabilityLifter : IReachabilityLifter
attributes: new Dictionary<string, string>
{
["version"] = pkgVersion ?? "0.0.0",
["purl"] = $"pkg:npm/{EncodePackageName(pkgName)}@{pkgVersion}"
["purl"] = $"pkg:npm/{EncodePackageName(pkgName)}@{pkgVersion}",
["code_id"] = CodeId.ForNode(pkgName, "module")
});
// Process entrypoints (main, module, exports)
@@ -137,7 +138,11 @@ public sealed class NodeReachabilityLifter : IReachabilityLifter
lang: SymbolId.Lang.Node,
kind: "entrypoint",
display: $"{pkgName}:{mainPath}",
sourceFile: NormalizePath(mainPath));
sourceFile: NormalizePath(mainPath),
attributes: new Dictionary<string, string>
{
["code_id"] = CodeId.ForNode(pkgName, NormalizePath(mainPath))
});
builder.AddEdge(
from: moduleSymbol,
@@ -162,7 +167,11 @@ public sealed class NodeReachabilityLifter : IReachabilityLifter
lang: SymbolId.Lang.Node,
kind: "entrypoint",
display: $"{pkgName}:{modulePath} (ESM)",
sourceFile: NormalizePath(modulePath));
sourceFile: NormalizePath(modulePath),
attributes: new Dictionary<string, string>
{
["code_id"] = CodeId.ForNode(pkgName, NormalizePath(modulePath))
});
builder.AddEdge(
from: moduleSymbol,
@@ -219,7 +228,11 @@ public sealed class NodeReachabilityLifter : IReachabilityLifter
kind: "binary",
display: $"{binName} -> {binPath}",
sourceFile: NormalizePath(binPath),
attributes: new Dictionary<string, string> { ["bin_name"] = binName });
attributes: new Dictionary<string, string>
{
["bin_name"] = binName,
["code_id"] = CodeId.ForNode(pkgName, NormalizePath(binPath))
});
builder.AddEdge(
from: moduleSymbol,

View File

@@ -0,0 +1,86 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.Cache.Abstractions;
namespace StellaOps.Scanner.Reachability;
public interface IRichGraphPublisher
{
Task<RichGraphPublishResult> PublishAsync(RichGraph graph, string analysisId, IFileContentAddressableStore cas, string workRoot, CancellationToken cancellationToken = default);
}
/// <summary>
/// Packages richgraph-v1 JSON + meta into a deterministic zip and stores it in CAS.
/// </summary>
public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
{
private readonly RichGraphWriter _writer;
public ReachabilityRichGraphPublisher(RichGraphWriter writer)
{
_writer = writer ?? throw new ArgumentNullException(nameof(writer));
}
public async Task<RichGraphPublishResult> PublishAsync(
RichGraph graph,
string analysisId,
IFileContentAddressableStore cas,
string workRoot,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(graph);
ArgumentNullException.ThrowIfNull(cas);
ArgumentException.ThrowIfNullOrWhiteSpace(analysisId);
ArgumentException.ThrowIfNullOrWhiteSpace(workRoot);
Directory.CreateDirectory(workRoot);
var writeResult = await _writer.WriteAsync(graph, workRoot, analysisId, cancellationToken).ConfigureAwait(false);
var folder = Path.GetDirectoryName(writeResult.GraphPath)!;
var zipPath = Path.Combine(folder, "richgraph.zip");
CreateDeterministicZip(folder, zipPath);
await using var stream = File.OpenRead(zipPath);
var sha = ComputeSha256(zipPath);
var casEntry = await cas.PutAsync(new FileCasPutRequest(sha, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
return new RichGraphPublishResult(writeResult.GraphHash, casEntry.RelativePath, writeResult.NodeCount, writeResult.EdgeCount);
}
private static void CreateDeterministicZip(string sourceDir, string destinationZip)
{
if (File.Exists(destinationZip))
{
File.Delete(destinationZip);
}
var files = Directory.EnumerateFiles(sourceDir, "*", SearchOption.TopDirectoryOnly)
.OrderBy(f => f, StringComparer.Ordinal)
.ToList();
using var zip = ZipFile.Open(destinationZip, ZipArchiveMode.Create);
foreach (var file in files)
{
var entryName = Path.GetFileName(file);
zip.CreateEntryFromFile(file, entryName, CompressionLevel.Optimal);
}
}
private static string ComputeSha256(string path)
{
using var sha = System.Security.Cryptography.SHA256.Create();
using var stream = File.OpenRead(path);
return Convert.ToHexString(sha.ComputeHash(stream)).ToLowerInvariant();
}
}
public sealed record RichGraphPublishResult(
string GraphHash,
string RelativePath,
int NodeCount,
int EdgeCount);

View File

@@ -0,0 +1,41 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.Surface.Env;
namespace StellaOps.Scanner.Reachability;
public interface IRichGraphPublisherService
{
Task<RichGraphPublishResult> PublishAsync(ReachabilityUnionGraph graph, string analysisId, CancellationToken cancellationToken = default);
}
/// <summary>
/// Service wrapper that builds richgraph-v1 from a union graph and stores it in CAS.
/// </summary>
public sealed class ReachabilityRichGraphPublisherService : IRichGraphPublisherService
{
private readonly ISurfaceEnvironment _environment;
private readonly IFileContentAddressableStore _cas;
private readonly IRichGraphPublisher _publisher;
public ReachabilityRichGraphPublisherService(
ISurfaceEnvironment environment,
IFileContentAddressableStore cas,
IRichGraphPublisher publisher)
{
_environment = environment ?? throw new ArgumentNullException(nameof(environment));
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
}
public Task<RichGraphPublishResult> PublishAsync(ReachabilityUnionGraph graph, string analysisId, CancellationToken cancellationToken = default)
{
var richGraph = RichGraphBuilder.FromUnion(graph, "scanner.reachability", "0.1.0");
var workRoot = Path.Combine(_environment.Settings.CacheRoot.FullName, "reachability");
Directory.CreateDirectory(workRoot);
return _publisher.PublishAsync(richGraph, analysisId, _cas, workRoot, cancellationToken);
}
}

View File

@@ -0,0 +1,225 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Canonical richgraph-v1 document used for CAS storage and Signals ingestion.
/// </summary>
public sealed record RichGraph(
IReadOnlyList<RichGraphNode> Nodes,
IReadOnlyList<RichGraphEdge> Edges,
IReadOnlyList<RichGraphRoot> Roots,
RichGraphAnalyzer Analyzer,
string Schema = "richgraph-v1")
{
public RichGraph Trimmed()
{
var nodes = (Nodes ?? Array.Empty<RichGraphNode>())
.Where(n => !string.IsNullOrWhiteSpace(n.Id))
.Select(n => n.Trimmed())
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToList();
var edges = (Edges ?? Array.Empty<RichGraphEdge>())
.Where(e => !string.IsNullOrWhiteSpace(e.From) && !string.IsNullOrWhiteSpace(e.To))
.Select(e => e.Trimmed())
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)
.ThenBy(e => e.Kind, StringComparer.Ordinal)
.ToList();
var roots = (Roots ?? Array.Empty<RichGraphRoot>())
.Select(r => r.Trimmed())
.OrderBy(r => r.Id, StringComparer.Ordinal)
.ToList();
return this with { Nodes = nodes, Edges = edges, Roots = roots, Analyzer = Analyzer.Trimmed() };
}
}
public sealed record RichGraphNode(
string Id,
string SymbolId,
string? CodeId,
string? Purl,
string Lang,
string Kind,
string? Display,
string? BuildId,
IReadOnlyList<string>? Evidence,
IReadOnlyDictionary<string, string>? Attributes,
string? SymbolDigest)
{
public RichGraphNode Trimmed()
{
return this with
{
Id = Id.Trim(),
SymbolId = SymbolId.Trim(),
CodeId = string.IsNullOrWhiteSpace(CodeId) ? null : CodeId.Trim(),
Purl = string.IsNullOrWhiteSpace(Purl) ? null : Purl.Trim(),
Lang = Lang.Trim(),
Kind = Kind.Trim(),
Display = string.IsNullOrWhiteSpace(Display) ? null : Display.Trim(),
BuildId = string.IsNullOrWhiteSpace(BuildId) ? null : BuildId.Trim(),
SymbolDigest = string.IsNullOrWhiteSpace(SymbolDigest) ? null : SymbolDigest.Trim(),
Evidence = Evidence is null
? Array.Empty<string>()
: Evidence.Where(e => !string.IsNullOrWhiteSpace(e)).Select(e => e.Trim()).OrderBy(e => e, StringComparer.Ordinal).ToArray(),
Attributes = Attributes is null
? ImmutableDictionary<string, string>.Empty
: Attributes.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
.ToImmutableSortedDictionary(kv => kv.Key.Trim(), kv => kv.Value!.Trim(), StringComparer.Ordinal)
};
}
}
public sealed record RichGraphEdge(
string From,
string To,
string Kind,
string? Purl,
string? SymbolDigest,
IReadOnlyList<string>? Evidence,
double Confidence,
IReadOnlyList<string>? Candidates)
{
public RichGraphEdge Trimmed()
{
return this with
{
From = From.Trim(),
To = To.Trim(),
Kind = string.IsNullOrWhiteSpace(Kind) ? "call" : Kind.Trim(),
Purl = string.IsNullOrWhiteSpace(Purl) ? null : Purl.Trim(),
SymbolDigest = string.IsNullOrWhiteSpace(SymbolDigest) ? null : SymbolDigest.Trim(),
Evidence = Evidence is null
? Array.Empty<string>()
: Evidence.Where(e => !string.IsNullOrWhiteSpace(e)).Select(e => e.Trim()).OrderBy(e => e, StringComparer.Ordinal).ToArray(),
Candidates = Candidates is null
? Array.Empty<string>()
: Candidates.Where(c => !string.IsNullOrWhiteSpace(c)).Select(c => c.Trim()).OrderBy(c => c, StringComparer.Ordinal).ToArray(),
Confidence = ClampConfidence(Confidence)
};
}
private static double ClampConfidence(double value) => Math.Min(1.0, Math.Max(0.0, value));
}
public sealed record RichGraphRoot(string Id, string Phase, string? Source)
{
public RichGraphRoot Trimmed()
=> new(Id.Trim(), string.IsNullOrWhiteSpace(Phase) ? "runtime" : Phase.Trim(), string.IsNullOrWhiteSpace(Source) ? null : Source.Trim());
}
public sealed record RichGraphAnalyzer(string Name, string Version, string? ToolchainDigest)
{
public RichGraphAnalyzer Trimmed()
=> new(
string.IsNullOrWhiteSpace(Name) ? "scanner.reachability" : Name.Trim(),
string.IsNullOrWhiteSpace(Version) ? "0.1.0" : Version.Trim(),
string.IsNullOrWhiteSpace(ToolchainDigest) ? null : ToolchainDigest.Trim());
}
/// <summary>
/// Transforms the union graph into a richgraph-v1 payload with purl/symbol digests.
/// </summary>
public static class RichGraphBuilder
{
public static RichGraph FromUnion(ReachabilityUnionGraph union, string analyzerName, string analyzerVersion)
{
ArgumentNullException.ThrowIfNull(union);
var nodePurls = new Dictionary<string, string>(StringComparer.Ordinal);
var nodeDigests = new Dictionary<string, string>(StringComparer.Ordinal);
var nodes = new List<RichGraphNode>();
foreach (var node in union.Nodes ?? Array.Empty<ReachabilityUnionNode>())
{
if (string.IsNullOrWhiteSpace(node.SymbolId))
{
continue;
}
var symbolId = node.SymbolId.Trim();
var purl = node.Attributes is not null && node.Attributes.TryGetValue("purl", out var p) ? p : null;
if (!string.IsNullOrWhiteSpace(purl))
{
nodePurls[symbolId] = purl!;
}
var symbolDigest = ComputeSymbolDigest(symbolId);
nodeDigests[symbolId] = symbolDigest;
var codeId = node.Attributes is not null && node.Attributes.TryGetValue("code_id", out var cid)
? cid
: CodeId.FromSymbolId(symbolId);
nodes.Add(new RichGraphNode(
Id: symbolId,
SymbolId: symbolId,
CodeId: codeId,
Purl: purl,
Lang: node.Lang,
Kind: node.Kind,
Display: node.Display,
BuildId: node.Attributes is not null && node.Attributes.TryGetValue("build_id", out var bid) ? bid : null,
Evidence: node.Source?.Evidence is null ? Array.Empty<string>() : new[] { node.Source.Evidence },
Attributes: node.Attributes,
SymbolDigest: symbolDigest));
}
var edges = new List<RichGraphEdge>();
foreach (var edge in union.Edges ?? Array.Empty<ReachabilityUnionEdge>())
{
if (string.IsNullOrWhiteSpace(edge.From) || string.IsNullOrWhiteSpace(edge.To))
{
continue;
}
var toId = edge.To.Trim();
nodePurls.TryGetValue(toId, out var edgePurl);
nodeDigests.TryGetValue(toId, out var edgeDigest);
edges.Add(new RichGraphEdge(
From: edge.From.Trim(),
To: toId,
Kind: edge.EdgeType,
Purl: edgePurl ?? "pkg:unknown",
SymbolDigest: edgeDigest,
Evidence: edge.Source?.Evidence is null ? Array.Empty<string>() : new[] { edge.Source.Evidence },
Confidence: ConfidenceToProbability(edge.Confidence),
Candidates: edgePurl is null ? new[] { "pkg:unknown" } : Array.Empty<string>()));
}
// include any synthetic roots if provided as attributes
var roots = nodes
.Where(n => n.Attributes is not null && n.Attributes.ContainsKey("root"))
.Select(n => new RichGraphRoot(n.Id, "runtime", n.Attributes!["root"]))
.ToList();
return new RichGraph(nodes, edges, roots, new RichGraphAnalyzer(analyzerName, analyzerVersion, null)).Trimmed();
}
private static string ComputeSymbolDigest(string symbolId)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(symbolId));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static double ConfidenceToProbability(string? confidence)
{
return (confidence ?? string.Empty).Trim().ToLowerInvariant() switch
{
"certain" => 1.0,
"high" => 0.9,
"medium" => 0.6,
"low" => 0.3,
_ => 0.6
};
}
}

View File

@@ -0,0 +1,185 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Writes richgraph-v1 documents to disk with canonical ordering and BLAKE3 hash.
/// </summary>
public sealed class RichGraphWriter
{
private static readonly JsonWriterOptions JsonOptions = new()
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false
};
public async Task<RichGraphWriteResult> WriteAsync(
RichGraph graph,
string outputRoot,
string analysisId,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(graph);
ArgumentException.ThrowIfNullOrWhiteSpace(outputRoot);
ArgumentException.ThrowIfNullOrWhiteSpace(analysisId);
var trimmed = graph.Trimmed();
var root = Path.Combine(outputRoot, "reachability_graphs", analysisId);
Directory.CreateDirectory(root);
var graphPath = Path.Combine(root, "richgraph-v1.json");
await using (var stream = File.Create(graphPath))
await using (var writer = new Utf8JsonWriter(stream, JsonOptions))
{
WriteGraph(writer, trimmed);
await writer.FlushAsync(cancellationToken).ConfigureAwait(false);
}
var bytes = await File.ReadAllBytesAsync(graphPath, cancellationToken).ConfigureAwait(false);
var graphHash = ComputeSha256(bytes);
var metaPath = Path.Combine(root, "meta.json");
await using (var stream = File.Create(metaPath))
await using (var writer = new Utf8JsonWriter(stream, new JsonWriterOptions { Indented = true }))
{
writer.WriteStartObject();
writer.WriteString("schema", trimmed.Schema);
writer.WriteString("graph_hash", graphHash);
writer.WritePropertyName("files");
writer.WriteStartArray();
writer.WriteStartObject();
writer.WriteString("path", graphPath);
writer.WriteString("hash", graphHash);
writer.WriteEndObject();
writer.WriteEndArray();
writer.WriteEndObject();
await writer.FlushAsync(cancellationToken).ConfigureAwait(false);
}
return new RichGraphWriteResult(graphPath, metaPath, graphHash, trimmed.Nodes.Count, trimmed.Edges.Count);
}
private static void WriteGraph(Utf8JsonWriter writer, RichGraph graph)
{
writer.WriteStartObject();
writer.WriteString("schema", graph.Schema);
writer.WritePropertyName("analyzer");
writer.WriteStartObject();
writer.WriteString("name", graph.Analyzer.Name);
writer.WriteString("version", graph.Analyzer.Version);
if (!string.IsNullOrWhiteSpace(graph.Analyzer.ToolchainDigest))
{
writer.WriteString("toolchain_digest", graph.Analyzer.ToolchainDigest);
}
writer.WriteEndObject();
writer.WritePropertyName("nodes");
writer.WriteStartArray();
foreach (var node in graph.Nodes)
{
writer.WriteStartObject();
writer.WriteString("id", node.Id);
writer.WriteString("symbol_id", node.SymbolId);
writer.WriteString("lang", node.Lang);
writer.WriteString("kind", node.Kind);
if (!string.IsNullOrWhiteSpace(node.Display)) writer.WriteString("display", node.Display);
if (!string.IsNullOrWhiteSpace(node.CodeId)) writer.WriteString("code_id", node.CodeId);
if (!string.IsNullOrWhiteSpace(node.Purl)) writer.WriteString("purl", node.Purl);
if (!string.IsNullOrWhiteSpace(node.BuildId)) writer.WriteString("build_id", node.BuildId);
if (!string.IsNullOrWhiteSpace(node.SymbolDigest)) writer.WriteString("symbol_digest", node.SymbolDigest);
if (node.Evidence is { Count: > 0 })
{
writer.WritePropertyName("evidence");
writer.WriteStartArray();
foreach (var e in node.Evidence) writer.WriteStringValue(e);
writer.WriteEndArray();
}
if (node.Attributes is { Count: > 0 })
{
writer.WritePropertyName("attributes");
writer.WriteStartObject();
foreach (var kv in node.Attributes)
{
writer.WriteString(kv.Key, kv.Value);
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("edges");
writer.WriteStartArray();
foreach (var edge in graph.Edges)
{
writer.WriteStartObject();
writer.WriteString("from", edge.From);
writer.WriteString("to", edge.To);
writer.WriteString("kind", edge.Kind);
if (!string.IsNullOrWhiteSpace(edge.Purl)) writer.WriteString("purl", edge.Purl);
if (!string.IsNullOrWhiteSpace(edge.SymbolDigest)) writer.WriteString("symbol_digest", edge.SymbolDigest);
writer.WriteNumber("confidence", edge.Confidence);
if (edge.Evidence is { Count: > 0 })
{
writer.WritePropertyName("evidence");
writer.WriteStartArray();
foreach (var e in edge.Evidence) writer.WriteStringValue(e);
writer.WriteEndArray();
}
if (edge.Candidates is { Count: > 0 })
{
writer.WritePropertyName("candidates");
writer.WriteStartArray();
foreach (var c in edge.Candidates) writer.WriteStringValue(c);
writer.WriteEndArray();
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("roots");
writer.WriteStartArray();
foreach (var root in graph.Roots)
{
writer.WriteStartObject();
writer.WriteString("id", root.Id);
writer.WriteString("phase", root.Phase);
if (!string.IsNullOrWhiteSpace(root.Source)) writer.WriteString("source", root.Source);
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
}
private static string ComputeSha256(IReadOnlyList<byte> bytes)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(bytes.ToArray());
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}
public sealed record RichGraphWriteResult(
string GraphPath,
string MetaPath,
string GraphHash,
int NodeCount,
int EdgeCount);

View File

@@ -0,0 +1,27 @@
using System.Threading.Tasks;
using StellaOps.Scanner.Reachability;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
public class RichGraphPublisherTests
{
[Fact]
public async Task PublishesGraphToCas()
{
var writer = new RichGraphWriter();
var publisher = new ReachabilityRichGraphPublisher(writer);
var cas = new FakeFileContentAddressableStore();
using var temp = new TempDir();
var union = new ReachabilityUnionGraph(
Nodes: new[] { new ReachabilityUnionNode("sym:node:a", "node", "module") },
Edges: new ReachabilityUnionEdge[0]);
var rich = RichGraphBuilder.FromUnion(union, "test", "1.0.0");
var result = await publisher.PublishAsync(rich, "scan-1", cas, temp.Path);
Assert.StartsWith("blake3:", result.GraphHash);
Assert.Equal(1, result.NodeCount);
}
}

View File

@@ -0,0 +1,38 @@
using System.IO;
using System.Threading.Tasks;
using StellaOps.Scanner.Reachability;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
public class RichGraphWriterTests
{
[Fact]
public async Task WritesCanonicalGraphAndMeta()
{
var writer = new RichGraphWriter();
using var temp = new TempDir();
var union = new ReachabilityUnionGraph(
Nodes: new[]
{
new ReachabilityUnionNode("sym:dotnet:B", "dotnet", "method", display: "B"),
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method", display: "A")
},
Edges: new[]
{
new ReachabilityUnionEdge("sym:dotnet:A", "sym:dotnet:B", "call", "high")
});
var rich = RichGraphBuilder.FromUnion(union, "test-analyzer", "1.0.0");
var result = await writer.WriteAsync(rich, temp.Path, "analysis-1");
Assert.True(File.Exists(result.GraphPath));
Assert.True(File.Exists(result.MetaPath));
var json = await File.ReadAllTextAsync(result.GraphPath);
Assert.Contains("richgraph-v1", json);
Assert.StartsWith("sha256:", result.GraphHash);
Assert.Equal(2, result.NodeCount);
Assert.Equal(1, result.EdgeCount);
}
}

View File

@@ -1,4 +1,5 @@
using System.Security.Cryptography;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.TaskRunner.Core.Execution;

View File

@@ -16,5 +16,11 @@
]
}
},
Postgres: {
Timeline: {
ConnectionString: Host=localhost;Database=timeline;Username=timeline;Password=timeline;
SchemaName: timeline
}
},
AllowedHosts: *
}

View File

@@ -3,6 +3,6 @@
| Task ID | State | Notes |
| --- | --- | --- |
| WEB-AOC-19-002 | DONE (2025-11-30) | Added provenance builder, checksum utilities, and DSSE/CMS signature verification helpers with unit tests. |
| WEB-AOC-19-003 | TODO | Analyzer/guard validation remains; will align once helper APIs settle. |
| WEB-CONSOLE-23-002 | TODO | Status/stream endpoints to proxy Scheduler once contracts finalized. |
| WEB-AOC-19-003 | DONE (2025-11-30) | Added client-side guard validator (forbidden/derived/unknown fields, provenance/signature checks) with unit fixtures. |
| WEB-CONSOLE-23-002 | DOING (2025-12-01) | Console status polling + SSE run stream client/store/UI added; tests pending once env fixed. |
| WEB-EXC-25-001 | TODO | Exceptions workflow CRUD pending policy scopes. |

View File

@@ -3,22 +3,28 @@ import { APP_INITIALIZER, ApplicationConfig } from '@angular/core';
import { provideRouter } from '@angular/router';
import { routes } from './app.routes';
import { CONCELIER_EXPORTER_API_BASE_URL } from './core/api/concelier-exporter.client';
import {
AUTHORITY_CONSOLE_API,
AUTHORITY_CONSOLE_API_BASE_URL,
AuthorityConsoleApiHttpClient,
} from './core/api/authority-console.client';
import { CONCELIER_EXPORTER_API_BASE_URL } from './core/api/concelier-exporter.client';
import {
AUTHORITY_CONSOLE_API,
AUTHORITY_CONSOLE_API_BASE_URL,
AuthorityConsoleApiHttpClient,
} from './core/api/authority-console.client';
import {
CONSOLE_API_BASE_URL,
DEFAULT_EVENT_SOURCE_FACTORY,
EVENT_SOURCE_FACTORY,
} from './core/api/console-status.client';
import {
NOTIFY_API,
NOTIFY_API_BASE_URL,
NOTIFY_TENANT_ID,
} from './core/api/notify.client';
import { AppConfigService } from './core/config/app-config.service';
import { AuthHttpInterceptor } from './core/auth/auth-http.interceptor';
import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor';
import { MockNotifyApiService } from './testing/mock-notify-api.service';
NOTIFY_TENANT_ID,
} from './core/api/notify.client';
import { CONSOLE_API_BASE_URL } from './core/api/console-status.client';
import { AppConfigService } from './core/config/app-config.service';
import { AuthHttpInterceptor } from './core/auth/auth-http.interceptor';
import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor';
import { MockNotifyApiService } from './testing/mock-notify-api.service';
export const appConfig: ApplicationConfig = {
providers: [
provideRouter(routes),
@@ -64,14 +70,33 @@ export const appConfig: ApplicationConfig = {
provide: AUTHORITY_CONSOLE_API,
useExisting: AuthorityConsoleApiHttpClient,
},
{
provide: NOTIFY_API_BASE_URL,
useValue: '/api/v1/notify',
},
{
provide: NOTIFY_TENANT_ID,
useValue: 'tenant-dev',
},
{
provide: NOTIFY_API_BASE_URL,
useValue: '/api/v1/notify',
},
{
provide: CONSOLE_API_BASE_URL,
deps: [AppConfigService],
useFactory: (config: AppConfigService) => {
const authorityBase = config.config.apiBaseUrls.authority;
try {
return new URL('/console', authorityBase).toString();
} catch {
const normalized = authorityBase.endsWith('/')
? authorityBase.slice(0, -1)
: authorityBase;
return `${normalized}/console`;
}
},
},
{
provide: EVENT_SOURCE_FACTORY,
useValue: DEFAULT_EVENT_SOURCE_FACTORY,
},
{
provide: NOTIFY_TENANT_ID,
useValue: 'tenant-dev',
},
MockNotifyApiService,
{
provide: NOTIFY_API,

View File

@@ -0,0 +1,56 @@
import { validateAocDocument } from './aoc-guard';
describe('AOC Guard (frontend)', () => {
const baseDoc = {
tenant: 'tenant-1',
source: { id: 'source-1' },
upstream: {
content_hash: 'sha256:abc',
signature: { present: true, format: 'cms+dsse', sig: 'c2ln', key_id: 'k1' },
},
content: {
raw: { advisory: 'example' },
},
linkset: {},
} as const;
it('rejects forbidden top-level fields with ERR_AOC_001', () => {
const result = validateAocDocument({ ...baseDoc, severity: 'critical' });
expect(result.some((v) => v.code === 'ERR_AOC_001' && v.path === '/severity')).toBeTrue();
});
it('rejects derived fields with ERR_AOC_006', () => {
const result = validateAocDocument({ ...baseDoc, effective_status: 'open' });
expect(result.some((v) => v.code === 'ERR_AOC_006' && v.path === '/effective_status')).toBeTrue();
});
it('rejects unknown fields with ERR_AOC_007', () => {
const result = validateAocDocument({ ...baseDoc, unexpected: true });
expect(result.some((v) => v.code === 'ERR_AOC_007' && v.path === '/unexpected')).toBeTrue();
});
it('enforces tenant presence and non-empty string', () => {
const missingTenant = validateAocDocument({ ...baseDoc, tenant: undefined });
expect(missingTenant.some((v) => v.code === 'ERR_AOC_004' && v.path === '/tenant')).toBeTrue();
const emptyTenant = validateAocDocument({ ...baseDoc, tenant: ' ' });
expect(emptyTenant.some((v) => v.code === 'ERR_AOC_004' && v.path === '/tenant')).toBeTrue();
});
it('validates signature subfields when present', () => {
const result = validateAocDocument({
...baseDoc,
upstream: { content_hash: 'sha256:abc', signature: { present: true } },
});
expect(result.some((v) => v.path === '/upstream/signature/format')).toBeTrue();
expect(result.some((v) => v.path === '/upstream/signature/sig')).toBeTrue();
expect(result.some((v) => v.path === '/upstream/signature/key_id')).toBeTrue();
expect(result.every((v) => v.code === 'ERR_AOC_005')).toBeTrue();
});
it('accepts a minimal valid document', () => {
const result = validateAocDocument({ ...baseDoc });
expect(result.length).toBe(0);
});
});

View File

@@ -0,0 +1,213 @@
export type AocViolationCode =
| 'ERR_AOC_000'
| 'ERR_AOC_001'
| 'ERR_AOC_002'
| 'ERR_AOC_003'
| 'ERR_AOC_004'
| 'ERR_AOC_005'
| 'ERR_AOC_006'
| 'ERR_AOC_007';
export interface AocGuardOptions {
requireSignatureMetadata?: boolean;
requireTenant?: boolean;
allowedTopLevelFields?: string[];
requiredTopLevelFields?: string[];
}
export interface AocViolation {
code: AocViolationCode;
path: string;
message: string;
}
const DEFAULT_REQUIRED = ['tenant', 'source', 'upstream', 'content', 'linkset'];
const DEFAULT_ALLOWED = new Set(
[
...DEFAULT_REQUIRED,
'_id',
'identifiers',
'attributes',
'supersedes',
'createdAt',
'created_at',
'ingestedAt',
'ingested_at',
'links',
'advisory_key',
].map((k) => k.toLowerCase())
);
const FORBIDDEN_FIELDS = new Set(
[
'severity',
'cvss',
'cvss_vector',
'effective_status',
'effective_range',
'merged_from',
'consensus_provider',
'reachability',
'asset_criticality',
'risk_score',
].map((k) => k.toLowerCase())
);
const isDerivedField = (name: string) => name.toLowerCase().startsWith('effective_');
const asObject = (value: unknown): Record<string, unknown> | undefined =>
value && typeof value === 'object' && !Array.isArray(value) ? (value as Record<string, unknown>) : undefined;
export function validateAocDocument(
document: Record<string, unknown>,
options: AocGuardOptions = {}
): AocViolation[] {
const violations: AocViolation[] = [];
const allowed = new Set(
(options.allowedTopLevelFields ?? Array.from(DEFAULT_ALLOWED)).map((k) => k.toLowerCase())
);
const required = options.requiredTopLevelFields ?? DEFAULT_REQUIRED;
const entries = Object.entries(document);
for (const [keyRaw, value] of entries) {
const key = keyRaw.toLowerCase();
if (FORBIDDEN_FIELDS.has(key)) {
violations.push({
code: 'ERR_AOC_001',
path: `/${keyRaw}`,
message: `Field '${keyRaw}' is forbidden in AOC documents.`,
});
continue;
}
if (isDerivedField(keyRaw)) {
violations.push({
code: 'ERR_AOC_006',
path: `/${keyRaw}`,
message: `Derived field '${keyRaw}' must not be written during ingestion.`,
});
}
if (!allowed.has(key)) {
violations.push({
code: 'ERR_AOC_007',
path: `/${keyRaw}`,
message: `Field '${keyRaw}' is not allowed in AOC documents.`,
});
continue;
}
}
for (const field of required) {
const value = document[field];
if (value === undefined || value === null) {
violations.push({
code: 'ERR_AOC_004',
path: `/${field}`,
message: `Required field '${field}' is missing.`,
});
continue;
}
if ((options.requireTenant ?? true) && field.toLowerCase() === 'tenant') {
if (typeof value !== 'string' || value.trim().length === 0) {
violations.push({
code: 'ERR_AOC_004',
path: '/tenant',
message: 'Tenant must be a non-empty string.',
});
}
}
}
const upstream = asObject(document['upstream']);
if (!upstream) {
violations.push({
code: 'ERR_AOC_004',
path: '/upstream',
message: 'Upstream metadata is required.',
});
} else {
const contentHash = upstream['content_hash'];
if (typeof contentHash !== 'string' || contentHash.trim().length === 0) {
violations.push({
code: 'ERR_AOC_004',
path: '/upstream/content_hash',
message: 'Upstream content hash is required.',
});
}
const requireSig = options.requireSignatureMetadata ?? true;
const signature = asObject(upstream['signature']);
if (requireSig && !signature) {
violations.push({
code: 'ERR_AOC_004',
path: '/upstream/signature',
message: 'Signature metadata is required.',
});
} else if (requireSig && signature) {
const present = signature['present'];
if (typeof present !== 'boolean') {
violations.push({
code: 'ERR_AOC_005',
path: '/upstream/signature/present',
message: "Signature metadata must include 'present' boolean.",
});
} else if (present) {
if (typeof signature['format'] !== 'string' || (signature['format'] as string).trim().length === 0) {
violations.push({
code: 'ERR_AOC_005',
path: '/upstream/signature/format',
message: 'Signature format is required when signature is present.',
});
}
if (typeof signature['sig'] !== 'string' || (signature['sig'] as string).trim().length === 0) {
violations.push({
code: 'ERR_AOC_005',
path: '/upstream/signature/sig',
message: 'Signature payload is required when signature is present.',
});
}
if (typeof signature['key_id'] !== 'string' || (signature['key_id'] as string).trim().length === 0) {
violations.push({
code: 'ERR_AOC_005',
path: '/upstream/signature/key_id',
message: 'Signature key identifier is required when signature is present.',
});
}
}
}
}
const content = asObject(document['content']);
if (!content) {
violations.push({
code: 'ERR_AOC_004',
path: '/content',
message: 'Content metadata is required.',
});
} else if (content['raw'] === undefined || content['raw'] === null) {
violations.push({
code: 'ERR_AOC_004',
path: '/content/raw',
message: 'Raw upstream payload must be preserved.',
});
}
const linkset = asObject(document['linkset']);
if (!linkset) {
violations.push({
code: 'ERR_AOC_004',
path: '/linkset',
message: 'Linkset metadata is required.',
});
}
return violations.sort((a, b) => a.path.localeCompare(b.path));
}

View File

@@ -0,0 +1,52 @@
import { assertKeyWriteAllowed, isKeyWriteAllowed, KeyWriteRequest } from './key-guard';
const unsafePersistentTargets: KeyWriteRequest[] = [
{ kind: 'private', target: 'local-storage', label: 'tenant signing key' },
{ kind: 'private', target: 'indexeddb', label: 'orchestrator control key' },
{ kind: 'symmetric', target: 'filesystem', label: 'session token key' },
];
const unsafeSessionTargets: KeyWriteRequest[] = [
{ kind: 'symmetric', target: 'session-storage', label: 'crypto session key' },
];
const unknownKind: KeyWriteRequest = {
kind: 'unknown',
target: 'memory',
label: 'unclassified key',
};
const safeTargets: KeyWriteRequest[] = [
{ kind: 'public', target: 'local-storage', label: 'public attestation key' },
{ kind: 'public', target: 'indexeddb', label: 'cacheable public key' },
{ kind: 'private', target: 'memory', label: 'ephemeral signing key' },
{ kind: 'symmetric', target: 'memory', label: 'in-memory session key' },
];
describe('key-guard', () => {
it('rejects private/symmetric key writes to persistent storage', () => {
for (const request of unsafePersistentTargets) {
expect(() => assertKeyWriteAllowed(request)).toThrowError(/may not be written/);
expect(isKeyWriteAllowed(request)).toBeFalse();
}
});
it('rejects symmetric keys in session storage to prevent browser persistence', () => {
for (const request of unsafeSessionTargets) {
expect(() => assertKeyWriteAllowed(request)).toThrowError(/session storage/);
expect(isKeyWriteAllowed(request)).toBeFalse();
}
});
it('rejects unknown key kinds regardless of target', () => {
expect(() => assertKeyWriteAllowed(unknownKind)).toThrowError(/key kind unknown/);
expect(isKeyWriteAllowed(unknownKind)).toBeFalse();
});
it('allows public keys in any storage and private/symmetric keys in memory only', () => {
for (const request of safeTargets) {
expect(() => assertKeyWriteAllowed(request)).not.toThrow();
expect(isKeyWriteAllowed(request)).toBeTrue();
}
});
});

View File

@@ -0,0 +1,75 @@
export type KeyMaterialKind = 'private' | 'public' | 'symmetric' | 'unknown';
export type KeyStorageTarget =
| 'memory'
| 'session-storage'
| 'local-storage'
| 'indexeddb'
| 'filesystem';
export interface KeyWriteRequest {
readonly kind: KeyMaterialKind;
readonly target: KeyStorageTarget;
/**
* Optional human-readable label for audit/error messages
* (e.g. "orchestrator signing key").
*/
readonly label?: string;
}
export class ForbiddenKeyWriteError extends Error {
constructor(message: string) {
super(message);
this.name = 'ForbiddenKeyWriteError';
}
}
const persistentTargets: KeyStorageTarget[] = [
'local-storage',
'indexeddb',
'filesystem',
];
/**
* Guard to prevent storing private or symmetric key material in persistent browser storage.
* Throws an error if the write is not permitted; callers must handle/abort accordingly.
*/
export function assertKeyWriteAllowed(request: KeyWriteRequest): void {
const label = request.label ?? 'key material';
if (request.kind === 'unknown') {
throw new ForbiddenKeyWriteError(
`${label}: key kind unknown; refusing to persist without explicit classification.`
);
}
if (request.kind === 'public') {
return; // public keys are safe to persist in any target
}
if (request.target === 'memory') {
return; // ephemeral use is allowed
}
if (request.target === 'session-storage' && request.kind === 'symmetric') {
throw new ForbiddenKeyWriteError(
`${label}: symmetric keys may not be written to session storage.`
);
}
if (persistentTargets.includes(request.target)) {
throw new ForbiddenKeyWriteError(
`${label}: ${request.kind} keys may not be written to ${request.target}.`
);
}
}
/** Convenience helper for tests and callers to check allowance without throwing. */
export function isKeyWriteAllowed(request: KeyWriteRequest): boolean {
try {
assertKeyWriteAllowed(request);
return true;
} catch {
return false;
}
}

View File

@@ -0,0 +1,90 @@
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { TestBed } from '@angular/core/testing';
import { AuthSessionStore } from '../auth/auth-session.store';
import { ConsoleStatusClient, CONSOLE_API_BASE_URL, EVENT_SOURCE_FACTORY } from './console-status.client';
import { ConsoleRunEventDto, ConsoleStatusDto } from './console-status.models';
class FakeAuthSessionStore {
getActiveTenantId(): string | null {
return 'tenant-dev';
}
}
class FakeEventSource {
public onmessage: ((this: EventSource, ev: MessageEvent) => any) | null = null;
public onerror: ((this: EventSource, ev: Event) => any) | null = null;
constructor(public readonly url: string) {}
close(): void {
// no-op for tests
}
}
describe('ConsoleStatusClient', () => {
let httpMock: HttpTestingController;
let client: ConsoleStatusClient;
let eventSourceFactory: jasmine.Spy<(url: string) => EventSource>;
beforeEach(() => {
eventSourceFactory = jasmine.createSpy('eventSourceFactory').and.callFake((url: string) => new FakeEventSource(url) as unknown as EventSource);
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
providers: [
ConsoleStatusClient,
{ provide: CONSOLE_API_BASE_URL, useValue: '/console' },
{ provide: AuthSessionStore, useClass: FakeAuthSessionStore },
{ provide: EVENT_SOURCE_FACTORY, useValue: eventSourceFactory },
],
});
httpMock = TestBed.inject(HttpTestingController);
client = TestBed.inject(ConsoleStatusClient);
});
afterEach(() => {
httpMock.verify();
});
it('adds tenant header and normalizes status response', () => {
const sample: Partial<ConsoleStatusDto> = {
backlog: 2,
queueLagMs: 1200,
activeRuns: 1,
pendingRuns: 1,
healthy: true,
lastCompletedRunId: null,
lastCompletedAt: null,
};
client.getStatus().subscribe((result) => {
expect(result.backlog).toBe(2);
expect(result.queueLagMs).toBe(1200);
expect(result.healthy).toBeTrue();
});
const req = httpMock.expectOne('/console/status');
expect(req.request.method).toBe('GET');
expect(req.request.headers.get('X-StellaOps-Tenant')).toBe('tenant-dev');
req.flush(sample);
});
it('creates SSE stream URL with tenant param and closes on unsubscribe', () => {
const events: ConsoleRunEventDto[] = [];
const subscription = client.streamRun('run-123').subscribe((evt) => events.push(evt));
expect(eventSourceFactory).toHaveBeenCalled();
const url = eventSourceFactory.calls.mostRecent().args[0];
expect(url).toBe('/console/runs/run-123/stream?tenant=tenant-dev');
// Simulate incoming message
const fakeSource = eventSourceFactory.calls.mostRecent().returnValue as unknown as FakeEventSource;
const message = { data: JSON.stringify({ runId: 'run-123', kind: 'progress', progressPercent: 50, updatedAt: '2025-12-01T00:00:00Z' }) } as MessageEvent;
fakeSource.onmessage?.(message);
expect(events.length).toBe(1);
expect(events[0].kind).toBe('progress');
subscription.unsubscribe();
});
});

View File

@@ -0,0 +1,86 @@
import { HttpClient, HttpHeaders, HttpParams } from '@angular/common/http';
import { Inject, Injectable, InjectionToken } from '@angular/core';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
import { AuthSessionStore } from '../auth/auth-session.store';
import { ConsoleRunEventDto, ConsoleStatusDto } from './console-status.models';
export const CONSOLE_API_BASE_URL = new InjectionToken<string>('CONSOLE_API_BASE_URL');
export type EventSourceFactory = (url: string) => EventSource;
export const EVENT_SOURCE_FACTORY = new InjectionToken<EventSourceFactory>('EVENT_SOURCE_FACTORY');
export const DEFAULT_EVENT_SOURCE_FACTORY: EventSourceFactory = (url: string) =>
new EventSource(url, { withCredentials: true });
@Injectable({
providedIn: 'root',
})
export class ConsoleStatusClient {
constructor(
private readonly http: HttpClient,
private readonly authSession: AuthSessionStore,
@Inject(CONSOLE_API_BASE_URL) private readonly baseUrl: string,
@Inject(EVENT_SOURCE_FACTORY) private readonly eventSourceFactory: EventSourceFactory
) {}
/**
* Poll console status (queue lag, backlog, run counts).
*/
getStatus(tenantId?: string): Observable<ConsoleStatusDto> {
const tenant = this.resolveTenant(tenantId);
const headers = new HttpHeaders({ 'X-StellaOps-Tenant': tenant });
return this.http.get<ConsoleStatusDto>(`${this.baseUrl}/status`, { headers }).pipe(
map((dto) => ({
...dto,
backlog: dto.backlog ?? 0,
queueLagMs: dto.queueLagMs ?? 0,
activeRuns: dto.activeRuns ?? 0,
pendingRuns: dto.pendingRuns ?? 0,
healthy: dto.healthy ?? false,
lastCompletedRunId: dto.lastCompletedRunId ?? null,
lastCompletedAt: dto.lastCompletedAt ?? null,
}))
);
}
/**
* Subscribe to streaming updates for a specific run via SSE.
* Caller is responsible for unsubscribing to close the connection.
*/
streamRun(runId: string, tenantId?: string): Observable<ConsoleRunEventDto> {
const tenant = this.resolveTenant(tenantId);
const params = new HttpParams().set('tenant', tenant);
const url = `${this.baseUrl}/runs/${encodeURIComponent(runId)}/stream?${params.toString()}`;
return new Observable<ConsoleRunEventDto>((observer) => {
const source = this.eventSourceFactory(url);
source.onmessage = (event) => {
try {
const parsed = JSON.parse(event.data) as ConsoleRunEventDto;
observer.next(parsed);
} catch (err) {
observer.error(err);
}
};
source.onerror = (err) => {
observer.error(err);
source.close();
};
return () => source.close();
});
}
private resolveTenant(tenantId?: string): string {
const tenant = (tenantId && tenantId.trim()) || this.authSession.getActiveTenantId();
if (!tenant) {
throw new Error('ConsoleStatusClient requires an active tenant identifier.');
}
return tenant;
}
}

View File

@@ -0,0 +1,26 @@
export interface ConsoleStatusDto {
/** Current queue backlog size for console jobs. */
backlog: number;
/** Estimated queue lag in milliseconds. */
queueLagMs: number;
/** Active runs currently streaming. */
activeRuns: number;
/** Pending runs waiting to be processed. */
pendingRuns: number;
/** Identifier of the last completed run, if any. */
lastCompletedRunId: string | null;
/** Completion timestamp of the last run. */
lastCompletedAt: string | null;
/** Health flag emitted by backend readiness checks. */
healthy: boolean;
}
export type ConsoleRunEventKind = 'queued' | 'started' | 'progress' | 'completed' | 'failed';
export interface ConsoleRunEventDto {
runId: string;
kind: ConsoleRunEventKind;
progressPercent?: number;
message?: string;
updatedAt: string;
}

View File

@@ -0,0 +1,75 @@
import { Injectable, inject } from '@angular/core';
import { Subscription, firstValueFrom, timer } from 'rxjs';
import { switchMap } from 'rxjs/operators';
import { ConsoleStatusClient } from '../api/console-status.client';
import { ConsoleRunEventDto, ConsoleStatusDto } from '../api/console-status.models';
import { ConsoleStatusStore } from './console-status.store';
@Injectable({
providedIn: 'root',
})
export class ConsoleStatusService {
private readonly client = inject(ConsoleStatusClient);
private readonly store = inject(ConsoleStatusStore);
/**
* Fetch a single snapshot of console status.
*/
async fetchStatus(): Promise<void> {
this.store.setLoading(true);
this.store.setError(null);
try {
const status = await firstValueFrom(this.client.getStatus());
this.store.setStatus(status as ConsoleStatusDto);
} catch (err) {
console.error('console status fetch failed', err);
this.store.setError('Unable to load console status');
} finally {
this.store.setLoading(false);
}
}
/**
* Start polling console status at the given interval (ms).
* Returns a subscription that must be unsubscribed by the caller.
*/
startPolling(intervalMs = 30000): Subscription {
this.store.setLoading(true);
this.store.setError(null);
const sub = timer(0, intervalMs)
.pipe(switchMap(() => this.client.getStatus()))
.subscribe({
next: (status) => {
this.store.setStatus(status);
this.store.setLoading(false);
},
error: (err) => {
console.error('console status poll failed', err);
this.store.setError('Unable to load console status');
this.store.setLoading(false);
},
});
return sub;
}
/**
* Subscribe to run stream events for a given run id.
*/
subscribeToRun(runId: string): Subscription {
this.store.clearEvents();
return this.client.streamRun(runId).subscribe({
next: (evt: ConsoleRunEventDto) => this.store.appendRunEvent(evt),
error: (err) => {
console.error('console run stream error', err);
this.store.setError('Run stream disconnected');
},
});
}
clear(): void {
this.store.clear();
}
}

View File

@@ -0,0 +1,18 @@
import { ConsoleStatusStore } from './console-status.store';
describe('ConsoleStatusStore', () => {
it('appends events with cap of 50', () => {
const store = new ConsoleStatusStore();
for (let i = 0; i < 60; i += 1) {
store.appendRunEvent({
runId: `r-${i}`,
kind: 'progress',
progressPercent: i,
updatedAt: `2025-12-01T00:00:${i.toString().padStart(2, '0')}Z`,
});
}
expect(store.runEvents().length).toBe(50);
expect(store.runEvents()[0].runId).toBe('r-10');
});
});

View File

@@ -0,0 +1,46 @@
import { Injectable, computed, signal } from '@angular/core';
import { ConsoleRunEventDto, ConsoleStatusDto } from '../api/console-status.models';
@Injectable({
providedIn: 'root',
})
export class ConsoleStatusStore {
private readonly statusSignal = signal<ConsoleStatusDto | null>(null);
private readonly loadingSignal = signal(false);
private readonly errorSignal = signal<string | null>(null);
private readonly runEventsSignal = signal<ConsoleRunEventDto[]>([]);
readonly status = computed(() => this.statusSignal());
readonly loading = computed(() => this.loadingSignal());
readonly error = computed(() => this.errorSignal());
readonly runEvents = computed(() => this.runEventsSignal());
setLoading(value: boolean): void {
this.loadingSignal.set(value);
}
setError(message: string | null): void {
this.errorSignal.set(message);
}
setStatus(status: ConsoleStatusDto | null): void {
this.statusSignal.set(status);
}
appendRunEvent(evt: ConsoleRunEventDto): void {
const next = [...this.runEventsSignal(), evt].slice(-50); // keep last 50 for UI
this.runEventsSignal.set(next);
}
clearEvents(): void {
this.runEventsSignal.set([]);
}
clear(): void {
this.statusSignal.set(null);
this.loadingSignal.set(false);
this.errorSignal.set(null);
this.runEventsSignal.set([]);
}
}

View File

@@ -0,0 +1,69 @@
<section class="console-status">
<header>
<div>
<h2>Console Status</h2>
<p class="hint">Queue lag, backlog, and active runs updated every 30s.</p>
</div>
<button type="button" (click)="refresh()" [disabled]="loading()">Refresh</button>
</header>
<div class="status-cards" *ngIf="status(); else statusSkeleton">
<article>
<p class="label">Queue Lag</p>
<p class="value">{{ status()?.queueLagMs ?? 0 | number }} ms</p>
</article>
<article>
<p class="label">Backlog</p>
<p class="value">{{ status()?.backlog ?? 0 }}</p>
</article>
<article>
<p class="label">Active Runs</p>
<p class="value">{{ status()?.activeRuns ?? 0 }}</p>
</article>
<article>
<p class="label">Pending Runs</p>
<p class="value">{{ status()?.pendingRuns ?? 0 }}</p>
</article>
<article>
<p class="label">Health</p>
<p class="value" [class.ok]="status()?.healthy" [class.warn]="!status()?.healthy">
{{ status()?.healthy ? 'Healthy' : 'Degraded' }}
</p>
</article>
</div>
<div class="error" *ngIf="error()">{{ error() }}</div>
<section class="run-stream">
<header>
<h3>Run Stream</h3>
<label>
Run ID
<input type="text" [value]="runId()" (input)="runId.set(($event.target as HTMLInputElement).value)" (change)="startRunStream()" />
</label>
</header>
<div class="events">
<div class="event" *ngFor="let evt of runEvents()">
<div class="meta">
<span class="kind">{{ evt.kind }}</span>
<span class="time">{{ evt.updatedAt }}</span>
</div>
<div class="detail">
<span class="run">Run {{ evt.runId }}</span>
<span class="message">{{ evt.message || '...' }}</span>
<span *ngIf="evt.progressPercent != null" class="progress">{{ evt.progressPercent }}%</span>
</div>
</div>
<p *ngIf="runEvents().length === 0" class="empty">No events yet.</p>
</div>
</section>
</section>
<ng-template #statusSkeleton>
<div class="status-cards skeleton">
<article *ngFor="let i of [1,2,3,4,5]">
<p class="label">Loading…</p>
<p class="value"></p>
</article>
</div>
</ng-template>

View File

@@ -0,0 +1,125 @@
.console-status {
display: flex;
flex-direction: column;
gap: 1rem;
}
header {
display: flex;
align-items: center;
justify-content: space-between;
gap: 1rem;
}
.hint {
color: #69707a;
margin: 0;
}
.status-cards {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(140px, 1fr));
gap: 0.75rem;
}
.status-cards article {
background: #0d1117;
border: 1px solid #1f2a36;
border-radius: 8px;
padding: 0.75rem;
}
.status-cards .label {
margin: 0;
color: #9da9bb;
font-size: 0.85rem;
}
.status-cards .value {
margin: 0.2rem 0 0;
font-size: 1.3rem;
font-weight: 600;
}
.value.ok {
color: #2dc98c;
}
.value.warn {
color: #f0ad4e;
}
.run-stream header {
display: flex;
align-items: center;
gap: 1rem;
}
.run-stream label {
display: flex;
align-items: center;
gap: 0.5rem;
}
.events {
border: 1px solid #1f2a36;
border-radius: 8px;
padding: 0.5rem;
background: #0b0f14;
}
.event {
border-bottom: 1px solid #1f2a36;
padding: 0.5rem 0;
}
.event:last-child {
border-bottom: none;
}
.meta {
display: flex;
gap: 0.75rem;
font-size: 0.85rem;
color: #9da9bb;
}
.detail {
display: flex;
flex-wrap: wrap;
gap: 0.75rem;
margin-top: 0.25rem;
}
.kind {
text-transform: uppercase;
letter-spacing: 0.08em;
}
.progress {
color: #2dc98c;
}
.empty {
color: #69707a;
margin: 0.5rem 0 0;
}
.error {
color: #f05d5d;
}
.skeleton article {
background: linear-gradient(90deg, #0d1117, #111824, #0d1117);
background-size: 200% 100%;
animation: shimmer 2s infinite;
}
@keyframes shimmer {
0% {
background-position: 200% 0;
}
100% {
background-position: -200% 0;
}
}

View File

@@ -0,0 +1,46 @@
import { CommonModule } from '@angular/common';
import { ChangeDetectionStrategy, Component, OnDestroy, OnInit, inject, signal } from '@angular/core';
import { ConsoleStatusService } from '../../core/console/console-status.service';
import { ConsoleStatusStore } from '../../core/console/console-status.store';
@Component({
selector: 'app-console-status',
standalone: true,
imports: [CommonModule],
templateUrl: './console-status.component.html',
styleUrls: ['./console-status.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ConsoleStatusComponent implements OnInit, OnDestroy {
private readonly service = inject(ConsoleStatusService);
private readonly store = inject(ConsoleStatusStore);
private pollSub: ReturnType<ConsoleStatusService['startPolling']> | null = null;
private runSub: ReturnType<ConsoleStatusService['subscribeToRun']> | null = null;
readonly status = this.store.status;
readonly loading = this.store.loading;
readonly error = this.store.error;
readonly runEvents = this.store.runEvents;
readonly runId = signal<string>('last');
ngOnInit(): void {
this.pollSub = this.service.startPolling(30000);
this.startRunStream();
}
ngOnDestroy(): void {
this.pollSub?.unsubscribe();
this.runSub?.unsubscribe();
}
refresh(): void {
this.service.fetchStatus();
}
startRunStream(): void {
this.runSub?.unsubscribe();
this.runSub = this.service.subscribeToRun(this.runId());
}
}

View File

@@ -125,6 +125,12 @@ public abstract class RepositoryBase<TDataSource> where TDataSource : DataSource
protected static long? GetNullableInt64(NpgsqlDataReader reader, int ordinal)
=> reader.IsDBNull(ordinal) ? null : reader.GetInt64(ordinal);
/// <summary>
/// Gets a nullable decimal from the reader.
/// </summary>
protected static decimal? GetNullableDecimal(NpgsqlDataReader reader, int ordinal)
=> reader.IsDBNull(ordinal) ? null : reader.GetDecimal(ordinal);
/// <summary>
/// Gets a nullable DateTimeOffset from the reader.
/// </summary>