Finish off old sprints
This commit is contained in:
@@ -6,12 +6,22 @@
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<IsIntegrationTest>true</IsIntegrationTest>
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<TestingPlatformDotnetTestSupport>true</TestingPlatformDotnetTestSupport>
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003;CS8602;CS8604;CS8601;CS8634;CS8714;CS8424</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -333,7 +333,7 @@ public class SchemaIsolationServiceTests : IDisposable
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.GeneratedStatements.Should().Contain(s =>
|
||||
s.Contains("sys_period_start") && s.Contains("sys_period_end"));
|
||||
s.Contains("valid_from") && s.Contains("valid_to"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -12,7 +12,7 @@ public sealed class FieldOwnershipValidatorTests
|
||||
|
||||
private static VerificationReceipt CreateFullReceipt() => new()
|
||||
{
|
||||
ProofBundleId = new ProofBundleId("abc123"),
|
||||
ProofBundleId = new ProofBundleId("abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1"),
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifierVersion = "1.0.0",
|
||||
AnchorId = new TrustAnchorId(Guid.Parse("00000001-0001-0001-0001-000000000001")),
|
||||
@@ -35,7 +35,7 @@ public sealed class FieldOwnershipValidatorTests
|
||||
|
||||
private static VerificationReceipt CreateMinimalReceipt() => new()
|
||||
{
|
||||
ProofBundleId = new ProofBundleId("min-123"),
|
||||
ProofBundleId = new ProofBundleId("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifierVersion = "1.0.0",
|
||||
AnchorId = new TrustAnchorId(Guid.Parse("00000002-0002-0002-0002-000000000002")),
|
||||
@@ -202,7 +202,7 @@ public sealed class FieldOwnershipValidatorTests
|
||||
{
|
||||
var receipt = new VerificationReceipt
|
||||
{
|
||||
ProofBundleId = new ProofBundleId("abc"),
|
||||
ProofBundleId = new ProofBundleId("abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabca"),
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifierVersion = "1.0.0",
|
||||
AnchorId = new TrustAnchorId(Guid.Parse("00000003-0003-0003-0003-000000000003")),
|
||||
@@ -222,7 +222,7 @@ public sealed class FieldOwnershipValidatorTests
|
||||
{
|
||||
var receipt = new VerificationReceipt
|
||||
{
|
||||
ProofBundleId = new ProofBundleId("abc"),
|
||||
ProofBundleId = new ProofBundleId("abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabca"),
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifierVersion = "1.0.0",
|
||||
AnchorId = new TrustAnchorId(Guid.Parse("00000003-0003-0003-0003-000000000003")),
|
||||
|
||||
@@ -150,6 +150,14 @@ public sealed class ReceiptSidebarModelsTests
|
||||
public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
{
|
||||
private static readonly Guid AnchorGuid = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
|
||||
// Valid 64-char lowercase hex digests for test fixtures
|
||||
private const string DigestAbc123 = "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1";
|
||||
private const string DigestDefault = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
private const string DigestAbc = "abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabca";
|
||||
private const string DigestCtx = "cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1cc1c";
|
||||
private const string DigestFallback = "fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00fb00";
|
||||
|
||||
private readonly TestSidebarMeterFactory _meterFactory = new();
|
||||
private readonly ReceiptSidebarService _sut;
|
||||
|
||||
@@ -165,7 +173,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_maps_bundle_id()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:abc123");
|
||||
var receipt = CreateReceipt(DigestAbc123);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
detail.BundleId.Should().Contain("abc123");
|
||||
}
|
||||
@@ -173,7 +181,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_maps_anchor_id()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x");
|
||||
var receipt = CreateReceipt(DigestDefault);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
detail.AnchorId.Should().Be(AnchorGuid.ToString());
|
||||
}
|
||||
@@ -181,7 +189,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_maps_verifier_version()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x");
|
||||
var receipt = CreateReceipt(DigestDefault);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
detail.VerifierVersion.Should().Be("2.1.0");
|
||||
}
|
||||
@@ -189,7 +197,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_all_pass_returns_verified()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("dsse-signature", VerificationResult.Pass),
|
||||
MakeCheck("rekor-inclusion", VerificationResult.Pass)
|
||||
]);
|
||||
@@ -201,7 +209,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_mixed_returns_partially_verified()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("dsse-signature", VerificationResult.Pass),
|
||||
MakeCheck("policy-check", VerificationResult.Fail)
|
||||
]);
|
||||
@@ -213,7 +221,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_all_fail_returns_failed()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("sig", VerificationResult.Fail),
|
||||
MakeCheck("hash", VerificationResult.Fail)
|
||||
]);
|
||||
@@ -225,7 +233,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_no_checks_returns_unverified()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", []);
|
||||
var receipt = CreateReceipt(DigestDefault, []);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
detail.VerificationStatus.Should().Be(ReceiptVerificationStatus.Unverified);
|
||||
}
|
||||
@@ -233,7 +241,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_sets_dsse_verified_when_dsse_check_passes()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("dsse-envelope-signature", VerificationResult.Pass)
|
||||
]);
|
||||
|
||||
@@ -244,7 +252,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_dsse_not_verified_when_dsse_check_fails()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("dsse-envelope-signature", VerificationResult.Fail)
|
||||
]);
|
||||
|
||||
@@ -255,7 +263,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_sets_rekor_verified_when_rekor_check_passes()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("rekor-inclusion-proof", VerificationResult.Pass, logIndex: 100)
|
||||
]);
|
||||
|
||||
@@ -266,7 +274,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_rekor_not_verified_when_absent()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("basic-hash", VerificationResult.Pass)
|
||||
]);
|
||||
|
||||
@@ -277,7 +285,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_maps_check_details()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("sig-check", VerificationResult.Pass, keyId: "key-1", details: "Valid signature")
|
||||
]);
|
||||
|
||||
@@ -294,7 +302,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_formats_expected_actual_when_no_details()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
new VerificationCheck
|
||||
{
|
||||
Check = "digest-match",
|
||||
@@ -312,7 +320,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_maps_tool_digests()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", toolDigests: new Dictionary<string, string>
|
||||
var receipt = CreateReceipt(DigestDefault, toolDigests: new Dictionary<string, string>
|
||||
{
|
||||
["verifier"] = "sha256:vvv",
|
||||
["scanner"] = "sha256:sss"
|
||||
@@ -327,7 +335,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void FormatReceipt_null_tool_digests_stays_null()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x");
|
||||
var receipt = CreateReceipt(DigestDefault);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
detail.ToolDigests.Should().BeNull();
|
||||
}
|
||||
@@ -352,7 +360,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public async Task GetDetailAsync_returns_detail_for_registered_receipt()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:abc");
|
||||
var receipt = CreateReceipt(DigestAbc);
|
||||
_sut.Register(receipt);
|
||||
|
||||
var request = new ReceiptSidebarRequest { BundleId = receipt.ProofBundleId.ToString() };
|
||||
@@ -365,7 +373,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public async Task GetDetailAsync_excludes_checks_when_requested()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:abc", [
|
||||
var receipt = CreateReceipt(DigestAbc, [
|
||||
MakeCheck("sig", VerificationResult.Pass)
|
||||
]);
|
||||
_sut.Register(receipt);
|
||||
@@ -384,7 +392,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public async Task GetDetailAsync_excludes_tool_digests_when_not_requested()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:abc", toolDigests: new Dictionary<string, string>
|
||||
var receipt = CreateReceipt(DigestAbc, toolDigests: new Dictionary<string, string>
|
||||
{
|
||||
["tool"] = "sha256:ttt"
|
||||
});
|
||||
@@ -420,7 +428,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public async Task GetContextAsync_returns_registered_context()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:ctx");
|
||||
var receipt = CreateReceipt(DigestCtx);
|
||||
var detail = _sut.FormatReceipt(receipt);
|
||||
var ctx = new VexReceiptSidebarContext
|
||||
{
|
||||
@@ -439,7 +447,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public async Task GetContextAsync_falls_back_to_receipt_only_context()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:fallback");
|
||||
var receipt = CreateReceipt(DigestFallback);
|
||||
_sut.Register(receipt);
|
||||
|
||||
var result = await _sut.GetContextAsync(receipt.ProofBundleId.ToString());
|
||||
@@ -465,7 +473,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void DeriveVerificationStatus_handles_single_pass()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("only", VerificationResult.Pass)
|
||||
]);
|
||||
|
||||
@@ -476,7 +484,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void DeriveVerificationStatus_handles_single_fail()
|
||||
{
|
||||
var receipt = CreateReceipt("sha256:x", [
|
||||
var receipt = CreateReceipt(DigestDefault, [
|
||||
MakeCheck("only", VerificationResult.Fail)
|
||||
]);
|
||||
|
||||
@@ -496,7 +504,7 @@ public sealed class ReceiptSidebarServiceTests : IDisposable
|
||||
[Fact]
|
||||
public void RegisterContext_throws_on_null_or_empty_bundleId()
|
||||
{
|
||||
var detail = _sut.FormatReceipt(CreateReceipt("sha256:x", []));
|
||||
var detail = _sut.FormatReceipt(CreateReceipt(DigestDefault, []));
|
||||
var ctx = new VexReceiptSidebarContext { Receipt = detail };
|
||||
|
||||
var act1 = () => _sut.RegisterContext(null!, ctx);
|
||||
|
||||
@@ -362,7 +362,7 @@ public sealed class DsseEnvelopeSizeGuardTests
|
||||
var manifest1 = guard.BuildChunkManifest(data);
|
||||
var manifest2 = guard.BuildChunkManifest(data);
|
||||
|
||||
manifest1.Should().Be(manifest2);
|
||||
manifest1.Should().BeEquivalentTo(manifest2);
|
||||
}
|
||||
|
||||
// --- Size tracking ---
|
||||
|
||||
@@ -280,7 +280,7 @@ public sealed class AstraConnector : IFeedConnector
|
||||
|
||||
// Create base provenance record
|
||||
var baseProvenance = new AdvisoryProvenance(
|
||||
source: AstraOptions.SourceName,
|
||||
source: AstraConnectorPlugin.SourceName,
|
||||
kind: "oval-definition",
|
||||
value: definition.DefinitionId,
|
||||
recordedAt: recordedAt,
|
||||
@@ -379,7 +379,7 @@ public sealed class AstraConnector : IFeedConnector
|
||||
/// <remarks>
|
||||
/// Temporary model until full OVAL schema mapping is implemented.
|
||||
/// </remarks>
|
||||
internal sealed record AstraVulnerabilityDefinition
|
||||
public sealed record AstraVulnerabilityDefinition
|
||||
{
|
||||
public required string DefinitionId { get; init; }
|
||||
public required string Title { get; init; }
|
||||
@@ -393,7 +393,7 @@ internal sealed record AstraVulnerabilityDefinition
|
||||
/// <summary>
|
||||
/// Represents an affected package from OVAL test/state elements.
|
||||
/// </summary>
|
||||
internal sealed record AstraAffectedPackage
|
||||
public sealed record AstraAffectedPackage
|
||||
{
|
||||
public required string PackageName { get; init; }
|
||||
public string? MinVersion { get; init; }
|
||||
|
||||
@@ -58,6 +58,13 @@ public sealed class OvalParser
|
||||
return Array.Empty<AstraVulnerabilityDefinition>();
|
||||
}
|
||||
|
||||
// Validate this is an OVAL document by checking root element namespace
|
||||
if (root.Name.Namespace != OvalDefsNs)
|
||||
{
|
||||
throw new OvalParseException(
|
||||
$"Invalid OVAL document: expected root element in namespace '{OvalDefsNs}', got '{root.Name.Namespace}'");
|
||||
}
|
||||
|
||||
// Extract definitions, tests, objects, and states
|
||||
var definitions = ExtractDefinitions(root);
|
||||
var tests = ExtractTests(root);
|
||||
@@ -91,6 +98,10 @@ public sealed class OvalParser
|
||||
_logger.LogDebug("Parsed {Count} vulnerability definitions from OVAL XML", results.Count);
|
||||
return results;
|
||||
}
|
||||
catch (OvalParseException)
|
||||
{
|
||||
throw; // Re-throw validation exceptions as-is
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to parse OVAL XML");
|
||||
@@ -281,15 +292,17 @@ public sealed class OvalParser
|
||||
continue;
|
||||
}
|
||||
|
||||
var evrElement = stateElement.Element(DpkgNs + "evr");
|
||||
var version = evrElement?.Value ?? string.Empty;
|
||||
var operation = evrElement?.Attribute("operation")?.Value ?? "less than";
|
||||
var evrElements = stateElement.Elements(DpkgNs + "evr").ToList();
|
||||
var constraints = evrElements.Select(evr => new OvalVersionConstraint
|
||||
{
|
||||
Version = evr.Value ?? string.Empty,
|
||||
Operation = evr.Attribute("operation")?.Value ?? "less than"
|
||||
}).ToList();
|
||||
|
||||
states.Add(new OvalState
|
||||
{
|
||||
Id = id,
|
||||
Version = version,
|
||||
Operation = operation
|
||||
Constraints = constraints
|
||||
});
|
||||
}
|
||||
|
||||
@@ -318,17 +331,32 @@ public sealed class OvalParser
|
||||
|
||||
string? fixedVersion = null;
|
||||
string? maxVersion = null;
|
||||
string? minVersion = null;
|
||||
|
||||
if (!string.IsNullOrEmpty(test.StateRef) && stateLookup.TryGetValue(test.StateRef, out var state))
|
||||
{
|
||||
// Parse operation to determine if this is a fixed version or affected version range
|
||||
if (state.Operation.Contains("less than", StringComparison.OrdinalIgnoreCase))
|
||||
foreach (var constraint in state.Constraints)
|
||||
{
|
||||
fixedVersion = state.Version; // Versions less than this are affected
|
||||
}
|
||||
else
|
||||
{
|
||||
maxVersion = state.Version;
|
||||
if (constraint.Operation.Contains("less than", StringComparison.OrdinalIgnoreCase) &&
|
||||
!constraint.Operation.Contains("or equal", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// "less than" -> versions below this are affected; this is the fixed version
|
||||
fixedVersion = constraint.Version;
|
||||
}
|
||||
else if (constraint.Operation.Contains("less than or equal", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// "less than or equal" -> upper bound of affected range
|
||||
maxVersion = constraint.Version;
|
||||
}
|
||||
else if (constraint.Operation.Contains("greater than or equal", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// "greater than or equal" -> lower bound of affected range
|
||||
minVersion = constraint.Version;
|
||||
}
|
||||
else if (constraint.Operation.Contains("greater than", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
minVersion = constraint.Version;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -340,7 +368,7 @@ public sealed class OvalParser
|
||||
PackageName = obj.PackageName,
|
||||
FixedVersion = fixedVersion,
|
||||
MaxVersion = maxVersion,
|
||||
MinVersion = null
|
||||
MinVersion = minVersion
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -377,6 +405,11 @@ public sealed class OvalParser
|
||||
private sealed record OvalState
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required List<OvalVersionConstraint> Constraints { get; init; }
|
||||
}
|
||||
|
||||
private sealed record OvalVersionConstraint
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public required string Operation { get; init; }
|
||||
}
|
||||
|
||||
@@ -498,23 +498,3 @@ public sealed class AstraConnectorIntegrationTests
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
// Make internal types accessible for testing
|
||||
internal sealed record AstraVulnerabilityDefinition
|
||||
{
|
||||
public required string DefinitionId { get; init; }
|
||||
public required string Title { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public required string[] CveIds { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public DateTimeOffset? PublishedDate { get; init; }
|
||||
public required AstraAffectedPackage[] AffectedPackages { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record AstraAffectedPackage
|
||||
{
|
||||
public required string PackageName { get; init; }
|
||||
public string? MinVersion { get; init; }
|
||||
public string? MaxVersion { get; init; }
|
||||
public string? FixedVersion { get; init; }
|
||||
}
|
||||
|
||||
@@ -63,6 +63,10 @@ public sealed class FeedSnapshotPinningServiceTests
|
||||
.Setup(x => x.GetLatestAsync("test-site-01", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((SyncLedgerEntity?)null);
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.GetBySourceAndIdAsync(sourceId, snapshotId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity?)null);
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.InsertAsync(It.IsAny<FeedSnapshotEntity>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity e, CancellationToken _) => e);
|
||||
@@ -130,6 +134,10 @@ public sealed class FeedSnapshotPinningServiceTests
|
||||
BundleHash = "sha256:prev"
|
||||
});
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.GetBySourceAndIdAsync(sourceId, snapshotId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity?)null);
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.InsertAsync(It.IsAny<FeedSnapshotEntity>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity e, CancellationToken _) => e);
|
||||
@@ -388,6 +396,10 @@ public sealed class FeedSnapshotPinningServiceTests
|
||||
.Setup(x => x.GetLatestAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((SyncLedgerEntity?)null);
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.GetBySourceAndIdAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity?)null);
|
||||
|
||||
_snapshotRepositoryMock
|
||||
.Setup(x => x.InsertAsync(It.IsAny<FeedSnapshotEntity>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedSnapshotEntity e, CancellationToken _) => e);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ using System.Net.Http.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
@@ -17,6 +18,7 @@ using Moq;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Interest;
|
||||
using StellaOps.Concelier.Interest.Models;
|
||||
using StellaOps.Concelier.WebService.Options;
|
||||
using Xunit;
|
||||
|
||||
using StellaOps.TestKit;
|
||||
@@ -307,12 +309,26 @@ public sealed class InterestScoreEndpointTests : IClassFixture<InterestScoreEndp
|
||||
/// </summary>
|
||||
public sealed class InterestScoreTestFactory : WebApplicationFactory<Program>
|
||||
{
|
||||
private const string TestConnectionString = "Host=localhost;Port=5432;Database=test-interest";
|
||||
|
||||
public Guid ExistingCanonicalId { get; } = Guid.NewGuid();
|
||||
public Guid ComputeCanonicalId { get; } = Guid.NewGuid();
|
||||
public Guid E2ECanonicalId { get; } = Guid.NewGuid();
|
||||
|
||||
private readonly Dictionary<Guid, List<SbomMatch>> _sbomMatches = new();
|
||||
|
||||
public InterestScoreTestFactory()
|
||||
{
|
||||
// Set environment variables before Program.Main executes.
|
||||
// Program.cs reads these during configuration binding in the Testing environment.
|
||||
Environment.SetEnvironmentVariable("CONCELIER__POSTGRESSTORAGE__CONNECTIONSTRING", TestConnectionString);
|
||||
Environment.SetEnvironmentVariable("CONCELIER__POSTGRESSTORAGE__COMMANDTIMEOUTSECONDS", "30");
|
||||
Environment.SetEnvironmentVariable("CONCELIER_TEST_STORAGE_DSN", TestConnectionString);
|
||||
Environment.SetEnvironmentVariable("CONCELIER_SKIP_OPTIONS_VALIDATION", "1");
|
||||
Environment.SetEnvironmentVariable("DOTNET_ENVIRONMENT", "Testing");
|
||||
Environment.SetEnvironmentVariable("ASPNETCORE_ENVIRONMENT", "Testing");
|
||||
}
|
||||
|
||||
public void AddSbomMatchForCanonical(Guid canonicalId)
|
||||
{
|
||||
if (!_sbomMatches.ContainsKey(canonicalId))
|
||||
@@ -338,18 +354,62 @@ public sealed class InterestScoreEndpointTests : IClassFixture<InterestScoreEndp
|
||||
|
||||
protected override void ConfigureWebHost(IWebHostBuilder builder)
|
||||
{
|
||||
Environment.SetEnvironmentVariable("CONCELIER__STORAGE__DSN", "Host=localhost;Port=5432;Database=test-interest");
|
||||
Environment.SetEnvironmentVariable("CONCELIER__STORAGE__DRIVER", "postgres");
|
||||
Environment.SetEnvironmentVariable("CONCELIER_SKIP_OPTIONS_VALIDATION", "1");
|
||||
Environment.SetEnvironmentVariable("DOTNET_ENVIRONMENT", "Testing");
|
||||
Environment.SetEnvironmentVariable("ASPNETCORE_ENVIRONMENT", "Testing");
|
||||
|
||||
builder.UseEnvironment("Testing");
|
||||
|
||||
builder.ConfigureAppConfiguration((_, config) =>
|
||||
{
|
||||
var overrides = new Dictionary<string, string?>
|
||||
{
|
||||
{"PostgresStorage:ConnectionString", TestConnectionString},
|
||||
{"PostgresStorage:CommandTimeoutSeconds", "30"},
|
||||
{"Telemetry:Enabled", "false"}
|
||||
};
|
||||
config.AddInMemoryCollection(overrides);
|
||||
});
|
||||
|
||||
builder.UseSetting("CONCELIER__POSTGRESSTORAGE__CONNECTIONSTRING", TestConnectionString);
|
||||
builder.UseSetting("CONCELIER__POSTGRESSTORAGE__COMMANDTIMEOUTSECONDS", "30");
|
||||
builder.UseSetting("CONCELIER__TELEMETRY__ENABLED", "false");
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<ILeaseStore>();
|
||||
services.AddSingleton<ILeaseStore, Fixtures.TestLeaseStore>();
|
||||
|
||||
// Inject ConcelierOptions with proper Postgres configuration
|
||||
services.AddSingleton(new ConcelierOptions
|
||||
{
|
||||
PostgresStorage = new ConcelierOptions.PostgresStorageOptions
|
||||
{
|
||||
ConnectionString = TestConnectionString,
|
||||
CommandTimeoutSeconds = 30
|
||||
},
|
||||
Telemetry = new ConcelierOptions.TelemetryOptions
|
||||
{
|
||||
Enabled = false
|
||||
}
|
||||
});
|
||||
|
||||
services.AddSingleton<IConfigureOptions<ConcelierOptions>>(sp => new ConfigureOptions<ConcelierOptions>(opts =>
|
||||
{
|
||||
opts.PostgresStorage ??= new ConcelierOptions.PostgresStorageOptions();
|
||||
opts.PostgresStorage.ConnectionString = TestConnectionString;
|
||||
opts.PostgresStorage.CommandTimeoutSeconds = 30;
|
||||
|
||||
opts.Telemetry ??= new ConcelierOptions.TelemetryOptions();
|
||||
opts.Telemetry.Enabled = false;
|
||||
}));
|
||||
|
||||
services.PostConfigure<ConcelierOptions>(opts =>
|
||||
{
|
||||
opts.PostgresStorage ??= new ConcelierOptions.PostgresStorageOptions();
|
||||
opts.PostgresStorage.ConnectionString = TestConnectionString;
|
||||
opts.PostgresStorage.CommandTimeoutSeconds = 30;
|
||||
|
||||
opts.Telemetry ??= new ConcelierOptions.TelemetryOptions();
|
||||
opts.Telemetry.Enabled = false;
|
||||
});
|
||||
|
||||
// Remove existing registrations
|
||||
var scoringServiceDescriptor = services
|
||||
.SingleOrDefault(d => d.ServiceType == typeof(IInterestScoringService));
|
||||
|
||||
BIN
src/Web/StellaOps.Web/qa-sidebar-manual-screens/security.png
Normal file
BIN
src/Web/StellaOps.Web/qa-sidebar-manual-screens/security.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 81 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 89 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 79 KiB |
BIN
src/Web/StellaOps.Web/qa-sidebar-manual-screens/security_vex.png
Normal file
BIN
src/Web/StellaOps.Web/qa-sidebar-manual-screens/security_vex.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 76 KiB |
@@ -1,129 +1,549 @@
|
||||
// Sprint: SPRINT_20251229_030_FE - Dead-Letter Management UI
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { Observable } from 'rxjs';
|
||||
import {
|
||||
DeadLetterEntry,
|
||||
DeadLetterListResponse,
|
||||
DeadLetterStatsSummary,
|
||||
DeadLetterFilter,
|
||||
ReplayRequest,
|
||||
ReplayResponse,
|
||||
Observable,
|
||||
catchError,
|
||||
forkJoin,
|
||||
map,
|
||||
of,
|
||||
switchMap,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
BatchReplayProgress,
|
||||
BatchReplayRequest,
|
||||
BatchReplayResponse,
|
||||
BatchReplayProgress,
|
||||
ResolveRequest,
|
||||
DeadLetterAuditEvent,
|
||||
DeadLetterEntry,
|
||||
DeadLetterEntrySummary,
|
||||
DeadLetterFilter,
|
||||
DeadLetterListResponse,
|
||||
DeadLetterState,
|
||||
DeadLetterStatsSummary,
|
||||
ErrorCode,
|
||||
ReplayRequest,
|
||||
ReplayResponse,
|
||||
ResolveRequest,
|
||||
} from './deadletter.models';
|
||||
|
||||
interface ApiDeadLetterEntry {
|
||||
entryId?: string;
|
||||
id?: string;
|
||||
originalJobId?: string;
|
||||
jobId?: string;
|
||||
runId?: string | null;
|
||||
sourceId?: string | null;
|
||||
jobType?: string;
|
||||
tenantId?: string;
|
||||
tenantName?: string;
|
||||
status?: string;
|
||||
state?: string;
|
||||
errorCode?: string;
|
||||
failureReason?: string;
|
||||
errorMessage?: string;
|
||||
category?: string;
|
||||
payload?: unknown;
|
||||
replayAttempts?: number;
|
||||
retryCount?: number;
|
||||
maxReplayAttempts?: number;
|
||||
maxRetries?: number;
|
||||
createdAt?: string;
|
||||
updatedAt?: string;
|
||||
failedAt?: string;
|
||||
resolvedAt?: string | null;
|
||||
resolutionNotes?: string | null;
|
||||
updatedBy?: string;
|
||||
}
|
||||
|
||||
interface ApiDeadLetterListResponse {
|
||||
entries?: ApiDeadLetterEntry[];
|
||||
items?: ApiDeadLetterEntry[];
|
||||
totalCount?: number;
|
||||
total?: number;
|
||||
nextCursor?: string;
|
||||
cursor?: string;
|
||||
}
|
||||
|
||||
interface ApiDeadLetterStatsResponse {
|
||||
totalEntries?: number;
|
||||
pendingEntries?: number;
|
||||
replayingEntries?: number;
|
||||
replayedEntries?: number;
|
||||
resolvedEntries?: number;
|
||||
exhaustedEntries?: number;
|
||||
expiredEntries?: number;
|
||||
retryableEntries?: number;
|
||||
topErrorCodes?: Record<string, number>;
|
||||
stats?: DeadLetterStatsSummary['stats'];
|
||||
byErrorType?: DeadLetterStatsSummary['byErrorType'];
|
||||
byTenant?: DeadLetterStatsSummary['byTenant'];
|
||||
trend?: DeadLetterStatsSummary['trend'];
|
||||
}
|
||||
|
||||
interface ApiDeadLetterSummary {
|
||||
errorCode?: string;
|
||||
entryCount?: number;
|
||||
}
|
||||
|
||||
interface ApiDeadLetterSummaryResponse {
|
||||
summaries?: ApiDeadLetterSummary[];
|
||||
}
|
||||
|
||||
interface ApiReplayResponse {
|
||||
success?: boolean;
|
||||
newJobId?: string | null;
|
||||
error?: string | null;
|
||||
errorMessage?: string | null;
|
||||
}
|
||||
|
||||
interface ApiBatchResultResponse {
|
||||
attempted?: number;
|
||||
succeeded?: number;
|
||||
failed?: number;
|
||||
queued?: number;
|
||||
skipped?: number;
|
||||
batchId?: string;
|
||||
}
|
||||
|
||||
interface ApiResolveBatchResponse {
|
||||
resolvedCount?: number;
|
||||
}
|
||||
|
||||
interface ApiReplayAuditRecord {
|
||||
auditId?: string;
|
||||
entryId?: string;
|
||||
success?: boolean;
|
||||
newJobId?: string | null;
|
||||
errorMessage?: string | null;
|
||||
triggeredBy?: string;
|
||||
triggeredAt?: string;
|
||||
attemptNumber?: number;
|
||||
}
|
||||
|
||||
interface ApiReplayAuditListResponse {
|
||||
audits?: ApiReplayAuditRecord[];
|
||||
}
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class DeadLetterClient {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly baseUrl = '/api/v1/orchestrator/deadletter';
|
||||
private readonly batchProgressById = new Map<string, BatchReplayProgress>();
|
||||
|
||||
/**
|
||||
* List dead-letter entries with filters.
|
||||
*/
|
||||
list(
|
||||
filter?: DeadLetterFilter,
|
||||
limit: number = 50,
|
||||
limit = 50,
|
||||
cursor?: string
|
||||
): Observable<DeadLetterListResponse> {
|
||||
let params = new HttpParams().set('limit', limit.toString());
|
||||
|
||||
if (cursor) params = params.set('cursor', cursor);
|
||||
if (filter?.state) params = params.set('state', filter.state);
|
||||
if (filter?.errorCode) params = params.set('errorCode', filter.errorCode);
|
||||
if (filter?.tenantId) params = params.set('tenantId', filter.tenantId);
|
||||
if (filter?.jobType) params = params.set('jobType', filter.jobType);
|
||||
if (filter?.olderThanHours) params = params.set('olderThanHours', filter.olderThanHours.toString());
|
||||
if (filter?.search) params = params.set('search', filter.search);
|
||||
if (filter?.dateFrom) params = params.set('dateFrom', filter.dateFrom);
|
||||
if (filter?.dateTo) params = params.set('dateTo', filter.dateTo);
|
||||
|
||||
return this.http.get<DeadLetterListResponse>(this.baseUrl, { params });
|
||||
const params = this.buildListParams(filter, limit, cursor);
|
||||
return this.http
|
||||
.get<ApiDeadLetterListResponse>(this.baseUrl, { params })
|
||||
.pipe(map((response) => this.mapListResponse(response)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get dead-letter entry details.
|
||||
*/
|
||||
getEntry(entryId: string): Observable<DeadLetterEntry> {
|
||||
return this.http.get<DeadLetterEntry>(`${this.baseUrl}/${entryId}`);
|
||||
return this.http
|
||||
.get<ApiDeadLetterEntry>(`${this.baseUrl}/${entryId}`)
|
||||
.pipe(map((entry) => this.mapEntryDetail(entry)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics and summary.
|
||||
*/
|
||||
getStats(): Observable<DeadLetterStatsSummary> {
|
||||
return this.http.get<DeadLetterStatsSummary>(`${this.baseUrl}/stats`);
|
||||
return forkJoin({
|
||||
stats: this.http
|
||||
.get<ApiDeadLetterStatsResponse>(`${this.baseUrl}/stats`)
|
||||
.pipe(catchError(() => of({} as ApiDeadLetterStatsResponse))),
|
||||
summary: this.http
|
||||
.get<ApiDeadLetterSummaryResponse>(`${this.baseUrl}/summary`)
|
||||
.pipe(catchError(() => of({ summaries: [] }))),
|
||||
}).pipe(
|
||||
map(({ stats, summary }) => this.mapStatsSummary(stats, summary))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replay a single entry.
|
||||
*/
|
||||
replay(entryId: string, options?: ReplayRequest): Observable<ReplayResponse> {
|
||||
return this.http.post<ReplayResponse>(`${this.baseUrl}/${entryId}/replay`, options || {});
|
||||
return this.http
|
||||
.post<ApiReplayResponse>(`${this.baseUrl}/${entryId}/replay`, options || {})
|
||||
.pipe(
|
||||
map((response) => ({
|
||||
success: response.success ?? false,
|
||||
newJobId: response.newJobId ?? undefined,
|
||||
error: response.error ?? response.errorMessage ?? undefined,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch replay by filter.
|
||||
*/
|
||||
batchReplay(request: BatchReplayRequest): Observable<BatchReplayResponse> {
|
||||
return this.http.post<BatchReplayResponse>(`${this.baseUrl}/replay/batch`, request);
|
||||
return this.list(request.filter, 200).pipe(
|
||||
switchMap((listResponse) => {
|
||||
const entryIds = listResponse.items
|
||||
.map((entry) => entry.id)
|
||||
.filter((id) => id.length > 0);
|
||||
|
||||
if (entryIds.length === 0) {
|
||||
return of(
|
||||
this.mapBatchReplayResponse({
|
||||
attempted: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return this.http
|
||||
.post<ApiBatchResultResponse>(`${this.baseUrl}/replay/batch`, { entryIds })
|
||||
.pipe(map((response) => this.mapBatchReplayResponse(response)));
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replay all pending retryable entries.
|
||||
*/
|
||||
replayAllPending(options?: ReplayRequest): Observable<BatchReplayResponse> {
|
||||
return this.http.post<BatchReplayResponse>(`${this.baseUrl}/replay/pending`, options || {});
|
||||
replayAllPending(_options?: ReplayRequest): Observable<BatchReplayResponse> {
|
||||
return this.http
|
||||
.post<ApiBatchResultResponse>(`${this.baseUrl}/replay/pending`, {})
|
||||
.pipe(map((response) => this.mapBatchReplayResponse(response)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get batch replay progress.
|
||||
*/
|
||||
getBatchProgress(batchId: string): Observable<BatchReplayProgress> {
|
||||
return this.http.get<BatchReplayProgress>(`${this.baseUrl}/replay/batch/${batchId}`);
|
||||
const progress =
|
||||
this.batchProgressById.get(batchId) ??
|
||||
{
|
||||
batchId,
|
||||
total: 0,
|
||||
completed: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
pending: 0,
|
||||
status: 'completed' as const,
|
||||
};
|
||||
return of(progress);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually resolve an entry.
|
||||
*/
|
||||
resolve(entryId: string, request: ResolveRequest): Observable<DeadLetterEntry> {
|
||||
return this.http.post<DeadLetterEntry>(`${this.baseUrl}/${entryId}/resolve`, request);
|
||||
return this.http
|
||||
.post<ApiDeadLetterEntry>(`${this.baseUrl}/${entryId}/resolve`, {
|
||||
notes: request.notes ?? request.reason,
|
||||
})
|
||||
.pipe(map((entry) => this.mapEntryDetail(entry)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch resolve entries.
|
||||
*/
|
||||
batchResolve(entryIds: string[], request: ResolveRequest): Observable<{ resolved: number }> {
|
||||
return this.http.post<{ resolved: number }>(`${this.baseUrl}/resolve/batch`, {
|
||||
entryIds,
|
||||
...request,
|
||||
});
|
||||
return this.http
|
||||
.post<ApiResolveBatchResponse>(`${this.baseUrl}/resolve/batch`, {
|
||||
entryIds,
|
||||
notes: request.notes ?? request.reason,
|
||||
})
|
||||
.pipe(
|
||||
map((response) => ({
|
||||
resolved: response.resolvedCount ?? 0,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entry audit history.
|
||||
*/
|
||||
getAuditHistory(entryId: string): Observable<DeadLetterAuditEvent[]> {
|
||||
return this.http.get<DeadLetterAuditEvent[]>(`${this.baseUrl}/${entryId}/audit`);
|
||||
return this.http
|
||||
.get<ApiReplayAuditListResponse | DeadLetterAuditEvent[]>(
|
||||
`${this.baseUrl}/${entryId}/audit`
|
||||
)
|
||||
.pipe(
|
||||
map((response) => this.mapAuditEvents(response)),
|
||||
catchError(() => of([]))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export dead-letter entries as CSV.
|
||||
*/
|
||||
export(filter?: DeadLetterFilter): Observable<Blob> {
|
||||
let params = new HttpParams();
|
||||
if (filter?.state) params = params.set('state', filter.state);
|
||||
if (filter?.state) params = params.set('status', this.toApiState(filter.state));
|
||||
if (filter?.errorCode) params = params.set('errorCode', filter.errorCode);
|
||||
if (filter?.tenantId) params = params.set('tenantId', filter.tenantId);
|
||||
if (filter?.dateFrom) params = params.set('dateFrom', filter.dateFrom);
|
||||
if (filter?.dateTo) params = params.set('dateTo', filter.dateTo);
|
||||
if (filter?.dateFrom) params = params.set('createdAfter', filter.dateFrom);
|
||||
if (filter?.dateTo) params = params.set('createdBefore', filter.dateTo);
|
||||
|
||||
return this.http.get(`${this.baseUrl}/export`, {
|
||||
params,
|
||||
responseType: 'blob',
|
||||
});
|
||||
}
|
||||
|
||||
private buildListParams(filter?: DeadLetterFilter, limit = 50, cursor?: string): HttpParams {
|
||||
let params = new HttpParams().set('limit', limit.toString());
|
||||
|
||||
if (cursor) params = params.set('cursor', cursor);
|
||||
if (filter?.state) params = params.set('status', this.toApiState(filter.state));
|
||||
if (filter?.errorCode) params = params.set('errorCode', filter.errorCode);
|
||||
if (filter?.jobType) params = params.set('jobType', filter.jobType);
|
||||
if (filter?.dateFrom) params = params.set('createdAfter', filter.dateFrom);
|
||||
if (filter?.dateTo) params = params.set('createdBefore', filter.dateTo);
|
||||
if (filter?.olderThanHours && filter.olderThanHours > 0) {
|
||||
const before = new Date(Date.now() - filter.olderThanHours * 60 * 60 * 1000).toISOString();
|
||||
params = params.set('createdBefore', before);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
private mapListResponse(response: ApiDeadLetterListResponse): DeadLetterListResponse {
|
||||
const rawItems = response.items ?? response.entries ?? [];
|
||||
const items = rawItems
|
||||
.map((entry) => this.mapEntrySummary(entry))
|
||||
.filter((entry): entry is DeadLetterEntrySummary => !!entry);
|
||||
|
||||
return {
|
||||
items,
|
||||
total: response.total ?? response.totalCount ?? items.length,
|
||||
cursor: response.cursor ?? response.nextCursor,
|
||||
};
|
||||
}
|
||||
|
||||
private mapStatsSummary(
|
||||
stats: ApiDeadLetterStatsResponse,
|
||||
summary: ApiDeadLetterSummaryResponse
|
||||
): DeadLetterStatsSummary {
|
||||
if (stats.stats) {
|
||||
return {
|
||||
stats: stats.stats,
|
||||
byErrorType: stats.byErrorType ?? [],
|
||||
byTenant: stats.byTenant ?? [],
|
||||
trend: stats.trend ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
const topErrorCodes = stats.topErrorCodes ?? {};
|
||||
const summaryCounts = (summary.summaries ?? [])
|
||||
.filter((item) => !!item.errorCode)
|
||||
.map((item) => ({
|
||||
errorCode: this.toErrorCode(item.errorCode),
|
||||
count: item.entryCount ?? 0,
|
||||
}));
|
||||
|
||||
const fallbackCounts = Object.entries(topErrorCodes).map(([code, count]) => ({
|
||||
errorCode: this.toErrorCode(code),
|
||||
count,
|
||||
}));
|
||||
|
||||
const byErrorTypeSource = summaryCounts.length > 0 ? summaryCounts : fallbackCounts;
|
||||
const totalForPercentages = byErrorTypeSource.reduce((acc, item) => acc + item.count, 0);
|
||||
|
||||
const byErrorType = byErrorTypeSource.map((item) => ({
|
||||
errorCode: item.errorCode,
|
||||
count: item.count,
|
||||
percentage: totalForPercentages > 0 ? (item.count / totalForPercentages) * 100 : 0,
|
||||
}));
|
||||
|
||||
return {
|
||||
stats: {
|
||||
total: stats.totalEntries ?? 0,
|
||||
pending: stats.pendingEntries ?? 0,
|
||||
retrying: stats.replayingEntries ?? 0,
|
||||
resolved: stats.resolvedEntries ?? 0,
|
||||
replayed: stats.replayedEntries ?? 0,
|
||||
failed: (stats.exhaustedEntries ?? 0) + (stats.expiredEntries ?? 0),
|
||||
olderThan24h: 0,
|
||||
retryable: stats.retryableEntries ?? 0,
|
||||
},
|
||||
byErrorType,
|
||||
byTenant: [],
|
||||
trend: [],
|
||||
};
|
||||
}
|
||||
|
||||
private mapEntrySummary(entry: ApiDeadLetterEntry): DeadLetterEntrySummary | null {
|
||||
const id = entry.id ?? entry.entryId ?? '';
|
||||
if (!id) return null;
|
||||
|
||||
const createdAt = entry.createdAt ?? entry.failedAt ?? new Date().toISOString();
|
||||
|
||||
return {
|
||||
id,
|
||||
jobId: entry.jobId ?? entry.originalJobId ?? id,
|
||||
jobType: entry.jobType ?? 'unknown',
|
||||
tenantId: entry.tenantId ?? 'default',
|
||||
tenantName: entry.tenantName ?? entry.tenantId ?? 'default',
|
||||
state: this.toUiState(entry.state ?? entry.status),
|
||||
errorCode: this.toErrorCode(entry.errorCode),
|
||||
errorMessage: entry.errorMessage ?? entry.failureReason ?? 'Unknown error',
|
||||
retryCount: entry.retryCount ?? entry.replayAttempts ?? 0,
|
||||
maxRetries: entry.maxRetries ?? entry.maxReplayAttempts ?? 0,
|
||||
age: this.computeAgeSeconds(createdAt),
|
||||
createdAt,
|
||||
};
|
||||
}
|
||||
|
||||
private mapEntryDetail(entry: ApiDeadLetterEntry): DeadLetterEntry {
|
||||
const summary = this.mapEntrySummary(entry) ?? {
|
||||
id: entry.id ?? entry.entryId ?? '',
|
||||
jobId: entry.jobId ?? entry.originalJobId ?? '',
|
||||
jobType: entry.jobType ?? 'unknown',
|
||||
tenantId: entry.tenantId ?? 'default',
|
||||
tenantName: entry.tenantName ?? entry.tenantId ?? 'default',
|
||||
state: this.toUiState(entry.state ?? entry.status),
|
||||
errorCode: this.toErrorCode(entry.errorCode),
|
||||
errorMessage: entry.errorMessage ?? entry.failureReason ?? 'Unknown error',
|
||||
retryCount: entry.retryCount ?? entry.replayAttempts ?? 0,
|
||||
maxRetries: entry.maxRetries ?? entry.maxReplayAttempts ?? 0,
|
||||
age: 0,
|
||||
createdAt: entry.createdAt ?? entry.failedAt ?? new Date().toISOString(),
|
||||
};
|
||||
|
||||
const payload = this.parsePayload(entry.payload);
|
||||
|
||||
return {
|
||||
...summary,
|
||||
payload,
|
||||
errorCategory: this.toErrorCategory(entry.category),
|
||||
stackTrace: undefined,
|
||||
updatedAt: entry.updatedAt ?? summary.createdAt,
|
||||
resolvedAt: entry.resolvedAt ?? undefined,
|
||||
resolvedBy: entry.updatedBy ?? undefined,
|
||||
resolutionReason: undefined,
|
||||
resolutionNotes: entry.resolutionNotes ?? undefined,
|
||||
replayedJobId: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private mapAuditEvents(
|
||||
response: ApiReplayAuditListResponse | DeadLetterAuditEvent[]
|
||||
): DeadLetterAuditEvent[] {
|
||||
if (Array.isArray(response)) {
|
||||
return response;
|
||||
}
|
||||
|
||||
return (response.audits ?? []).map((audit) => ({
|
||||
id: audit.auditId ?? '',
|
||||
entryId: audit.entryId ?? '',
|
||||
action: audit.success ? 'replayed' : 'retry_failed',
|
||||
timestamp: audit.triggeredAt ?? new Date().toISOString(),
|
||||
actor: audit.triggeredBy ?? undefined,
|
||||
details: {
|
||||
attemptNumber: audit.attemptNumber ?? 0,
|
||||
newJobId: audit.newJobId ?? null,
|
||||
errorMessage: audit.errorMessage ?? null,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
private mapBatchReplayResponse(response: ApiBatchResultResponse): BatchReplayResponse {
|
||||
if (response.batchId) {
|
||||
return {
|
||||
queued: response.queued ?? response.succeeded ?? 0,
|
||||
skipped: response.skipped ?? response.failed ?? 0,
|
||||
batchId: response.batchId,
|
||||
};
|
||||
}
|
||||
|
||||
const attempted = response.attempted ?? 0;
|
||||
const succeeded = response.succeeded ?? 0;
|
||||
const failed = response.failed ?? 0;
|
||||
const batchId = this.createBatchId();
|
||||
|
||||
this.batchProgressById.set(batchId, {
|
||||
batchId,
|
||||
total: attempted,
|
||||
completed: attempted,
|
||||
succeeded,
|
||||
failed,
|
||||
pending: 0,
|
||||
status: 'completed',
|
||||
});
|
||||
|
||||
return {
|
||||
queued: succeeded,
|
||||
skipped: failed,
|
||||
batchId,
|
||||
};
|
||||
}
|
||||
|
||||
private toApiState(state: DeadLetterState): string {
|
||||
switch (state) {
|
||||
case 'retrying':
|
||||
return 'replaying';
|
||||
case 'failed':
|
||||
return 'exhausted';
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
||||
private toUiState(state: string | undefined): DeadLetterState {
|
||||
const normalized = (state ?? '').toLowerCase();
|
||||
switch (normalized) {
|
||||
case 'replaying':
|
||||
return 'retrying';
|
||||
case 'resolved':
|
||||
case 'replayed':
|
||||
case 'pending':
|
||||
case 'failed':
|
||||
case 'retrying':
|
||||
return normalized as DeadLetterState;
|
||||
case 'exhausted':
|
||||
case 'expired':
|
||||
return 'failed';
|
||||
default:
|
||||
return 'pending';
|
||||
}
|
||||
}
|
||||
|
||||
private toErrorCode(value: string | undefined): ErrorCode {
|
||||
const raw = (value ?? '').toUpperCase();
|
||||
const known: readonly ErrorCode[] = [
|
||||
'DLQ_TIMEOUT',
|
||||
'DLQ_RESOURCE',
|
||||
'DLQ_NETWORK',
|
||||
'DLQ_DEPENDENCY',
|
||||
'DLQ_VALIDATION',
|
||||
'DLQ_POLICY',
|
||||
'DLQ_AUTH',
|
||||
'DLQ_CONFLICT',
|
||||
'DLQ_UNKNOWN',
|
||||
];
|
||||
|
||||
if (known.includes(raw as ErrorCode)) {
|
||||
return raw as ErrorCode;
|
||||
}
|
||||
|
||||
if (raw.includes('TIMEOUT')) return 'DLQ_TIMEOUT';
|
||||
if (raw.includes('NETWORK') || raw.includes('CONNECTION') || raw.includes('DNS')) return 'DLQ_NETWORK';
|
||||
if (raw.includes('RESOURCE') || raw.includes('MEMORY') || raw.includes('CPU')) return 'DLQ_RESOURCE';
|
||||
if (raw.includes('DEPENDENCY') || raw.includes('SERVICE_UNAVAILABLE')) return 'DLQ_DEPENDENCY';
|
||||
if (raw.includes('VALIDATION')) return 'DLQ_VALIDATION';
|
||||
if (raw.includes('POLICY')) return 'DLQ_POLICY';
|
||||
if (raw.includes('AUTH') || raw.includes('TOKEN')) return 'DLQ_AUTH';
|
||||
if (raw.includes('CONFLICT') || raw.includes('DUPLICATE')) return 'DLQ_CONFLICT';
|
||||
return 'DLQ_UNKNOWN';
|
||||
}
|
||||
|
||||
private toErrorCategory(value: string | undefined): 'transient' | 'permanent' {
|
||||
const normalized = (value ?? '').toLowerCase();
|
||||
return normalized === 'transient' ? 'transient' : 'permanent';
|
||||
}
|
||||
|
||||
private parsePayload(payload: unknown): Record<string, unknown> {
|
||||
if (payload && typeof payload === 'object' && !Array.isArray(payload)) {
|
||||
return payload as Record<string, unknown>;
|
||||
}
|
||||
|
||||
if (typeof payload === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(payload) as unknown;
|
||||
if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
|
||||
return parsed as Record<string, unknown>;
|
||||
}
|
||||
} catch {
|
||||
return { raw: payload };
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
private computeAgeSeconds(createdAt: string): number {
|
||||
const createdMillis = new Date(createdAt).getTime();
|
||||
if (Number.isNaN(createdMillis)) return 0;
|
||||
return Math.max(0, Math.floor((Date.now() - createdMillis) / 1000));
|
||||
}
|
||||
|
||||
private createBatchId(): string {
|
||||
if (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function') {
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
|
||||
return `batch-${Date.now()}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import { Injectable, InjectionToken, Inject } from '@angular/core';
|
||||
import { HttpClient, HttpHeaders } from '@angular/common/http';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { delay } from 'rxjs/operators';
|
||||
import { delay, map, switchMap } from 'rxjs/operators';
|
||||
import { AuthSessionStore } from '../auth/auth-session.store';
|
||||
import type {
|
||||
Schedule,
|
||||
@@ -33,6 +33,19 @@ export interface CreateScheduleDto {
|
||||
|
||||
export type UpdateScheduleDto = Partial<CreateScheduleDto>;
|
||||
|
||||
interface SchedulerScheduleEnvelope {
|
||||
readonly schedule?: Record<string, unknown>;
|
||||
readonly summary?: Record<string, unknown> | null;
|
||||
}
|
||||
|
||||
interface SchedulerScheduleCollectionResponse {
|
||||
readonly schedules?: readonly SchedulerScheduleEnvelope[];
|
||||
}
|
||||
|
||||
interface SchedulerRunsPreviewResponse {
|
||||
readonly total?: number;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// API Interface
|
||||
// ============================================================================
|
||||
@@ -65,31 +78,55 @@ export class SchedulerHttpClient implements SchedulerApi {
|
||||
) {}
|
||||
|
||||
listSchedules(): Observable<Schedule[]> {
|
||||
return this.http.get<Schedule[]>(`${this.baseUrl}/schedules/`, {
|
||||
return this.http.get<SchedulerScheduleCollectionResponse | Schedule[]>(`${this.baseUrl}/schedules/`, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}).pipe(
|
||||
map((response) => this.mapScheduleList(response)),
|
||||
);
|
||||
}
|
||||
|
||||
getSchedule(id: string): Observable<Schedule> {
|
||||
return this.http.get<Schedule>(`${this.baseUrl}/schedules/${id}`, {
|
||||
return this.http.get<SchedulerScheduleEnvelope | Schedule>(`${this.baseUrl}/schedules/${id}`, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}).pipe(
|
||||
map((response) => this.mapSchedule(response)),
|
||||
);
|
||||
}
|
||||
|
||||
createSchedule(schedule: CreateScheduleDto): Observable<Schedule> {
|
||||
return this.http.post<Schedule>(`${this.baseUrl}/schedules/`, schedule, {
|
||||
const payload = this.toCreateRequest(schedule);
|
||||
return this.http.post<SchedulerScheduleEnvelope | Schedule>(`${this.baseUrl}/schedules/`, payload, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}).pipe(
|
||||
map((response) => this.mapSchedule(response)),
|
||||
);
|
||||
}
|
||||
|
||||
updateSchedule(id: string, schedule: UpdateScheduleDto): Observable<Schedule> {
|
||||
return this.http.put<Schedule>(`${this.baseUrl}/schedules/${id}`, schedule, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
const headers = this.buildHeaders();
|
||||
const payload = this.toUpdateRequest(schedule);
|
||||
|
||||
return this.http.patch<SchedulerScheduleEnvelope | Schedule>(`${this.baseUrl}/schedules/${id}`, payload, {
|
||||
headers,
|
||||
}).pipe(
|
||||
switchMap((response) => {
|
||||
if (schedule.enabled === undefined) {
|
||||
return of(response);
|
||||
}
|
||||
|
||||
const toggle$ = schedule.enabled
|
||||
? this.http.post<void>(`${this.baseUrl}/schedules/${id}/resume`, {}, { headers })
|
||||
: this.http.post<void>(`${this.baseUrl}/schedules/${id}/pause`, {}, { headers });
|
||||
|
||||
return toggle$.pipe(map(() => response));
|
||||
}),
|
||||
map((response) => this.mapSchedule(response)),
|
||||
);
|
||||
}
|
||||
|
||||
deleteSchedule(id: string): Observable<void> {
|
||||
return this.http.delete<void>(`${this.baseUrl}/schedules/${id}`, {
|
||||
// Compatibility fallback: pausing removes the item from default list responses.
|
||||
return this.http.post<void>(`${this.baseUrl}/schedules/${id}/pause`, {}, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}
|
||||
@@ -107,15 +144,180 @@ export class SchedulerHttpClient implements SchedulerApi {
|
||||
}
|
||||
|
||||
triggerSchedule(id: string): Observable<void> {
|
||||
return this.http.post<void>(`${this.baseUrl}/schedules/${id}/trigger`, {}, {
|
||||
return this.http.post<void>(`${this.baseUrl}/runs/`, {
|
||||
scheduleId: id,
|
||||
trigger: 'manual',
|
||||
reason: {
|
||||
manualReason: 'Triggered from schedule management UI',
|
||||
},
|
||||
}, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}
|
||||
|
||||
previewImpact(schedule: CreateScheduleDto): Observable<ScheduleImpactPreview> {
|
||||
return this.http.post<ScheduleImpactPreview>(`${this.baseUrl}/schedules/preview-impact`, schedule, {
|
||||
previewImpact(_schedule: CreateScheduleDto): Observable<ScheduleImpactPreview> {
|
||||
return this.http.post<SchedulerRunsPreviewResponse>(`${this.baseUrl}/runs/preview`, {
|
||||
selector: {
|
||||
scope: 'all-images',
|
||||
},
|
||||
usageOnly: true,
|
||||
sampleSize: 10,
|
||||
}, {
|
||||
headers: this.buildHeaders(),
|
||||
});
|
||||
}).pipe(
|
||||
map((response) => {
|
||||
const total = Number.isFinite(response?.total) ? Number(response.total) : 0;
|
||||
const warnings = total > 1000
|
||||
? [`Preview includes ${total} impacted records; consider a narrower selector.`]
|
||||
: [];
|
||||
|
||||
return {
|
||||
scheduleId: 'preview',
|
||||
proposedChange: 'update',
|
||||
affectedRuns: total,
|
||||
nextRunTime: new Date(Date.now() + 60 * 60 * 1000).toISOString(),
|
||||
estimatedLoad: Math.min(100, Math.max(5, total > 0 ? Math.round(total / 20) : 5)),
|
||||
conflicts: [],
|
||||
warnings,
|
||||
} satisfies ScheduleImpactPreview;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
private mapScheduleList(payload: SchedulerScheduleCollectionResponse | Schedule[]): Schedule[] {
|
||||
if (Array.isArray(payload)) {
|
||||
return payload.map((entry) => this.mapSchedule(entry));
|
||||
}
|
||||
|
||||
const entries = Array.isArray(payload?.schedules) ? payload.schedules : [];
|
||||
return entries.map((entry) => this.mapSchedule(entry));
|
||||
}
|
||||
|
||||
private mapSchedule(payload: SchedulerScheduleEnvelope | Schedule): Schedule {
|
||||
const envelope = payload as SchedulerScheduleEnvelope;
|
||||
const schedule = (envelope?.schedule ?? payload) as Record<string, unknown>;
|
||||
const summary = envelope?.summary as Record<string, unknown> | null | undefined;
|
||||
const limits = this.asRecord(schedule?.['limits']);
|
||||
|
||||
const recentRuns = Array.isArray(summary?.['recentRuns'])
|
||||
? summary['recentRuns'] as readonly Record<string, unknown>[]
|
||||
: [];
|
||||
const lastRunAt = recentRuns.length > 0
|
||||
? this.readString(recentRuns[0], 'completedAt')
|
||||
: undefined;
|
||||
|
||||
const maxJobs = this.readNumber(limits, 'maxJobs');
|
||||
const maxRetries = maxJobs > 0
|
||||
? Math.min(10, Math.max(1, Math.round(maxJobs / 10)))
|
||||
: 3;
|
||||
|
||||
return {
|
||||
id: this.readString(schedule, 'id') || `sch-${Date.now()}`,
|
||||
name: this.readString(schedule, 'name') || 'Unnamed schedule',
|
||||
description: this.readString(schedule, 'description') || '',
|
||||
cronExpression: this.readString(schedule, 'cronExpression') || '0 6 * * *',
|
||||
timezone: this.readString(schedule, 'timezone') || 'UTC',
|
||||
enabled: this.readBoolean(schedule, 'enabled', true),
|
||||
taskType: this.inferTaskType(this.readString(schedule, 'mode')),
|
||||
taskConfig: {},
|
||||
lastRunAt,
|
||||
nextRunAt: undefined,
|
||||
createdAt: this.readString(schedule, 'createdAt') || new Date().toISOString(),
|
||||
updatedAt: this.readString(schedule, 'updatedAt') || new Date().toISOString(),
|
||||
createdBy: this.readString(schedule, 'createdBy') || 'system',
|
||||
tags: [],
|
||||
retryPolicy: {
|
||||
maxRetries,
|
||||
backoffMultiplier: 2,
|
||||
initialDelayMs: 1000,
|
||||
maxDelayMs: 60000,
|
||||
},
|
||||
concurrencyLimit: Math.max(1, this.readNumber(limits, 'parallelism') || 1),
|
||||
};
|
||||
}
|
||||
|
||||
private toCreateRequest(schedule: CreateScheduleDto): Record<string, unknown> {
|
||||
return {
|
||||
name: schedule.name,
|
||||
cronExpression: schedule.cronExpression,
|
||||
timezone: schedule.timezone,
|
||||
enabled: schedule.enabled,
|
||||
mode: this.toSchedulerMode(schedule.taskType),
|
||||
selection: {
|
||||
scope: 'all-images',
|
||||
},
|
||||
limits: {
|
||||
parallelism: schedule.concurrencyLimit ?? 1,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private toUpdateRequest(schedule: UpdateScheduleDto): Record<string, unknown> {
|
||||
const request: Record<string, unknown> = {};
|
||||
|
||||
if (schedule.name !== undefined) {
|
||||
request['name'] = schedule.name;
|
||||
}
|
||||
if (schedule.cronExpression !== undefined) {
|
||||
request['cronExpression'] = schedule.cronExpression;
|
||||
}
|
||||
if (schedule.timezone !== undefined) {
|
||||
request['timezone'] = schedule.timezone;
|
||||
}
|
||||
if (schedule.taskType !== undefined) {
|
||||
request['mode'] = this.toSchedulerMode(schedule.taskType);
|
||||
}
|
||||
if (schedule.concurrencyLimit !== undefined) {
|
||||
request['limits'] = { parallelism: schedule.concurrencyLimit };
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
private toSchedulerMode(taskType: ScheduleTaskType): string {
|
||||
switch (taskType) {
|
||||
case 'scan':
|
||||
case 'cleanup':
|
||||
case 'custom':
|
||||
return 'analysis-only';
|
||||
default:
|
||||
return 'content-refresh';
|
||||
}
|
||||
}
|
||||
|
||||
private inferTaskType(mode: string): ScheduleTaskType {
|
||||
return mode.toLowerCase() === 'content-refresh'
|
||||
? 'vulnerability-sync'
|
||||
: 'scan';
|
||||
}
|
||||
|
||||
private readString(source: Record<string, unknown> | null | undefined, key: string): string {
|
||||
const value = source?.[key];
|
||||
return typeof value === 'string' ? value : '';
|
||||
}
|
||||
|
||||
private readNumber(source: Record<string, unknown> | null | undefined, key: string): number {
|
||||
const value = source?.[key];
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
const parsed = Number(value);
|
||||
return Number.isFinite(parsed) ? parsed : 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private readBoolean(source: Record<string, unknown> | null | undefined, key: string, fallback: boolean): boolean {
|
||||
const value = source?.[key];
|
||||
return typeof value === 'boolean' ? value : fallback;
|
||||
}
|
||||
|
||||
private asRecord(value: unknown): Record<string, unknown> | null {
|
||||
return value && typeof value === 'object' ? value as Record<string, unknown> : null;
|
||||
}
|
||||
|
||||
private buildHeaders(): HttpHeaders {
|
||||
|
||||
@@ -6,11 +6,13 @@ import {
|
||||
inject,
|
||||
Input,
|
||||
OnChanges,
|
||||
OnInit,
|
||||
Output,
|
||||
signal,
|
||||
SimpleChanges,
|
||||
} from '@angular/core';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { ActivatedRoute } from '@angular/router';
|
||||
import {
|
||||
FeedMirror,
|
||||
FeedSnapshot,
|
||||
@@ -20,6 +22,25 @@ import {
|
||||
import { FEED_MIRROR_API } from '../../core/api/feed-mirror.client';
|
||||
import { SnapshotActionsComponent } from './snapshot-actions.component';
|
||||
|
||||
const EMPTY_FEED_MIRROR: FeedMirror = {
|
||||
mirrorId: '',
|
||||
name: 'Loading mirror...',
|
||||
feedType: 'custom',
|
||||
upstreamUrl: '',
|
||||
localPath: '',
|
||||
enabled: false,
|
||||
syncStatus: 'pending',
|
||||
lastSyncAt: null,
|
||||
nextSyncAt: null,
|
||||
syncIntervalMinutes: 60,
|
||||
snapshotCount: 0,
|
||||
totalSizeBytes: 0,
|
||||
latestSnapshotId: null,
|
||||
errorMessage: null,
|
||||
createdAt: new Date(0).toISOString(),
|
||||
updatedAt: new Date(0).toISOString(),
|
||||
};
|
||||
|
||||
@Component({
|
||||
selector: 'app-mirror-detail',
|
||||
imports: [CommonModule, FormsModule, SnapshotActionsComponent],
|
||||
@@ -781,27 +802,72 @@ import { SnapshotActionsComponent } from './snapshot-actions.component';
|
||||
`],
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class MirrorDetailComponent implements OnChanges {
|
||||
export class MirrorDetailComponent implements OnInit, OnChanges {
|
||||
private readonly feedMirrorApi = inject(FEED_MIRROR_API);
|
||||
private readonly route = inject(ActivatedRoute);
|
||||
private mirrorState: FeedMirror = EMPTY_FEED_MIRROR;
|
||||
|
||||
@Input() set mirror(value: FeedMirror | null | undefined) {
|
||||
this.mirrorState = value ?? EMPTY_FEED_MIRROR;
|
||||
}
|
||||
|
||||
get mirror(): FeedMirror {
|
||||
return this.mirrorState;
|
||||
}
|
||||
|
||||
@Input({ required: true }) mirror!: FeedMirror;
|
||||
@Output() back = new EventEmitter<void>();
|
||||
|
||||
readonly snapshots = signal<readonly FeedSnapshot[]>([]);
|
||||
readonly retentionConfig = signal<SnapshotRetentionConfig | null>(null);
|
||||
readonly loadingSnapshots = signal(true);
|
||||
readonly loadingMirror = signal(false);
|
||||
readonly syncing = signal(false);
|
||||
readonly showSettings = signal(false);
|
||||
readonly settingsSyncInterval = signal(0);
|
||||
readonly settingsUpstreamUrl = signal('');
|
||||
|
||||
ngOnChanges(changes: SimpleChanges): void {
|
||||
if (changes['mirror']) {
|
||||
this.loadSnapshots();
|
||||
this.loadRetentionConfig();
|
||||
this.settingsSyncInterval.set(this.mirror.syncIntervalMinutes);
|
||||
this.settingsUpstreamUrl.set(this.mirror.upstreamUrl);
|
||||
ngOnInit(): void {
|
||||
const routeMirrorId = this.route.snapshot.paramMap.get('mirrorId');
|
||||
if (routeMirrorId && !this.hasMirrorData()) {
|
||||
this.loadMirrorById(routeMirrorId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.hasMirrorData()) {
|
||||
this.initializeMirrorState();
|
||||
}
|
||||
}
|
||||
|
||||
ngOnChanges(changes: SimpleChanges): void {
|
||||
if (changes['mirror'] && this.hasMirrorData()) {
|
||||
this.initializeMirrorState();
|
||||
}
|
||||
}
|
||||
|
||||
private initializeMirrorState(): void {
|
||||
this.settingsSyncInterval.set(this.mirror.syncIntervalMinutes);
|
||||
this.settingsUpstreamUrl.set(this.mirror.upstreamUrl);
|
||||
this.loadSnapshots();
|
||||
this.loadRetentionConfig();
|
||||
}
|
||||
|
||||
private hasMirrorData(): boolean {
|
||||
return !!this.mirror?.mirrorId;
|
||||
}
|
||||
|
||||
private loadMirrorById(mirrorId: string): void {
|
||||
this.loadingMirror.set(true);
|
||||
this.feedMirrorApi.getMirror(mirrorId).subscribe({
|
||||
next: (mirror) => {
|
||||
this.mirror = mirror;
|
||||
this.loadingMirror.set(false);
|
||||
this.initializeMirrorState();
|
||||
},
|
||||
error: (err) => {
|
||||
console.error('Failed to load mirror details:', err);
|
||||
this.loadingMirror.set(false);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private loadSnapshots(): void {
|
||||
@@ -826,6 +892,10 @@ export class MirrorDetailComponent implements OnChanges {
|
||||
}
|
||||
|
||||
toggleEnabled(event: Event): void {
|
||||
if (!this.hasMirrorData()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const checked = (event.target as HTMLInputElement).checked;
|
||||
const update: MirrorConfigUpdate = { enabled: checked };
|
||||
this.feedMirrorApi.updateMirrorConfig(this.mirror.mirrorId, update).subscribe({
|
||||
@@ -835,6 +905,10 @@ export class MirrorDetailComponent implements OnChanges {
|
||||
}
|
||||
|
||||
triggerSync(): void {
|
||||
if (!this.hasMirrorData()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.syncing.set(true);
|
||||
this.feedMirrorApi.triggerSync({ mirrorId: this.mirror.mirrorId }).subscribe({
|
||||
next: (result) => {
|
||||
@@ -850,6 +924,10 @@ export class MirrorDetailComponent implements OnChanges {
|
||||
}
|
||||
|
||||
saveSettings(): void {
|
||||
if (!this.hasMirrorData()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const update: MirrorConfigUpdate = {
|
||||
syncIntervalMinutes: this.settingsSyncInterval(),
|
||||
upstreamUrl: this.settingsUpstreamUrl(),
|
||||
|
||||
58
src/Web/StellaOps.Web/tmp-debug-errors.js
Normal file
58
src/Web/StellaOps.Web/tmp-debug-errors.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const { chromium } = require('playwright');
|
||||
|
||||
(async () => {
|
||||
const browser = await chromium.launch({ headless: true, args: ['--disable-dev-shm-usage'] });
|
||||
const context = await browser.newContext({ ignoreHTTPSErrors: true });
|
||||
const page = await context.newPage();
|
||||
const events = [];
|
||||
|
||||
const push = (kind, payload) => events.push({ ts: new Date().toISOString(), kind, ...payload, page: page.url() });
|
||||
|
||||
page.on('console', msg => {
|
||||
if (msg.type() === 'error') {
|
||||
push('console_error', { text: msg.text() });
|
||||
}
|
||||
});
|
||||
|
||||
page.on('requestfailed', request => {
|
||||
const url = request.url();
|
||||
if (/\.(css|js|map|png|jpg|jpeg|svg|woff2?)($|\?)/i.test(url)) return;
|
||||
push('request_failed', {
|
||||
method: request.method(),
|
||||
url,
|
||||
error: request.failure()?.errorText ?? 'unknown'
|
||||
});
|
||||
});
|
||||
|
||||
page.on('response', response => {
|
||||
const url = response.url();
|
||||
if (/\.(css|js|map|png|jpg|jpeg|svg|woff2?)($|\?)/i.test(url)) return;
|
||||
if (response.status() >= 400) {
|
||||
push('response_error', { status: response.status(), method: response.request().method(), url });
|
||||
}
|
||||
});
|
||||
|
||||
await page.goto('https://stella-ops.local/welcome', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForTimeout(1200);
|
||||
const cta = page.locator('button.cta').first();
|
||||
if (await cta.count()) {
|
||||
await cta.click({ force: true, noWaitAfter: true });
|
||||
await page.waitForTimeout(1000);
|
||||
}
|
||||
|
||||
if (page.url().includes('/connect/authorize')) {
|
||||
await page.locator('input[name="username"]').first().fill('admin');
|
||||
await page.locator('input[name="password"]').first().fill('Admin@Stella2026!');
|
||||
await page.locator('button[type="submit"], button:has-text("Sign In")').first().click();
|
||||
await page.waitForURL(url => !url.toString().includes('/connect/authorize'), { timeout: 20000 });
|
||||
await page.waitForTimeout(1200);
|
||||
}
|
||||
|
||||
for (const path of ['/evidence/proof-chains', '/policy/packs']) {
|
||||
await page.goto(`https://stella-ops.local${path}`, { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForTimeout(6000);
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(events, null, 2));
|
||||
await browser.close();
|
||||
})();
|
||||
49
src/Web/StellaOps.Web/tmp-debug-requestfailed.js
Normal file
49
src/Web/StellaOps.Web/tmp-debug-requestfailed.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const { chromium } = require('playwright');
|
||||
|
||||
const BASE='https://stella-ops.local';
|
||||
const USER='admin';
|
||||
const PASS='Admin@Stella2026!';
|
||||
|
||||
(async () => {
|
||||
const browser = await chromium.launch({ headless: true, args:['--disable-dev-shm-usage'] });
|
||||
const ctx = await browser.newContext({ ignoreHTTPSErrors: true, viewport:{width:1511,height:864} });
|
||||
const page = await ctx.newPage();
|
||||
|
||||
const failed=[];
|
||||
const responses=[];
|
||||
page.on('requestfailed', req => {
|
||||
const url=req.url();
|
||||
if (/\.(css|js|map|png|jpg|jpeg|svg|woff2?)($|\?)/i.test(url)) return;
|
||||
failed.push({ url, method:req.method(), error:req.failure()?.errorText || 'unknown', page: page.url() });
|
||||
});
|
||||
page.on('response', res => {
|
||||
const url=res.url();
|
||||
if (/\.(css|js|map|png|jpg|jpeg|svg|woff2?)($|\?)/i.test(url)) return;
|
||||
if (res.status() >= 400) {
|
||||
responses.push({ status: res.status(), method: res.request().method(), url, page: page.url() });
|
||||
}
|
||||
});
|
||||
|
||||
await page.goto(`${BASE}/welcome`, { waitUntil:'domcontentloaded' });
|
||||
await page.waitForTimeout(1200);
|
||||
const cta = page.locator('button.cta').first();
|
||||
if (await cta.count()) {
|
||||
await cta.click({ force:true, noWaitAfter:true });
|
||||
await page.waitForTimeout(1200);
|
||||
}
|
||||
if (page.url().includes('/connect/authorize')) {
|
||||
await page.locator('input[name="username"]').first().fill(USER);
|
||||
await page.locator('input[name="password"]').first().fill(PASS);
|
||||
await page.locator('button[type="submit"], button:has-text("Sign In")').first().click();
|
||||
await page.waitForURL(url => !url.toString().includes('/connect/authorize'), { timeout: 20000 });
|
||||
await page.waitForTimeout(1200);
|
||||
}
|
||||
|
||||
for (const p of ['/security/exceptions','/evidence/proof-chains']) {
|
||||
await page.goto(`${BASE}${p}`, { waitUntil:'domcontentloaded' });
|
||||
await page.waitForTimeout(2200);
|
||||
}
|
||||
|
||||
await browser.close();
|
||||
console.log(JSON.stringify({ failed, responses }, null, 2));
|
||||
})();
|
||||
65
src/Web/StellaOps.Web/tmp-debug-requestfailed2.js
Normal file
65
src/Web/StellaOps.Web/tmp-debug-requestfailed2.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const { chromium } = require('playwright');
|
||||
|
||||
(async () => {
|
||||
const browser = await chromium.launch({ headless: true, args: ['--disable-dev-shm-usage'] });
|
||||
const context = await browser.newContext({ ignoreHTTPSErrors: true });
|
||||
const page = await context.newPage();
|
||||
|
||||
const failed = [];
|
||||
const websockets = [];
|
||||
|
||||
page.on('requestfailed', request => {
|
||||
const url = request.url();
|
||||
if (/\.(css|js|map|png|jpg|jpeg|svg|woff2?)($|\?)/i.test(url)) {
|
||||
return;
|
||||
}
|
||||
|
||||
failed.push({
|
||||
url,
|
||||
method: request.method(),
|
||||
error: request.failure()?.errorText ?? 'unknown',
|
||||
page: page.url(),
|
||||
});
|
||||
});
|
||||
|
||||
page.on('websocket', socket => {
|
||||
const record = { url: socket.url(), events: [] };
|
||||
websockets.push(record);
|
||||
socket.on('framesent', () => record.events.push('sent'));
|
||||
socket.on('framereceived', () => record.events.push('recv'));
|
||||
socket.on('close', () => record.events.push('close'));
|
||||
});
|
||||
|
||||
page.on('console', msg => {
|
||||
if (msg.type() === 'error') {
|
||||
console.log('console-error', msg.text(), '@', page.url());
|
||||
}
|
||||
});
|
||||
|
||||
await page.goto('https://stella-ops.local/welcome', { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForTimeout(1200);
|
||||
|
||||
const cta = page.locator('button.cta').first();
|
||||
if (await cta.count()) {
|
||||
await cta.click({ force: true, noWaitAfter: true });
|
||||
await page.waitForTimeout(1200);
|
||||
}
|
||||
|
||||
if (page.url().includes('/connect/authorize')) {
|
||||
await page.locator('input[name="username"]').first().fill('admin');
|
||||
await page.locator('input[name="password"]').first().fill('Admin@Stella2026!');
|
||||
await page.locator('button[type="submit"], button:has-text("Sign In")').first().click();
|
||||
await page.waitForURL(url => !url.toString().includes('/connect/authorize'), { timeout: 20000 });
|
||||
await page.waitForTimeout(1200);
|
||||
}
|
||||
|
||||
for (const path of ['/security/exceptions', '/evidence/proof-chains']) {
|
||||
await page.goto(`https://stella-ops.local${path}`, { waitUntil: 'domcontentloaded' });
|
||||
await page.waitForTimeout(3000);
|
||||
}
|
||||
|
||||
const filteredFailed = failed.filter(item => !item.url.includes('/connect/authorize?'));
|
||||
console.log(JSON.stringify({ filteredFailed, websockets }, null, 2));
|
||||
|
||||
await browser.close();
|
||||
})();
|
||||
Reference in New Issue
Block a user