consolidate the tests locations
This commit is contained in:
@@ -27,7 +27,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Storage.Postgres\StellaOps.AirGap.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Controller\StellaOps.AirGap.Controller.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Authority.Storage.Postgres\StellaOps.Authority.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Canonicalization/StellaOps.Canonicalization.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.DeltaVerdict/StellaOps.DeltaVerdict.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj" />
|
||||
<ProjectReference Include="../../__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj" />
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.csproj" />
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj" />
|
||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
|
||||
|
||||
218
src/Concelier/__Libraries/StellaOps.Concelier.Interest/README.md
Normal file
218
src/Concelier/__Libraries/StellaOps.Concelier.Interest/README.md
Normal file
@@ -0,0 +1,218 @@
|
||||
# StellaOps.Concelier.Interest
|
||||
|
||||
Interest scoring service for canonical advisories. This module learns which advisories matter to your organization by analyzing SBOM intersections, reachability data, VEX statements, and runtime signals.
|
||||
|
||||
## Overview
|
||||
|
||||
Interest scoring helps prioritize advisories by computing a relevance score (0.0 to 1.0) based on:
|
||||
|
||||
- **SBOM Intersection** (30%): Advisory affects packages in your SBOMs
|
||||
- **Reachability** (25%): Vulnerable code is reachable from application entrypoints
|
||||
- **Deployment** (20%): Affected component is deployed in production
|
||||
- **VEX Status** (15%): No `not_affected` VEX statement exists
|
||||
- **Recency** (10%): How recently the advisory was seen in builds (decays over 365 days)
|
||||
|
||||
## Key Features
|
||||
|
||||
### Score Tiers
|
||||
|
||||
| Tier | Score Range | Description |
|
||||
|------|-------------|-------------|
|
||||
| **High** | ≥ 0.7 | Urgent attention required |
|
||||
| **Medium** | 0.4 - 0.7 | Should be reviewed |
|
||||
| **Low** | 0.2 - 0.4 | Lower priority |
|
||||
| **None** | < 0.2 | Can be ignored or degraded to stub |
|
||||
|
||||
### Stub Degradation
|
||||
|
||||
Low-interest advisories (score < 0.2) can be automatically degraded to lightweight stubs:
|
||||
- Only essential fields retained (ID, CVE, severity, title)
|
||||
- Full details discarded to save storage
|
||||
- Stubs auto-restore when interest score increases above threshold (0.4)
|
||||
|
||||
## Usage
|
||||
|
||||
### Computing Scores
|
||||
|
||||
```csharp
|
||||
// Inject the service
|
||||
var scoringService = serviceProvider.GetRequiredService<IInterestScoringService>();
|
||||
|
||||
// Compute score for a canonical advisory
|
||||
var score = await scoringService.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Or compute from explicit signals
|
||||
var input = new InterestScoreInput
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
SbomMatches = [
|
||||
new SbomMatch
|
||||
{
|
||||
SbomDigest = "sha256:...",
|
||||
Purl = "pkg:npm/lodash@4.17.21",
|
||||
IsReachable = true,
|
||||
IsDeployed = false
|
||||
}
|
||||
],
|
||||
VexStatements = []
|
||||
};
|
||||
var score = await scoringService.ComputeScoreAsync(input);
|
||||
```
|
||||
|
||||
### Recording Signals
|
||||
|
||||
```csharp
|
||||
// Record an SBOM match
|
||||
await scoringService.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:abc123",
|
||||
purl: "pkg:npm/lodash@4.17.21",
|
||||
isReachable: true,
|
||||
isDeployed: false);
|
||||
|
||||
// Record a VEX statement
|
||||
await scoringService.RecordVexStatementAsync(canonicalId, new VexStatement
|
||||
{
|
||||
StatementId = "VEX-2025-001",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "Component not used in production"
|
||||
});
|
||||
```
|
||||
|
||||
### Batch Operations
|
||||
|
||||
```csharp
|
||||
// Update scores for specific canonicals
|
||||
await scoringService.BatchUpdateAsync(canonicalIds);
|
||||
|
||||
// Full recalculation (all active advisories)
|
||||
await scoringService.RecalculateAllAsync();
|
||||
```
|
||||
|
||||
### Degradation/Restoration
|
||||
|
||||
```csharp
|
||||
// Degrade low-interest advisories to stubs
|
||||
int degraded = await scoringService.DegradeToStubsAsync(threshold: 0.2);
|
||||
|
||||
// Restore stubs when interest increases
|
||||
int restored = await scoringService.RestoreFromStubsAsync(threshold: 0.4);
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
| Endpoint | Method | Description |
|
||||
|----------|--------|-------------|
|
||||
| `/api/v1/canonical/{id}/score` | GET | Get interest score for a canonical |
|
||||
| `/api/v1/canonical/{id}/score/compute` | POST | Compute and update score |
|
||||
| `/api/v1/scores` | GET | Query scores with filtering |
|
||||
| `/api/v1/scores/distribution` | GET | Get score distribution statistics |
|
||||
| `/api/v1/scores/recalculate` | POST | Trigger batch/full recalculation |
|
||||
| `/api/v1/scores/degrade` | POST | Run stub degradation |
|
||||
| `/api/v1/scores/restore` | POST | Run stub restoration |
|
||||
|
||||
### Example API Response
|
||||
|
||||
```json
|
||||
{
|
||||
"canonicalId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"score": 0.75,
|
||||
"tier": "High",
|
||||
"reasons": ["in_sbom", "reachable", "deployed"],
|
||||
"lastSeenInBuild": "b5d2c400-e29b-41d4-a716-446655440000",
|
||||
"computedAt": "2025-12-26T10:30:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"InterestScore": {
|
||||
"EnableCache": true,
|
||||
"DegradationPolicy": {
|
||||
"Enabled": true,
|
||||
"DegradationThreshold": 0.2,
|
||||
"RestorationThreshold": 0.4,
|
||||
"MinAgeDays": 30,
|
||||
"BatchSize": 1000,
|
||||
"JobInterval": "06:00:00"
|
||||
},
|
||||
"Job": {
|
||||
"Enabled": true,
|
||||
"Interval": "01:00:00",
|
||||
"FullRecalculationHour": 3,
|
||||
"FullRecalculationBatchSize": 1000
|
||||
},
|
||||
"Weights": {
|
||||
"InSbom": 0.30,
|
||||
"Reachable": 0.25,
|
||||
"Deployed": 0.20,
|
||||
"NoVexNotAffected": 0.15,
|
||||
"Recent": 0.10
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Background Jobs
|
||||
|
||||
### InterestScoreRecalculationJob
|
||||
|
||||
Runs periodically to keep scores up-to-date:
|
||||
- **Incremental mode** (hourly): Updates scores for recently changed advisories
|
||||
- **Full mode** (nightly at 3 AM UTC): Recalculates all active advisories
|
||||
|
||||
### StubDegradationJob
|
||||
|
||||
Runs periodically (default: every 6 hours) to:
|
||||
1. Degrade advisories with scores below threshold
|
||||
2. Restore stubs whose scores have increased
|
||||
|
||||
## Metrics
|
||||
|
||||
| Metric | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `concelier_interest_score_computed_total` | Counter | Total scores computed |
|
||||
| `concelier_interest_score_distribution` | Histogram | Score value distribution |
|
||||
| `concelier_stub_degradations_total` | Counter | Total stub degradations |
|
||||
| `concelier_stub_restorations_total` | Counter | Total stub restorations |
|
||||
| `concelier_scoring_job_duration_seconds` | Histogram | Job execution time |
|
||||
| `concelier_scoring_job_errors_total` | Counter | Job execution errors |
|
||||
|
||||
## Database Schema
|
||||
|
||||
```sql
|
||||
CREATE TABLE vuln.interest_score (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id),
|
||||
score NUMERIC(3,2) NOT NULL CHECK (score >= 0 AND score <= 1),
|
||||
reasons JSONB NOT NULL DEFAULT '[]',
|
||||
last_seen_in_build UUID,
|
||||
computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT uq_interest_score_canonical UNIQUE (canonical_id)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_interest_score_score ON vuln.interest_score(score DESC);
|
||||
CREATE INDEX idx_interest_score_computed ON vuln.interest_score(computed_at DESC);
|
||||
|
||||
-- Partial indexes for degradation queries
|
||||
CREATE INDEX idx_interest_score_high ON vuln.interest_score(canonical_id) WHERE score >= 0.7;
|
||||
CREATE INDEX idx_interest_score_low ON vuln.interest_score(canonical_id) WHERE score < 0.2;
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests with:
|
||||
|
||||
```bash
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Interest.Tests/
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Storage.Postgres.Tests/ --filter "InterestScore"
|
||||
```
|
||||
|
||||
## Sprint Reference
|
||||
|
||||
- Sprint: `SPRINT_8200_0013_0002_CONCEL_interest_scoring`
|
||||
- Tasks: ISCORE-8200-000 through ISCORE-8200-033
|
||||
@@ -9,7 +9,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="Source\Distro\Alpine\Fixtures\**\*">
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="Apple/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Apple/Fixtures/%(Filename)%(Extension)" />
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,384 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// InterestScoringServiceTests.cs
|
||||
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
|
||||
// Tasks: ISCORE-8200-018, ISCORE-8200-023, ISCORE-8200-028
|
||||
// Description: Integration tests for scoring service, job execution, and degradation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Interest.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Interest.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="InterestScoringService"/>.
|
||||
/// Tests job execution, score consistency, and degradation/restoration cycles.
|
||||
/// </summary>
|
||||
public class InterestScoringServiceTests
|
||||
{
|
||||
private readonly Mock<IInterestScoreRepository> _repositoryMock;
|
||||
private readonly InterestScoringService _service;
|
||||
private readonly InterestScoreWeights _defaultWeights = new();
|
||||
|
||||
public InterestScoringServiceTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IInterestScoreRepository>();
|
||||
|
||||
var options = Options.Create(new InterestScoreOptions
|
||||
{
|
||||
DegradationPolicy = new StubDegradationPolicy
|
||||
{
|
||||
DegradationThreshold = 0.2,
|
||||
RestorationThreshold = 0.4,
|
||||
MinAgeDays = 30,
|
||||
BatchSize = 1000,
|
||||
Enabled = true
|
||||
},
|
||||
Job = new ScoringJobOptions
|
||||
{
|
||||
Enabled = true,
|
||||
FullRecalculationBatchSize = 100
|
||||
}
|
||||
});
|
||||
|
||||
_service = new InterestScoringService(
|
||||
_repositoryMock.Object,
|
||||
new InterestScoreCalculator(_defaultWeights),
|
||||
options,
|
||||
advisoryStore: null,
|
||||
cacheService: null,
|
||||
logger: NullLogger<InterestScoringService>.Instance);
|
||||
}
|
||||
|
||||
#region Task 18: Integration Tests - Score Persistence
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_PersistsToRepository()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(0.75, ["in_sbom", "reachable"]);
|
||||
|
||||
// Act
|
||||
await _service.UpdateScoreAsync(score);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(
|
||||
r => r.SaveAsync(score, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreAsync_RetrievesFromRepository()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
var expected = CreateTestScore(0.5, ["in_sbom"], canonicalId);
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByCanonicalIdAsync(canonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(expected);
|
||||
|
||||
// Act
|
||||
var result = await _service.GetScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.CanonicalId.Should().Be(canonicalId);
|
||||
result.Score.Should().Be(0.5);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
// Arrange
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByCanonicalIdAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((InterestScore?)null);
|
||||
|
||||
// Act
|
||||
var result = await _service.GetScoreAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BatchUpdateAsync_UpdatesMultipleScores()
|
||||
{
|
||||
// Arrange
|
||||
var ids = new[] { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
// Act
|
||||
await _service.BatchUpdateAsync(ids);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(
|
||||
r => r.SaveManyAsync(It.Is<IEnumerable<InterestScore>>(s => s.Count() == 3), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BatchUpdateAsync_HandlesEmptyInput()
|
||||
{
|
||||
// Act
|
||||
await _service.BatchUpdateAsync([]);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(
|
||||
r => r.SaveManyAsync(It.IsAny<IEnumerable<InterestScore>>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task 23: Job Execution and Score Consistency
|
||||
|
||||
[Fact]
|
||||
public async Task RecalculateAllAsync_ReturnsZero_WhenNoAdvisoryStore()
|
||||
{
|
||||
// The service is created without an ICanonicalAdvisoryStore,
|
||||
// so RecalculateAllAsync returns 0 immediately
|
||||
// (which is correct behavior for tests without full integration setup)
|
||||
|
||||
// Act
|
||||
var result = await _service.RecalculateAllAsync();
|
||||
|
||||
// Assert - returns 0 because advisory store is not available
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_ProducesDeterministicResults()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
|
||||
// Act - compute twice with same input
|
||||
var result1 = await _service.ComputeScoreAsync(canonicalId);
|
||||
var result2 = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert - same inputs should produce same outputs
|
||||
result1.Score.Should().Be(result2.Score);
|
||||
result1.Reasons.Should().BeEquivalentTo(result2.Reasons);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_ReturnsValidScoreRange()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
var result = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
result.Score.Should().BeInRange(0.0, 1.0);
|
||||
result.CanonicalId.Should().Be(canonicalId);
|
||||
result.ComputedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_PreservesScoreConsistency()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
InterestScore? savedScore = null;
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.SaveAsync(It.IsAny<InterestScore>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<InterestScore, CancellationToken>((s, _) => savedScore = s)
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
var score = CreateTestScore(0.75, ["in_sbom", "reachable"], canonicalId);
|
||||
|
||||
// Act
|
||||
await _service.UpdateScoreAsync(score);
|
||||
|
||||
// Assert
|
||||
savedScore.Should().NotBeNull();
|
||||
savedScore!.CanonicalId.Should().Be(canonicalId);
|
||||
savedScore.Score.Should().Be(0.75);
|
||||
savedScore.Reasons.Should().BeEquivalentTo(["in_sbom", "reachable"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BatchUpdateAsync_MaintainsScoreOrdering()
|
||||
{
|
||||
// Arrange
|
||||
var ids = new[] { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() };
|
||||
IEnumerable<InterestScore>? savedScores = null;
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.SaveManyAsync(It.IsAny<IEnumerable<InterestScore>>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<IEnumerable<InterestScore>, CancellationToken>((s, _) => savedScores = s.ToList())
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
await _service.BatchUpdateAsync(ids);
|
||||
|
||||
// Assert
|
||||
savedScores.Should().NotBeNull();
|
||||
var scoreList = savedScores!.ToList();
|
||||
scoreList.Should().HaveCount(3);
|
||||
scoreList.Select(s => s.CanonicalId).Should().BeEquivalentTo(ids);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task 28: Degradation/Restoration Cycle
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubsAsync_ReturnsZero_WhenNoAdvisoryStore()
|
||||
{
|
||||
// The service is created without an ICanonicalAdvisoryStore,
|
||||
// so degradation operations should return 0 immediately
|
||||
// (which is correct behavior for tests without full integration setup)
|
||||
|
||||
// Act
|
||||
var result = await _service.DegradeToStubsAsync(0.2);
|
||||
|
||||
// Assert - returns 0 because advisory store is not available
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RestoreFromStubsAsync_ReturnsZero_WhenNoAdvisoryStore()
|
||||
{
|
||||
// The service is created without an ICanonicalAdvisoryStore,
|
||||
// so restoration operations should return 0 immediately
|
||||
|
||||
// Act
|
||||
var result = await _service.RestoreFromStubsAsync(0.4);
|
||||
|
||||
// Assert - returns 0 because advisory store is not available
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeRestoreCycle_MaintainsDataIntegrity()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
var scores = new Dictionary<Guid, InterestScore>();
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.SaveAsync(It.IsAny<InterestScore>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<InterestScore, CancellationToken>((s, _) => scores[s.CanonicalId] = s)
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByCanonicalIdAsync(canonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(() => scores.GetValueOrDefault(canonicalId));
|
||||
|
||||
// Initial low score
|
||||
var lowScore = CreateTestScore(0.1, [], canonicalId);
|
||||
await _service.UpdateScoreAsync(lowScore);
|
||||
|
||||
// Verify low score stored
|
||||
var stored = await _service.GetScoreAsync(canonicalId);
|
||||
stored!.Score.Should().Be(0.1);
|
||||
|
||||
// Update to high score (simulating new evidence)
|
||||
var highScore = CreateTestScore(0.8, ["in_sbom", "reachable", "deployed"], canonicalId);
|
||||
await _service.UpdateScoreAsync(highScore);
|
||||
|
||||
// Verify high score stored
|
||||
stored = await _service.GetScoreAsync(canonicalId);
|
||||
stored!.Score.Should().Be(0.8);
|
||||
stored.Reasons.Should().Contain("in_sbom");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubsAsync_ReturnsZero_WhenNoLowScores()
|
||||
{
|
||||
// Arrange
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetLowScoreCanonicalIdsAsync(
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<TimeSpan>(),
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(Array.Empty<Guid>());
|
||||
|
||||
// Act
|
||||
var result = await _service.DegradeToStubsAsync(0.2);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RestoreFromStubsAsync_ReturnsZero_WhenNoHighScores()
|
||||
{
|
||||
// Arrange
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetHighScoreCanonicalIdsAsync(
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(Array.Empty<Guid>());
|
||||
|
||||
// Act
|
||||
var result = await _service.RestoreFromStubsAsync(0.4);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_HandlesBoundaryScores()
|
||||
{
|
||||
// Arrange
|
||||
var minScore = CreateTestScore(0.0, []);
|
||||
var maxScore = CreateTestScore(1.0, ["in_sbom", "reachable", "deployed", "no_vex_na", "recent"]);
|
||||
|
||||
// Act & Assert - should not throw
|
||||
await _service.UpdateScoreAsync(minScore);
|
||||
await _service.UpdateScoreAsync(maxScore);
|
||||
|
||||
_repositoryMock.Verify(
|
||||
r => r.SaveAsync(It.IsAny<InterestScore>(), It.IsAny<CancellationToken>()),
|
||||
Times.Exactly(2));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_HandlesNullInputGracefully()
|
||||
{
|
||||
// Act
|
||||
var result = await _service.ComputeScoreAsync(Guid.Empty);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.CanonicalId.Should().Be(Guid.Empty);
|
||||
result.Score.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static InterestScore CreateTestScore(
|
||||
double score,
|
||||
string[] reasons,
|
||||
Guid? canonicalId = null)
|
||||
{
|
||||
return new InterestScore
|
||||
{
|
||||
CanonicalId = canonicalId ?? Guid.NewGuid(),
|
||||
Score = score,
|
||||
Reasons = reasons,
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -9,11 +9,17 @@
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<RootNamespace>StellaOps.Concelier.Interest.Tests</RootNamespace>
|
||||
<!-- Unit tests use mocks, no need for Postgres test infrastructure -->
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="FluentAssertions" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,666 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// InterestScoringServiceIntegrationTests.cs
|
||||
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
|
||||
// Task: ISCORE-8200-018
|
||||
// Description: Integration tests for InterestScoringService with Postgres + Valkey
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Cache.Valkey;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using StellaOps.Concelier.Interest;
|
||||
using StellaOps.Concelier.Interest.Models;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="InterestScoringService"/> with real PostgreSQL
|
||||
/// and mocked Valkey cache service.
|
||||
/// </summary>
|
||||
[Collection(ConcelierPostgresCollection.Name)]
|
||||
public sealed class InterestScoringServiceIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcelierPostgresFixture _fixture;
|
||||
private readonly ConcelierDataSource _dataSource;
|
||||
private readonly InterestScoreRepository _repository;
|
||||
private readonly Mock<IAdvisoryCacheService> _cacheServiceMock;
|
||||
private readonly Mock<ICanonicalAdvisoryStore> _advisoryStoreMock;
|
||||
private readonly InterestScoreCalculator _calculator;
|
||||
private readonly InterestScoreOptions _options;
|
||||
private InterestScoringService _service = null!;
|
||||
|
||||
public InterestScoringServiceIntegrationTests(ConcelierPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
_dataSource = new ConcelierDataSource(Options.Create(options), NullLogger<ConcelierDataSource>.Instance);
|
||||
_repository = new InterestScoreRepository(_dataSource, NullLogger<InterestScoreRepository>.Instance);
|
||||
|
||||
_cacheServiceMock = new Mock<IAdvisoryCacheService>();
|
||||
_advisoryStoreMock = new Mock<ICanonicalAdvisoryStore>();
|
||||
|
||||
var weights = new InterestScoreWeights();
|
||||
_calculator = new InterestScoreCalculator(weights);
|
||||
|
||||
_options = new InterestScoreOptions
|
||||
{
|
||||
EnableCache = true,
|
||||
DegradationPolicy = new StubDegradationPolicy
|
||||
{
|
||||
Enabled = true,
|
||||
DegradationThreshold = 0.2,
|
||||
RestorationThreshold = 0.4,
|
||||
MinAgeDays = 30,
|
||||
BatchSize = 100
|
||||
},
|
||||
Job = new ScoringJobOptions
|
||||
{
|
||||
Enabled = true,
|
||||
Interval = TimeSpan.FromMinutes(60),
|
||||
FullRecalculationBatchSize = 100
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
_service = new InterestScoringService(
|
||||
_repository,
|
||||
_calculator,
|
||||
Options.Create(_options),
|
||||
_advisoryStoreMock.Object,
|
||||
_cacheServiceMock.Object,
|
||||
NullLogger<InterestScoringService>.Instance);
|
||||
|
||||
return _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
#region ComputeScoreAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_WithNoSignals_ReturnsBaseScore()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
var score = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().Be(0.15); // Only no_vex_na
|
||||
score.CanonicalId.Should().Be(canonicalId);
|
||||
score.Reasons.Should().Contain("no_vex_na");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_WithSbomMatch_IncludesInSbomFactor()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
await _service.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:test123",
|
||||
purl: "pkg:npm/lodash@4.17.21",
|
||||
isReachable: false,
|
||||
isDeployed: false);
|
||||
|
||||
// Act
|
||||
var score = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().Be(0.45); // in_sbom (0.30) + no_vex_na (0.15)
|
||||
score.Reasons.Should().Contain("in_sbom");
|
||||
score.Reasons.Should().Contain("no_vex_na");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_WithReachableAndDeployed_IncludesAllFactors()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
await _service.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:test123",
|
||||
purl: "pkg:npm/lodash@4.17.21",
|
||||
isReachable: true,
|
||||
isDeployed: true);
|
||||
|
||||
// Act
|
||||
var score = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().Be(0.90); // in_sbom (0.30) + reachable (0.25) + deployed (0.20) + no_vex_na (0.15)
|
||||
score.Reasons.Should().Contain("in_sbom");
|
||||
score.Reasons.Should().Contain("reachable");
|
||||
score.Reasons.Should().Contain("deployed");
|
||||
score.Reasons.Should().Contain("no_vex_na");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeScoreAsync_WithVexNotAffected_ExcludesNoVexFactor()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
await _service.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:test123",
|
||||
purl: "pkg:npm/lodash@4.17.21");
|
||||
|
||||
await _service.RecordVexStatementAsync(
|
||||
canonicalId,
|
||||
new VexStatement
|
||||
{
|
||||
StatementId = "VEX-001",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "Not applicable"
|
||||
});
|
||||
|
||||
// Act
|
||||
var score = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().Be(0.30); // Only in_sbom, no no_vex_na
|
||||
score.Reasons.Should().Contain("in_sbom");
|
||||
score.Reasons.Should().NotContain("no_vex_na");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpdateScoreAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_PersistsToPostgres()
|
||||
{
|
||||
// Arrange
|
||||
var score = new InterestScore
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Score = 0.75,
|
||||
Reasons = ["in_sbom", "reachable", "deployed"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
await _service.UpdateScoreAsync(score);
|
||||
|
||||
// Assert - verify persisted to Postgres
|
||||
var retrieved = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.Score.Should().Be(0.75);
|
||||
retrieved.Reasons.Should().BeEquivalentTo(["in_sbom", "reachable", "deployed"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_UpdatesCacheWhenEnabled()
|
||||
{
|
||||
// Arrange
|
||||
var score = new InterestScore
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Score = 0.85,
|
||||
Reasons = ["in_sbom"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
await _service.UpdateScoreAsync(score);
|
||||
|
||||
// Assert - verify cache was updated
|
||||
_cacheServiceMock.Verify(
|
||||
x => x.UpdateScoreAsync(
|
||||
score.CanonicalId.ToString(),
|
||||
0.85,
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_UpsertsBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
var initialScore = new InterestScore
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
Score = 0.30,
|
||||
Reasons = ["in_sbom"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
await _service.UpdateScoreAsync(initialScore);
|
||||
|
||||
var updatedScore = new InterestScore
|
||||
{
|
||||
CanonicalId = canonicalId,
|
||||
Score = 0.90,
|
||||
Reasons = ["in_sbom", "reachable", "deployed", "no_vex_na"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
await _service.UpdateScoreAsync(updatedScore);
|
||||
|
||||
// Assert
|
||||
var retrieved = await _repository.GetByCanonicalIdAsync(canonicalId);
|
||||
retrieved!.Score.Should().Be(0.90);
|
||||
retrieved.Reasons.Should().HaveCount(4);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetScoreAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreAsync_ReturnsPersistedScore()
|
||||
{
|
||||
// Arrange
|
||||
var score = new InterestScore
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Score = 0.65,
|
||||
Reasons = ["in_sbom", "deployed"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Act
|
||||
var result = await _service.GetScoreAsync(score.CanonicalId);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Score.Should().Be(0.65);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreAsync_ReturnsNullForNonExistent()
|
||||
{
|
||||
// Act
|
||||
var result = await _service.GetScoreAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region BatchUpdateAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BatchUpdateAsync_ComputesAndPersistsMultipleScores()
|
||||
{
|
||||
// Arrange
|
||||
var id1 = Guid.NewGuid();
|
||||
var id2 = Guid.NewGuid();
|
||||
var id3 = Guid.NewGuid();
|
||||
|
||||
// Setup signals for different scores
|
||||
await _service.RecordSbomMatchAsync(id1, "sha256:a", "pkg:npm/a@1.0.0");
|
||||
await _service.RecordSbomMatchAsync(id2, "sha256:b", "pkg:npm/b@1.0.0", isReachable: true);
|
||||
// id3 has no signals
|
||||
|
||||
// Act
|
||||
var updated = await _service.BatchUpdateAsync([id1, id2, id3]);
|
||||
|
||||
// Assert
|
||||
updated.Should().Be(3);
|
||||
|
||||
var score1 = await _repository.GetByCanonicalIdAsync(id1);
|
||||
var score2 = await _repository.GetByCanonicalIdAsync(id2);
|
||||
var score3 = await _repository.GetByCanonicalIdAsync(id3);
|
||||
|
||||
score1!.Score.Should().Be(0.45); // in_sbom + no_vex_na
|
||||
score2!.Score.Should().Be(0.70); // in_sbom + reachable + no_vex_na
|
||||
score3!.Score.Should().Be(0.15); // only no_vex_na
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BatchUpdateAsync_UpdatesCacheForEachScore()
|
||||
{
|
||||
// Arrange
|
||||
var id1 = Guid.NewGuid();
|
||||
var id2 = Guid.NewGuid();
|
||||
|
||||
await _service.RecordSbomMatchAsync(id1, "sha256:a", "pkg:npm/a@1.0.0");
|
||||
await _service.RecordSbomMatchAsync(id2, "sha256:b", "pkg:npm/b@1.0.0");
|
||||
|
||||
// Act
|
||||
await _service.BatchUpdateAsync([id1, id2]);
|
||||
|
||||
// Assert
|
||||
_cacheServiceMock.Verify(
|
||||
x => x.UpdateScoreAsync(id1.ToString(), It.IsAny<double>(), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
_cacheServiceMock.Verify(
|
||||
x => x.UpdateScoreAsync(id2.ToString(), It.IsAny<double>(), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetTopScoresAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetTopScoresAsync_ReturnsScoresInDescendingOrder()
|
||||
{
|
||||
// Arrange
|
||||
var scores = new[]
|
||||
{
|
||||
CreateScore(0.3),
|
||||
CreateScore(0.9),
|
||||
CreateScore(0.5),
|
||||
CreateScore(0.7)
|
||||
};
|
||||
|
||||
foreach (var score in scores)
|
||||
{
|
||||
await _repository.SaveAsync(score);
|
||||
}
|
||||
|
||||
// Act
|
||||
var topScores = await _service.GetTopScoresAsync(limit: 10);
|
||||
|
||||
// Assert
|
||||
topScores.Should().HaveCount(4);
|
||||
topScores[0].Score.Should().Be(0.9);
|
||||
topScores[1].Score.Should().Be(0.7);
|
||||
topScores[2].Score.Should().Be(0.5);
|
||||
topScores[3].Score.Should().Be(0.3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetDistributionAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetDistributionAsync_ReturnsCorrectDistribution()
|
||||
{
|
||||
// Arrange
|
||||
// High tier
|
||||
await _repository.SaveAsync(CreateScore(0.9));
|
||||
await _repository.SaveAsync(CreateScore(0.8));
|
||||
// Medium tier
|
||||
await _repository.SaveAsync(CreateScore(0.5));
|
||||
// Low tier
|
||||
await _repository.SaveAsync(CreateScore(0.3));
|
||||
// None tier
|
||||
await _repository.SaveAsync(CreateScore(0.1));
|
||||
|
||||
// Act
|
||||
var distribution = await _service.GetDistributionAsync();
|
||||
|
||||
// Assert
|
||||
distribution.TotalCount.Should().Be(5);
|
||||
distribution.HighCount.Should().Be(2);
|
||||
distribution.MediumCount.Should().Be(1);
|
||||
distribution.LowCount.Should().Be(1);
|
||||
distribution.NoneCount.Should().Be(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DegradeToStubsAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubsAsync_DelegatesToAdvisoryStore()
|
||||
{
|
||||
// Arrange
|
||||
var oldDate = DateTimeOffset.UtcNow.AddDays(-60);
|
||||
var lowScore1 = CreateScore(0.1, oldDate);
|
||||
var lowScore2 = CreateScore(0.15, oldDate);
|
||||
var highScore = CreateScore(0.8, oldDate);
|
||||
|
||||
await _repository.SaveAsync(lowScore1);
|
||||
await _repository.SaveAsync(lowScore2);
|
||||
await _repository.SaveAsync(highScore);
|
||||
|
||||
_advisoryStoreMock
|
||||
.Setup(x => x.UpdateStatusAsync(It.IsAny<Guid>(), CanonicalStatus.Stub, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
var degraded = await _service.DegradeToStubsAsync(0.2);
|
||||
|
||||
// Assert
|
||||
degraded.Should().Be(2);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(lowScore1.CanonicalId, CanonicalStatus.Stub, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(lowScore2.CanonicalId, CanonicalStatus.Stub, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubsAsync_RespectsMinAge()
|
||||
{
|
||||
// Arrange - one old, one recent
|
||||
var lowOld = CreateScore(0.1, DateTimeOffset.UtcNow.AddDays(-60));
|
||||
var lowRecent = CreateScore(0.1, DateTimeOffset.UtcNow.AddDays(-5));
|
||||
|
||||
await _repository.SaveAsync(lowOld);
|
||||
await _repository.SaveAsync(lowRecent);
|
||||
|
||||
_advisoryStoreMock
|
||||
.Setup(x => x.UpdateStatusAsync(It.IsAny<Guid>(), CanonicalStatus.Stub, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
var degraded = await _service.DegradeToStubsAsync(0.2);
|
||||
|
||||
// Assert - only old one should be degraded
|
||||
degraded.Should().Be(1);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(lowOld.CanonicalId, CanonicalStatus.Stub, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(lowRecent.CanonicalId, CanonicalStatus.Stub, It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region RestoreFromStubsAsync Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task RestoreFromStubsAsync_RestoresHighScoreStubs()
|
||||
{
|
||||
// Arrange
|
||||
var highScore = CreateScore(0.8);
|
||||
await _repository.SaveAsync(highScore);
|
||||
|
||||
var stubAdvisory = CreateMockCanonicalAdvisory(highScore.CanonicalId, CanonicalStatus.Stub);
|
||||
_advisoryStoreMock
|
||||
.Setup(x => x.GetByIdAsync(highScore.CanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(stubAdvisory);
|
||||
_advisoryStoreMock
|
||||
.Setup(x => x.UpdateStatusAsync(highScore.CanonicalId, CanonicalStatus.Active, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
var restored = await _service.RestoreFromStubsAsync(0.4);
|
||||
|
||||
// Assert
|
||||
restored.Should().Be(1);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(highScore.CanonicalId, CanonicalStatus.Active, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RestoreFromStubsAsync_SkipsNonStubs()
|
||||
{
|
||||
// Arrange
|
||||
var highScore = CreateScore(0.8);
|
||||
await _repository.SaveAsync(highScore);
|
||||
|
||||
var activeAdvisory = CreateMockCanonicalAdvisory(highScore.CanonicalId, CanonicalStatus.Active);
|
||||
_advisoryStoreMock
|
||||
.Setup(x => x.GetByIdAsync(highScore.CanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(activeAdvisory);
|
||||
|
||||
// Act
|
||||
var restored = await _service.RestoreFromStubsAsync(0.4);
|
||||
|
||||
// Assert - should not restore already active
|
||||
restored.Should().Be(0);
|
||||
_advisoryStoreMock.Verify(
|
||||
x => x.UpdateStatusAsync(It.IsAny<Guid>(), CanonicalStatus.Active, It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Full Flow Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FullFlow_RecordSignals_ComputeScore_PersistAndCache()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
|
||||
// Act 1: Record SBOM match
|
||||
await _service.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:prod123",
|
||||
purl: "pkg:npm/express@4.18.0",
|
||||
isReachable: true,
|
||||
isDeployed: true);
|
||||
|
||||
// Act 2: Compute score
|
||||
var computedScore = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Act 3: Persist score
|
||||
await _service.UpdateScoreAsync(computedScore);
|
||||
|
||||
// Assert: Verify in database
|
||||
var dbScore = await _repository.GetByCanonicalIdAsync(canonicalId);
|
||||
dbScore.Should().NotBeNull();
|
||||
dbScore!.Score.Should().Be(0.90);
|
||||
dbScore.Reasons.Should().Contain("in_sbom");
|
||||
dbScore.Reasons.Should().Contain("reachable");
|
||||
dbScore.Reasons.Should().Contain("deployed");
|
||||
dbScore.Reasons.Should().Contain("no_vex_na");
|
||||
|
||||
// Assert: Verify cache was updated
|
||||
_cacheServiceMock.Verify(
|
||||
x => x.UpdateScoreAsync(canonicalId.ToString(), 0.90, It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
|
||||
// Act 4: Retrieve via service
|
||||
var retrievedScore = await _service.GetScoreAsync(canonicalId);
|
||||
retrievedScore.Should().NotBeNull();
|
||||
retrievedScore!.Score.Should().Be(0.90);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FullFlow_VexStatementReducesScore()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
|
||||
// Record signals with high score potential
|
||||
await _service.RecordSbomMatchAsync(
|
||||
canonicalId,
|
||||
sbomDigest: "sha256:prod123",
|
||||
purl: "pkg:npm/express@4.18.0",
|
||||
isReachable: true,
|
||||
isDeployed: true);
|
||||
|
||||
// Compute initial score
|
||||
var initialScore = await _service.ComputeScoreAsync(canonicalId);
|
||||
initialScore.Score.Should().Be(0.90);
|
||||
|
||||
// Act: Add VEX not_affected statement
|
||||
await _service.RecordVexStatementAsync(
|
||||
canonicalId,
|
||||
new VexStatement
|
||||
{
|
||||
StatementId = "VEX-123",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "Component not used in production context"
|
||||
});
|
||||
|
||||
// Recompute score
|
||||
var reducedScore = await _service.ComputeScoreAsync(canonicalId);
|
||||
|
||||
// Assert: Score should be reduced (no no_vex_na factor)
|
||||
reducedScore.Score.Should().Be(0.75);
|
||||
reducedScore.Reasons.Should().NotContain("no_vex_na");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cache Disabled Tests
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_SkipsCacheWhenDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var optionsWithCacheDisabled = new InterestScoreOptions { EnableCache = false };
|
||||
var serviceWithCacheDisabled = new InterestScoringService(
|
||||
_repository,
|
||||
_calculator,
|
||||
Options.Create(optionsWithCacheDisabled),
|
||||
_advisoryStoreMock.Object,
|
||||
_cacheServiceMock.Object,
|
||||
NullLogger<InterestScoringService>.Instance);
|
||||
|
||||
var score = new InterestScore
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Score = 0.75,
|
||||
Reasons = ["in_sbom"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
await serviceWithCacheDisabled.UpdateScoreAsync(score);
|
||||
|
||||
// Assert - cache should not be called
|
||||
_cacheServiceMock.Verify(
|
||||
x => x.UpdateScoreAsync(It.IsAny<string>(), It.IsAny<double>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
|
||||
// But database should still be updated
|
||||
var retrieved = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
retrieved.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static InterestScore CreateScore(double score, DateTimeOffset? computedAt = null)
|
||||
{
|
||||
return new InterestScore
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Score = score,
|
||||
Reasons = score >= 0.7 ? ["in_sbom", "reachable", "deployed"] : ["no_vex_na"],
|
||||
ComputedAt = computedAt ?? DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static CanonicalAdvisory CreateMockCanonicalAdvisory(Guid id, CanonicalStatus status)
|
||||
{
|
||||
return new CanonicalAdvisory
|
||||
{
|
||||
Id = id,
|
||||
MergeHash = $"sha256:{id:N}",
|
||||
Cve = $"CVE-2024-{id.ToString("N")[..5]}",
|
||||
AffectsKey = $"pkg:npm/test@1.0.0",
|
||||
Status = status,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -2,6 +2,8 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Opt-out of shared test infra - this project has its own ConcelierPostgresFixture -->
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
@@ -20,7 +22,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Concelier.Storage.Postgres\StellaOps.Concelier.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -0,0 +1,424 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// InterestScoreEndpointTests.cs
|
||||
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
|
||||
// Task: ISCORE-8200-032
|
||||
// Description: End-to-end tests for interest score API endpoints
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Interest;
|
||||
using StellaOps.Concelier.Interest.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end tests for interest score endpoints.
|
||||
/// Tests the complete flow: ingest advisory, update SBOM, verify score change.
|
||||
/// </summary>
|
||||
public sealed class InterestScoreEndpointTests : IClassFixture<InterestScoreEndpointTests.InterestScoreTestFactory>
|
||||
{
|
||||
private readonly InterestScoreTestFactory _factory;
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public InterestScoreEndpointTests(InterestScoreTestFactory factory)
|
||||
{
|
||||
_factory = factory;
|
||||
_client = factory.CreateClient();
|
||||
}
|
||||
|
||||
#region Task 32: E2E Test - Ingest Advisory, Update SBOM, Verify Score Change
|
||||
|
||||
[Fact]
|
||||
public async Task GetInterestScore_ReturnsNotFound_WhenScoreDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
var nonExistentId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical/{nonExistentId}/score");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetInterestScore_ReturnsScore_WhenExists()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = _factory.ExistingCanonicalId;
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/api/v1/canonical/{canonicalId}/score");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<InterestScoreResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.CanonicalId.Should().Be(canonicalId);
|
||||
result.Score.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeInterestScore_ComputesAndPersistsScore()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = _factory.ComputeCanonicalId;
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsync(
|
||||
$"/api/v1/canonical/{canonicalId}/score/compute",
|
||||
null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<InterestScoreResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.CanonicalId.Should().Be(canonicalId);
|
||||
result.Score.Should().BeGreaterThanOrEqualTo(0);
|
||||
result.ComputedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromMinutes(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryInterestScores_ReturnsFilteredResults()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/api/v1/scores?minScore=0.3&maxScore=0.9&limit=10");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<InterestScoreListResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Items.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreDistribution_ReturnsStatistics()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/api/v1/scores/distribution");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ScoreDistributionResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.TotalCount.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecalculateScores_AcceptsBatchRequest()
|
||||
{
|
||||
// Arrange
|
||||
var request = new RecalculateRequest
|
||||
{
|
||||
CanonicalIds = [Guid.NewGuid(), Guid.NewGuid()]
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scores/recalculate", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.Accepted);
|
||||
var result = await response.Content.ReadFromJsonAsync<RecalculateResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Mode.Should().Be("batch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecalculateScores_AcceptsFullRequest()
|
||||
{
|
||||
// Arrange - empty body triggers full recalculation
|
||||
var request = new RecalculateRequest();
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scores/recalculate", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.Accepted);
|
||||
var result = await response.Content.ReadFromJsonAsync<RecalculateResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Mode.Should().Be("full");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DegradeToStubs_ExecutesDegradation()
|
||||
{
|
||||
// Arrange
|
||||
var request = new DegradeRequest { Threshold = 0.2 };
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scores/degrade", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<DegradeResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Threshold.Should().Be(0.2);
|
||||
result.Degraded.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RestoreFromStubs_ExecutesRestoration()
|
||||
{
|
||||
// Arrange
|
||||
var request = new RestoreRequest { Threshold = 0.4 };
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scores/restore", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<RestoreResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Threshold.Should().Be(0.4);
|
||||
result.Restored.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task E2E_IngestAdvisoryUpdateSbomVerifyScoreChange()
|
||||
{
|
||||
// This tests the full workflow:
|
||||
// 1. Advisory exists with no SBOM match → low score
|
||||
// 2. Record SBOM match → score increases
|
||||
// 3. Record reachability → score increases further
|
||||
|
||||
var canonicalId = _factory.E2ECanonicalId;
|
||||
|
||||
// Step 1: Compute initial score (no SBOM matches)
|
||||
var computeResponse = await _client.PostAsync(
|
||||
$"/api/v1/canonical/{canonicalId}/score/compute", null);
|
||||
computeResponse.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var initialScore = await computeResponse.Content.ReadFromJsonAsync<InterestScoreResponse>();
|
||||
initialScore.Should().NotBeNull();
|
||||
var initialValue = initialScore!.Score;
|
||||
|
||||
// Step 2: Record SBOM match via service (simulated by mock)
|
||||
// The mock is set up to include SBOM signals for this ID
|
||||
_factory.AddSbomMatchForCanonical(canonicalId);
|
||||
|
||||
// Recompute score
|
||||
computeResponse = await _client.PostAsync(
|
||||
$"/api/v1/canonical/{canonicalId}/score/compute", null);
|
||||
var updatedScore = await computeResponse.Content.ReadFromJsonAsync<InterestScoreResponse>();
|
||||
|
||||
// Step 3: Verify score increased
|
||||
updatedScore.Should().NotBeNull();
|
||||
updatedScore!.Reasons.Should().Contain("in_sbom");
|
||||
// Score should be higher after SBOM match
|
||||
updatedScore.Score.Should().BeGreaterThanOrEqualTo(initialValue);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Response DTOs (matching endpoint responses)
|
||||
|
||||
public record InterestScoreResponse
|
||||
{
|
||||
public Guid CanonicalId { get; init; }
|
||||
public double Score { get; init; }
|
||||
public string Tier { get; init; } = string.Empty;
|
||||
public IReadOnlyList<string> Reasons { get; init; } = [];
|
||||
public Guid? LastSeenInBuild { get; init; }
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
public record InterestScoreListResponse
|
||||
{
|
||||
public IReadOnlyList<InterestScoreResponse> Items { get; init; } = [];
|
||||
public int TotalCount { get; init; }
|
||||
public int Offset { get; init; }
|
||||
public int Limit { get; init; }
|
||||
}
|
||||
|
||||
public record ScoreDistributionResponse
|
||||
{
|
||||
public long HighCount { get; init; }
|
||||
public long MediumCount { get; init; }
|
||||
public long LowCount { get; init; }
|
||||
public long NoneCount { get; init; }
|
||||
public long TotalCount { get; init; }
|
||||
public double AverageScore { get; init; }
|
||||
public double MedianScore { get; init; }
|
||||
}
|
||||
|
||||
public record RecalculateRequest
|
||||
{
|
||||
public IReadOnlyList<Guid>? CanonicalIds { get; init; }
|
||||
}
|
||||
|
||||
public record RecalculateResponse
|
||||
{
|
||||
public int Updated { get; init; }
|
||||
public string Mode { get; init; } = string.Empty;
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
}
|
||||
|
||||
public record DegradeRequest
|
||||
{
|
||||
public double? Threshold { get; init; }
|
||||
}
|
||||
|
||||
public record DegradeResponse
|
||||
{
|
||||
public int Degraded { get; init; }
|
||||
public double Threshold { get; init; }
|
||||
public DateTimeOffset ExecutedAt { get; init; }
|
||||
}
|
||||
|
||||
public record RestoreRequest
|
||||
{
|
||||
public double? Threshold { get; init; }
|
||||
}
|
||||
|
||||
public record RestoreResponse
|
||||
{
|
||||
public int Restored { get; init; }
|
||||
public double Threshold { get; init; }
|
||||
public DateTimeOffset ExecutedAt { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Factory
|
||||
|
||||
/// <summary>
|
||||
/// Test factory that sets up mocked dependencies for interest score testing.
|
||||
/// </summary>
|
||||
public sealed class InterestScoreTestFactory : WebApplicationFactory<Program>
|
||||
{
|
||||
public Guid ExistingCanonicalId { get; } = Guid.NewGuid();
|
||||
public Guid ComputeCanonicalId { get; } = Guid.NewGuid();
|
||||
public Guid E2ECanonicalId { get; } = Guid.NewGuid();
|
||||
|
||||
private readonly Dictionary<Guid, List<SbomMatch>> _sbomMatches = new();
|
||||
|
||||
public void AddSbomMatchForCanonical(Guid canonicalId)
|
||||
{
|
||||
if (!_sbomMatches.ContainsKey(canonicalId))
|
||||
{
|
||||
_sbomMatches[canonicalId] = [];
|
||||
}
|
||||
_sbomMatches[canonicalId].Add(new SbomMatch
|
||||
{
|
||||
SbomDigest = "sha256:test123",
|
||||
Purl = "pkg:npm/lodash@4.17.21",
|
||||
IsReachable = true,
|
||||
ScannedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
|
||||
protected override void ConfigureWebHost(IWebHostBuilder builder)
|
||||
{
|
||||
Environment.SetEnvironmentVariable("CONCELIER__STORAGE__DSN", "Host=localhost;Port=5432;Database=test-interest");
|
||||
Environment.SetEnvironmentVariable("CONCELIER__STORAGE__DRIVER", "postgres");
|
||||
Environment.SetEnvironmentVariable("CONCELIER_SKIP_OPTIONS_VALIDATION", "1");
|
||||
Environment.SetEnvironmentVariable("DOTNET_ENVIRONMENT", "Testing");
|
||||
Environment.SetEnvironmentVariable("ASPNETCORE_ENVIRONMENT", "Testing");
|
||||
|
||||
builder.UseEnvironment("Testing");
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
// Remove existing registrations
|
||||
var scoringServiceDescriptor = services
|
||||
.SingleOrDefault(d => d.ServiceType == typeof(IInterestScoringService));
|
||||
if (scoringServiceDescriptor != null)
|
||||
{
|
||||
services.Remove(scoringServiceDescriptor);
|
||||
}
|
||||
|
||||
var repositoryDescriptor = services
|
||||
.SingleOrDefault(d => d.ServiceType == typeof(IInterestScoreRepository));
|
||||
if (repositoryDescriptor != null)
|
||||
{
|
||||
services.Remove(repositoryDescriptor);
|
||||
}
|
||||
|
||||
// Create mock repository
|
||||
var mockRepository = new Mock<IInterestScoreRepository>();
|
||||
|
||||
// Set up existing score
|
||||
var existingScore = new InterestScore
|
||||
{
|
||||
CanonicalId = ExistingCanonicalId,
|
||||
Score = 0.75,
|
||||
Reasons = ["in_sbom", "reachable"],
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetByCanonicalIdAsync(ExistingCanonicalId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingScore);
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetByCanonicalIdAsync(It.Is<Guid>(g => g != ExistingCanonicalId), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((InterestScore?)null);
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetAllAsync(It.IsAny<int>(), It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<InterestScore> { existingScore });
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetScoreDistributionAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new ScoreDistribution
|
||||
{
|
||||
TotalCount = 100,
|
||||
HighCount = 25,
|
||||
MediumCount = 35,
|
||||
LowCount = 25,
|
||||
NoneCount = 15,
|
||||
AverageScore = 0.52,
|
||||
MedianScore = 0.48
|
||||
});
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetLowScoreCanonicalIdsAsync(
|
||||
It.IsAny<double>(), It.IsAny<TimeSpan>(), It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<Guid>());
|
||||
|
||||
mockRepository
|
||||
.Setup(r => r.GetHighScoreCanonicalIdsAsync(
|
||||
It.IsAny<double>(), It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<Guid>());
|
||||
|
||||
services.AddSingleton(mockRepository.Object);
|
||||
|
||||
// Add scoring service with mock repository
|
||||
var options = Options.Create(new InterestScoreOptions
|
||||
{
|
||||
EnableCache = false,
|
||||
DegradationPolicy = new DegradationPolicyOptions
|
||||
{
|
||||
Enabled = true,
|
||||
DegradationThreshold = 0.2,
|
||||
RestorationThreshold = 0.4,
|
||||
MinAgeDays = 30,
|
||||
BatchSize = 100
|
||||
},
|
||||
Job = new InterestScoreJobOptions
|
||||
{
|
||||
Enabled = false
|
||||
}
|
||||
});
|
||||
|
||||
var calculator = new InterestScoreCalculator(new InterestScoreWeights());
|
||||
|
||||
services.AddSingleton<IInterestScoringService>(sp =>
|
||||
new InterestScoringService(
|
||||
mockRepository.Object,
|
||||
calculator,
|
||||
options));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -16,6 +16,7 @@
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
<PackageReference Update="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Interest/StellaOps.Concelier.Interest.csproj" />
|
||||
<ProjectReference Include="../../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Graph.Indexer.Storage.Postgres\StellaOps.Graph.Indexer.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.IssuerDirectory.Storage.Postgres\StellaOps.IssuerDirectory.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\StellaOps.IssuerDirectory.Core\StellaOps.IssuerDirectory.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\\..\\StellaOps.IssuerDirectory\\StellaOps.IssuerDirectory.Storage.Postgres\\StellaOps.IssuerDirectory.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Tests\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\\..\\StellaOps.IssuerDirectory\\StellaOps.IssuerDirectory.Core\\StellaOps.IssuerDirectory.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Notify.Storage.Postgres\StellaOps.Notify.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.PacksRegistry.Storage.Postgres\StellaOps.PacksRegistry.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Storage.Postgres\StellaOps.Policy.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Exceptions\StellaOps.Policy.Exceptions.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Unknowns\StellaOps.Policy.Unknowns.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
<ProjectReference Include="..\..\StellaOps.Policy.Scoring\StellaOps.Policy.Scoring.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.SbomService.Storage.Postgres\StellaOps.SbomService.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Tests\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Tests\\__Libraries\\StellaOps.Infrastructure.Postgres.Testing\\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<!-- NOTE: TestKit reference removed due to package version conflict (Microsoft.AspNetCore.Mvc.Testing 10.0.0 vs 10.0.0-rc.2) -->
|
||||
<!-- TestKit-dependent tests excluded from compilation until resolved -->
|
||||
</ItemGroup>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<ProjectReference Include="../../StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Scheduler.Storage.Postgres\StellaOps.Scheduler.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Signals.Storage.Postgres\StellaOps.Signals.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.TaskRunner.Storage.Postgres\StellaOps.TaskRunner.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.TaskRunner\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Tests\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<ProjectReference Include="../../Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" />
|
||||
<ProjectReference Include="../../Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
<ProjectReference Include="../../__Tests/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.VexHub.Storage.Postgres/StellaOps.VexHub.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Infrastructure.Postgres.Testing/StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="../../../__Tests/__Libraries/StellaOps.Infrastructure.Postgres.Testing/StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
184
src/__Tests/AGENTS.md
Normal file
184
src/__Tests/AGENTS.md
Normal file
@@ -0,0 +1,184 @@
|
||||
# src/__Tests/AGENTS.md
|
||||
|
||||
## Purpose & Scope
|
||||
|
||||
This directory contains all global test infrastructure, benchmarks, datasets, and shared testing libraries for the StellaOps platform.
|
||||
|
||||
- **Working directory:** `src/__Tests/`
|
||||
- **Roles:** QA engineer, performance/bench engineer, integration test developer, docs contributor
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
src/__Tests/
|
||||
├── __Libraries/ # Shared testing libraries
|
||||
│ ├── StellaOps.Infrastructure.Postgres.Testing/
|
||||
│ ├── StellaOps.Messaging.Testing/
|
||||
│ ├── StellaOps.Testing.AirGap/
|
||||
│ ├── StellaOps.Testing.Determinism/
|
||||
│ ├── StellaOps.Testing.Manifests/
|
||||
│ ├── StellaOps.Concelier.Testing/
|
||||
│ └── StellaOps.Router.Testing/
|
||||
├── __Benchmarks/ # Golden corpus, CVE findings, determinism fixtures
|
||||
│ ├── golden-corpus/ # Canonical test cases (severity, VEX, reachability)
|
||||
│ ├── findings/ # CVE bundles with reachability evidence
|
||||
│ ├── reachability-benchmark/ # Public multi-language benchmark
|
||||
│ ├── determinism/ # Determinism test fixtures
|
||||
│ └── tools/ # Verification utilities
|
||||
├── __Datasets/ # Ground truth samples, schemas
|
||||
│ └── reachability/ # Reachability ground truth
|
||||
├── Integration/ # Cross-module integration tests
|
||||
├── acceptance/ # Acceptance test packs
|
||||
├── load/ # k6 load tests
|
||||
├── security/ # OWASP security tests
|
||||
├── chaos/ # Chaos engineering tests
|
||||
├── AirGap/ # Offline operation tests
|
||||
├── reachability/ # Reachability analysis tests
|
||||
├── fixtures/ # Shared test fixtures (offline-bundle, images, sboms)
|
||||
└── ... # Other test categories
|
||||
```
|
||||
|
||||
## Required Reading
|
||||
|
||||
Before working in this directory:
|
||||
- `docs/README.md`
|
||||
- `docs/19_TEST_SUITE_OVERVIEW.md`
|
||||
- `src/__Tests/__Benchmarks/README.md`
|
||||
- Sprint-specific guidance for corpus/bench artifacts
|
||||
|
||||
## Test Categories
|
||||
|
||||
When writing tests, use appropriate xUnit traits:
|
||||
|
||||
```csharp
|
||||
[Trait("Category", "Unit")] // Fast, isolated unit tests
|
||||
[Trait("Category", "Integration")] // Tests requiring infrastructure
|
||||
[Trait("Category", "E2E")] // Full end-to-end workflows
|
||||
[Trait("Category", "AirGap")] // Must work without network
|
||||
[Trait("Category", "Interop")] // Third-party tool compatibility
|
||||
[Trait("Category", "Performance")] // Performance benchmarks
|
||||
[Trait("Category", "Chaos")] // Failure injection tests
|
||||
[Trait("Category", "Security")] // Security-focused tests
|
||||
```
|
||||
|
||||
## Key Patterns
|
||||
|
||||
### 1. PostgreSQL Integration Tests
|
||||
|
||||
Use the shared fixture from `__Libraries/StellaOps.Infrastructure.Postgres.Testing`:
|
||||
|
||||
```csharp
|
||||
public class MyIntegrationTests : IClassFixture<MyPostgresFixture>
|
||||
{
|
||||
private readonly MyPostgresFixture _fixture;
|
||||
|
||||
public MyIntegrationTests(MyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MyTest()
|
||||
{
|
||||
// _fixture.ConnectionString is available
|
||||
// _fixture.TruncateAllTablesAsync() for cleanup
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Air-Gap Tests
|
||||
|
||||
Inherit from `NetworkIsolatedTestBase` for network-free tests:
|
||||
|
||||
```csharp
|
||||
[Trait("Category", "AirGap")]
|
||||
public class OfflineTests : NetworkIsolatedTestBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task Test_WorksOffline()
|
||||
{
|
||||
AssertNoNetworkCalls(); // Fails if network accessed
|
||||
}
|
||||
|
||||
protected string GetOfflineBundlePath() =>
|
||||
Path.Combine(AppContext.BaseDirectory, "fixtures", "offline-bundle");
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Determinism Tests
|
||||
|
||||
Use `DeterminismVerifier` to ensure reproducibility:
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public void Output_IsDeterministic()
|
||||
{
|
||||
var verifier = new DeterminismVerifier();
|
||||
var result = verifier.Verify(myObject, iterations: 10);
|
||||
result.IsDeterministic.Should().BeTrue();
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Golden Corpus Tests
|
||||
|
||||
Reference cases from `__Benchmarks/golden-corpus/`:
|
||||
|
||||
```csharp
|
||||
[Theory]
|
||||
[MemberData(nameof(GetCorpusCases))]
|
||||
public async Task Corpus_Case_Passes(string caseId)
|
||||
{
|
||||
var testCase = CorpusLoader.Load(caseId);
|
||||
var result = await ProcessAsync(testCase.Input);
|
||||
result.Should().BeEquivalentTo(testCase.Expected);
|
||||
}
|
||||
```
|
||||
|
||||
## Working Agreements
|
||||
|
||||
1. **Determinism:** Stable ordering, fixed seeds, UTC timestamps
|
||||
2. **Offline-first:** No network dependencies unless explicitly required
|
||||
3. **Testcontainers:** Use PostgreSQL fixtures from `__Libraries/`
|
||||
4. **Air-gap validation:** Inherit from `NetworkIsolatedTestBase`
|
||||
5. **Golden corpus:** Reference cases from `__Benchmarks/golden-corpus/`
|
||||
6. **Fixtures:** Keep ASCII and reproducible; avoid oversized binaries
|
||||
|
||||
## Module Tests vs Global Tests
|
||||
|
||||
- **Module tests:** Stay in `src/<Module>/__Tests/` - component-specific testing
|
||||
- **Global tests:** Go in `src/__Tests/` - cross-cutting, infrastructure, benchmarks, integration
|
||||
|
||||
## Rules for Test Development
|
||||
|
||||
### DO:
|
||||
1. Tag tests with appropriate categories for filtering
|
||||
2. Use Testcontainers for infrastructure dependencies
|
||||
3. Inherit from shared fixtures to avoid duplication
|
||||
4. Assert no network calls in air-gap tests
|
||||
5. Verify determinism for any serialization output
|
||||
6. Use property-based tests (FsCheck) for invariants
|
||||
7. Document test purpose in method names
|
||||
|
||||
### DON'T:
|
||||
1. Don't skip tests without documenting why
|
||||
2. Don't use `Thread.Sleep` - use proper async waits
|
||||
3. Don't hardcode paths - use `AppContext.BaseDirectory`
|
||||
4. Don't make network calls in non-interop tests
|
||||
5. Don't depend on test execution order
|
||||
6. Don't leave test data in shared databases
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Purpose | Default |
|
||||
|----------|---------|---------|
|
||||
| `STELLAOPS_OFFLINE_MODE` | Enable offline mode | `false` |
|
||||
| `STELLAOPS_OFFLINE_BUNDLE` | Path to offline bundle | - |
|
||||
| `STELLAOPS_TEST_POSTGRES` | PostgreSQL connection | Testcontainers |
|
||||
| `STELLAOPS_TEST_VALKEY` | Valkey connection | Testcontainers |
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/19_TEST_SUITE_OVERVIEW.md` - Comprehensive test taxonomy
|
||||
- `docs/testing/webservice-test-discipline.md` - WebService test patterns
|
||||
- `docs/testing/SPRINT_EXECUTION_PLAYBOOK.md` - Sprint execution guide
|
||||
- `docs/dev/fixtures.md` - Fixture maintenance patterns
|
||||
6
src/__Tests/AirGap/README.md
Normal file
6
src/__Tests/AirGap/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# AirGap Tests
|
||||
|
||||
## Notes
|
||||
- Tests now run entirely against in-memory stores (no MongoDB or external services required).
|
||||
- Keep fixtures deterministic: stable ordering, UTC timestamps, fixed seeds where applicable.
|
||||
- Sealed-mode and staleness tests rely on local fixture bundles only; no network access is needed.
|
||||
@@ -0,0 +1,163 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Controller.Domain;
|
||||
using StellaOps.AirGap.Controller.Options;
|
||||
using StellaOps.AirGap.Controller.Services;
|
||||
using StellaOps.AirGap.Controller.Stores;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Controller.Tests;
|
||||
|
||||
public class AirGapStartupDiagnosticsHostedServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Blocks_when_allowlist_missing_for_sealed_state()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var store = new InMemoryAirGapStateStore();
|
||||
await store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = "default",
|
||||
Sealed = true,
|
||||
PolicyHash = "policy-x",
|
||||
TimeAnchor = new TimeAnchor(now, "rough", "rough", "fp", "digest"),
|
||||
StalenessBudget = new StalenessBudget(60, 120)
|
||||
});
|
||||
|
||||
var trustDir = CreateTrustMaterial();
|
||||
var options = BuildOptions(trustDir);
|
||||
options.EgressAllowlist = null; // simulate missing config section
|
||||
|
||||
var service = CreateService(store, options, now);
|
||||
|
||||
var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => service.StartAsync(CancellationToken.None));
|
||||
Assert.Contains("egress-allowlist-missing", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Passes_when_materials_present_and_anchor_fresh()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var store = new InMemoryAirGapStateStore();
|
||||
await store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = "default",
|
||||
Sealed = true,
|
||||
PolicyHash = "policy-ok",
|
||||
TimeAnchor = new TimeAnchor(now.AddMinutes(-1), "rough", "rough", "fp", "digest"),
|
||||
StalenessBudget = new StalenessBudget(300, 600)
|
||||
});
|
||||
|
||||
var trustDir = CreateTrustMaterial();
|
||||
var options = BuildOptions(trustDir, new[] { "127.0.0.1/32" });
|
||||
|
||||
var service = CreateService(store, options, now);
|
||||
|
||||
await service.StartAsync(CancellationToken.None); // should not throw
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Blocks_when_anchor_is_stale()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var store = new InMemoryAirGapStateStore();
|
||||
await store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = "default",
|
||||
Sealed = true,
|
||||
PolicyHash = "policy-stale",
|
||||
TimeAnchor = new TimeAnchor(now.AddHours(-2), "rough", "rough", "fp", "digest"),
|
||||
StalenessBudget = new StalenessBudget(60, 90)
|
||||
});
|
||||
|
||||
var trustDir = CreateTrustMaterial();
|
||||
var options = BuildOptions(trustDir, new[] { "10.0.0.0/24" });
|
||||
|
||||
var service = CreateService(store, options, now);
|
||||
|
||||
var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => service.StartAsync(CancellationToken.None));
|
||||
Assert.Contains("time-anchor-stale", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Blocks_when_rotation_pending_without_dual_approval()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var store = new InMemoryAirGapStateStore();
|
||||
await store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = "default",
|
||||
Sealed = true,
|
||||
PolicyHash = "policy-rot",
|
||||
TimeAnchor = new TimeAnchor(now, "rough", "rough", "fp", "digest"),
|
||||
StalenessBudget = new StalenessBudget(120, 240)
|
||||
});
|
||||
|
||||
var trustDir = CreateTrustMaterial();
|
||||
var options = BuildOptions(trustDir, new[] { "10.10.0.0/16" });
|
||||
options.Rotation.PendingKeys["k-new"] = Convert.ToBase64String(new byte[] { 1, 2, 3 });
|
||||
options.Rotation.ActiveKeys["k-old"] = Convert.ToBase64String(new byte[] { 9, 9, 9 });
|
||||
options.Rotation.ApproverIds.Add("approver-1");
|
||||
|
||||
var service = CreateService(store, options, now);
|
||||
|
||||
var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => service.StartAsync(CancellationToken.None));
|
||||
Assert.Contains("rotation:rotation-dual-approval-required", ex.Message);
|
||||
}
|
||||
|
||||
private static AirGapStartupOptions BuildOptions(string trustDir, string[]? allowlist = null)
|
||||
{
|
||||
return new AirGapStartupOptions
|
||||
{
|
||||
TenantId = "default",
|
||||
EgressAllowlist = allowlist,
|
||||
Trust = new TrustMaterialOptions
|
||||
{
|
||||
RootJsonPath = Path.Combine(trustDir, "root.json"),
|
||||
SnapshotJsonPath = Path.Combine(trustDir, "snapshot.json"),
|
||||
TimestampJsonPath = Path.Combine(trustDir, "timestamp.json")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static AirGapStartupDiagnosticsHostedService CreateService(IAirGapStateStore store, AirGapStartupOptions options, DateTimeOffset now)
|
||||
{
|
||||
return new AirGapStartupDiagnosticsHostedService(
|
||||
store,
|
||||
new StalenessCalculator(),
|
||||
new FixedTimeProvider(now),
|
||||
Microsoft.Extensions.Options.Options.Create(options),
|
||||
NullLogger<AirGapStartupDiagnosticsHostedService>.Instance,
|
||||
new AirGapTelemetry(NullLogger<AirGapTelemetry>.Instance),
|
||||
new TufMetadataValidator(),
|
||||
new RootRotationPolicy());
|
||||
}
|
||||
|
||||
private static string CreateTrustMaterial()
|
||||
{
|
||||
var dir = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), "airgap-trust-" + Guid.NewGuid().ToString("N"))).FullName;
|
||||
var expires = DateTimeOffset.UtcNow.AddDays(1).ToString("O");
|
||||
const string hash = "abc123";
|
||||
|
||||
File.WriteAllText(Path.Combine(dir, "root.json"), $"{{\"version\":1,\"expiresUtc\":\"{expires}\"}}");
|
||||
File.WriteAllText(Path.Combine(dir, "snapshot.json"), $"{{\"version\":1,\"expiresUtc\":\"{expires}\",\"meta\":{{\"snapshot\":{{\"hashes\":{{\"sha256\":\"{hash}\"}}}}}}}}");
|
||||
File.WriteAllText(Path.Combine(dir, "timestamp.json"), $"{{\"version\":1,\"expiresUtc\":\"{expires}\",\"snapshot\":{{\"meta\":{{\"hashes\":{{\"sha256\":\"{hash}\"}}}}}}}}");
|
||||
|
||||
return dir;
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _now;
|
||||
|
||||
public FixedTimeProvider(DateTimeOffset now)
|
||||
{
|
||||
_now = now;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
using StellaOps.AirGap.Controller.Services;
|
||||
using StellaOps.AirGap.Controller.Stores;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Controller.Tests;
|
||||
|
||||
public class AirGapStateServiceTests
|
||||
{
|
||||
private readonly AirGapStateService _service;
|
||||
private readonly InMemoryAirGapStateStore _store = new();
|
||||
private readonly StalenessCalculator _calculator = new();
|
||||
|
||||
public AirGapStateServiceTests()
|
||||
{
|
||||
_service = new AirGapStateService(_store, _calculator);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Seal_sets_state_and_computes_staleness()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now.AddMinutes(-2), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = new StalenessBudget(60, 120);
|
||||
|
||||
await _service.SealAsync("tenant-a", "policy-1", anchor, budget, now);
|
||||
var status = await _service.GetStatusAsync("tenant-a", now);
|
||||
|
||||
Assert.True(status.State.Sealed);
|
||||
Assert.Equal("policy-1", status.State.PolicyHash);
|
||||
Assert.Equal("tenant-a", status.State.TenantId);
|
||||
Assert.True(status.Staleness.AgeSeconds > 0);
|
||||
Assert.True(status.Staleness.IsWarning);
|
||||
Assert.Equal(120 - status.Staleness.AgeSeconds, status.Staleness.SecondsRemaining);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Unseal_clears_sealed_flag_and_updates_timestamp()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
await _service.SealAsync("default", "hash", TimeAnchor.Unknown, StalenessBudget.Default, now);
|
||||
|
||||
var later = now.AddMinutes(1);
|
||||
await _service.UnsealAsync("default", later);
|
||||
var status = await _service.GetStatusAsync("default", later);
|
||||
|
||||
Assert.False(status.State.Sealed);
|
||||
Assert.Equal(later, status.State.LastTransitionAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Seal_persists_drift_baseline_seconds()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now.AddMinutes(-5), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = StalenessBudget.Default;
|
||||
|
||||
var state = await _service.SealAsync("tenant-drift", "policy-drift", anchor, budget, now);
|
||||
|
||||
Assert.Equal(300, state.DriftBaselineSeconds); // 5 minutes = 300 seconds
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Seal_creates_default_content_budgets_when_not_provided()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now.AddMinutes(-1), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = new StalenessBudget(120, 240);
|
||||
|
||||
var state = await _service.SealAsync("tenant-content", "policy-content", anchor, budget, now);
|
||||
|
||||
Assert.Contains("advisories", state.ContentBudgets.Keys);
|
||||
Assert.Contains("vex", state.ContentBudgets.Keys);
|
||||
Assert.Contains("policy", state.ContentBudgets.Keys);
|
||||
Assert.Equal(budget, state.ContentBudgets["advisories"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Seal_uses_provided_content_budgets()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now.AddMinutes(-1), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = StalenessBudget.Default;
|
||||
var contentBudgets = new Dictionary<string, StalenessBudget>
|
||||
{
|
||||
{ "advisories", new StalenessBudget(30, 60) },
|
||||
{ "vex", new StalenessBudget(60, 120) }
|
||||
};
|
||||
|
||||
var state = await _service.SealAsync("tenant-custom", "policy-custom", anchor, budget, now, contentBudgets);
|
||||
|
||||
Assert.Equal(new StalenessBudget(30, 60), state.ContentBudgets["advisories"]);
|
||||
Assert.Equal(new StalenessBudget(60, 120), state.ContentBudgets["vex"]);
|
||||
Assert.Equal(budget, state.ContentBudgets["policy"]); // Falls back to default
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatus_returns_per_content_staleness()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now.AddSeconds(-45), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = StalenessBudget.Default;
|
||||
var contentBudgets = new Dictionary<string, StalenessBudget>
|
||||
{
|
||||
{ "advisories", new StalenessBudget(30, 60) },
|
||||
{ "vex", new StalenessBudget(60, 120) },
|
||||
{ "policy", new StalenessBudget(100, 200) }
|
||||
};
|
||||
|
||||
await _service.SealAsync("tenant-content-status", "policy-content-status", anchor, budget, now, contentBudgets);
|
||||
var status = await _service.GetStatusAsync("tenant-content-status", now);
|
||||
|
||||
Assert.NotEmpty(status.ContentStaleness);
|
||||
Assert.True(status.ContentStaleness["advisories"].IsWarning); // 45s >= 30s warning
|
||||
Assert.False(status.ContentStaleness["advisories"].IsBreach); // 45s < 60s breach
|
||||
Assert.False(status.ContentStaleness["vex"].IsWarning); // 45s < 60s warning
|
||||
Assert.False(status.ContentStaleness["policy"].IsWarning); // 45s < 100s warning
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
using StellaOps.AirGap.Controller.Domain;
|
||||
using StellaOps.AirGap.Controller.Stores;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Controller.Tests;
|
||||
|
||||
public class InMemoryAirGapStateStoreTests
|
||||
{
|
||||
private readonly InMemoryAirGapStateStore _store = new();
|
||||
|
||||
[Fact]
|
||||
public async Task Upsert_and_read_state_by_tenant()
|
||||
{
|
||||
var state = new AirGapState
|
||||
{
|
||||
TenantId = "tenant-x",
|
||||
Sealed = true,
|
||||
PolicyHash = "hash-1",
|
||||
TimeAnchor = new TimeAnchor(DateTimeOffset.UtcNow, "roughtime", "roughtime", "fp", "digest"),
|
||||
StalenessBudget = new StalenessBudget(10, 20),
|
||||
LastTransitionAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
await _store.SetAsync(state);
|
||||
|
||||
var stored = await _store.GetAsync("tenant-x");
|
||||
Assert.True(stored.Sealed);
|
||||
Assert.Equal("hash-1", stored.PolicyHash);
|
||||
Assert.Equal("tenant-x", stored.TenantId);
|
||||
Assert.Equal(state.TimeAnchor.TokenDigest, stored.TimeAnchor.TokenDigest);
|
||||
Assert.Equal(10, stored.StalenessBudget.WarningSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Enforces_singleton_per_tenant()
|
||||
{
|
||||
var first = new AirGapState { TenantId = "tenant-y", Sealed = true, PolicyHash = "h1" };
|
||||
var second = new AirGapState { TenantId = "tenant-y", Sealed = false, PolicyHash = "h2" };
|
||||
|
||||
await _store.SetAsync(first);
|
||||
await _store.SetAsync(second);
|
||||
|
||||
var stored = await _store.GetAsync("tenant-y");
|
||||
Assert.Equal("h2", stored.PolicyHash);
|
||||
Assert.False(stored.Sealed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Defaults_to_unknown_when_missing()
|
||||
{
|
||||
var stored = await _store.GetAsync("absent");
|
||||
Assert.False(stored.Sealed);
|
||||
Assert.Equal("absent", stored.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Parallel_upserts_keep_single_document()
|
||||
{
|
||||
var tasks = Enumerable.Range(0, 20).Select(i =>
|
||||
{
|
||||
var state = new AirGapState
|
||||
{
|
||||
TenantId = "tenant-parallel",
|
||||
Sealed = i % 2 == 0,
|
||||
PolicyHash = $"hash-{i}"
|
||||
};
|
||||
return _store.SetAsync(state);
|
||||
});
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
var stored = await _store.GetAsync("tenant-parallel");
|
||||
Assert.StartsWith("hash-", stored.PolicyHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Multi_tenant_updates_do_not_collide()
|
||||
{
|
||||
var tenants = Enumerable.Range(0, 5).Select(i => $"t-{i}").ToArray();
|
||||
|
||||
var tasks = tenants.Select(t => _store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = t,
|
||||
Sealed = true,
|
||||
PolicyHash = $"hash-{t}"
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
foreach (var t in tenants)
|
||||
{
|
||||
var stored = await _store.GetAsync(t);
|
||||
Assert.Equal($"hash-{t}", stored.PolicyHash);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Staleness_round_trip_matches_budget()
|
||||
{
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UtcNow.AddMinutes(-3), "roughtime", "roughtime", "fp", "digest");
|
||||
var budget = new StalenessBudget(60, 600);
|
||||
await _store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = "tenant-staleness",
|
||||
Sealed = true,
|
||||
PolicyHash = "hash-s",
|
||||
TimeAnchor = anchor,
|
||||
StalenessBudget = budget,
|
||||
LastTransitionAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
|
||||
var stored = await _store.GetAsync("tenant-staleness");
|
||||
Assert.Equal(anchor.TokenDigest, stored.TimeAnchor.TokenDigest);
|
||||
Assert.Equal(budget.WarningSeconds, stored.StalenessBudget.WarningSeconds);
|
||||
Assert.Equal(budget.BreachSeconds, stored.StalenessBudget.BreachSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Multi_tenant_states_preserve_transition_times()
|
||||
{
|
||||
var tenants = new[] { "a", "b", "c" };
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
foreach (var t in tenants)
|
||||
{
|
||||
await _store.SetAsync(new AirGapState
|
||||
{
|
||||
TenantId = t,
|
||||
Sealed = true,
|
||||
PolicyHash = $"ph-{t}",
|
||||
LastTransitionAt = now
|
||||
});
|
||||
}
|
||||
|
||||
foreach (var t in tenants)
|
||||
{
|
||||
var state = await _store.GetAsync(t);
|
||||
Assert.Equal(now, state.LastTransitionAt);
|
||||
Assert.Equal($"ph-{t}", state.PolicyHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
using StellaOps.AirGap.Controller.Endpoints.Contracts;
|
||||
using StellaOps.AirGap.Controller.Services;
|
||||
using StellaOps.AirGap.Controller.Stores;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Controller.Tests;
|
||||
|
||||
public class ReplayVerificationServiceTests
|
||||
{
|
||||
private readonly ReplayVerificationService _service;
|
||||
private readonly AirGapStateService _stateService;
|
||||
private readonly StalenessCalculator _staleness = new();
|
||||
private readonly InMemoryAirGapStateStore _store = new();
|
||||
|
||||
public ReplayVerificationServiceTests()
|
||||
{
|
||||
_stateService = new AirGapStateService(_store, _staleness);
|
||||
_service = new ReplayVerificationService(_stateService, new ReplayVerifier());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Passes_full_recompute_when_hashes_match()
|
||||
{
|
||||
var now = DateTimeOffset.Parse("2025-12-02T01:00:00Z");
|
||||
await _stateService.SealAsync("tenant-a", "policy-x", TimeAnchor.Unknown, StalenessBudget.Default, now);
|
||||
|
||||
var request = new VerifyRequest
|
||||
{
|
||||
Depth = ReplayDepth.FullRecompute,
|
||||
ManifestSha256 = new string('a', 64),
|
||||
BundleSha256 = new string('b', 64),
|
||||
ComputedManifestSha256 = new string('a', 64),
|
||||
ComputedBundleSha256 = new string('b', 64),
|
||||
ManifestCreatedAt = now.AddHours(-2),
|
||||
StalenessWindowHours = 24,
|
||||
BundlePolicyHash = "policy-x"
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync("tenant-a", request, now);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("full-recompute-passed", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Detects_stale_manifest()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var request = new VerifyRequest
|
||||
{
|
||||
Depth = ReplayDepth.HashOnly,
|
||||
ManifestSha256 = new string('a', 64),
|
||||
BundleSha256 = new string('b', 64),
|
||||
ComputedManifestSha256 = new string('a', 64),
|
||||
ComputedBundleSha256 = new string('b', 64),
|
||||
ManifestCreatedAt = now.AddHours(-30),
|
||||
StalenessWindowHours = 12
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync("default", request, now);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("manifest-stale", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Policy_freeze_requires_matching_policy()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
await _stateService.SealAsync("tenant-b", "sealed-policy", TimeAnchor.Unknown, StalenessBudget.Default, now);
|
||||
|
||||
var request = new VerifyRequest
|
||||
{
|
||||
Depth = ReplayDepth.PolicyFreeze,
|
||||
ManifestSha256 = new string('a', 64),
|
||||
BundleSha256 = new string('b', 64),
|
||||
ComputedManifestSha256 = new string('a', 64),
|
||||
ComputedBundleSha256 = new string('b', 64),
|
||||
ManifestCreatedAt = now,
|
||||
StalenessWindowHours = 48,
|
||||
BundlePolicyHash = "bundle-policy"
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync("tenant-b", request, now);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("policy-hash-drift", result.Reason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<IsPackable>false</IsPackable>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj" />
|
||||
<Compile Include="../../shared/*.cs" Link="Shared/%(Filename)%(Extension)" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,40 @@
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Planning;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class BundleImportPlannerTests
|
||||
{
|
||||
[Fact]
|
||||
public void ReturnsFailureWhenBundlePathMissing()
|
||||
{
|
||||
var planner = new BundleImportPlanner();
|
||||
var result = planner.CreatePlan(string.Empty, TrustRootConfig.Empty("/tmp"));
|
||||
|
||||
Assert.False(result.InitialState.IsValid);
|
||||
Assert.Equal("bundle-path-required", result.InitialState.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReturnsFailureWhenTrustRootsMissing()
|
||||
{
|
||||
var planner = new BundleImportPlanner();
|
||||
var result = planner.CreatePlan("bundle.tar", TrustRootConfig.Empty("/tmp"));
|
||||
|
||||
Assert.False(result.InitialState.IsValid);
|
||||
Assert.Equal("trust-roots-required", result.InitialState.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReturnsDefaultPlanWhenInputsProvided()
|
||||
{
|
||||
var planner = new BundleImportPlanner();
|
||||
var trust = new TrustRootConfig("/tmp/trust.json", new[] { "abc" }, new[] { "ed25519" }, null, null, new Dictionary<string, byte[]>());
|
||||
|
||||
var result = planner.CreatePlan("bundle.tar", trust);
|
||||
|
||||
Assert.True(result.InitialState.IsValid);
|
||||
Assert.Contains("verify-dsse-signature", result.Steps);
|
||||
Assert.Equal("bundle.tar", result.Inputs["bundlePath"]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
using System.Security.Cryptography;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class DsseVerifierTests
|
||||
{
|
||||
[Fact]
|
||||
public void FailsWhenUntrustedKey()
|
||||
{
|
||||
var verifier = new DsseVerifier();
|
||||
var envelope = new DsseEnvelope("text/plain", Convert.ToBase64String("hi"u8), new[] { new DsseSignature("k1", "sig") });
|
||||
var trust = TrustRootConfig.Empty("/tmp");
|
||||
|
||||
var result = verifier.Verify(envelope, trust);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifiesRsaPssSignature()
|
||||
{
|
||||
using var rsa = RSA.Create(2048);
|
||||
var pub = rsa.ExportSubjectPublicKeyInfo();
|
||||
var payload = "hello-world";
|
||||
var payloadType = "application/vnd.stella.bundle";
|
||||
var pae = BuildPae(payloadType, payload);
|
||||
var sig = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pss);
|
||||
|
||||
var envelope = new DsseEnvelope(payloadType, Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload)), new[]
|
||||
{
|
||||
new DsseSignature("k1", Convert.ToBase64String(sig))
|
||||
});
|
||||
|
||||
var trust = new TrustRootConfig(
|
||||
"/tmp/root.json",
|
||||
new[] { Fingerprint(pub) },
|
||||
new[] { "rsassa-pss-sha256" },
|
||||
null,
|
||||
null,
|
||||
new Dictionary<string, byte[]> { ["k1"] = pub });
|
||||
|
||||
var result = new DsseVerifier().Verify(envelope, trust);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("dsse-signature-verified", result.Reason);
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, string payload)
|
||||
{
|
||||
var parts = new[] { "DSSEv1", payloadType, payload };
|
||||
var paeBuilder = new System.Text.StringBuilder();
|
||||
paeBuilder.Append("PAE:");
|
||||
paeBuilder.Append(parts.Length);
|
||||
foreach (var part in parts)
|
||||
{
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part.Length);
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part);
|
||||
}
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(paeBuilder.ToString());
|
||||
}
|
||||
|
||||
private static string Fingerprint(byte[] pub)
|
||||
{
|
||||
return Convert.ToHexString(SHA256.HashData(pub)).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
global using Xunit;
|
||||
@@ -0,0 +1,238 @@
|
||||
using System.Security.Cryptography;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Quarantine;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.AirGap.Importer.Versioning;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public sealed class ImportValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenTufInvalid_ShouldFailAndQuarantine()
|
||||
{
|
||||
var quarantine = new CapturingQuarantineService();
|
||||
var monotonicity = new CapturingMonotonicityChecker();
|
||||
|
||||
var validator = new ImportValidator(
|
||||
new DsseVerifier(),
|
||||
new TufMetadataValidator(),
|
||||
new MerkleRootCalculator(),
|
||||
new RootRotationPolicy(),
|
||||
monotonicity,
|
||||
quarantine,
|
||||
NullLogger<ImportValidator>.Instance);
|
||||
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(tempRoot);
|
||||
var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
try
|
||||
{
|
||||
var request = BuildRequest(bundlePath, rootJson: "{}", snapshotJson: "{}", timestampJson: "{}");
|
||||
var result = await validator.ValidateAsync(request);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Reason.Should().StartWith("tuf:");
|
||||
|
||||
quarantine.Requests.Should().HaveCount(1);
|
||||
quarantine.Requests[0].TenantId.Should().Be("tenant-a");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(tempRoot, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenAllChecksPass_ShouldSucceedAndRecordActivation()
|
||||
{
|
||||
var root = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\"}";
|
||||
var snapshot = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"meta\":{\"snapshot\":{\"hashes\":{\"sha256\":\"abc\"}}}}";
|
||||
var timestamp = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"snapshot\":{\"meta\":{\"hashes\":{\"sha256\":\"abc\"}}}}";
|
||||
|
||||
using var rsa = RSA.Create(2048);
|
||||
var pub = rsa.ExportSubjectPublicKeyInfo();
|
||||
|
||||
var payload = "bundle-body";
|
||||
var payloadType = "application/vnd.stella.bundle";
|
||||
var pae = BuildPae(payloadType, payload);
|
||||
var sig = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pss);
|
||||
|
||||
var envelope = new DsseEnvelope(payloadType, Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload)), new[]
|
||||
{
|
||||
new DsseSignature("k1", Convert.ToBase64String(sig))
|
||||
});
|
||||
|
||||
var trustStore = new TrustStore();
|
||||
trustStore.LoadActive(new Dictionary<string, byte[]> { ["k1"] = pub });
|
||||
trustStore.StagePending(new Dictionary<string, byte[]> { ["k2"] = pub });
|
||||
|
||||
var quarantine = new CapturingQuarantineService();
|
||||
var monotonicity = new CapturingMonotonicityChecker();
|
||||
|
||||
var validator = new ImportValidator(
|
||||
new DsseVerifier(),
|
||||
new TufMetadataValidator(),
|
||||
new MerkleRootCalculator(),
|
||||
new RootRotationPolicy(),
|
||||
monotonicity,
|
||||
quarantine,
|
||||
NullLogger<ImportValidator>.Instance);
|
||||
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(tempRoot);
|
||||
var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
try
|
||||
{
|
||||
var request = new ImportValidationRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
BundleDigest: "sha256:bundle",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: "{\"version\":\"1.0.0\"}",
|
||||
ManifestVersion: "1.0.0",
|
||||
ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"),
|
||||
ForceActivate: false,
|
||||
ForceActivateReason: null,
|
||||
Envelope: envelope,
|
||||
TrustRoots: new TrustRootConfig("/tmp/root.json", new[] { Fingerprint(pub) }, new[] { "rsassa-pss-sha256" }, null, null, new Dictionary<string, byte[]> { ["k1"] = pub }),
|
||||
RootJson: root,
|
||||
SnapshotJson: snapshot,
|
||||
TimestampJson: timestamp,
|
||||
PayloadEntries: new List<NamedStream> { new("a.txt", new MemoryStream("data"u8.ToArray())) },
|
||||
TrustStore: trustStore,
|
||||
ApproverIds: new[] { "approver-1", "approver-2" });
|
||||
|
||||
var result = await validator.ValidateAsync(request);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Reason.Should().Be("import-validated");
|
||||
|
||||
monotonicity.RecordedActivations.Should().HaveCount(1);
|
||||
monotonicity.RecordedActivations[0].BundleDigest.Should().Be("sha256:bundle");
|
||||
monotonicity.RecordedActivations[0].Version.SemVer.Should().Be("1.0.0");
|
||||
|
||||
quarantine.Requests.Should().BeEmpty();
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(tempRoot, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, string payload)
|
||||
{
|
||||
var parts = new[] { "DSSEv1", payloadType, payload };
|
||||
var paeBuilder = new System.Text.StringBuilder();
|
||||
paeBuilder.Append("PAE:");
|
||||
paeBuilder.Append(parts.Length);
|
||||
foreach (var part in parts)
|
||||
{
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part.Length);
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part);
|
||||
}
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(paeBuilder.ToString());
|
||||
}
|
||||
|
||||
private static string Fingerprint(byte[] pub) => Convert.ToHexString(SHA256.HashData(pub)).ToLowerInvariant();
|
||||
|
||||
private static ImportValidationRequest BuildRequest(string bundlePath, string rootJson, string snapshotJson, string timestampJson)
|
||||
{
|
||||
var envelope = new DsseEnvelope("text/plain", Convert.ToBase64String("hi"u8), Array.Empty<DsseSignature>());
|
||||
var trustRoot = TrustRootConfig.Empty("/tmp");
|
||||
var trustStore = new TrustStore();
|
||||
return new ImportValidationRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
BundleDigest: "sha256:bundle",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: null,
|
||||
ManifestVersion: "1.0.0",
|
||||
ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"),
|
||||
ForceActivate: false,
|
||||
ForceActivateReason: null,
|
||||
Envelope: envelope,
|
||||
TrustRoots: trustRoot,
|
||||
RootJson: rootJson,
|
||||
SnapshotJson: snapshotJson,
|
||||
TimestampJson: timestampJson,
|
||||
PayloadEntries: Array.Empty<NamedStream>(),
|
||||
TrustStore: trustStore,
|
||||
ApproverIds: Array.Empty<string>());
|
||||
}
|
||||
|
||||
private sealed class CapturingMonotonicityChecker : IVersionMonotonicityChecker
|
||||
{
|
||||
public List<(BundleVersion Version, string BundleDigest)> RecordedActivations { get; } = new();
|
||||
|
||||
public Task<MonotonicityCheckResult> CheckAsync(string tenantId, string bundleType, BundleVersion incomingVersion, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(new MonotonicityCheckResult(
|
||||
IsMonotonic: true,
|
||||
CurrentVersion: null,
|
||||
CurrentBundleDigest: null,
|
||||
CurrentActivatedAt: null,
|
||||
ReasonCode: "FIRST_ACTIVATION"));
|
||||
}
|
||||
|
||||
public Task RecordActivationAsync(
|
||||
string tenantId,
|
||||
string bundleType,
|
||||
BundleVersion version,
|
||||
string bundleDigest,
|
||||
bool wasForceActivated = false,
|
||||
string? forceActivateReason = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
RecordedActivations.Add((version, bundleDigest));
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class CapturingQuarantineService : IQuarantineService
|
||||
{
|
||||
public List<QuarantineRequest> Requests { get; } = new();
|
||||
|
||||
public Task<QuarantineResult> QuarantineAsync(QuarantineRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
Requests.Add(request);
|
||||
return Task.FromResult(new QuarantineResult(
|
||||
Success: true,
|
||||
QuarantineId: "test",
|
||||
QuarantinePath: "(memory)",
|
||||
QuarantinedAt: DateTimeOffset.UnixEpoch));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<QuarantineEntry>> ListAsync(string tenantId, QuarantineListOptions? options = null, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<IReadOnlyList<QuarantineEntry>>(Array.Empty<QuarantineEntry>());
|
||||
|
||||
public Task<bool> RemoveAsync(string tenantId, string quarantineId, string removalReason, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(false);
|
||||
|
||||
public Task<int> CleanupExpiredAsync(TimeSpan retentionPeriod, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
using StellaOps.AirGap.Importer.Models;
|
||||
using StellaOps.AirGap.Importer.Repositories;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class InMemoryBundleRepositoriesTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CatalogUpsertOverwritesPerTenant()
|
||||
{
|
||||
var repo = new InMemoryBundleCatalogRepository();
|
||||
var entry1 = new BundleCatalogEntry("t1", "b1", "d1", DateTimeOffset.UnixEpoch, new[] { "a" });
|
||||
var entry2 = new BundleCatalogEntry("t1", "b1", "d2", DateTimeOffset.UnixEpoch.AddMinutes(1), new[] { "b" });
|
||||
|
||||
await repo.UpsertAsync(entry1, default);
|
||||
await repo.UpsertAsync(entry2, default);
|
||||
|
||||
var list = await repo.ListAsync("t1", default);
|
||||
Assert.Single(list);
|
||||
Assert.Equal("d2", list[0].Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CatalogIsTenantIsolated()
|
||||
{
|
||||
var repo = new InMemoryBundleCatalogRepository();
|
||||
await repo.UpsertAsync(new BundleCatalogEntry("t1", "b1", "d1", DateTimeOffset.UnixEpoch, Array.Empty<string>()), default);
|
||||
await repo.UpsertAsync(new BundleCatalogEntry("t2", "b1", "d2", DateTimeOffset.UnixEpoch, Array.Empty<string>()), default);
|
||||
|
||||
var t1 = await repo.ListAsync("t1", default);
|
||||
Assert.Single(t1);
|
||||
Assert.Equal("d1", t1[0].Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ItemsOrderedByPath()
|
||||
{
|
||||
var repo = new InMemoryBundleItemRepository();
|
||||
await repo.UpsertManyAsync(new[]
|
||||
{
|
||||
new BundleItem("t1", "b1", "b.txt", "d2", 10),
|
||||
new BundleItem("t1", "b1", "a.txt", "d1", 5)
|
||||
}, default);
|
||||
|
||||
var list = await repo.ListByBundleAsync("t1", "b1", default);
|
||||
Assert.Equal(new[] { "a.txt", "b.txt" }, list.Select(i => i.Path).ToArray());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ItemsTenantIsolated()
|
||||
{
|
||||
var repo = new InMemoryBundleItemRepository();
|
||||
await repo.UpsertManyAsync(new[]
|
||||
{
|
||||
new BundleItem("t1", "b1", "a.txt", "d1", 1),
|
||||
new BundleItem("t2", "b1", "a.txt", "d2", 1)
|
||||
}, default);
|
||||
|
||||
var list = await repo.ListByBundleAsync("t1", "b1", default);
|
||||
Assert.Single(list);
|
||||
Assert.Equal("d1", list[0].Digest);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class MerkleRootCalculatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void EmptySetProducesEmptyRoot()
|
||||
{
|
||||
var calc = new MerkleRootCalculator();
|
||||
var root = calc.ComputeRoot(Array.Empty<NamedStream>());
|
||||
Assert.Equal(string.Empty, root);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeterministicAcrossOrder()
|
||||
{
|
||||
var calc = new MerkleRootCalculator();
|
||||
var a = new NamedStream("b.txt", new MemoryStream("two"u8.ToArray()));
|
||||
var b = new NamedStream("a.txt", new MemoryStream("one"u8.ToArray()));
|
||||
|
||||
var root1 = calc.ComputeRoot(new[] { a, b });
|
||||
var root2 = calc.ComputeRoot(new[] { b, a });
|
||||
|
||||
Assert.Equal(root1, root2);
|
||||
Assert.NotEqual(string.Empty, root1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.AirGap.Importer.Telemetry;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public sealed class OfflineKitMetricsTests : IDisposable
|
||||
{
|
||||
private readonly MeterListener _listener;
|
||||
private readonly List<RecordedMeasurement> _measurements = [];
|
||||
|
||||
public OfflineKitMetricsTests()
|
||||
{
|
||||
_listener = new MeterListener();
|
||||
_listener.InstrumentPublished = (instrument, listener) =>
|
||||
{
|
||||
if (instrument.Meter.Name == OfflineKitMetrics.MeterName)
|
||||
{
|
||||
listener.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
};
|
||||
|
||||
_listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
_measurements.Add(new RecordedMeasurement(instrument.Name, measurement, tags.ToArray()));
|
||||
});
|
||||
_listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
_measurements.Add(new RecordedMeasurement(instrument.Name, measurement, tags.ToArray()));
|
||||
});
|
||||
_listener.Start();
|
||||
}
|
||||
|
||||
public void Dispose() => _listener.Dispose();
|
||||
|
||||
[Fact]
|
||||
public void RecordImport_EmitsCounterWithLabels()
|
||||
{
|
||||
using var metrics = new OfflineKitMetrics();
|
||||
|
||||
metrics.RecordImport(status: "success", tenantId: "tenant-a");
|
||||
|
||||
Assert.Contains(_measurements, m =>
|
||||
m.Name == "offlinekit_import_total" &&
|
||||
m.Value is long v &&
|
||||
v == 1 &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.Status, "success") &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.TenantId, "tenant-a"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordAttestationVerifyLatency_EmitsHistogramWithLabels()
|
||||
{
|
||||
using var metrics = new OfflineKitMetrics();
|
||||
|
||||
metrics.RecordAttestationVerifyLatency(attestationType: "dsse", seconds: 1.234, success: true);
|
||||
|
||||
Assert.Contains(_measurements, m =>
|
||||
m.Name == "offlinekit_attestation_verify_latency_seconds" &&
|
||||
m.Value is double v &&
|
||||
Math.Abs(v - 1.234) < 0.000_001 &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.AttestationType, "dsse") &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.Success, "true"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordRekorSuccess_EmitsCounterWithLabels()
|
||||
{
|
||||
using var metrics = new OfflineKitMetrics();
|
||||
|
||||
metrics.RecordRekorSuccess(mode: "offline");
|
||||
|
||||
Assert.Contains(_measurements, m =>
|
||||
m.Name == "attestor_rekor_success_total" &&
|
||||
m.Value is long v &&
|
||||
v == 1 &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.Mode, "offline"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordRekorRetry_EmitsCounterWithLabels()
|
||||
{
|
||||
using var metrics = new OfflineKitMetrics();
|
||||
|
||||
metrics.RecordRekorRetry(reason: "stale_snapshot");
|
||||
|
||||
Assert.Contains(_measurements, m =>
|
||||
m.Name == "attestor_rekor_retry_total" &&
|
||||
m.Value is long v &&
|
||||
v == 1 &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.Reason, "stale_snapshot"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordRekorInclusionLatency_EmitsHistogramWithLabels()
|
||||
{
|
||||
using var metrics = new OfflineKitMetrics();
|
||||
|
||||
metrics.RecordRekorInclusionLatency(seconds: 0.5, success: false);
|
||||
|
||||
Assert.Contains(_measurements, m =>
|
||||
m.Name == "rekor_inclusion_latency" &&
|
||||
m.Value is double v &&
|
||||
Math.Abs(v - 0.5) < 0.000_001 &&
|
||||
m.HasTag(OfflineKitMetrics.TagNames.Success, "false"));
|
||||
}
|
||||
|
||||
private sealed record RecordedMeasurement(string Name, object Value, IReadOnlyList<KeyValuePair<string, object?>> Tags)
|
||||
{
|
||||
public bool HasTag(string key, string expectedValue) =>
|
||||
Tags.Any(t => t.Key == key && string.Equals(t.Value?.ToString(), expectedValue, StringComparison.Ordinal));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Importer.Quarantine;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Quarantine;
|
||||
|
||||
public sealed class FileSystemQuarantineServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task QuarantineAsync_ShouldCreateExpectedFiles_AndListAsyncShouldReturnEntry()
|
||||
{
|
||||
var root = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var bundlePath = Path.Combine(root, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
var options = Options.Create(new QuarantineOptions
|
||||
{
|
||||
QuarantineRoot = Path.Combine(root, "quarantine"),
|
||||
RetentionPeriod = TimeSpan.FromDays(30),
|
||||
MaxQuarantineSizeBytes = 1024 * 1024,
|
||||
EnableAutomaticCleanup = true
|
||||
});
|
||||
|
||||
var svc = new FileSystemQuarantineService(
|
||||
options,
|
||||
NullLogger<FileSystemQuarantineService>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var result = await svc.QuarantineAsync(new QuarantineRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: "{\"version\":\"1.0.0\"}",
|
||||
ReasonCode: "dsse:invalid",
|
||||
ReasonMessage: "dsse:invalid",
|
||||
VerificationLog: new[] { "tuf:ok", "dsse:invalid" },
|
||||
Metadata: new Dictionary<string, string> { ["k"] = "v" }));
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
Directory.Exists(result.QuarantinePath).Should().BeTrue();
|
||||
|
||||
File.Exists(Path.Combine(result.QuarantinePath, "bundle.tar.zst")).Should().BeTrue();
|
||||
File.Exists(Path.Combine(result.QuarantinePath, "manifest.json")).Should().BeTrue();
|
||||
File.Exists(Path.Combine(result.QuarantinePath, "verification.log")).Should().BeTrue();
|
||||
File.Exists(Path.Combine(result.QuarantinePath, "failure-reason.txt")).Should().BeTrue();
|
||||
File.Exists(Path.Combine(result.QuarantinePath, "quarantine.json")).Should().BeTrue();
|
||||
|
||||
var listed = await svc.ListAsync("tenant-a");
|
||||
listed.Should().ContainSingle(e => e.QuarantineId == result.QuarantineId);
|
||||
}
|
||||
finally
|
||||
{
|
||||
SafeDeleteDirectory(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveAsync_ShouldMoveToRemovedFolder()
|
||||
{
|
||||
var root = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var bundlePath = Path.Combine(root, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
var quarantineRoot = Path.Combine(root, "quarantine");
|
||||
var options = Options.Create(new QuarantineOptions { QuarantineRoot = quarantineRoot, MaxQuarantineSizeBytes = 1024 * 1024 });
|
||||
var svc = new FileSystemQuarantineService(options, NullLogger<FileSystemQuarantineService>.Instance, TimeProvider.System);
|
||||
|
||||
var result = await svc.QuarantineAsync(new QuarantineRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: null,
|
||||
ReasonCode: "tuf:invalid",
|
||||
ReasonMessage: "tuf:invalid",
|
||||
VerificationLog: new[] { "tuf:invalid" }));
|
||||
|
||||
var removed = await svc.RemoveAsync("tenant-a", result.QuarantineId, "investigated");
|
||||
removed.Should().BeTrue();
|
||||
|
||||
Directory.Exists(result.QuarantinePath).Should().BeFalse();
|
||||
Directory.Exists(Path.Combine(quarantineRoot, "tenant-a", ".removed", result.QuarantineId)).Should().BeTrue();
|
||||
}
|
||||
finally
|
||||
{
|
||||
SafeDeleteDirectory(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CleanupExpiredAsync_ShouldDeleteOldEntries()
|
||||
{
|
||||
var root = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var bundlePath = Path.Combine(root, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
var quarantineRoot = Path.Combine(root, "quarantine");
|
||||
var options = Options.Create(new QuarantineOptions { QuarantineRoot = quarantineRoot, MaxQuarantineSizeBytes = 1024 * 1024 });
|
||||
var svc = new FileSystemQuarantineService(options, NullLogger<FileSystemQuarantineService>.Instance, TimeProvider.System);
|
||||
|
||||
var result = await svc.QuarantineAsync(new QuarantineRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: null,
|
||||
ReasonCode: "tuf:invalid",
|
||||
ReasonMessage: "tuf:invalid",
|
||||
VerificationLog: new[] { "tuf:invalid" }));
|
||||
|
||||
var jsonPath = Path.Combine(result.QuarantinePath, "quarantine.json");
|
||||
var json = await File.ReadAllTextAsync(jsonPath);
|
||||
var jsonOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true };
|
||||
var entry = JsonSerializer.Deserialize<QuarantineEntry>(json, jsonOptions);
|
||||
entry.Should().NotBeNull();
|
||||
|
||||
var oldEntry = entry! with { QuarantinedAt = DateTimeOffset.Parse("1900-01-01T00:00:00Z") };
|
||||
await File.WriteAllTextAsync(jsonPath, JsonSerializer.Serialize(oldEntry, jsonOptions));
|
||||
|
||||
var removed = await svc.CleanupExpiredAsync(TimeSpan.FromDays(30));
|
||||
removed.Should().BeGreaterThanOrEqualTo(1);
|
||||
Directory.Exists(result.QuarantinePath).Should().BeFalse();
|
||||
}
|
||||
finally
|
||||
{
|
||||
SafeDeleteDirectory(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static string CreateTempDirectory()
|
||||
{
|
||||
var dir = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(dir);
|
||||
return dir;
|
||||
}
|
||||
|
||||
private static void SafeDeleteDirectory(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(path))
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Reconciliation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
public sealed class ArtifactIndexTests
|
||||
{
|
||||
[Fact]
|
||||
public void NormalizeDigest_BareHex_AddsPrefixAndLowercases()
|
||||
{
|
||||
var hex = new string('A', 64);
|
||||
ArtifactIndex.NormalizeDigest(hex).Should().Be("sha256:" + new string('a', 64));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NormalizeDigest_WithSha256Prefix_IsCanonical()
|
||||
{
|
||||
var hex = new string('B', 64);
|
||||
ArtifactIndex.NormalizeDigest("sha256:" + hex).Should().Be("sha256:" + new string('b', 64));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NormalizeDigest_WithOtherAlgorithm_Throws()
|
||||
{
|
||||
var ex = Assert.Throws<FormatException>(() => ArtifactIndex.NormalizeDigest("sha512:" + new string('a', 64)));
|
||||
ex.Message.Should().Contain("Only sha256");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddOrUpdate_MergesEntries_DeduplicatesAndSorts()
|
||||
{
|
||||
var digest = new string('c', 64);
|
||||
|
||||
var entryA = ArtifactEntry.Empty(digest) with
|
||||
{
|
||||
Sboms = new[]
|
||||
{
|
||||
new SbomReference("b", "b.json", SbomFormat.CycloneDx, null),
|
||||
new SbomReference("a", "a.json", SbomFormat.Spdx, null),
|
||||
}
|
||||
};
|
||||
|
||||
var entryB = ArtifactEntry.Empty("sha256:" + digest.ToUpperInvariant()) with
|
||||
{
|
||||
Sboms = new[]
|
||||
{
|
||||
new SbomReference("a", "a2.json", SbomFormat.CycloneDx, null),
|
||||
new SbomReference("c", "c.json", SbomFormat.Spdx, null),
|
||||
}
|
||||
};
|
||||
|
||||
var index = new ArtifactIndex();
|
||||
index.AddOrUpdate(entryA);
|
||||
index.AddOrUpdate(entryB);
|
||||
|
||||
var stored = index.Get("sha256:" + digest);
|
||||
stored.Should().NotBeNull();
|
||||
stored!.Digest.Should().Be("sha256:" + digest);
|
||||
|
||||
stored.Sboms.Select(s => (s.ContentHash, s.FilePath)).Should().Equal(
|
||||
("a", "a.json"),
|
||||
("b", "b.json"),
|
||||
("c", "c.json"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
// =============================================================================
|
||||
// CycloneDxParserTests.cs
|
||||
// Golden-file tests for CycloneDX SBOM parsing
|
||||
// Part of Task T24: Golden-file tests for determinism
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Reconciliation;
|
||||
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
public sealed class CycloneDxParserTests
|
||||
{
|
||||
private static readonly string FixturesPath = Path.Combine(
|
||||
AppDomain.CurrentDomain.BaseDirectory,
|
||||
"Reconciliation", "Fixtures");
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ValidCycloneDx_ExtractsAllSubjects()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new CycloneDxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.cdx.json");
|
||||
|
||||
// Skip if fixtures not available
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Format.Should().Be(SbomFormat.CycloneDx);
|
||||
result.SpecVersion.Should().Be("1.6");
|
||||
result.SerialNumber.Should().Be("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79");
|
||||
result.GeneratorTool.Should().Contain("syft");
|
||||
|
||||
// Should have 3 subjects with SHA-256 hashes (primary + 2 components)
|
||||
result.Subjects.Should().HaveCount(3);
|
||||
|
||||
// Verify subjects are sorted by digest
|
||||
result.Subjects.Should().BeInAscendingOrder(s => s.Digest, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ExtractsPrimarySubject()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new CycloneDxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.cdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.PrimarySubject.Should().NotBeNull();
|
||||
result.PrimarySubject!.Name.Should().Be("test-app");
|
||||
result.PrimarySubject.Version.Should().Be("1.0.0");
|
||||
result.PrimarySubject.Digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_SubjectDigestsAreNormalized()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new CycloneDxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.cdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - all digests should be normalized sha256:lowercase format
|
||||
foreach (var subject in result.Subjects)
|
||||
{
|
||||
subject.Digest.Should().StartWith("sha256:");
|
||||
subject.Digest[7..].Should().MatchRegex("^[a-f0-9]{64}$");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFormat_CycloneDxFile_ReturnsCycloneDx()
|
||||
{
|
||||
var parser = new CycloneDxParser();
|
||||
parser.DetectFormat("test.cdx.json").Should().Be(SbomFormat.CycloneDx);
|
||||
parser.DetectFormat("test.bom.json").Should().Be(SbomFormat.CycloneDx);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFormat_NonCycloneDxFile_ReturnsUnknown()
|
||||
{
|
||||
var parser = new CycloneDxParser();
|
||||
parser.DetectFormat("test.spdx.json").Should().Be(SbomFormat.Unknown);
|
||||
parser.DetectFormat("test.json").Should().Be(SbomFormat.Unknown);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_Deterministic_SameOutputForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new CycloneDxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.cdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act - parse twice
|
||||
var result1 = await parser.ParseAsync(filePath);
|
||||
var result2 = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - results should be identical
|
||||
result1.Subjects.Select(s => s.Digest)
|
||||
.Should().BeEquivalentTo(result2.Subjects.Select(s => s.Digest));
|
||||
|
||||
result1.Subjects.Select(s => s.Name)
|
||||
.Should().BeEquivalentTo(result2.Subjects.Select(s => s.Name));
|
||||
|
||||
// Order should be the same
|
||||
result1.Subjects.Select(s => s.Digest).Should().Equal(result2.Subjects.Select(s => s.Digest));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
// =============================================================================
|
||||
// DsseAttestationParserTests.cs
|
||||
// Golden-file tests for DSSE attestation parsing
|
||||
// Part of Task T24: Golden-file tests for determinism
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
public sealed class DsseAttestationParserTests
|
||||
{
|
||||
private static readonly string FixturesPath = Path.Combine(
|
||||
AppDomain.CurrentDomain.BaseDirectory,
|
||||
"Reconciliation", "Fixtures");
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ValidDsse_ExtractsEnvelope()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new DsseAttestationParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.intoto.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Envelope.Should().NotBeNull();
|
||||
result.Envelope!.PayloadType.Should().Be("application/vnd.in-toto+json");
|
||||
result.Envelope.Signatures.Should().HaveCount(1);
|
||||
result.Envelope.Signatures[0].KeyId.Should().Be("test-key-id");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ValidDsse_ExtractsStatement()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new DsseAttestationParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.intoto.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.Statement.Should().NotBeNull();
|
||||
result.Statement!.Type.Should().Be("https://in-toto.io/Statement/v1");
|
||||
result.Statement.PredicateType.Should().Be("https://slsa.dev/provenance/v1");
|
||||
result.Statement.Subjects.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ExtractsSubjectDigests()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new DsseAttestationParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.intoto.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
var subject = result.Statement!.Subjects[0];
|
||||
subject.Name.Should().Be("test-app");
|
||||
subject.GetSha256Digest().Should().Be("sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsAttestation_DsseFile_ReturnsTrue()
|
||||
{
|
||||
var parser = new DsseAttestationParser();
|
||||
parser.IsAttestation("test.intoto.json").Should().BeTrue();
|
||||
parser.IsAttestation("test.intoto.jsonl").Should().BeTrue();
|
||||
parser.IsAttestation("test.dsig").Should().BeTrue();
|
||||
parser.IsAttestation("test.dsse").Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsAttestation_NonDsseFile_ReturnsFalse()
|
||||
{
|
||||
var parser = new DsseAttestationParser();
|
||||
parser.IsAttestation("test.json").Should().BeFalse();
|
||||
parser.IsAttestation("test.cdx.json").Should().BeFalse();
|
||||
parser.IsAttestation("test.spdx.json").Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_Deterministic_SameOutputForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new DsseAttestationParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.intoto.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act - parse twice
|
||||
var result1 = await parser.ParseAsync(filePath);
|
||||
var result2 = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - results should be identical
|
||||
result1.Statement!.PredicateType.Should().Be(result2.Statement!.PredicateType);
|
||||
result1.Statement.Subjects.Count.Should().Be(result2.Statement.Subjects.Count);
|
||||
result1.Statement.Subjects[0].GetSha256Digest()
|
||||
.Should().Be(result2.Statement.Subjects[0].GetSha256Digest());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_InvalidJson_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new DsseAttestationParser();
|
||||
var json = "not valid json";
|
||||
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(json));
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(stream);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeFalse();
|
||||
result.ErrorMessage.Should().Contain("parsing error");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Reconciliation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
public sealed class EvidenceDirectoryDiscoveryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Discover_ReturnsDeterministicRelativePathsAndHashes()
|
||||
{
|
||||
var root = Path.Combine(Path.GetTempPath(), "stellaops-evidence-" + Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(root);
|
||||
|
||||
try
|
||||
{
|
||||
WriteUtf8(Path.Combine(root, "sboms", "a.cdx.json"), "{\"bom\":1}");
|
||||
WriteUtf8(Path.Combine(root, "attestations", "z.intoto.jsonl.dsig"), "dsse");
|
||||
WriteUtf8(Path.Combine(root, "vex", "v.openvex.json"), "{\"vex\":true}");
|
||||
|
||||
var discovered = EvidenceDirectoryDiscovery.Discover(root);
|
||||
discovered.Should().HaveCount(3);
|
||||
|
||||
discovered.Select(d => d.RelativePath).Should().Equal(
|
||||
"attestations/z.intoto.jsonl.dsig",
|
||||
"sboms/a.cdx.json",
|
||||
"vex/v.openvex.json");
|
||||
|
||||
discovered[0].Kind.Should().Be(EvidenceFileKind.Attestation);
|
||||
discovered[1].Kind.Should().Be(EvidenceFileKind.Sbom);
|
||||
discovered[2].Kind.Should().Be(EvidenceFileKind.Vex);
|
||||
|
||||
discovered[0].ContentSha256.Should().Be(HashUtf8("dsse"));
|
||||
discovered[1].ContentSha256.Should().Be(HashUtf8("{\"bom\":1}"));
|
||||
discovered[2].ContentSha256.Should().Be(HashUtf8("{\"vex\":true}"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(root, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Discover_WhenDirectoryMissing_Throws()
|
||||
{
|
||||
var missing = Path.Combine(Path.GetTempPath(), "stellaops-missing-" + Guid.NewGuid().ToString("N"));
|
||||
Action act = () => EvidenceDirectoryDiscovery.Discover(missing);
|
||||
act.Should().Throw<DirectoryNotFoundException>();
|
||||
}
|
||||
|
||||
private static void WriteUtf8(string path, string content)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
File.WriteAllText(path, content, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false));
|
||||
}
|
||||
|
||||
private static string HashUtf8(string content)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = sha256.ComputeHash(bytes);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"version": 1,
|
||||
"serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79",
|
||||
"metadata": {
|
||||
"timestamp": "2025-01-15T10:00:00Z",
|
||||
"component": {
|
||||
"type": "application",
|
||||
"name": "test-app",
|
||||
"version": "1.0.0",
|
||||
"hashes": [
|
||||
{
|
||||
"alg": "SHA-256",
|
||||
"content": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tools": {
|
||||
"components": [
|
||||
{
|
||||
"name": "syft",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "zlib",
|
||||
"version": "1.2.11",
|
||||
"bom-ref": "pkg:generic/zlib@1.2.11",
|
||||
"purl": "pkg:generic/zlib@1.2.11",
|
||||
"hashes": [
|
||||
{
|
||||
"alg": "SHA-256",
|
||||
"content": "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"name": "openssl",
|
||||
"version": "3.0.0",
|
||||
"bom-ref": "pkg:generic/openssl@3.0.0",
|
||||
"purl": "pkg:generic/openssl@3.0.0",
|
||||
"hashes": [
|
||||
{
|
||||
"alg": "SHA-256",
|
||||
"content": "919b4a3e65a8deade6b3c94dd44cb98e0f65a1785a787689c23e6b5c0b4edfea"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"payloadType": "application/vnd.in-toto+json",
|
||||
"payload": "eyJfdHlwZSI6Imh0dHBzOi8vaW4tdG90by5pby9TdGF0ZW1lbnQvdjEiLCJwcmVkaWNhdGVUeXBlIjoiaHR0cHM6Ly9zbHNhLmRldi9wcm92ZW5hbmNlL3YxIiwic3ViamVjdCI6W3sibmFtZSI6InRlc3QtYXBwIiwiZGlnZXN0Ijp7InNoYTI1NiI6ImUzYjBjNDQyOThmYzFjMTQ5YWZiZjRjODk5NmZiOTI0MjdhZTQxZTQ2NDliOTM0Y2E0OTU5OTFiNzg1MmI4NTUifX1dLCJwcmVkaWNhdGUiOnsiYnVpbGRlcklkIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9idWlsZGVyIiwiYnVpbGRUeXBlIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9idWlsZC10eXBlIn19",
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "test-key-id",
|
||||
"sig": "MEUCIQDFmJRQSwWMbQGiS8X5mY9CvZxVbVmXJ7JQVGEYIhXEBQIgbqDBJxP2P9N2kGPXDlX7Qx8KPVQjN3P1Y5Z9A8B2C3D="
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
{
|
||||
"spdxVersion": "SPDX-2.3",
|
||||
"dataLicense": "CC0-1.0",
|
||||
"SPDXID": "SPDXRef-DOCUMENT",
|
||||
"name": "test-app-sbom",
|
||||
"documentNamespace": "https://example.com/test-app/1.0.0",
|
||||
"creationInfo": {
|
||||
"created": "2025-01-15T10:00:00Z",
|
||||
"creators": [
|
||||
"Tool: syft-1.0.0"
|
||||
]
|
||||
},
|
||||
"documentDescribes": [
|
||||
"SPDXRef-Package-test-app"
|
||||
],
|
||||
"packages": [
|
||||
{
|
||||
"SPDXID": "SPDXRef-Package-test-app",
|
||||
"name": "test-app",
|
||||
"versionInfo": "1.0.0",
|
||||
"downloadLocation": "NOASSERTION",
|
||||
"filesAnalyzed": false,
|
||||
"checksums": [
|
||||
{
|
||||
"algorithm": "SHA256",
|
||||
"checksumValue": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"SPDXID": "SPDXRef-Package-zlib",
|
||||
"name": "zlib",
|
||||
"versionInfo": "1.2.11",
|
||||
"downloadLocation": "NOASSERTION",
|
||||
"filesAnalyzed": false,
|
||||
"checksums": [
|
||||
{
|
||||
"algorithm": "SHA256",
|
||||
"checksumValue": "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1"
|
||||
}
|
||||
],
|
||||
"externalRefs": [
|
||||
{
|
||||
"referenceCategory": "PACKAGE-MANAGER",
|
||||
"referenceType": "purl",
|
||||
"referenceLocator": "pkg:generic/zlib@1.2.11"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"SPDXID": "SPDXRef-Package-openssl",
|
||||
"name": "openssl",
|
||||
"versionInfo": "3.0.0",
|
||||
"downloadLocation": "NOASSERTION",
|
||||
"filesAnalyzed": false,
|
||||
"checksums": [
|
||||
{
|
||||
"algorithm": "SHA256",
|
||||
"checksumValue": "919b4a3e65a8deade6b3c94dd44cb98e0f65a1785a787689c23e6b5c0b4edfea"
|
||||
}
|
||||
],
|
||||
"externalRefs": [
|
||||
{
|
||||
"referenceCategory": "PACKAGE-MANAGER",
|
||||
"referenceType": "purl",
|
||||
"referenceLocator": "pkg:generic/openssl@3.0.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relationships": [
|
||||
{
|
||||
"spdxElementId": "SPDXRef-DOCUMENT",
|
||||
"relatedSpdxElement": "SPDXRef-Package-test-app",
|
||||
"relationshipType": "DESCRIBES"
|
||||
},
|
||||
{
|
||||
"spdxElementId": "SPDXRef-Package-test-app",
|
||||
"relatedSpdxElement": "SPDXRef-Package-zlib",
|
||||
"relationshipType": "DEPENDS_ON"
|
||||
},
|
||||
{
|
||||
"spdxElementId": "SPDXRef-Package-test-app",
|
||||
"relatedSpdxElement": "SPDXRef-Package-openssl",
|
||||
"relationshipType": "DEPENDS_ON"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,453 @@
|
||||
// =============================================================================
|
||||
// SourcePrecedenceLatticePropertyTests.cs
|
||||
// Property-based tests for lattice properties
|
||||
// Part of Task T25: Write property-based tests
|
||||
// =============================================================================
|
||||
|
||||
using StellaOps.AirGap.Importer.Reconciliation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
/// <summary>
|
||||
/// Property-based tests verifying lattice algebraic properties.
|
||||
/// A lattice must satisfy: associativity, commutativity, idempotence, and absorption.
|
||||
/// </summary>
|
||||
public sealed class SourcePrecedenceLatticePropertyTests
|
||||
{
|
||||
private static readonly SourcePrecedence[] AllPrecedences =
|
||||
[
|
||||
SourcePrecedence.Unknown,
|
||||
SourcePrecedence.ThirdParty,
|
||||
SourcePrecedence.Maintainer,
|
||||
SourcePrecedence.Vendor
|
||||
];
|
||||
|
||||
#region Lattice Algebraic Properties
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join is commutative - Join(a, b) = Join(b, a)
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Join_IsCommutative()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var joinAB = SourcePrecedenceLattice.Join(a, b);
|
||||
var joinBA = SourcePrecedenceLattice.Join(b, a);
|
||||
|
||||
Assert.Equal(joinAB, joinBA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet is commutative - Meet(a, b) = Meet(b, a)
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Meet_IsCommutative()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var meetAB = SourcePrecedenceLattice.Meet(a, b);
|
||||
var meetBA = SourcePrecedenceLattice.Meet(b, a);
|
||||
|
||||
Assert.Equal(meetAB, meetBA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join is associative - Join(Join(a, b), c) = Join(a, Join(b, c))
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Join_IsAssociative()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
foreach (var c in AllPrecedences)
|
||||
{
|
||||
var left = SourcePrecedenceLattice.Join(SourcePrecedenceLattice.Join(a, b), c);
|
||||
var right = SourcePrecedenceLattice.Join(a, SourcePrecedenceLattice.Join(b, c));
|
||||
|
||||
Assert.Equal(left, right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet is associative - Meet(Meet(a, b), c) = Meet(a, Meet(b, c))
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Meet_IsAssociative()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
foreach (var c in AllPrecedences)
|
||||
{
|
||||
var left = SourcePrecedenceLattice.Meet(SourcePrecedenceLattice.Meet(a, b), c);
|
||||
var right = SourcePrecedenceLattice.Meet(a, SourcePrecedenceLattice.Meet(b, c));
|
||||
|
||||
Assert.Equal(left, right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join is idempotent - Join(a, a) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Join_IsIdempotent()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
var result = SourcePrecedenceLattice.Join(a, a);
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet is idempotent - Meet(a, a) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Meet_IsIdempotent()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
var result = SourcePrecedenceLattice.Meet(a, a);
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Absorption law 1 - Join(a, Meet(a, b)) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Absorption_JoinMeet_ReturnsFirst()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var meet = SourcePrecedenceLattice.Meet(a, b);
|
||||
var result = SourcePrecedenceLattice.Join(a, meet);
|
||||
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Absorption law 2 - Meet(a, Join(a, b)) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Absorption_MeetJoin_ReturnsFirst()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var join = SourcePrecedenceLattice.Join(a, b);
|
||||
var result = SourcePrecedenceLattice.Meet(a, join);
|
||||
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ordering Properties
|
||||
|
||||
/// <summary>
|
||||
/// Property: Compare is antisymmetric - if Compare(a,b) > 0 then Compare(b,a) < 0
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Compare_IsAntisymmetric()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var compareAB = SourcePrecedenceLattice.Compare(a, b);
|
||||
var compareBA = SourcePrecedenceLattice.Compare(b, a);
|
||||
|
||||
if (compareAB > 0)
|
||||
{
|
||||
Assert.True(compareBA < 0);
|
||||
}
|
||||
else if (compareAB < 0)
|
||||
{
|
||||
Assert.True(compareBA > 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal(0, compareBA);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Compare is transitive - if Compare(a,b) > 0 and Compare(b,c) > 0 then Compare(a,c) > 0
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Compare_IsTransitive()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
foreach (var c in AllPrecedences)
|
||||
{
|
||||
var ab = SourcePrecedenceLattice.Compare(a, b);
|
||||
var bc = SourcePrecedenceLattice.Compare(b, c);
|
||||
var ac = SourcePrecedenceLattice.Compare(a, c);
|
||||
|
||||
if (ab > 0 && bc > 0)
|
||||
{
|
||||
Assert.True(ac > 0);
|
||||
}
|
||||
|
||||
if (ab < 0 && bc < 0)
|
||||
{
|
||||
Assert.True(ac < 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Compare is reflexive - Compare(a, a) = 0
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Compare_IsReflexive()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
Assert.Equal(0, SourcePrecedenceLattice.Compare(a, a));
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Join/Meet Bound Properties
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join returns an upper bound - Join(a, b) >= a AND Join(a, b) >= b
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Join_ReturnsUpperBound()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var join = SourcePrecedenceLattice.Join(a, b);
|
||||
|
||||
Assert.True(SourcePrecedenceLattice.Compare(join, a) >= 0);
|
||||
Assert.True(SourcePrecedenceLattice.Compare(join, b) >= 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet returns a lower bound - Meet(a, b) <= a AND Meet(a, b) <= b
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Meet_ReturnsLowerBound()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var meet = SourcePrecedenceLattice.Meet(a, b);
|
||||
|
||||
Assert.True(SourcePrecedenceLattice.Compare(meet, a) <= 0);
|
||||
Assert.True(SourcePrecedenceLattice.Compare(meet, b) <= 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Join is least upper bound - for all c, if c >= a and c >= b then c >= Join(a,b)
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Join_IsLeastUpperBound()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var join = SourcePrecedenceLattice.Join(a, b);
|
||||
|
||||
foreach (var c in AllPrecedences)
|
||||
{
|
||||
var cGeA = SourcePrecedenceLattice.Compare(c, a) >= 0;
|
||||
var cGeB = SourcePrecedenceLattice.Compare(c, b) >= 0;
|
||||
|
||||
if (cGeA && cGeB)
|
||||
{
|
||||
Assert.True(SourcePrecedenceLattice.Compare(c, join) >= 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Meet is greatest lower bound - for all c, if c <= a and c <= b then c <= Meet(a,b)
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Meet_IsGreatestLowerBound()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
foreach (var b in AllPrecedences)
|
||||
{
|
||||
var meet = SourcePrecedenceLattice.Meet(a, b);
|
||||
|
||||
foreach (var c in AllPrecedences)
|
||||
{
|
||||
var cLeA = SourcePrecedenceLattice.Compare(c, a) <= 0;
|
||||
var cLeB = SourcePrecedenceLattice.Compare(c, b) <= 0;
|
||||
|
||||
if (cLeA && cLeB)
|
||||
{
|
||||
Assert.True(SourcePrecedenceLattice.Compare(c, meet) <= 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Bounded Lattice Properties
|
||||
|
||||
/// <summary>
|
||||
/// Property: Unknown is the bottom element - Join(Unknown, a) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Unknown_IsBottomElement()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
var result = SourcePrecedenceLattice.Join(SourcePrecedence.Unknown, a);
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Vendor is the top element - Meet(Vendor, a) = a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Vendor_IsTopElement()
|
||||
{
|
||||
foreach (var a in AllPrecedences)
|
||||
{
|
||||
var result = SourcePrecedenceLattice.Meet(SourcePrecedence.Vendor, a);
|
||||
Assert.Equal(a, result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merge Determinism
|
||||
|
||||
/// <summary>
|
||||
/// Property: Merge is deterministic - same inputs always produce same output
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Merge_IsDeterministic()
|
||||
{
|
||||
var lattice = new SourcePrecedenceLattice();
|
||||
var timestamp = new DateTimeOffset(2025, 12, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var statements = new[]
|
||||
{
|
||||
CreateStatement("CVE-2024-001", "product-1", VexStatus.Affected, SourcePrecedence.ThirdParty, timestamp),
|
||||
CreateStatement("CVE-2024-001", "product-1", VexStatus.NotAffected, SourcePrecedence.Vendor, timestamp),
|
||||
CreateStatement("CVE-2024-001", "product-1", VexStatus.Fixed, SourcePrecedence.Maintainer, timestamp)
|
||||
};
|
||||
|
||||
// Run merge 100 times and verify same result
|
||||
var firstResult = lattice.Merge(statements);
|
||||
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
var result = lattice.Merge(statements);
|
||||
|
||||
Assert.Equal(firstResult.Status, result.Status);
|
||||
Assert.Equal(firstResult.Source, result.Source);
|
||||
Assert.Equal(firstResult.VulnerabilityId, result.VulnerabilityId);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: Higher precedence always wins in merge
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Merge_HigherPrecedenceWins()
|
||||
{
|
||||
var lattice = new SourcePrecedenceLattice();
|
||||
var timestamp = new DateTimeOffset(2025, 12, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
// Vendor should win over ThirdParty
|
||||
var vendorStatement = CreateStatement("CVE-2024-001", "product-1", VexStatus.NotAffected, SourcePrecedence.Vendor, timestamp);
|
||||
var thirdPartyStatement = CreateStatement("CVE-2024-001", "product-1", VexStatus.Affected, SourcePrecedence.ThirdParty, timestamp);
|
||||
|
||||
var result = lattice.Merge(vendorStatement, thirdPartyStatement);
|
||||
|
||||
Assert.Equal(SourcePrecedence.Vendor, result.Source);
|
||||
Assert.Equal(VexStatus.NotAffected, result.Status);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Property: More recent timestamp wins when precedence is equal
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Merge_MoreRecentTimestampWins_WhenPrecedenceEqual()
|
||||
{
|
||||
var lattice = new SourcePrecedenceLattice();
|
||||
var olderTimestamp = new DateTimeOffset(2025, 12, 1, 12, 0, 0, TimeSpan.Zero);
|
||||
var newerTimestamp = new DateTimeOffset(2025, 12, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var olderStatement = CreateStatement("CVE-2024-001", "product-1", VexStatus.Affected, SourcePrecedence.Maintainer, olderTimestamp);
|
||||
var newerStatement = CreateStatement("CVE-2024-001", "product-1", VexStatus.Fixed, SourcePrecedence.Maintainer, newerTimestamp);
|
||||
|
||||
var result = lattice.Merge(olderStatement, newerStatement);
|
||||
|
||||
Assert.Equal(VexStatus.Fixed, result.Status);
|
||||
Assert.Equal(newerTimestamp, result.Timestamp);
|
||||
}
|
||||
|
||||
private static VexStatement CreateStatement(
|
||||
string vulnId,
|
||||
string productId,
|
||||
VexStatus status,
|
||||
SourcePrecedence source,
|
||||
DateTimeOffset? timestamp)
|
||||
{
|
||||
return new VexStatement
|
||||
{
|
||||
VulnerabilityId = vulnId,
|
||||
ProductId = productId,
|
||||
Status = status,
|
||||
Source = source,
|
||||
Timestamp = timestamp
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
// =============================================================================
|
||||
// SpdxParserTests.cs
|
||||
// Golden-file tests for SPDX SBOM parsing
|
||||
// Part of Task T24: Golden-file tests for determinism
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Reconciliation;
|
||||
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
|
||||
|
||||
public sealed class SpdxParserTests
|
||||
{
|
||||
private static readonly string FixturesPath = Path.Combine(
|
||||
AppDomain.CurrentDomain.BaseDirectory,
|
||||
"Reconciliation", "Fixtures");
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ValidSpdx_ExtractsAllSubjects()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new SpdxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.spdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Format.Should().Be(SbomFormat.Spdx);
|
||||
result.SpecVersion.Should().Be("2.3");
|
||||
result.SerialNumber.Should().Be("https://example.com/test-app/1.0.0");
|
||||
result.GeneratorTool.Should().Contain("syft");
|
||||
|
||||
// Should have 3 packages with SHA256 checksums
|
||||
result.Subjects.Should().HaveCount(3);
|
||||
|
||||
// Verify subjects are sorted by digest
|
||||
result.Subjects.Should().BeInAscendingOrder(s => s.Digest, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ExtractsPrimarySubject()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new SpdxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.spdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert
|
||||
result.PrimarySubject.Should().NotBeNull();
|
||||
result.PrimarySubject!.Name.Should().Be("test-app");
|
||||
result.PrimarySubject.Version.Should().Be("1.0.0");
|
||||
result.PrimarySubject.SpdxId.Should().Be("SPDXRef-Package-test-app");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_ExtractsPurls()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new SpdxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.spdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - check for components with purls
|
||||
var zlib = result.Subjects.FirstOrDefault(s => s.Name == "zlib");
|
||||
zlib.Should().NotBeNull();
|
||||
zlib!.Purl.Should().Be("pkg:generic/zlib@1.2.11");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_SubjectDigestsAreNormalized()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new SpdxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.spdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - all digests should be normalized sha256:lowercase format
|
||||
foreach (var subject in result.Subjects)
|
||||
{
|
||||
subject.Digest.Should().StartWith("sha256:");
|
||||
subject.Digest[7..].Should().MatchRegex("^[a-f0-9]{64}$");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFormat_SpdxFile_ReturnsSpdx()
|
||||
{
|
||||
var parser = new SpdxParser();
|
||||
parser.DetectFormat("test.spdx.json").Should().Be(SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFormat_NonSpdxFile_ReturnsUnknown()
|
||||
{
|
||||
var parser = new SpdxParser();
|
||||
parser.DetectFormat("test.cdx.json").Should().Be(SbomFormat.Unknown);
|
||||
parser.DetectFormat("test.json").Should().Be(SbomFormat.Unknown);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParseAsync_Deterministic_SameOutputForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var parser = new SpdxParser();
|
||||
var filePath = Path.Combine(FixturesPath, "sample.spdx.json");
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Act - parse twice
|
||||
var result1 = await parser.ParseAsync(filePath);
|
||||
var result2 = await parser.ParseAsync(filePath);
|
||||
|
||||
// Assert - results should be identical and in same order
|
||||
result1.Subjects.Select(s => s.Digest).Should().Equal(result2.Subjects.Select(s => s.Digest));
|
||||
result1.Subjects.Select(s => s.Name).Should().Equal(result2.Subjects.Select(s => s.Name));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class ReplayVerifierTests
|
||||
{
|
||||
private readonly ReplayVerifier _verifier = new();
|
||||
|
||||
[Fact]
|
||||
public void FullRecompute_succeeds_when_hashes_match_and_fresh()
|
||||
{
|
||||
var now = DateTimeOffset.Parse("2025-12-02T01:00:00Z");
|
||||
var request = new ReplayVerificationRequest(
|
||||
"aa".PadRight(64, 'a'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
"aa".PadRight(64, 'a'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
now.AddHours(-4),
|
||||
24,
|
||||
"cc".PadRight(64, 'c'),
|
||||
"cc".PadRight(64, 'c'),
|
||||
ReplayDepth.FullRecompute);
|
||||
|
||||
var result = _verifier.Verify(request, now);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("full-recompute-passed", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Detects_hash_drift()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var request = new ReplayVerificationRequest(
|
||||
"aa".PadRight(64, 'a'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
"00".PadRight(64, '0'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
now,
|
||||
1,
|
||||
null,
|
||||
null,
|
||||
ReplayDepth.HashOnly);
|
||||
|
||||
var result = _verifier.Verify(request, now);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("manifest-hash-drift", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyFreeze_requires_matching_policy_hash()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var request = new ReplayVerificationRequest(
|
||||
"aa".PadRight(64, 'a'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
"aa".PadRight(64, 'a'),
|
||||
"bb".PadRight(64, 'b'),
|
||||
now,
|
||||
12,
|
||||
"bundle-policy",
|
||||
"sealed-policy-other",
|
||||
ReplayDepth.PolicyFreeze);
|
||||
|
||||
var result = _verifier.Verify(request, now);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("policy-hash-drift", result.Reason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class RootRotationPolicyTests
|
||||
{
|
||||
[Fact]
|
||||
public void RequiresTwoApprovers()
|
||||
{
|
||||
var policy = new RootRotationPolicy();
|
||||
var result = policy.Validate(new Dictionary<string, byte[]>(), new Dictionary<string, byte[]> { ["k1"] = new byte[] { 1 } }, new[] { "a" });
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("rotation-dual-approval-required", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RejectsNoChange()
|
||||
{
|
||||
var policy = new RootRotationPolicy();
|
||||
var result = policy.Validate(
|
||||
new Dictionary<string, byte[]> { ["k1"] = new byte[] { 1 } },
|
||||
new Dictionary<string, byte[]> { ["k1"] = new byte[] { 1 } },
|
||||
new[] { "a", "b" });
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("rotation-no-change", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AcceptsRotationWithDualApproval()
|
||||
{
|
||||
var policy = new RootRotationPolicy();
|
||||
var result = policy.Validate(
|
||||
new Dictionary<string, byte[]> { ["old"] = new byte[] { 1 } },
|
||||
new Dictionary<string, byte[]> { ["new"] = new byte[] { 2 } },
|
||||
new[] { "a", "b" });
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("rotation-approved", result.Reason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<IsPackable>false</IsPackable>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="Reconciliation/Fixtures/**/*">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,42 @@
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests;
|
||||
|
||||
public class TufMetadataValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void RejectsInvalidJson()
|
||||
{
|
||||
var validator = new TufMetadataValidator();
|
||||
var result = validator.Validate("{}", "{}", "{}");
|
||||
Assert.False(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AcceptsConsistentSnapshotHash()
|
||||
{
|
||||
var validator = new TufMetadataValidator();
|
||||
var root = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\"}";
|
||||
var snapshot = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"meta\":{\"snapshot\":{\"hashes\":{\"sha256\":\"abc\"}}}}";
|
||||
var timestamp = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"snapshot\":{\"meta\":{\"hashes\":{\"sha256\":\"abc\"}}}}";
|
||||
|
||||
var result = validator.Validate(root, snapshot, timestamp);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("tuf-metadata-valid", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectsHashMismatch()
|
||||
{
|
||||
var validator = new TufMetadataValidator();
|
||||
var root = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\"}";
|
||||
var snapshot = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"meta\":{\"snapshot\":{\"hashes\":{\"sha256\":\"abc\"}}}}";
|
||||
var timestamp = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"snapshot\":{\"meta\":{\"hashes\":{\"sha256\":\"def\"}}}}";
|
||||
|
||||
var result = validator.Validate(root, snapshot, timestamp);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("tuf-snapshot-hash-mismatch", result.Reason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Quarantine;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.AirGap.Importer.Versioning;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Validation;
|
||||
|
||||
public sealed class ImportValidatorIntegrationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenNonMonotonic_ShouldFailAndQuarantine()
|
||||
{
|
||||
var quarantine = new CapturingQuarantineService();
|
||||
var monotonicity = new FixedMonotonicityChecker(isMonotonic: false);
|
||||
|
||||
var validator = new ImportValidator(
|
||||
new DsseVerifier(),
|
||||
new TufMetadataValidator(),
|
||||
new MerkleRootCalculator(),
|
||||
new RootRotationPolicy(),
|
||||
monotonicity,
|
||||
quarantine,
|
||||
NullLogger<ImportValidator>.Instance);
|
||||
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(tempRoot);
|
||||
var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst");
|
||||
await File.WriteAllTextAsync(bundlePath, "bundle-bytes");
|
||||
|
||||
try
|
||||
{
|
||||
var (envelope, trustRoots) = CreateValidDsse();
|
||||
|
||||
var trustStore = new TrustStore();
|
||||
trustStore.LoadActive(new Dictionary<string, byte[]>());
|
||||
trustStore.StagePending(new Dictionary<string, byte[]> { ["pending-key"] = new byte[] { 1, 2, 3 } });
|
||||
|
||||
var request = new ImportValidationRequest(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
BundleDigest: "sha256:bundle",
|
||||
BundlePath: bundlePath,
|
||||
ManifestJson: "{\"version\":\"1.0.0\"}",
|
||||
ManifestVersion: "1.0.0",
|
||||
ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"),
|
||||
ForceActivate: false,
|
||||
ForceActivateReason: null,
|
||||
Envelope: envelope,
|
||||
TrustRoots: trustRoots,
|
||||
RootJson: """
|
||||
{"version":1,"expiresUtc":"2025-12-31T00:00:00Z"}
|
||||
""",
|
||||
SnapshotJson: """
|
||||
{"version":1,"expiresUtc":"2025-12-31T00:00:00Z","meta":{"snapshot":{"hashes":{"sha256":"abc"}}}}
|
||||
""",
|
||||
TimestampJson: """
|
||||
{"version":1,"expiresUtc":"2025-12-31T00:00:00Z","snapshot":{"meta":{"hashes":{"sha256":"abc"}}}}
|
||||
""",
|
||||
PayloadEntries: new[] { new NamedStream("payload.txt", new MemoryStream(Encoding.UTF8.GetBytes("hello"))) },
|
||||
TrustStore: trustStore,
|
||||
ApproverIds: new[] { "approver-a", "approver-b" });
|
||||
|
||||
var result = await validator.ValidateAsync(request);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Reason.Should().Contain("version-non-monotonic");
|
||||
|
||||
quarantine.Requests.Should().HaveCount(1);
|
||||
quarantine.Requests[0].TenantId.Should().Be("tenant-a");
|
||||
quarantine.Requests[0].ReasonCode.Should().Contain("version-non-monotonic");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(tempRoot, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static (DsseEnvelope envelope, TrustRootConfig trustRoots) CreateValidDsse()
|
||||
{
|
||||
using var rsa = RSA.Create(2048);
|
||||
var publicKey = rsa.ExportSubjectPublicKeyInfo();
|
||||
|
||||
var fingerprint = Convert.ToHexString(SHA256.HashData(publicKey)).ToLowerInvariant();
|
||||
var payloadType = "application/vnd.in-toto+json";
|
||||
var payloadBytes = Encoding.UTF8.GetBytes("{\"hello\":\"world\"}");
|
||||
var payloadBase64 = Convert.ToBase64String(payloadBytes);
|
||||
|
||||
var pae = BuildPae(payloadType, payloadBytes);
|
||||
var signature = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pss);
|
||||
|
||||
var envelope = new DsseEnvelope(
|
||||
PayloadType: payloadType,
|
||||
Payload: payloadBase64,
|
||||
Signatures: new[] { new DsseSignature("key-1", Convert.ToBase64String(signature)) });
|
||||
|
||||
var trustRoots = new TrustRootConfig(
|
||||
RootBundlePath: "(memory)",
|
||||
TrustedKeyFingerprints: new[] { fingerprint },
|
||||
AllowedSignatureAlgorithms: new[] { "rsa-pss-sha256" },
|
||||
NotBeforeUtc: null,
|
||||
NotAfterUtc: null,
|
||||
PublicKeys: new Dictionary<string, byte[]> { ["key-1"] = publicKey });
|
||||
|
||||
return (envelope, trustRoots);
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, byte[] payloadBytes)
|
||||
{
|
||||
const string paePrefix = "DSSEv1";
|
||||
var payload = Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
var parts = new[]
|
||||
{
|
||||
paePrefix,
|
||||
payloadType,
|
||||
payload
|
||||
};
|
||||
|
||||
var paeBuilder = new StringBuilder();
|
||||
paeBuilder.Append("PAE:");
|
||||
paeBuilder.Append(parts.Length);
|
||||
foreach (var part in parts)
|
||||
{
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part.Length);
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part);
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetBytes(paeBuilder.ToString());
|
||||
}
|
||||
|
||||
private sealed class FixedMonotonicityChecker : IVersionMonotonicityChecker
|
||||
{
|
||||
private readonly bool _isMonotonic;
|
||||
|
||||
public FixedMonotonicityChecker(bool isMonotonic)
|
||||
{
|
||||
_isMonotonic = isMonotonic;
|
||||
}
|
||||
|
||||
public Task<MonotonicityCheckResult> CheckAsync(
|
||||
string tenantId,
|
||||
string bundleType,
|
||||
BundleVersion incomingVersion,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(new MonotonicityCheckResult(
|
||||
IsMonotonic: _isMonotonic,
|
||||
CurrentVersion: new BundleVersion(2, 0, 0, DateTimeOffset.Parse("2025-12-14T00:00:00Z")),
|
||||
CurrentBundleDigest: "sha256:current",
|
||||
CurrentActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
ReasonCode: _isMonotonic ? "MONOTONIC_OK" : "VERSION_NON_MONOTONIC"));
|
||||
}
|
||||
|
||||
public Task RecordActivationAsync(
|
||||
string tenantId,
|
||||
string bundleType,
|
||||
BundleVersion version,
|
||||
string bundleDigest,
|
||||
bool wasForceActivated = false,
|
||||
string? forceActivateReason = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class CapturingQuarantineService : IQuarantineService
|
||||
{
|
||||
public List<QuarantineRequest> Requests { get; } = new();
|
||||
|
||||
public Task<QuarantineResult> QuarantineAsync(QuarantineRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
Requests.Add(request);
|
||||
return Task.FromResult(new QuarantineResult(
|
||||
Success: true,
|
||||
QuarantineId: "test",
|
||||
QuarantinePath: "(memory)",
|
||||
QuarantinedAt: DateTimeOffset.UnixEpoch));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<QuarantineEntry>> ListAsync(string tenantId, QuarantineListOptions? options = null, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<IReadOnlyList<QuarantineEntry>>(Array.Empty<QuarantineEntry>());
|
||||
|
||||
public Task<bool> RemoveAsync(string tenantId, string quarantineId, string removalReason, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(false);
|
||||
|
||||
public Task<int> CleanupExpiredAsync(TimeSpan retentionPeriod, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,165 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Validation;
|
||||
|
||||
public sealed class RekorOfflineReceiptVerifierTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidReceiptAndCheckpoint_Succeeds()
|
||||
{
|
||||
var temp = Path.Combine(Path.GetTempPath(), "stellaops-rekor-" + Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(temp);
|
||||
|
||||
try
|
||||
{
|
||||
// Leaf 0 is the DSSE digest we verify for inclusion.
|
||||
var dsseSha256 = SHA256.HashData(Encoding.UTF8.GetBytes("dsse-envelope"));
|
||||
var otherDsseSha256 = SHA256.HashData(Encoding.UTF8.GetBytes("other-envelope"));
|
||||
|
||||
var leaf0 = HashLeaf(dsseSha256);
|
||||
var leaf1 = HashLeaf(otherDsseSha256);
|
||||
var root = HashInterior(leaf0, leaf1);
|
||||
|
||||
var rootBase64 = Convert.ToBase64String(root);
|
||||
var treeSize = 2L;
|
||||
var origin = "rekor.sigstore.dev - 2605736670972794746";
|
||||
var timestamp = "1700000000";
|
||||
var canonicalBody = $"{origin}\n{treeSize}\n{rootBase64}\n{timestamp}\n";
|
||||
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var signature = ecdsa.SignData(Encoding.UTF8.GetBytes(canonicalBody), HashAlgorithmName.SHA256);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
var checkpointPath = Path.Combine(temp, "checkpoint.sig");
|
||||
await File.WriteAllTextAsync(
|
||||
checkpointPath,
|
||||
canonicalBody + $"sig {signatureBase64}\n",
|
||||
new UTF8Encoding(encoderShouldEmitUTF8Identifier: false));
|
||||
|
||||
var publicKeyPath = Path.Combine(temp, "rekor-pub.pem");
|
||||
await File.WriteAllTextAsync(
|
||||
publicKeyPath,
|
||||
WrapPem("PUBLIC KEY", ecdsa.ExportSubjectPublicKeyInfo()),
|
||||
new UTF8Encoding(encoderShouldEmitUTF8Identifier: false));
|
||||
|
||||
var receiptPath = Path.Combine(temp, "rekor-receipt.json");
|
||||
var receiptJson = JsonSerializer.Serialize(new
|
||||
{
|
||||
uuid = "uuid-1",
|
||||
logIndex = 0,
|
||||
rootHash = Convert.ToHexString(root).ToLowerInvariant(),
|
||||
hashes = new[] { Convert.ToHexString(leaf1).ToLowerInvariant() },
|
||||
checkpoint = "checkpoint.sig"
|
||||
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(receiptPath, receiptJson, new UTF8Encoding(false));
|
||||
|
||||
var result = await RekorOfflineReceiptVerifier.VerifyAsync(receiptPath, dsseSha256, publicKeyPath, CancellationToken.None);
|
||||
|
||||
result.Verified.Should().BeTrue();
|
||||
result.CheckpointSignatureVerified.Should().BeTrue();
|
||||
result.RekorUuid.Should().Be("uuid-1");
|
||||
result.LogIndex.Should().Be(0);
|
||||
result.TreeSize.Should().Be(2);
|
||||
result.ExpectedRootHash.Should().Be(Convert.ToHexString(root).ToLowerInvariant());
|
||||
result.ComputedRootHash.Should().Be(Convert.ToHexString(root).ToLowerInvariant());
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(temp, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_TamperedCheckpointSignature_Fails()
|
||||
{
|
||||
var temp = Path.Combine(Path.GetTempPath(), "stellaops-rekor-" + Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(temp);
|
||||
|
||||
try
|
||||
{
|
||||
var dsseSha256 = SHA256.HashData(Encoding.UTF8.GetBytes("dsse-envelope"));
|
||||
var otherDsseSha256 = SHA256.HashData(Encoding.UTF8.GetBytes("other-envelope"));
|
||||
|
||||
var leaf0 = HashLeaf(dsseSha256);
|
||||
var leaf1 = HashLeaf(otherDsseSha256);
|
||||
var root = HashInterior(leaf0, leaf1);
|
||||
|
||||
var rootBase64 = Convert.ToBase64String(root);
|
||||
var treeSize = 2L;
|
||||
var origin = "rekor.sigstore.dev - 2605736670972794746";
|
||||
var timestamp = "1700000000";
|
||||
var canonicalBody = $"{origin}\n{treeSize}\n{rootBase64}\n{timestamp}\n";
|
||||
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var signature = ecdsa.SignData(Encoding.UTF8.GetBytes(canonicalBody), HashAlgorithmName.SHA256);
|
||||
signature[0] ^= 0xFF; // tamper
|
||||
|
||||
var checkpointPath = Path.Combine(temp, "checkpoint.sig");
|
||||
await File.WriteAllTextAsync(
|
||||
checkpointPath,
|
||||
canonicalBody + $"sig {Convert.ToBase64String(signature)}\n",
|
||||
new UTF8Encoding(false));
|
||||
|
||||
var publicKeyPath = Path.Combine(temp, "rekor-pub.pem");
|
||||
await File.WriteAllTextAsync(
|
||||
publicKeyPath,
|
||||
WrapPem("PUBLIC KEY", ecdsa.ExportSubjectPublicKeyInfo()),
|
||||
new UTF8Encoding(false));
|
||||
|
||||
var receiptPath = Path.Combine(temp, "rekor-receipt.json");
|
||||
var receiptJson = JsonSerializer.Serialize(new
|
||||
{
|
||||
uuid = "uuid-1",
|
||||
logIndex = 0,
|
||||
rootHash = Convert.ToHexString(root).ToLowerInvariant(),
|
||||
hashes = new[] { Convert.ToHexString(leaf1).ToLowerInvariant() },
|
||||
checkpoint = "checkpoint.sig"
|
||||
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(receiptPath, receiptJson, new UTF8Encoding(false));
|
||||
|
||||
var result = await RekorOfflineReceiptVerifier.VerifyAsync(receiptPath, dsseSha256, publicKeyPath, CancellationToken.None);
|
||||
|
||||
result.Verified.Should().BeFalse();
|
||||
result.FailureReason.Should().Contain("checkpoint signature", because: result.FailureReason);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(temp, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] HashLeaf(byte[] leafData)
|
||||
{
|
||||
var buffer = new byte[1 + leafData.Length];
|
||||
buffer[0] = 0x00;
|
||||
leafData.CopyTo(buffer, 1);
|
||||
return SHA256.HashData(buffer);
|
||||
}
|
||||
|
||||
private static byte[] HashInterior(byte[] left, byte[] right)
|
||||
{
|
||||
var buffer = new byte[1 + left.Length + right.Length];
|
||||
buffer[0] = 0x01;
|
||||
left.CopyTo(buffer, 1);
|
||||
right.CopyTo(buffer, 1 + left.Length);
|
||||
return SHA256.HashData(buffer);
|
||||
}
|
||||
|
||||
private static string WrapPem(string label, byte[] derBytes)
|
||||
{
|
||||
var base64 = Convert.ToBase64String(derBytes);
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine($"-----BEGIN {label}-----");
|
||||
for (var i = 0; i < base64.Length; i += 64)
|
||||
{
|
||||
sb.AppendLine(base64.Substring(i, Math.Min(64, base64.Length - i)));
|
||||
}
|
||||
sb.AppendLine($"-----END {label}-----");
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Versioning;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Versioning;
|
||||
|
||||
public sealed class BundleVersionTests
|
||||
{
|
||||
[Fact]
|
||||
public void Parse_ShouldParseSemVer()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero);
|
||||
var version = BundleVersion.Parse("1.2.3", createdAt);
|
||||
|
||||
version.Major.Should().Be(1);
|
||||
version.Minor.Should().Be(2);
|
||||
version.Patch.Should().Be(3);
|
||||
version.Prerelease.Should().BeNull();
|
||||
version.CreatedAt.Should().Be(createdAt);
|
||||
version.SemVer.Should().Be("1.2.3");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_ShouldParsePrerelease()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero);
|
||||
var version = BundleVersion.Parse("1.2.3-edge.1", createdAt);
|
||||
|
||||
version.SemVer.Should().Be("1.2.3-edge.1");
|
||||
version.Prerelease.Should().Be("edge.1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsNewerThan_ShouldCompareMajorMinorPatch()
|
||||
{
|
||||
var a = new BundleVersion(1, 2, 3, DateTimeOffset.UnixEpoch);
|
||||
var b = new BundleVersion(2, 0, 0, DateTimeOffset.UnixEpoch);
|
||||
b.IsNewerThan(a).Should().BeTrue();
|
||||
a.IsNewerThan(b).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsNewerThan_ShouldTreatReleaseAsNewerThanPrerelease()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero);
|
||||
var prerelease = new BundleVersion(1, 2, 3, now, "alpha");
|
||||
var release = new BundleVersion(1, 2, 3, now, null);
|
||||
|
||||
release.IsNewerThan(prerelease).Should().BeTrue();
|
||||
prerelease.IsNewerThan(release).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsNewerThan_ShouldOrderPrereleaseIdentifiers()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero);
|
||||
var alpha = new BundleVersion(1, 2, 3, now, "alpha");
|
||||
var beta = new BundleVersion(1, 2, 3, now, "beta");
|
||||
var rc1 = new BundleVersion(1, 2, 3, now, "rc.1");
|
||||
var rc2 = new BundleVersion(1, 2, 3, now, "rc.2");
|
||||
|
||||
beta.IsNewerThan(alpha).Should().BeTrue();
|
||||
rc1.IsNewerThan(beta).Should().BeTrue();
|
||||
rc2.IsNewerThan(rc1).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsNewerThan_ShouldUseCreatedAtAsTiebreaker()
|
||||
{
|
||||
var earlier = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero);
|
||||
var later = earlier.AddMinutes(1);
|
||||
|
||||
var a = new BundleVersion(1, 2, 3, earlier, "edge");
|
||||
var b = new BundleVersion(1, 2, 3, later, "edge");
|
||||
|
||||
b.IsNewerThan(a).Should().BeTrue();
|
||||
a.IsNewerThan(b).Should().BeFalse();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,157 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.AirGap.Importer.Versioning;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Tests.Versioning;
|
||||
|
||||
public sealed class VersionMonotonicityCheckerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CheckAsync_WhenNoCurrent_ShouldBeFirstActivation()
|
||||
{
|
||||
var store = new InMemoryBundleVersionStore();
|
||||
var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-14T00:00:00Z")));
|
||||
|
||||
var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-14T00:00:00Z"));
|
||||
var result = await checker.CheckAsync("tenant-a", "offline-kit", incoming);
|
||||
|
||||
result.IsMonotonic.Should().BeTrue();
|
||||
result.ReasonCode.Should().Be("FIRST_ACTIVATION");
|
||||
result.CurrentVersion.Should().BeNull();
|
||||
result.CurrentBundleDigest.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CheckAsync_WhenOlder_ShouldBeNonMonotonic()
|
||||
{
|
||||
var store = new InMemoryBundleVersionStore();
|
||||
await store.UpsertAsync(new BundleVersionRecord(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
VersionString: "2.0.0",
|
||||
Major: 2,
|
||||
Minor: 0,
|
||||
Patch: 0,
|
||||
Prerelease: null,
|
||||
BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
BundleDigest: "sha256:current",
|
||||
ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
WasForceActivated: false,
|
||||
ForceActivateReason: null));
|
||||
|
||||
var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-14T00:00:00Z")));
|
||||
var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-14T00:00:00Z"));
|
||||
|
||||
var result = await checker.CheckAsync("tenant-a", "offline-kit", incoming);
|
||||
|
||||
result.IsMonotonic.Should().BeFalse();
|
||||
result.ReasonCode.Should().Be("VERSION_NON_MONOTONIC");
|
||||
result.CurrentVersion.Should().NotBeNull();
|
||||
result.CurrentVersion!.SemVer.Should().Be("2.0.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordActivationAsync_WhenNonMonotonicWithoutForce_ShouldThrow()
|
||||
{
|
||||
var store = new InMemoryBundleVersionStore();
|
||||
await store.UpsertAsync(new BundleVersionRecord(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
VersionString: "2.0.0",
|
||||
Major: 2,
|
||||
Minor: 0,
|
||||
Patch: 0,
|
||||
Prerelease: null,
|
||||
BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
BundleDigest: "sha256:current",
|
||||
ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
WasForceActivated: false,
|
||||
ForceActivateReason: null));
|
||||
|
||||
var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-15T00:00:00Z")));
|
||||
var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-15T00:00:00Z"));
|
||||
|
||||
var act = () => checker.RecordActivationAsync("tenant-a", "offline-kit", incoming, "sha256:new");
|
||||
await act.Should().ThrowAsync<InvalidOperationException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordActivationAsync_WhenForced_ShouldWriteForceFields()
|
||||
{
|
||||
var store = new InMemoryBundleVersionStore();
|
||||
await store.UpsertAsync(new BundleVersionRecord(
|
||||
TenantId: "tenant-a",
|
||||
BundleType: "offline-kit",
|
||||
VersionString: "2.0.0",
|
||||
Major: 2,
|
||||
Minor: 0,
|
||||
Patch: 0,
|
||||
Prerelease: null,
|
||||
BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
BundleDigest: "sha256:current",
|
||||
ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
|
||||
WasForceActivated: false,
|
||||
ForceActivateReason: null));
|
||||
|
||||
var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-15T00:00:00Z")));
|
||||
var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-15T00:00:00Z"));
|
||||
|
||||
await checker.RecordActivationAsync(
|
||||
"tenant-a",
|
||||
"offline-kit",
|
||||
incoming,
|
||||
"sha256:new",
|
||||
wasForceActivated: true,
|
||||
forceActivateReason: "manual rollback permitted");
|
||||
|
||||
var current = await store.GetCurrentAsync("tenant-a", "offline-kit");
|
||||
current.Should().NotBeNull();
|
||||
current!.WasForceActivated.Should().BeTrue();
|
||||
current.ForceActivateReason.Should().Be("manual rollback permitted");
|
||||
current.BundleDigest.Should().Be("sha256:new");
|
||||
}
|
||||
|
||||
private sealed class InMemoryBundleVersionStore : IBundleVersionStore
|
||||
{
|
||||
private BundleVersionRecord? _current;
|
||||
private readonly List<BundleVersionRecord> _history = new();
|
||||
|
||||
public Task<BundleVersionRecord?> GetCurrentAsync(string tenantId, string bundleType, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_current is not null &&
|
||||
_current.TenantId.Equals(tenantId, StringComparison.Ordinal) &&
|
||||
_current.BundleType.Equals(bundleType, StringComparison.Ordinal)
|
||||
? _current
|
||||
: null);
|
||||
}
|
||||
|
||||
public Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default)
|
||||
{
|
||||
_current = record;
|
||||
_history.Insert(0, record);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<BundleVersionRecord>> GetHistoryAsync(string tenantId, string bundleType, int limit = 10, CancellationToken ct = default)
|
||||
{
|
||||
var items = _history
|
||||
.Where(r => r.TenantId.Equals(tenantId, StringComparison.Ordinal) && r.BundleType.Equals(bundleType, StringComparison.Ordinal))
|
||||
.Take(limit)
|
||||
.ToArray();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<BundleVersionRecord>>(items);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _utcNow;
|
||||
|
||||
public FixedTimeProvider(DateTimeOffset utcNow)
|
||||
{
|
||||
_utcNow = utcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Time.Config;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class AirGapOptionsValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void FailsWhenTenantMissing()
|
||||
{
|
||||
var opts = new AirGapOptions { TenantId = "" };
|
||||
var validator = new AirGapOptionsValidator();
|
||||
var result = validator.Validate(null, opts);
|
||||
Assert.True(result.Failed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FailsWhenWarningExceedsBreach()
|
||||
{
|
||||
var opts = new AirGapOptions { TenantId = "t", Staleness = new StalenessOptions { WarningSeconds = 20, BreachSeconds = 10 } };
|
||||
var validator = new AirGapOptionsValidator();
|
||||
var result = validator.Validate(null, opts);
|
||||
Assert.True(result.Failed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SucceedsForValidOptions()
|
||||
{
|
||||
var opts = new AirGapOptions { TenantId = "t", Staleness = new StalenessOptions { WarningSeconds = 10, BreachSeconds = 20 } };
|
||||
var validator = new AirGapOptionsValidator();
|
||||
var result = validator.Validate(null, opts);
|
||||
Assert.True(result.Succeeded);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
global using Xunit;
|
||||
@@ -0,0 +1,93 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for Rfc3161Verifier with real SignedCms verification.
|
||||
/// Per AIRGAP-TIME-57-001: Trusted time-anchor service.
|
||||
/// </summary>
|
||||
public class Rfc3161VerifierTests
|
||||
{
|
||||
private readonly Rfc3161Verifier _verifier = new();
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenTrustRootsEmpty()
|
||||
{
|
||||
var token = new byte[] { 0x01, 0x02, 0x03 };
|
||||
|
||||
var result = _verifier.Verify(token, Array.Empty<TimeTrustRoot>(), out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("rfc3161-trust-roots-required", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenTokenEmpty()
|
||||
{
|
||||
var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") };
|
||||
|
||||
var result = _verifier.Verify(ReadOnlySpan<byte>.Empty, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("rfc3161-token-empty", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenInvalidAsn1Structure()
|
||||
{
|
||||
var token = new byte[] { 0x01, 0x02, 0x03 }; // Invalid ASN.1
|
||||
var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains("rfc3161-", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ProducesTokenDigest()
|
||||
{
|
||||
var token = new byte[] { 0x30, 0x00 }; // Empty SEQUENCE (minimal valid ASN.1)
|
||||
var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out _);
|
||||
|
||||
// Should fail on CMS decode but attempt was made
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains("rfc3161-", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_HandlesExceptionsGracefully()
|
||||
{
|
||||
// Create bytes that might cause internal exceptions
|
||||
var token = new byte[256];
|
||||
new Random(42).NextBytes(token);
|
||||
var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
// Should not throw, should return failure result
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains("rfc3161-", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReportsDecodeErrorForMalformedCms()
|
||||
{
|
||||
// Create something that looks like CMS but isn't valid
|
||||
var token = new byte[] { 0x30, 0x82, 0x00, 0x10, 0x06, 0x09 };
|
||||
var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out _);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
// Should report either decode or error
|
||||
Assert.True(result.Reason?.Contains("rfc3161-") ?? false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for RoughtimeVerifier with real Ed25519 signature verification.
|
||||
/// Per AIRGAP-TIME-57-001: Trusted time-anchor service.
|
||||
/// </summary>
|
||||
public class RoughtimeVerifierTests
|
||||
{
|
||||
private readonly RoughtimeVerifier _verifier = new();
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenTrustRootsEmpty()
|
||||
{
|
||||
var token = new byte[] { 0x01, 0x02, 0x03, 0x04 };
|
||||
|
||||
var result = _verifier.Verify(token, Array.Empty<TimeTrustRoot>(), out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("roughtime-trust-roots-required", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenTokenEmpty()
|
||||
{
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") };
|
||||
|
||||
var result = _verifier.Verify(ReadOnlySpan<byte>.Empty, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("roughtime-token-empty", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenTokenTooShort()
|
||||
{
|
||||
var token = new byte[] { 0x01, 0x02, 0x03 };
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("roughtime-message-too-short", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenInvalidTagCount()
|
||||
{
|
||||
// Create a minimal wire format with invalid tag count
|
||||
var token = new byte[8];
|
||||
// Set num_tags to 0 (invalid)
|
||||
BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)0);
|
||||
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("roughtime-invalid-tag-count", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenNonEd25519Algorithm()
|
||||
{
|
||||
// Create a minimal valid-looking wire format
|
||||
var token = CreateMinimalRoughtimeToken();
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[32], "rsa") }; // Wrong algorithm
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
// Should fail either on parsing or signature verification
|
||||
Assert.Contains("roughtime-", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ReturnsFailure_WhenKeyLengthWrong()
|
||||
{
|
||||
var token = CreateMinimalRoughtimeToken();
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[16], "ed25519") }; // Wrong key length
|
||||
|
||||
var result = _verifier.Verify(token, trust, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains("roughtime-", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_ProducesTokenDigest()
|
||||
{
|
||||
var token = new byte[] { 0xAA, 0xBB, 0xCC, 0xDD };
|
||||
var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") };
|
||||
|
||||
var result = _verifier.Verify(token, trust, out _);
|
||||
|
||||
// Even on failure, we should get a deterministic result
|
||||
Assert.False(result.IsValid);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a minimal Roughtime wire format token for testing parsing paths.
|
||||
/// Note: This will fail signature verification but tests the parsing logic.
|
||||
/// </summary>
|
||||
private static byte[] CreateMinimalRoughtimeToken()
|
||||
{
|
||||
// Roughtime wire format:
|
||||
// [num_tags:u32] [offsets:u32[n-1]] [tags:u32[n]] [values...]
|
||||
// We'll create 2 tags: SIG and SREP
|
||||
|
||||
const uint TagSig = 0x00474953; // "SIG\0"
|
||||
const uint TagSrep = 0x50455253; // "SREP"
|
||||
|
||||
var sigValue = new byte[64]; // Ed25519 signature
|
||||
var srepValue = CreateMinimalSrep();
|
||||
|
||||
// Header: num_tags=2, offset[0]=64 (sig length), tags=[SIG, SREP]
|
||||
var headerSize = 4 + 4 + 8; // num_tags + 1 offset + 2 tags = 16 bytes
|
||||
var token = new byte[headerSize + sigValue.Length + srepValue.Length];
|
||||
|
||||
BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)2); // num_tags = 2
|
||||
BitConverter.TryWriteBytes(token.AsSpan(4, 4), (uint)64); // offset[0] = 64 (sig length)
|
||||
BitConverter.TryWriteBytes(token.AsSpan(8, 4), TagSig);
|
||||
BitConverter.TryWriteBytes(token.AsSpan(12, 4), TagSrep);
|
||||
sigValue.CopyTo(token.AsSpan(16));
|
||||
srepValue.CopyTo(token.AsSpan(16 + 64));
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
private static byte[] CreateMinimalSrep()
|
||||
{
|
||||
// SREP with MIDP tag containing 8-byte timestamp
|
||||
const uint TagMidp = 0x5044494D; // "MIDP"
|
||||
|
||||
// Header: num_tags=1, tags=[MIDP]
|
||||
var headerSize = 4 + 4; // num_tags + 1 tag = 8 bytes
|
||||
var srepValue = new byte[headerSize + 8]; // + 8 bytes for MIDP value
|
||||
|
||||
BitConverter.TryWriteBytes(srepValue.AsSpan(0, 4), (uint)1); // num_tags = 1
|
||||
BitConverter.TryWriteBytes(srepValue.AsSpan(4, 4), TagMidp);
|
||||
// MIDP value: microseconds since Unix epoch (example: 2025-01-01 00:00:00 UTC)
|
||||
BitConverter.TryWriteBytes(srepValue.AsSpan(8, 8), 1735689600000000L);
|
||||
|
||||
return srepValue;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using StellaOps.AirGap.Time.Stores;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class SealedStartupValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task FailsWhenAnchorMissing()
|
||||
{
|
||||
var validator = Build(out var statusService);
|
||||
var result = await validator.ValidateAsync("t1", StalenessBudget.Default, default);
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("time-anchor-missing", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FailsWhenBreach()
|
||||
{
|
||||
var validator = Build(out var statusService);
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UnixEpoch, "src", "fmt", "fp", "digest");
|
||||
await statusService.SetAnchorAsync("t1", anchor, new StalenessBudget(10, 20));
|
||||
var now = DateTimeOffset.UnixEpoch.AddSeconds(25);
|
||||
var status = await statusService.GetStatusAsync("t1", now);
|
||||
var result = status.Staleness.IsBreach;
|
||||
Assert.True(result);
|
||||
var validation = await validator.ValidateAsync("t1", new StalenessBudget(10, 20), default);
|
||||
Assert.False(validation.IsValid);
|
||||
Assert.Equal("time-anchor-stale", validation.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SucceedsWhenFresh()
|
||||
{
|
||||
var validator = Build(out var statusService);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchor = new TimeAnchor(now, "src", "fmt", "fp", "digest");
|
||||
await statusService.SetAnchorAsync("t1", anchor, new StalenessBudget(10, 20));
|
||||
var validation = await validator.ValidateAsync("t1", new StalenessBudget(10, 20), default);
|
||||
Assert.True(validation.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FailsOnBudgetMismatch()
|
||||
{
|
||||
var validator = Build(out var statusService);
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UtcNow, "src", "fmt", "fp", "digest");
|
||||
await statusService.SetAnchorAsync("t1", anchor, new StalenessBudget(10, 20));
|
||||
|
||||
var validation = await validator.ValidateAsync("t1", new StalenessBudget(5, 15), default);
|
||||
|
||||
Assert.False(validation.IsValid);
|
||||
Assert.Equal("time-anchor-budget-mismatch", validation.Reason);
|
||||
}
|
||||
|
||||
private static SealedStartupValidator Build(out TimeStatusService statusService)
|
||||
{
|
||||
var store = new InMemoryTimeAnchorStore();
|
||||
statusService = new TimeStatusService(store, new StalenessCalculator(), new TimeTelemetry(), Microsoft.Extensions.Options.Options.Create(new AirGapOptions()));
|
||||
return new SealedStartupValidator(statusService);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class StalenessCalculatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void UnknownWhenNoAnchor()
|
||||
{
|
||||
var calc = new StalenessCalculator();
|
||||
var result = calc.Evaluate(TimeAnchor.Unknown, StalenessBudget.Default, DateTimeOffset.UnixEpoch);
|
||||
Assert.False(result.IsWarning);
|
||||
Assert.False(result.IsBreach);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BreachWhenBeyondBudget()
|
||||
{
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UnixEpoch, "source", "fmt", "fp", "digest");
|
||||
var budget = new StalenessBudget(10, 20);
|
||||
var calc = new StalenessCalculator();
|
||||
|
||||
var result = calc.Evaluate(anchor, budget, DateTimeOffset.UnixEpoch.AddSeconds(25));
|
||||
|
||||
Assert.True(result.IsBreach);
|
||||
Assert.True(result.IsWarning);
|
||||
Assert.Equal(25, result.AgeSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WarningWhenBetweenWarningAndBreach()
|
||||
{
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UnixEpoch, "source", "fmt", "fp", "digest");
|
||||
var budget = new StalenessBudget(10, 20);
|
||||
var calc = new StalenessCalculator();
|
||||
|
||||
var result = calc.Evaluate(anchor, budget, DateTimeOffset.UnixEpoch.AddSeconds(15));
|
||||
|
||||
Assert.True(result.IsWarning);
|
||||
Assert.False(result.IsBreach);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<IsPackable>false</IsPackable>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,61 @@
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Parsing;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeAnchorLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void RejectsInvalidHex()
|
||||
{
|
||||
var loader = Build();
|
||||
var trust = new[] { new TimeTrustRoot("k1", new byte[32], "ed25519") };
|
||||
var result = loader.TryLoadHex("not-hex", TimeTokenFormat.Roughtime, trust, out _);
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("token-hex-invalid", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LoadsHexToken()
|
||||
{
|
||||
var loader = Build();
|
||||
var hex = "01020304";
|
||||
var trust = new[] { new TimeTrustRoot("k1", new byte[32], "ed25519") };
|
||||
var result = loader.TryLoadHex(hex, TimeTokenFormat.Roughtime, trust, out var anchor);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("Roughtime", anchor.Format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RejectsIncompatibleTrustRoots()
|
||||
{
|
||||
var loader = Build();
|
||||
var hex = "010203";
|
||||
var rsaKey = new byte[128];
|
||||
var trust = new[] { new TimeTrustRoot("k1", rsaKey, "rsa") };
|
||||
|
||||
var result = loader.TryLoadHex(hex, TimeTokenFormat.Roughtime, trust, out _);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("trust-roots-incompatible-format", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RejectsWhenTrustRootsMissing()
|
||||
{
|
||||
var loader = Build();
|
||||
var result = loader.TryLoadHex("010203", TimeTokenFormat.Roughtime, Array.Empty<TimeTrustRoot>(), out _);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("trust-roots-required", result.Reason);
|
||||
}
|
||||
|
||||
private static TimeAnchorLoader Build()
|
||||
{
|
||||
var options = Options.Create(new AirGapOptions { AllowUntrustedAnchors = false });
|
||||
return new TimeAnchorLoader(new TimeVerificationService(), new TimeTokenParser(), options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,261 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using StellaOps.AirGap.Time.Stores;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for TimeAnchorPolicyService.
|
||||
/// Per AIRGAP-TIME-57-001: Time-anchor policy enforcement.
|
||||
/// </summary>
|
||||
public class TimeAnchorPolicyServiceTests
|
||||
{
|
||||
private readonly TimeProvider _fixedTimeProvider;
|
||||
private readonly InMemoryTimeAnchorStore _store;
|
||||
private readonly StalenessCalculator _calculator;
|
||||
private readonly TimeTelemetry _telemetry;
|
||||
private readonly TimeStatusService _statusService;
|
||||
private readonly AirGapOptions _airGapOptions;
|
||||
|
||||
public TimeAnchorPolicyServiceTests()
|
||||
{
|
||||
_fixedTimeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero));
|
||||
_store = new InMemoryTimeAnchorStore();
|
||||
_calculator = new StalenessCalculator();
|
||||
_telemetry = new TimeTelemetry();
|
||||
_airGapOptions = new AirGapOptions
|
||||
{
|
||||
Staleness = new AirGapOptions.StalenessOptions { WarningSeconds = 3600, BreachSeconds = 7200 },
|
||||
ContentBudgets = new Dictionary<string, AirGapOptions.StalenessOptions>()
|
||||
};
|
||||
_statusService = new TimeStatusService(_store, _calculator, _telemetry, Options.Create(_airGapOptions));
|
||||
}
|
||||
|
||||
private TimeAnchorPolicyService CreateService(TimeAnchorPolicyOptions? options = null)
|
||||
{
|
||||
return new TimeAnchorPolicyService(
|
||||
_statusService,
|
||||
Options.Create(options ?? new TimeAnchorPolicyOptions()),
|
||||
NullLogger<TimeAnchorPolicyService>.Instance,
|
||||
_fixedTimeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenNoAnchor()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
var result = await service.ValidateTimeAnchorAsync("tenant-1");
|
||||
|
||||
Assert.False(result.Allowed);
|
||||
Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode);
|
||||
Assert.NotNull(result.Remediation);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateTimeAnchorAsync_ReturnsSuccess_WhenAnchorValid()
|
||||
{
|
||||
var service = CreateService();
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow().AddMinutes(-30),
|
||||
"test-source",
|
||||
"Roughtime",
|
||||
"fingerprint",
|
||||
"digest123");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var result = await service.ValidateTimeAnchorAsync("tenant-1");
|
||||
|
||||
Assert.True(result.Allowed);
|
||||
Assert.Null(result.ErrorCode);
|
||||
Assert.NotNull(result.Staleness);
|
||||
Assert.False(result.Staleness.IsBreach);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateTimeAnchorAsync_ReturnsWarning_WhenAnchorStale()
|
||||
{
|
||||
var service = CreateService();
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow().AddSeconds(-5000), // Past warning threshold
|
||||
"test-source",
|
||||
"Roughtime",
|
||||
"fingerprint",
|
||||
"digest123");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var result = await service.ValidateTimeAnchorAsync("tenant-1");
|
||||
|
||||
Assert.True(result.Allowed); // Allowed but with warning
|
||||
Assert.NotNull(result.Staleness);
|
||||
Assert.True(result.Staleness.IsWarning);
|
||||
Assert.Contains("warning", result.Reason, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenAnchorBreached()
|
||||
{
|
||||
var service = CreateService();
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow().AddSeconds(-8000), // Past breach threshold
|
||||
"test-source",
|
||||
"Roughtime",
|
||||
"fingerprint",
|
||||
"digest123");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var result = await service.ValidateTimeAnchorAsync("tenant-1");
|
||||
|
||||
Assert.False(result.Allowed);
|
||||
Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorBreached, result.ErrorCode);
|
||||
Assert.NotNull(result.Staleness);
|
||||
Assert.True(result.Staleness.IsBreach);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnforceBundleImportPolicyAsync_AllowsImport_WhenAnchorValid()
|
||||
{
|
||||
var service = CreateService();
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow().AddMinutes(-30),
|
||||
"test-source",
|
||||
"Roughtime",
|
||||
"fingerprint",
|
||||
"digest123");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var result = await service.EnforceBundleImportPolicyAsync(
|
||||
"tenant-1",
|
||||
"bundle-123",
|
||||
_fixedTimeProvider.GetUtcNow().AddMinutes(-15));
|
||||
|
||||
Assert.True(result.Allowed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnforceBundleImportPolicyAsync_BlocksImport_WhenDriftExceeded()
|
||||
{
|
||||
var options = new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 }; // 1 hour max
|
||||
var service = CreateService(options);
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow().AddMinutes(-30),
|
||||
"test-source",
|
||||
"Roughtime",
|
||||
"fingerprint",
|
||||
"digest123");
|
||||
var budget = new StalenessBudget(86400, 172800); // Large budget
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var bundleTimestamp = _fixedTimeProvider.GetUtcNow().AddDays(-2); // 2 days ago
|
||||
|
||||
var result = await service.EnforceBundleImportPolicyAsync(
|
||||
"tenant-1",
|
||||
"bundle-123",
|
||||
bundleTimestamp);
|
||||
|
||||
Assert.False(result.Allowed);
|
||||
Assert.Equal(TimeAnchorPolicyErrorCodes.DriftExceeded, result.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnforceOperationPolicyAsync_BlocksStrictOperations_WhenNoAnchor()
|
||||
{
|
||||
var options = new TimeAnchorPolicyOptions
|
||||
{
|
||||
StrictOperations = new[] { "attestation.sign" }
|
||||
};
|
||||
var service = CreateService(options);
|
||||
|
||||
var result = await service.EnforceOperationPolicyAsync("tenant-1", "attestation.sign");
|
||||
|
||||
Assert.False(result.Allowed);
|
||||
Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnforceOperationPolicyAsync_AllowsNonStrictOperations_InNonStrictMode()
|
||||
{
|
||||
var options = new TimeAnchorPolicyOptions
|
||||
{
|
||||
StrictEnforcement = false,
|
||||
StrictOperations = new[] { "attestation.sign" }
|
||||
};
|
||||
var service = CreateService(options);
|
||||
|
||||
var result = await service.EnforceOperationPolicyAsync("tenant-1", "some.other.operation");
|
||||
|
||||
Assert.True(result.Allowed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CalculateDriftAsync_ReturnsNoDrift_WhenNoAnchor()
|
||||
{
|
||||
var service = CreateService();
|
||||
|
||||
var result = await service.CalculateDriftAsync("tenant-1", _fixedTimeProvider.GetUtcNow());
|
||||
|
||||
Assert.False(result.HasAnchor);
|
||||
Assert.Equal(TimeSpan.Zero, result.Drift);
|
||||
Assert.Null(result.AnchorTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CalculateDriftAsync_ReturnsDrift_WhenAnchorExists()
|
||||
{
|
||||
var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 });
|
||||
var anchorTime = _fixedTimeProvider.GetUtcNow().AddMinutes(-30);
|
||||
var anchor = new TimeAnchor(anchorTime, "test", "Roughtime", "fp", "digest");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(15);
|
||||
var result = await service.CalculateDriftAsync("tenant-1", targetTime);
|
||||
|
||||
Assert.True(result.HasAnchor);
|
||||
Assert.Equal(anchorTime, result.AnchorTime);
|
||||
Assert.Equal(45, (int)result.Drift.TotalMinutes); // 30 min + 15 min
|
||||
Assert.False(result.DriftExceedsThreshold);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CalculateDriftAsync_DetectsExcessiveDrift()
|
||||
{
|
||||
var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 60 }); // 1 minute max
|
||||
var anchor = new TimeAnchor(
|
||||
_fixedTimeProvider.GetUtcNow(),
|
||||
"test",
|
||||
"Roughtime",
|
||||
"fp",
|
||||
"digest");
|
||||
var budget = new StalenessBudget(3600, 7200);
|
||||
|
||||
await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None);
|
||||
|
||||
var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(5); // 5 minutes drift
|
||||
var result = await service.CalculateDriftAsync("tenant-1", targetTime);
|
||||
|
||||
Assert.True(result.HasAnchor);
|
||||
Assert.True(result.DriftExceedsThreshold);
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now) => _now = now;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeStatusDtoTests
|
||||
{
|
||||
[Fact]
|
||||
public void SerializesDeterministically()
|
||||
{
|
||||
var status = new TimeStatus(
|
||||
new TimeAnchor(DateTimeOffset.Parse("2025-01-01T00:00:00Z"), "source", "fmt", "fp", "digest"),
|
||||
new StalenessEvaluation(42, 10, 20, true, false),
|
||||
new StalenessBudget(10, 20),
|
||||
new Dictionary<string, StalenessEvaluation>
|
||||
{
|
||||
{ "advisories", new StalenessEvaluation(42, 10, 20, true, false) }
|
||||
},
|
||||
DateTimeOffset.Parse("2025-01-02T00:00:00Z"));
|
||||
|
||||
var json = TimeStatusDto.FromStatus(status).ToJson();
|
||||
Assert.Contains("\"contentStaleness\":{\"advisories\":{", json);
|
||||
Assert.Contains("\"ageSeconds\":42", json);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
using StellaOps.AirGap.Time.Stores;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeStatusServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ReturnsUnknownWhenNoAnchor()
|
||||
{
|
||||
var svc = Build(out var telemetry);
|
||||
var status = await svc.GetStatusAsync("t1", DateTimeOffset.UnixEpoch);
|
||||
Assert.Equal(TimeAnchor.Unknown, status.Anchor);
|
||||
Assert.False(status.Staleness.IsWarning);
|
||||
Assert.Equal(0, telemetry.GetLatest("t1")?.AgeSeconds ?? 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PersistsAnchorAndBudget()
|
||||
{
|
||||
var svc = Build(out var telemetry);
|
||||
var anchor = new TimeAnchor(DateTimeOffset.UnixEpoch, "source", "fmt", "fp", "digest");
|
||||
var budget = new StalenessBudget(10, 20);
|
||||
|
||||
await svc.SetAnchorAsync("t1", anchor, budget);
|
||||
var status = await svc.GetStatusAsync("t1", DateTimeOffset.UnixEpoch.AddSeconds(15));
|
||||
|
||||
Assert.Equal(anchor, status.Anchor);
|
||||
Assert.True(status.Staleness.IsWarning);
|
||||
Assert.False(status.Staleness.IsBreach);
|
||||
Assert.Equal(15, status.Staleness.AgeSeconds);
|
||||
var snap = telemetry.GetLatest("t1");
|
||||
Assert.NotNull(snap);
|
||||
Assert.Equal(status.Staleness.AgeSeconds, snap!.AgeSeconds);
|
||||
Assert.True(snap.IsWarning);
|
||||
}
|
||||
|
||||
private static TimeStatusService Build(out TimeTelemetry telemetry)
|
||||
{
|
||||
telemetry = new TimeTelemetry();
|
||||
var options = Microsoft.Extensions.Options.Options.Create(new AirGapOptions());
|
||||
return new TimeStatusService(new InMemoryTimeAnchorStore(), new StalenessCalculator(), telemetry, options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeTelemetryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Records_latest_snapshot_per_tenant()
|
||||
{
|
||||
var telemetry = new TimeTelemetry();
|
||||
var status = new TimeStatus(
|
||||
new TimeAnchor(DateTimeOffset.UnixEpoch, "src", "fmt", "fp", "digest"),
|
||||
new StalenessEvaluation(90, 60, 120, true, false),
|
||||
StalenessBudget.Default,
|
||||
new Dictionary<string, StalenessEvaluation>{{"advisories", new StalenessEvaluation(90,60,120,true,false)}},
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
telemetry.Record("t1", status);
|
||||
|
||||
var snap = telemetry.GetLatest("t1");
|
||||
Assert.NotNull(snap);
|
||||
Assert.Equal(90, snap!.AgeSeconds);
|
||||
Assert.True(snap.IsWarning);
|
||||
Assert.False(snap.IsBreach);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Parsing;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeTokenParserTests
|
||||
{
|
||||
[Fact]
|
||||
public void EmptyTokenFails()
|
||||
{
|
||||
var parser = new TimeTokenParser();
|
||||
var result = parser.TryParse(Array.Empty<byte>(), TimeTokenFormat.Roughtime, out var anchor);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("token-empty", result.Reason);
|
||||
Assert.Equal(TimeAnchor.Unknown, anchor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RoughtimeTokenProducesDigest()
|
||||
{
|
||||
var parser = new TimeTokenParser();
|
||||
var token = new byte[] { 0x01, 0x02, 0x03 };
|
||||
|
||||
var result = parser.TryParse(token, TimeTokenFormat.Roughtime, out var anchor);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("Roughtime", anchor.Format);
|
||||
Assert.Equal("roughtime-token", anchor.Source);
|
||||
Assert.Equal("structure-stubbed", result.Reason);
|
||||
Assert.Matches("^[0-9a-f]{64}$", anchor.TokenDigest);
|
||||
Assert.NotEqual(DateTimeOffset.UnixEpoch, anchor.AnchorTime); // deterministic derivation
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.AirGap.Time.Parsing;
|
||||
using StellaOps.AirGap.Time.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Time.Tests;
|
||||
|
||||
public class TimeVerificationServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public void FailsWithoutTrustRoots()
|
||||
{
|
||||
var svc = new TimeVerificationService();
|
||||
var result = svc.Verify(new byte[] { 0x01 }, TimeTokenFormat.Roughtime, Array.Empty<TimeTrustRoot>(), out _);
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("trust-roots-required", result.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SucceedsForRoughtimeWithTrustRoot()
|
||||
{
|
||||
var svc = new TimeVerificationService();
|
||||
var trust = new[] { new TimeTrustRoot("k1", new byte[] { 0x01 }, "rsassa-pss-sha256") };
|
||||
var result = svc.Verify(new byte[] { 0x01, 0x02 }, TimeTokenFormat.Roughtime, trust, out var anchor);
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal("Roughtime", anchor.Format);
|
||||
Assert.Equal("k1", anchor.SignatureFingerprint);
|
||||
}
|
||||
}
|
||||
20
src/__Tests/EvidenceLocker/Bundles/Golden/README.md
Normal file
20
src/__Tests/EvidenceLocker/Bundles/Golden/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Evidence Locker Golden Fixtures (EB10)
|
||||
|
||||
Purpose: reference bundles and replay records used by CI to prove deterministic packaging, DSSE subject stability, and portable redaction behaviour.
|
||||
|
||||
## Layout
|
||||
- `sealed/` – sealed bundle ingredients (`manifest.json`, `checksums.txt`, DSSE `signature.json`, `bundle.json`, evidence ndjson) plus `expected.json`.
|
||||
- `portable/` – redacted bundle ingredients and `expected.json` noting masked fields and tenant token.
|
||||
- `replay/` – `replay.ndjson` with `expected.json` (recordDigest, sequence, ledger URI); ordering is canonical (recordedAtUtc, scanId).
|
||||
|
||||
## Expectations
|
||||
- Gzip timestamp pinned to `2025-01-01T00:00:00Z`; tar entries use `0644` perms and fixed mtime.
|
||||
- `checksums.txt` sorted lexicographically by `canonicalPath`; Merkle root equals `sha256sum checksums.txt`.
|
||||
- DSSE subject ties to the Merkle root; manifest validates against `schemas/bundle.manifest.schema.json`.
|
||||
- Portable bundles must exclude tenant identifiers and include redaction metadata in the manifest.
|
||||
|
||||
## How to (re)generate
|
||||
1. Set `TZ=UTC` and ensure deterministic tool versions.
|
||||
2. Run EvidenceLocker pipeline to produce sealed bundle; copy outputs here with expected hash values.
|
||||
3. Produce portable bundle and replay records using the same input set; write `expected.json` capturing root hashes and replay digests.
|
||||
4. Update xUnit tests in `StellaOps.EvidenceLocker.Tests` to consume these fixtures without network calls.
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenant": "redacted",
|
||||
"kind": "evaluation",
|
||||
"createdAt": "2025-12-04T00:00:00Z",
|
||||
"portable": true
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"algorithm": "sha256",
|
||||
"root": "72c82a7a3d114164d491e2ecd7098bc015b115ee1ec7c42d648f0348e573cfcf",
|
||||
"generatedAt": "2025-12-04T00:00:00Z",
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"entries": [
|
||||
{ "canonicalPath": "bundle.json", "sha256": "10695174db1b549d77be583e529a249713e9bd23e46cc5e73250db5dfc92c4a9", "sizeBytes": 160 },
|
||||
{ "canonicalPath": "instructions-portable.txt", "sha256": "dd2a3b62857cf331b423e7dc3b869ad2dc9bfa852109a20bcbecc7bcef9bdcb7", "sizeBytes": 180 },
|
||||
{ "canonicalPath": "linksets.ndjson", "sha256": "a4d84bbc3262190fd3e1f5dbc15915c97e464326a56534483ce810c905288b9d", "sizeBytes": 151 },
|
||||
{ "canonicalPath": "observations.ndjson", "sha256": "c523f82e71c8a1bd9be0650883faf00ec39a792023066105d7cda544ad6ef5fd", "sizeBytes": 149 }
|
||||
],
|
||||
"chunking": { "strategy": "none" }
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantRedacted": true,
|
||||
"merkleRoot": "72c82a7a3d114164d491e2ecd7098bc015b115ee1ec7c42d648f0348e573cfcf",
|
||||
"subject": "sha256:72c82a7a3d114164d491e2ecd7098bc015b115ee1ec7c42d648f0348e573cfcf",
|
||||
"entries": [
|
||||
"bundle.json",
|
||||
"instructions-portable.txt",
|
||||
"linksets.ndjson",
|
||||
"observations.ndjson"
|
||||
],
|
||||
"dsseKeyId": "demo-ed25519",
|
||||
"logPolicy": "skip-offline",
|
||||
"redaction": {
|
||||
"maskedFields": ["tenantId"],
|
||||
"tenantToken": "portable-tenant-01"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
Portable bundle verification:
|
||||
1) sha256sum -c checksums.txt
|
||||
2) expect no tenant identifiers in manifest or bundle.json
|
||||
3) merkle_root=$(sha256sum checksums.txt | awk '{print $1}')
|
||||
@@ -0,0 +1 @@
|
||||
{"linksetId":"lnk-demo-001","advisoryId":"CVE-2025-0001","components":["pkg:deb/openssl@1.1.1w"],"normalized":true,"createdAt":"2025-11-30T00:05:00Z"}
|
||||
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"kind": "evaluation",
|
||||
"createdAt": "2025-12-04T00:00:00Z",
|
||||
"metadata": {
|
||||
"scope": "demo",
|
||||
"portable": "true"
|
||||
},
|
||||
"redaction": {
|
||||
"portable": true,
|
||||
"maskedFields": ["tenantId"],
|
||||
"tenantToken": "portable-tenant-01"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"section": "manifest",
|
||||
"canonicalPath": "bundle.json",
|
||||
"sha256": "10695174db1b549d77be583e529a249713e9bd23e46cc5e73250db5dfc92c4a9",
|
||||
"sizeBytes": 160,
|
||||
"mediaType": "application/json",
|
||||
"attributes": {
|
||||
"role": "bundle",
|
||||
"portable": "true"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "evidence",
|
||||
"canonicalPath": "observations.ndjson",
|
||||
"sha256": "c523f82e71c8a1bd9be0650883faf00ec39a792023066105d7cda544ad6ef5fd",
|
||||
"sizeBytes": 149,
|
||||
"mediaType": "application/x-ndjson",
|
||||
"attributes": {
|
||||
"dataset": "observations"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "evidence",
|
||||
"canonicalPath": "linksets.ndjson",
|
||||
"sha256": "a4d84bbc3262190fd3e1f5dbc15915c97e464326a56534483ce810c905288b9d",
|
||||
"sizeBytes": 151,
|
||||
"mediaType": "application/x-ndjson",
|
||||
"attributes": {
|
||||
"dataset": "linksets"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "docs",
|
||||
"canonicalPath": "instructions-portable.txt",
|
||||
"sha256": "dd2a3b62857cf331b423e7dc3b869ad2dc9bfa852109a20bcbecc7bcef9bdcb7",
|
||||
"sizeBytes": 180,
|
||||
"mediaType": "text/plain",
|
||||
"attributes": {
|
||||
"purpose": "verification"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"observationId":"obs-demo-001","advisoryId":"CVE-2025-0001","component":"pkg:deb/openssl@1.1.1w","source":"nvd","fetchedAt":"2025-11-30T00:00:00Z"}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"payloadType": "application/vnd.stellaops.evidence+json",
|
||||
"payload": "ewogICJidW5kbGVJZCI6ICIxMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMSIsCiAgInRlbmFudElkIjogImFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhIiwKICAia2luZCI6ICJldmFsdWF0aW9uIiwKICAiY3JlYXRlZEF0IjogIjIwMjUtMTItMDRUMDA6MDA6MDBaIiwKICAibWV0YWRhdGEiOiB7CiAgICAic2NvcGUiOiAiZGVtbyIsCiAgICAicG9ydGFibGUiOiAidHJ1ZSIKICB9LAogICJyZWRhY3Rpb24iOiB7CiAgICAicG9ydGFibGUiOiB0cnVlLAogICAgIm1hc2tlZEZpZWxkcyI6IFsidGVuYW50SWQiXSwKICAgICJ0ZW5hbnRUb2tlbiI6ICJwb3J0YWJsZS10ZW5hbnQtMDEiCiAgfSwKICAiZW50cmllcyI6IFsKICAgIHsKICAgICAgInNlY3Rpb24iOiAibWFuaWZlc3QiLAogICAgICAiY2Fub25pY2FsUGF0aCI6ICJidW5kbGUuanNvbiIsCiAgICAgICJzaGEyNTYiOiAiMTA2OTUxNzRkYjFiNTQ5ZDc3YmU1ODNlNTI5YTI0OTcxM2U5YmQyM2U0NmNjNWU3MzI1MGRiNWRmYzkyYzRhOSIsCiAgICAgICJzaXplQnl0ZXMiOiAxNjAsCiAgICAgICJtZWRpYVR5cGUiOiAiYXBwbGljYXRpb24vanNvbiIsCiAgICAgICJhdHRyaWJ1dGVzIjogewogICAgICAgICJyb2xlIjogImJ1bmRsZSIsCiAgICAgICAgInBvcnRhYmxlIjogInRydWUiCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJzZWN0aW9uIjogImV2aWRlbmNlIiwKICAgICAgImNhbm9uaWNhbFBhdGgiOiAib2JzZXJ2YXRpb25zLm5kanNvbiIsCiAgICAgICJzaGEyNTYiOiAiYzUyM2Y4MmU3MWM4YTFiZDliZTA2NTA4ODNmYWYwMGVjMzlhNzkyMDIzMDY2MTA1ZDdjZGE1NDRhZDZlZjVmZCIsCiAgICAgICJzaXplQnl0ZXMiOiAxNDksCiAgICAgICJtZWRpYVR5cGUiOiAiYXBwbGljYXRpb24veC1uZGpzb24iLAogICAgICAiYXR0cmlidXRlcyI6IHsKICAgICAgICAiZGF0YXNldCI6ICJvYnNlcnZhdGlvbnMiCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJzZWN0aW9uIjogImV2aWRlbmNlIiwKICAgICAgImNhbm9uaWNhbFBhdGgiOiAibGlua3NldHMubmRqc29uIiwKICAgICAgInNoYTI1NiI6ICJhNGQ4NGJiYzMyNjIxOTBmZDNlMWY1ZGJjMTU5MTVjOTdlNDY0MzI2YTU2NTM0NDgzY2U4MTBjOTA1Mjg4YjlkIiwKICAgICAgInNpemVCeXRlcyI6IDE1MSwKICAgICAgIm1lZGlhVHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgICAgICJhdHRyaWJ1dGVzIjogewogICAgICAgICJkYXRhc2V0IjogImxpbmtzZXRzIgogICAgICB9CiAgICB9LAogICAgewogICAgICAic2VjdGlvbiI6ICJkb2NzIiwKICAgICAgImNhbm9uaWNhbFBhdGgiOiAiaW5zdHJ1Y3Rpb25zLXBvcnRhYmxlLnR4dCIsCiAgICAgICJzaGEyNTYiOiAiZGQyYTNiNjI4NTdjZjMzMWI0MjNlN2RjM2I4NjlhZDJkYzliZmE4NTIxMDlhMjBiY2JlY2M3YmNlZjliZGNiNyIsCiAgICAgICJzaXplQnl0ZXMiOiAxODAsCiAgICAgICJtZWRpYVR5cGUiOiAidGV4dC9wbGFpbiIsCiAgICAgICJhdHRyaWJ1dGVzIjogewogICAgICAgICJwdXJwb3NlIjogInZlcmlmaWNhdGlvbiIKICAgICAgfQogICAgfQogIF0KfQo=",
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "demo-ed25519",
|
||||
"sig": "MEQCIGZkZGVtb3NpZw==",
|
||||
"algorithm": "ed25519",
|
||||
"provider": "sovereign-default",
|
||||
"subjectMerkleRoot": "72c82a7a3d114164d491e2ecd7098bc015b115ee1ec7c42d648f0348e573cfcf",
|
||||
"transparency": null,
|
||||
"log_policy": "skip-offline"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"recordDigest": "sha256:8765b4a8411e76b36a2d2d43eba4c2197b4dcf0c5c0a11685ce46780a7c54222",
|
||||
"sequence": 0,
|
||||
"ledgerUri": "offline://demo-ledger",
|
||||
"dsseEnvelope": "ZHNzZV9lbmNfZGVtbyIs",
|
||||
"ordering": "recordedAtUtc, scanId"
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"scanId":"22222222-2222-4222-8222-222222222222","tenantId":"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","subjectDigest":"sha256:c15ab4d1348da9e5000a5d3da50790ea120d865cafb0961845ed6f1e96927596","scanKind":"sbom","startedAtUtc":"2025-12-03T00:00:00Z","completedAtUtc":"2025-12-03T00:10:00Z","recordedAtUtc":"2025-12-03T00:10:01Z","artifacts":[{"type":"sbom","digest":"sha256:aaaa","uri":"s3://demo/sbom"}],"provenance":{"dsseEnvelope":"ZHNzZV9lbmNfZGVtbyIs"},"summary":{"findings":1,"advisories":1,"policies":0}}
|
||||
@@ -0,0 +1 @@
|
||||
8765b4a8411e76b36a2d2d43eba4c2197b4dcf0c5c0a11685ce46780a7c54222 replay.ndjson
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"kind": "evaluation",
|
||||
"createdAt": "2025-12-04T00:00:00Z",
|
||||
"portable": false
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"algorithm": "sha256",
|
||||
"root": "c15ab4d1348da9e5000a5d3da50790ea120d865cafb0961845ed6f1e96927596",
|
||||
"generatedAt": "2025-12-04T00:00:00Z",
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"entries": [
|
||||
{ "canonicalPath": "bundle.json", "sha256": "86872809b585f9b43f53b12a8fb27dbb0a3b9c4f74e41c38118877ebcff1c273", "sizeBytes": 187 },
|
||||
{ "canonicalPath": "instructions.txt", "sha256": "39a5880af850121919a540dd4528e49a3b5687cb922195b07db2c56f9e90dd1b", "sizeBytes": 160 },
|
||||
{ "canonicalPath": "linksets.ndjson", "sha256": "a4d84bbc3262190fd3e1f5dbc15915c97e464326a56534483ce810c905288b9d", "sizeBytes": 151 },
|
||||
{ "canonicalPath": "observations.ndjson", "sha256": "c523f82e71c8a1bd9be0650883faf00ec39a792023066105d7cda544ad6ef5fd", "sizeBytes": 149 }
|
||||
],
|
||||
"chunking": { "strategy": "none" }
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"merkleRoot": "c15ab4d1348da9e5000a5d3da50790ea120d865cafb0961845ed6f1e96927596",
|
||||
"subject": "sha256:c15ab4d1348da9e5000a5d3da50790ea120d865cafb0961845ed6f1e96927596",
|
||||
"entries": [
|
||||
"bundle.json",
|
||||
"instructions.txt",
|
||||
"linksets.ndjson",
|
||||
"observations.ndjson"
|
||||
],
|
||||
"dsseKeyId": "demo-ed25519",
|
||||
"logPolicy": "skip-offline"
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
Offline verification steps:
|
||||
1) sha256sum -c checksums.txt
|
||||
2) merkle_root=$(sha256sum checksums.txt | awk '{print $1}')
|
||||
3) compare merkle_root with DSSE subject
|
||||
@@ -0,0 +1 @@
|
||||
{"linksetId":"lnk-demo-001","advisoryId":"CVE-2025-0001","components":["pkg:deb/openssl@1.1.1w"],"normalized":true,"createdAt":"2025-11-30T00:05:00Z"}
|
||||
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"bundleId": "11111111111111111111111111111111",
|
||||
"tenantId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"kind": "evaluation",
|
||||
"createdAt": "2025-12-04T00:00:00Z",
|
||||
"metadata": {
|
||||
"scope": "demo",
|
||||
"advisory": "CVE-2025-0001"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"section": "manifest",
|
||||
"canonicalPath": "bundle.json",
|
||||
"sha256": "86872809b585f9b43f53b12a8fb27dbb0a3b9c4f74e41c38118877ebcff1c273",
|
||||
"sizeBytes": 187,
|
||||
"mediaType": "application/json",
|
||||
"attributes": {
|
||||
"role": "bundle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "evidence",
|
||||
"canonicalPath": "observations.ndjson",
|
||||
"sha256": "c523f82e71c8a1bd9be0650883faf00ec39a792023066105d7cda544ad6ef5fd",
|
||||
"sizeBytes": 149,
|
||||
"mediaType": "application/x-ndjson",
|
||||
"attributes": {
|
||||
"dataset": "observations"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "evidence",
|
||||
"canonicalPath": "linksets.ndjson",
|
||||
"sha256": "a4d84bbc3262190fd3e1f5dbc15915c97e464326a56534483ce810c905288b9d",
|
||||
"sizeBytes": 151,
|
||||
"mediaType": "application/x-ndjson",
|
||||
"attributes": {
|
||||
"dataset": "linksets"
|
||||
}
|
||||
},
|
||||
{
|
||||
"section": "docs",
|
||||
"canonicalPath": "instructions.txt",
|
||||
"sha256": "39a5880af850121919a540dd4528e49a3b5687cb922195b07db2c56f9e90dd1b",
|
||||
"sizeBytes": 160,
|
||||
"mediaType": "text/plain",
|
||||
"attributes": {
|
||||
"purpose": "verification"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"observationId":"obs-demo-001","advisoryId":"CVE-2025-0001","component":"pkg:deb/openssl@1.1.1w","source":"nvd","fetchedAt":"2025-11-30T00:00:00Z"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user