up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Risk Bundle CI / risk-bundle-build (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Risk Bundle CI / risk-bundle-offline-kit (push) Has been cancelled
Risk Bundle CI / publish-checksums (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Risk Bundle CI / risk-bundle-build (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Risk Bundle CI / risk-bundle-offline-kit (push) Has been cancelled
Risk Bundle CI / publish-checksums (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
This commit is contained in:
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
name: Risk Bundle CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
include_osv:
|
||||||
|
description: 'Include OSV providers (larger bundle)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
publish_checksums:
|
||||||
|
description: 'Publish checksums to artifact store'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
risk-bundle-build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: scripts/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Test RiskBundle unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--filter "FullyQualifiedName~RiskBundle" \
|
||||||
|
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Build risk bundle (fixtures)
|
||||||
|
run: |
|
||||||
|
mkdir -p $BUNDLE_OUTPUT
|
||||||
|
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||||
|
|
||||||
|
- name: Verify bundle integrity
|
||||||
|
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||||
|
|
||||||
|
- name: Generate checksums
|
||||||
|
run: |
|
||||||
|
cd $BUNDLE_OUTPUT
|
||||||
|
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||||
|
sha256sum manifest.json > manifest.json.sha256
|
||||||
|
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||||
|
echo "Bundle checksums:"
|
||||||
|
cat checksums.txt
|
||||||
|
|
||||||
|
- name: Upload risk bundle artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||||
|
${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: risk-bundle-test-results
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
risk-bundle-offline-kit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Package for offline kit
|
||||||
|
run: |
|
||||||
|
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
|
||||||
|
# Create offline kit manifest entry
|
||||||
|
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||||
|
{
|
||||||
|
"component": "risk-bundle",
|
||||||
|
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||||
|
"files": [
|
||||||
|
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||||
|
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||||
|
],
|
||||||
|
"verification": {
|
||||||
|
"checksums": "checksums.txt",
|
||||||
|
"signature": "risk-bundle.tar.gz.sig"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Verify offline kit structure
|
||||||
|
run: |
|
||||||
|
echo "Offline kit structure:"
|
||||||
|
find $OFFLINE_KIT_DIR -type f
|
||||||
|
echo ""
|
||||||
|
echo "Checksum verification:"
|
||||||
|
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
sha256sum -c checksums.txt
|
||||||
|
|
||||||
|
- name: Upload offline kit
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-offline-kit
|
||||||
|
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||||
|
|
||||||
|
publish-checksums:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Publish checksums
|
||||||
|
run: |
|
||||||
|
echo "Publishing checksums for risk bundle..."
|
||||||
|
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||||
|
mkdir -p $CHECKSUM_DIR
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||||
|
|
||||||
|
# Create latest symlink manifest
|
||||||
|
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||||
|
{
|
||||||
|
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||||
|
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Checksums published to $CHECKSUM_DIR"
|
||||||
|
cat $CHECKSUM_DIR/checksums.txt
|
||||||
|
|
||||||
|
- name: Upload published checksums
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-published-checksums
|
||||||
|
path: out/checksums/risk-bundle/
|
||||||
@@ -42,9 +42,9 @@
|
|||||||
| 8 | EXPORT-OBS-55-001 | DONE | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
| 8 | EXPORT-OBS-55-001 | DONE | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
||||||
| 9 | EXPORT-RISK-69-001 | DONE | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. |
|
| 9 | EXPORT-RISK-69-001 | DONE | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. |
|
||||||
| 10 | EXPORT-RISK-69-002 | DONE | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. |
|
| 10 | EXPORT-RISK-69-002 | DONE | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. |
|
||||||
| 11 | EXPORT-RISK-70-001 | TODO | Depends on EXPORT-RISK-69-002. | Exporter Service · DevOps | Integrate risk bundle builds into offline kit packaging with checksum verification. |
|
| 11 | EXPORT-RISK-70-001 | DONE | Depends on EXPORT-RISK-69-002. | Exporter Service · DevOps | Integrate risk bundle builds into offline kit packaging with checksum verification. |
|
||||||
| 12 | EXPORT-SVC-35-001 | TODO | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service | Bootstrap exporter service project, config, Postgres migrations for `export_profiles/runs/inputs/distributions` with tenant scoping + tests. |
|
| 12 | EXPORT-SVC-35-001 | DONE | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service | Bootstrap exporter service project, config, Postgres migrations for `export_profiles/runs/inputs/distributions` with tenant scoping + tests. |
|
||||||
| 13 | EXPORT-SVC-35-002 | TODO | Depends on EXPORT-SVC-35-001. | Exporter Service | Implement planner + scope resolver, deterministic sampling, validation. |
|
| 13 | EXPORT-SVC-35-002 | DONE | Depends on EXPORT-SVC-35-001. | Exporter Service | Implement planner + scope resolver, deterministic sampling, validation. |
|
||||||
| 14 | EXPORT-SVC-35-003 | TODO | Depends on EXPORT-SVC-35-002. | Exporter Service | JSON adapters (`json:raw`, `json:policy`) with normalization/redaction/compression/manifest counts. |
|
| 14 | EXPORT-SVC-35-003 | TODO | Depends on EXPORT-SVC-35-002. | Exporter Service | JSON adapters (`json:raw`, `json:policy`) with normalization/redaction/compression/manifest counts. |
|
||||||
| 15 | EXPORT-SVC-35-004 | TODO | Depends on EXPORT-SVC-35-003. | Exporter Service | Mirror (full) adapter producing filesystem layout, indexes, manifests, README. |
|
| 15 | EXPORT-SVC-35-004 | TODO | Depends on EXPORT-SVC-35-003. | Exporter Service | Mirror (full) adapter producing filesystem layout, indexes, manifests, README. |
|
||||||
| 16 | EXPORT-SVC-35-005 | TODO | Depends on EXPORT-SVC-35-004. | Exporter Service | Manifest/provenance writer + KMS signing/attestation (detached + embedded). |
|
| 16 | EXPORT-SVC-35-005 | TODO | Depends on EXPORT-SVC-35-004. | Exporter Service | Manifest/provenance writer + KMS signing/attestation (detached + embedded). |
|
||||||
@@ -93,6 +93,9 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | **EXPORT-SVC-35-002 DONE:** Implemented planner and scope resolver with deterministic sampling and validation. Created `Planner/` namespace in Core with: `ExportScopeModels.cs` (ExportScope with TargetKinds, SourceRefs, DateRangeFilter, MaxItems; SamplingConfig with Strategy enum Random/First/Last/Stratified/Systematic, Size, Seed for deterministic output, StratifyBy; ResolvedExportItem, ScopeResolutionResult with Items, SampledItems, EstimatedTotalSizeBytes, SamplingMetadata, Warnings; ExportScopeValidationError with Code, Message, Severity enum Warning/Error/Critical), `ExportPlanModels.cs` (ExportPlanRequest with ProfileId, TenantId, ScopeOverride, FormatOverride, DryRun, CorrelationId, InitiatedBy; ExportPlan with PlanId, ProfileId, TenantId, Status Ready/Creating/Executing/Completed/Failed/Cancelled/Expired, ResolvedScope, Format, Phases list, TotalItems, EstimatedSizeBytes, EstimatedDuration, timestamps, Warnings, ValidationErrors; ExportPlanResult with Success, Plan, ErrorMessage, ValidationErrors factory methods; ExportPlanPhase with Order, Name, Kind enum DataFetch/Transform/WriteOutput/GenerateManifest/Sign/Distribute/Verify, ItemCount, EstimatedSizeBytes, EstimatedDuration, Dependencies, Parameters; ExportFormatOptions with Format enum Json/JsonNdjson/Mirror/OfflineKit/Custom, Compression enum None/Gzip/Zstd, IncludeManifest, IncludeChecksums, RedactFields, NormalizeTimestamps, SortKeys). `IExportScopeResolver.cs` interface with ResolveAsync, ValidateAsync, EstimateAsync methods. `ExportScopeResolver.cs` implementation with: ValidateAsync (checks TargetKinds against valid set sbom/vex/attestation/scan-report/policy-result/evidence/risk-bundle/advisory, validates DateRange From<To, validates SamplingConfig has Size>0 and Stratified has StratifyBy field, warns on potentially large exports), ResolveAsync (generates mock items, applies sampling with deterministic Random seeding via seed parameter, First/Last sampling, Stratified by field grouping), EstimateAsync (returns item count, estimated bytes, estimated processing time). `IExportPlanner.cs` interface with CreatePlanAsync, GetPlanAsync, ValidatePlanAsync, CancelPlanAsync. `ExportPlanner.cs` implementation with: ConcurrentDictionary in-memory plan store, CreatePlanAsync (loads profile via IExportProfileRepository, validates Active status, parses ScopeJson/FormatJson, validates scope, resolves scope to items, builds phases via BuildPhases, creates plan with 60-minute validity), GetPlanAsync, ValidatePlanAsync (checks expiration, re-validates scope), CancelPlanAsync (only Ready/Creating status). BuildPhases creates ordered phases: DataFetch→Transform (conditional on redaction/normalization/sorting)→WriteOutput→GenerateManifest→Sign (conditional on Mirror format). `IExportProfileRepository` interface with GetByIdAsync, GetActiveProfilesAsync, CreateAsync, UpdateAsync. `InMemoryExportProfileRepository` implementation with ConcurrentDictionary keyed by (TenantId, ProfileId). Changed ExportProfile from class to record to support `with` expressions in plan updates. Created tests: `ExportScopeResolverTests.cs` (21 test cases for scope resolution, validation, deterministic sampling, estimation), `ExportPlannerTests.cs` (12 test cases for plan creation, validation, cancellation, phase generation, correlation tracking). Core project builds successfully with 0 errors. | Implementer |
|
||||||
|
| 2025-12-07 | **EXPORT-SVC-35-001 DONE:** Bootstrapped exporter service with Postgres migrations for export data layer. Created `Configuration/ExportCenterOptions.cs` in Core with: `ExportCenterOptions` (DatabaseOptions, ObjectStoreOptions, TimelineOptions, SigningOptions, QuotaOptions), `DatabaseOptions` (ConnectionString, ApplyMigrationsAtStartup). Created domain models in `Domain/`: `ExportProfile.cs` (ProfileId, TenantId, Name, Description, Kind, Status, ScopeJson, FormatJson, SigningJson, Schedule, timestamps; enums ExportProfileKind AdHoc/Scheduled/EventDriven/Continuous, ExportProfileStatus Draft/Active/Paused/Archived), `ExportRun.cs` (RunId, ProfileId, TenantId, Status, Trigger, CorrelationId, InitiatedBy, item counts, TotalSizeBytes, ErrorJson; enums ExportRunStatus Queued→Cancelled, ExportRunTrigger Manual/Scheduled/Event/Api), `ExportInput.cs` (InputId, RunId, TenantId, Kind, Status, SourceRef, Name, ContentHash, SizeBytes, MetadataJson; enums ExportInputKind Sbom/Vex/Attestation/ScanReport/PolicyResult/Evidence/RiskBundle/Advisory, ExportInputStatus Pending→Skipped), `ExportDistribution.cs` (DistributionId, RunId, TenantId, Kind, Status, Target, ArtifactPath, ArtifactHash, SizeBytes, ContentType, MetadataJson, AttemptCount; enums ExportDistributionKind FileSystem/AmazonS3/Mirror/OfflineKit/Webhook, ExportDistributionStatus Pending→Cancelled). Created database infrastructure in Infrastructure `Db/`: `MigrationScript.cs` (version parsing, SHA256 checksum, line-ending normalization), `MigrationLoader.cs` (loads embedded SQL resources ordered by version), `ExportCenterDataSource.cs` (NpgsqlDataSource with tenant session config via `app.current_tenant`), `ExportCenterMigrationRunner.cs` (applies migrations with checksum validation), `ExportCenterDbServiceExtensions.cs` (DI registration, `ExportCenterMigrationHostedService` for startup migrations). Created `Db/Migrations/001_initial_schema.sql` with schemas export_center/export_center_app, `require_current_tenant()` function, tables (export_profiles, export_runs, export_inputs, export_distributions) with RLS policies, indexes (tenant_status, profile_created, correlation), FK constraints, `update_updated_at` trigger. Updated csproj to add Npgsql 8.0.3 and EmbeddedResource for SQL files. Added tests: `MigrationScriptTests.cs` (version parsing, SHA256 determinism, line-ending normalization), `MigrationLoaderTests.cs` (resource loading, ordering, validation), `ExportProfileTests.cs`/`ExportRunTests.cs`/`ExportInputTests.cs`/`ExportDistributionTests.cs` (domain model construction, enum value verification). Core and Infrastructure projects build successfully with 0 errors. | Implementer |
|
||||||
|
| 2025-12-07 | **EXPORT-RISK-70-001 DONE:** Integrated risk bundle builds into offline kit packaging with checksum verification. Added to `OfflineKitModels.cs`: `OfflineKitRiskBundleEntry` record (kind, exportId, bundleId, inputsHash, providers[], rootHash, artifact, checksum, createdAt), `OfflineKitRiskProviderInfo` record (providerId, source, snapshotDate, optional), `OfflineKitRiskBundleRequest` record. Added to `OfflineKitPackager.cs`: `RiskBundlesDir` constant ("risk-bundles"), `RiskBundleFileName` constant ("export-risk-bundle-v1.tgz"), `AddRiskBundle` method (writes bundle to risk-bundles/ directory with SHA256 checksum), `CreateRiskBundleEntry` method (creates manifest entry with provider info). Updated `OfflineKitDistributor.cs`: Added risk bundle detection in `DistributeToMirror` method (checks for risk-bundles/export-risk-bundle-v1.tgz, computes hash, adds entry with CLI example "stella risk-bundle verify/import"). Added tests in `OfflineKitPackagerTests.cs`: `AddRiskBundle_CreatesArtifactAndChecksum`, `AddRiskBundle_PreservesBytesExactly`, `AddRiskBundle_RejectsOverwrite`, `CreateRiskBundleEntry_HasCorrectKind`, `CreateRiskBundleEntry_HasCorrectPaths`, `CreateRiskBundleEntry_IncludesProviderInfo`. Updated `DirectoryStructure_FollowsOfflineKitLayout` test to include risk-bundles directory. Core library builds successfully with 0 errors. | Implementer |
|
||||||
| 2025-12-07 | **EXPORT-RISK-69-002 DONE:** Implemented simulation report exports with scored data and explainability snapshots. Created `SimulationExport/` namespace with: `SimulationExportModels.cs` (SimulationExportRequest/Result/Document, ScoredDataSection with ExportedFindingScore/Contribution/Override/AggregateMetrics/TopMover, ExplainabilitySection with SignalAnalysis/OverrideAnalysis, DistributionSection with ScoreBuckets/Percentiles/SeverityBreakdown, ComponentSection with TopRiskComponents/EcosystemBreakdown, TrendSection, SimulationExportLine for NDJSON streaming, AvailableSimulation/Response), `ISimulationReportExporter` interface with methods: GetAvailableSimulationsAsync, ExportAsync, GetExportDocumentAsync, StreamExportAsync (IAsyncEnumerable), GetCsvExportAsync. `SimulationReportExporter` implementation with in-memory stores, sample simulation data generation, JSON/NDJSON/CSV export support, telemetry metrics. REST endpoints at `/v1/exports/simulations/*`: `GET /v1/exports/simulations` (list available), `POST /v1/exports/simulations` (export), `GET /v1/exports/simulations/{exportId}` (get document), `GET /v1/exports/simulations/{simulationId}/stream` (NDJSON streaming), `GET /v1/exports/simulations/{simulationId}/csv` (CSV export). Added `export_simulation_exports_total` metric. Build succeeded with 0 errors. | Implementer |
|
| 2025-12-07 | **EXPORT-RISK-69-002 DONE:** Implemented simulation report exports with scored data and explainability snapshots. Created `SimulationExport/` namespace with: `SimulationExportModels.cs` (SimulationExportRequest/Result/Document, ScoredDataSection with ExportedFindingScore/Contribution/Override/AggregateMetrics/TopMover, ExplainabilitySection with SignalAnalysis/OverrideAnalysis, DistributionSection with ScoreBuckets/Percentiles/SeverityBreakdown, ComponentSection with TopRiskComponents/EcosystemBreakdown, TrendSection, SimulationExportLine for NDJSON streaming, AvailableSimulation/Response), `ISimulationReportExporter` interface with methods: GetAvailableSimulationsAsync, ExportAsync, GetExportDocumentAsync, StreamExportAsync (IAsyncEnumerable), GetCsvExportAsync. `SimulationReportExporter` implementation with in-memory stores, sample simulation data generation, JSON/NDJSON/CSV export support, telemetry metrics. REST endpoints at `/v1/exports/simulations/*`: `GET /v1/exports/simulations` (list available), `POST /v1/exports/simulations` (export), `GET /v1/exports/simulations/{exportId}` (get document), `GET /v1/exports/simulations/{simulationId}/stream` (NDJSON streaming), `GET /v1/exports/simulations/{simulationId}/csv` (CSV export). Added `export_simulation_exports_total` metric. Build succeeded with 0 errors. | Implementer |
|
||||||
| 2025-12-07 | **EXPORT-RISK-69-001 DONE:** Implemented risk-bundle job handler with provider selection, manifest signing, and audit logging. Created `RiskBundle/` namespace with: `RiskBundleJobModels.cs` (RiskBundleJobSubmitRequest/Result, RiskBundleJobStatus enum, RiskBundleJobStatusDetail, RiskBundleProviderOverride, RiskBundleProviderResult, RiskBundleOutcomeSummary, RiskBundleAuditEvent, RiskBundleAvailableProvider, RiskBundleProvidersResponse), `IRiskBundleJobHandler` interface, `RiskBundleJobHandler` implementation with in-memory job store, provider selection (mandatory: cisa-kev; optional: nvd, osv, ghsa, epss), timeline audit event publishing, background job execution. Created `RiskBundleEndpoints.cs` with REST API: `GET /v1/risk-bundles/providers`, `POST /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs/{jobId}`, `POST /v1/risk-bundles/jobs/{jobId}/cancel`. Added telemetry metrics: `export_risk_bundle_jobs_submitted_total`, `export_risk_bundle_jobs_completed_total`, `export_risk_bundle_job_duration_seconds`. Build succeeded with 0 errors. | Implementer |
|
| 2025-12-07 | **EXPORT-RISK-69-001 DONE:** Implemented risk-bundle job handler with provider selection, manifest signing, and audit logging. Created `RiskBundle/` namespace with: `RiskBundleJobModels.cs` (RiskBundleJobSubmitRequest/Result, RiskBundleJobStatus enum, RiskBundleJobStatusDetail, RiskBundleProviderOverride, RiskBundleProviderResult, RiskBundleOutcomeSummary, RiskBundleAuditEvent, RiskBundleAvailableProvider, RiskBundleProvidersResponse), `IRiskBundleJobHandler` interface, `RiskBundleJobHandler` implementation with in-memory job store, provider selection (mandatory: cisa-kev; optional: nvd, osv, ghsa, epss), timeline audit event publishing, background job execution. Created `RiskBundleEndpoints.cs` with REST API: `GET /v1/risk-bundles/providers`, `POST /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs/{jobId}`, `POST /v1/risk-bundles/jobs/{jobId}/cancel`. Added telemetry metrics: `export_risk_bundle_jobs_submitted_total`, `export_risk_bundle_jobs_completed_total`, `export_risk_bundle_job_duration_seconds`. Build succeeded with 0 errors. | Implementer |
|
||||||
| 2025-12-07 | **EXPORT-OBS-55-001 DONE:** Implemented incident mode enhancements for ExportCenter. Created `Incident/` namespace with: `ExportIncidentModels.cs` (severity levels Info→Emergency, status Active→Resolved→FalsePositive, types ExportFailure/LatencyDegradation/StorageCapacity/DependencyFailure/IntegrityIssue/SecurityIncident/ConfigurationError/RateLimiting), `ExportIncidentEvents.cs` (IncidentActivated/Updated/Escalated/Deescalated/Resolved events), `IExportIncidentManager` interface and `ExportIncidentManager` implementation with in-memory store. `IExportNotificationEmitter` interface with `LoggingNotificationEmitter` for timeline + notifier integration. Added `PublishIncidentEventAsync` to `IExportTimelinePublisher`. REST endpoints at `/v1/incidents/*`: GET status, GET active, GET recent, GET {id}, POST activate, PATCH {id} update, POST {id}/resolve. Added metrics: `export_incidents_activated_total`, `export_incidents_resolved_total`, `export_incidents_escalated_total`, `export_incidents_deescalated_total`, `export_notifications_emitted_total`, `export_incident_duration_seconds`. | Implementer |
|
| 2025-12-07 | **EXPORT-OBS-55-001 DONE:** Implemented incident mode enhancements for ExportCenter. Created `Incident/` namespace with: `ExportIncidentModels.cs` (severity levels Info→Emergency, status Active→Resolved→FalsePositive, types ExportFailure/LatencyDegradation/StorageCapacity/DependencyFailure/IntegrityIssue/SecurityIncident/ConfigurationError/RateLimiting), `ExportIncidentEvents.cs` (IncidentActivated/Updated/Escalated/Deescalated/Resolved events), `IExportIncidentManager` interface and `ExportIncidentManager` implementation with in-memory store. `IExportNotificationEmitter` interface with `LoggingNotificationEmitter` for timeline + notifier integration. Added `PublishIncidentEventAsync` to `IExportTimelinePublisher`. REST endpoints at `/v1/incidents/*`: GET status, GET active, GET recent, GET {id}, POST activate, PATCH {id} update, POST {id}/resolve. Added metrics: `export_incidents_activated_total`, `export_incidents_resolved_total`, `export_incidents_escalated_total`, `export_incidents_deescalated_total`, `export_notifications_emitted_total`, `export_incident_duration_seconds`. | Implementer |
|
||||||
|
|||||||
@@ -33,9 +33,9 @@
|
|||||||
| 10 | EXPORT-SVC-43-001 | BLOCKED (2025-11-30) | BLOCKED by 37-004; pack-run integration waits on verification API. | Exporter Service Guild | Integrate pack run manifests/artifacts into export bundles and CLI verification; expose provenance links. |
|
| 10 | EXPORT-SVC-43-001 | BLOCKED (2025-11-30) | BLOCKED by 37-004; pack-run integration waits on verification API. | Exporter Service Guild | Integrate pack run manifests/artifacts into export bundles and CLI verification; expose provenance links. |
|
||||||
| 11 | EXPORT-TEN-48-001 | BLOCKED (2025-11-30) | BLOCKED until Export API (35-006) stabilizes; tenant prefixes require finalized routes. | Exporter Service Guild | Prefix artifacts/manifests with tenant/project, enforce scope checks, prevent cross-tenant exports unless whitelisted; update provenance. |
|
| 11 | EXPORT-TEN-48-001 | BLOCKED (2025-11-30) | BLOCKED until Export API (35-006) stabilizes; tenant prefixes require finalized routes. | Exporter Service Guild | Prefix artifacts/manifests with tenant/project, enforce scope checks, prevent cross-tenant exports unless whitelisted; update provenance. |
|
||||||
| 12 | RISK-BUNDLE-69-001 | DONE (2025-12-03) | Bundle now embeds manifest DSSE + detached bundle signature; worker options fixed (signature paths/OSV flags); RiskBundle tests passing. | Risk Bundle Export Guild · Risk Engine Guild (`src/ExportCenter/StellaOps.ExportCenter.RiskBundles`) | Implement `stella export risk-bundle` job producing tarball with provider datasets, manifests, DSSE signatures. |
|
| 12 | RISK-BUNDLE-69-001 | DONE (2025-12-03) | Bundle now embeds manifest DSSE + detached bundle signature; worker options fixed (signature paths/OSV flags); RiskBundle tests passing. | Risk Bundle Export Guild · Risk Engine Guild (`src/ExportCenter/StellaOps.ExportCenter.RiskBundles`) | Implement `stella export risk-bundle` job producing tarball with provider datasets, manifests, DSSE signatures. |
|
||||||
| 13 | RISK-BUNDLE-69-002 | BLOCKED (2025-11-30) | BLOCKED by 69-001 deliverables. | Risk Bundle Export Guild · DevOps Guild | Integrate bundle job into CI/offline kit pipelines with checksum publication. |
|
| 13 | RISK-BUNDLE-69-002 | TODO | 69-001 DONE; integrate into CI/offline kit. | Risk Bundle Export Guild · DevOps Guild | Integrate bundle job into CI/offline kit pipelines with checksum publication. |
|
||||||
| 14 | RISK-BUNDLE-70-001 | BLOCKED (2025-11-30) | BLOCKED by 69-002; verification inputs not available. | Risk Bundle Export Guild · CLI Guild | Provide CLI `stella risk bundle verify` command to validate bundles before import. |
|
| 14 | RISK-BUNDLE-70-001 | TODO | Depends on 69-002. | Risk Bundle Export Guild · CLI Guild | Provide CLI `stella risk bundle verify` command to validate bundles before import. |
|
||||||
| 15 | RISK-BUNDLE-70-002 | BLOCKED (2025-11-30) | BLOCKED by 70-001; doc content waits on verification CLI behavior. | Risk Bundle Export Guild · Docs Guild | Publish `/docs/airgap/risk-bundles.md` covering build/import/verification workflows. |
|
| 15 | RISK-BUNDLE-70-002 | TODO | Depends on 70-001. | Risk Bundle Export Guild · Docs Guild | Publish `/docs/airgap/risk-bundles.md` covering build/import/verification workflows. |
|
||||||
|
|
||||||
## Wave Coordination
|
## Wave Coordination
|
||||||
- Wave 1: EXPORT-SVC-35/36/37 chain (API → adapters → OCI → planner → mirror delta → encryption → scheduling → verification → pack-run integration).
|
- Wave 1: EXPORT-SVC-35/36/37 chain (API → adapters → OCI → planner → mirror delta → encryption → scheduling → verification → pack-run integration).
|
||||||
@@ -86,6 +86,7 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | **RISK-BUNDLE tasks unblocked:** Tasks 13-15 (RISK-BUNDLE-69-002, 70-001, 70-002) changed from BLOCKED to TODO. Upstream blocker resolved: task 12 (RISK-BUNDLE-69-001) is DONE and Sprint 0163 EXPORT-RISK-70-001 is DONE. Wave 3 can now proceed. Tasks 1-11 remain BLOCKED pending Sprint 0163 EXPORT-SVC-35-001..005 implementation. | Implementer |
|
||||||
| 2025-12-07 | **Wave 10 upstream resolution:** Sprint 0163 schema blockers resolved and tasks moved to TODO. Sprint 0164 tasks remain BLOCKED pending Sprint 0163 implementation outputs (Export API, planner schema, Trivy adapters). | Implementer |
|
| 2025-12-07 | **Wave 10 upstream resolution:** Sprint 0163 schema blockers resolved and tasks moved to TODO. Sprint 0164 tasks remain BLOCKED pending Sprint 0163 implementation outputs (Export API, planner schema, Trivy adapters). | Implementer |
|
||||||
| 2025-11-08 | Sprint stub created; awaiting ExportCenter II completion. | Planning |
|
| 2025-11-08 | Sprint stub created; awaiting ExportCenter II completion. | Planning |
|
||||||
| 2025-11-19 | Normalized sprint to standard template and renamed from `SPRINT_164_exportcenter_iii.md` to `SPRINT_0164_0001_0001_exportcenter_iii.md`; content preserved. | Implementer |
|
| 2025-11-19 | Normalized sprint to standard template and renamed from `SPRINT_164_exportcenter_iii.md` to `SPRINT_0164_0001_0001_exportcenter_iii.md`; content preserved. | Implementer |
|
||||||
|
|||||||
@@ -81,6 +81,7 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | CVSS UI wired to Policy Gateway `/api/cvss/receipts`; Angular client added with tenant headers and receipt/history mapping. | Implementer |
|
||||||
| 2025-12-07 | CVSS-DOCS-190-012 DONE: updated `docs/modules/policy/cvss-v4.md` and `docs/09_API_CLI_REFERENCE.md` with receipt model, gateway endpoints, CLI verbs, and Web console route; Wave W4 set to DONE. | Docs |
|
| 2025-12-07 | CVSS-DOCS-190-012 DONE: updated `docs/modules/policy/cvss-v4.md` and `docs/09_API_CLI_REFERENCE.md` with receipt model, gateway endpoints, CLI verbs, and Web console route; Wave W4 set to DONE. | Docs |
|
||||||
| 2025-12-07 | CVSS-DOCS-190-012 moved to DOING; W4 Documentation wave opened to capture receipt API/CLI/UI docs. | Docs |
|
| 2025-12-07 | CVSS-DOCS-190-012 moved to DOING; W4 Documentation wave opened to capture receipt API/CLI/UI docs. | Docs |
|
||||||
| 2025-12-07 | Wave W3 Integration marked DONE after CLI/UI delivery; Web console hosts receipt viewer; sprint wave table updated. | Project Mgmt |
|
| 2025-12-07 | Wave W3 Integration marked DONE after CLI/UI delivery; Web console hosts receipt viewer; sprint wave table updated. | Project Mgmt |
|
||||||
|
|||||||
@@ -772,8 +772,8 @@ Consolidated task ledger for everything under `docs/implplan/archived/` (sprints
|
|||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-001 | TODO | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. | Authority Core & Security Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-001 | TODO | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. | Authority Core & Security Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-002 | TODO | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. | Authority Core & Security Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-002 | TODO | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. | Authority Core & Security Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-003 | TODO | Update docs/config samples for Vuln Explorer roles and security posture. | Authority Core & Docs Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | AUTH-VULN-29-003 | TODO | Update docs/config samples for Vuln Explorer roles and security posture. | Authority Core & Docs Guild | Path: src/Authority/StellaOps.Authority | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-001 | TODO | Implement `stella vuln list` with grouping, filters, JSON/CSV output. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-001 | DONE (2025-12-06) | Implement `stella vuln list` with grouping, filters, JSON/CSV output. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-002 | TODO | Implement `stella vuln show` with evidence/policy/path display. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-002 | DONE (2025-12-06) | Implement `stella vuln show` with evidence/policy/path display. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-003 | TODO | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-003 | TODO | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing diff summaries/Markdown. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing diff summaries/Markdown. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
||||||
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-005 | TODO | Implement `stella vuln export` and bundle signature verification. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
| docs/implplan/archived/updates/tasks.md | Sprint 29 — Vulnerability Explorer | CLI-VULN-29-005 | TODO | Implement `stella vuln export` and bundle signature verification. | DevEx/CLI Guild | Path: src/Cli/StellaOps.Cli | 2025-10-19 |
|
||||||
|
|||||||
@@ -809,12 +809,12 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation
|
|||||||
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. |
|
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Security Guild | AUTH-VULN-29-002 | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. |
|
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Security Guild | AUTH-VULN-29-002 | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Docs Guild | AUTH-VULN-29-003 | Update docs/config samples for Vuln Explorer roles and security posture. |
|
| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority | TODO | Authority Core & Docs Guild | AUTH-VULN-29-003 | Update docs/config samples for Vuln Explorer roles and security posture. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI Guild | CLI-VULN-29-001 | Implement `stella vuln list` with grouping, filters, JSON/CSV output. |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI Guild | CLI-VULN-29-001 | Implement `stella vuln list` with grouping, filters, JSON/CSV output. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI Guild | CLI-VULN-29-002 | Implement `stella vuln show` with evidence/policy/path display. |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI Guild | CLI-VULN-29-002 | Implement `stella vuln show` with evidence/policy/path display. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI Guild | CLI-VULN-29-003 | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI Guild | CLI-VULN-29-003 | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI Guild | CLI-VULN-29-004 | Implement `stella vuln simulate` producing diff summaries/Markdown. |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI Guild | CLI-VULN-29-004 | Implement `stella vuln simulate` producing diff summaries/Markdown. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI Guild | CLI-VULN-29-005 | Implement `stella vuln export` and bundle signature verification. |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI Guild | CLI-VULN-29-005 | Implement `stella vuln export` and bundle signature verification. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | TODO | DevEx/CLI & Docs Guilds | CLI-VULN-29-006 | Update CLI docs/examples for Vulnerability Explorer commands. |
|
| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli | DONE (2025-12-06) | DevEx/CLI & Docs Guilds | CLI-VULN-29-006 | Update CLI docs/examples for Vulnerability Explorer commands. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Canonicalize (lossless) advisory identifiers, persist `links[]`, backfill, and expose raw payload snapshots (no merge/derived fields). |
|
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Canonicalize (lossless) advisory identifiers, persist `links[]`, backfill, and expose raw payload snapshots (no merge/derived fields). |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService Guild | CONCELIER-VULN-29-002 | Provide advisory evidence retrieval endpoint for Vuln Explorer. |
|
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService Guild | CONCELIER-VULN-29-002 | Provide advisory evidence retrieval endpoint for Vuln Explorer. |
|
||||||
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService & Observability Guilds | CONCELIER-VULN-29-004 | Add metrics/logs/events for advisory normalization supporting resolver. |
|
| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService | TODO | Concelier WebService & Observability Guilds | CONCELIER-VULN-29-004 | Add metrics/logs/events for advisory normalization supporting resolver. |
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
Updated 2025-12-07: FEEDCONN-ICSCISA-02-012/KISA-02-008 unblocked (ICS/KISA SOP v0.2); tracked in SPRINT_0113 row 18 and SPRINT_0503 feed ops tasks.
|
Updated 2025-12-07: FEEDCONN-ICSCISA-02-012/KISA-02-008 unblocked (ICS/KISA SOP v0.2); tracked in SPRINT_0113 row 18 and SPRINT_0503 feed ops tasks.
|
||||||
|
|
||||||
|
Updated 2025-12-07: RISK-BUNDLE-69-002/70-001/70-002 unblocked (SPRINT_0164 tasks 13-15); RISK-BUNDLE-69-001 DONE. Wave 3 can proceed.
|
||||||
|
|
||||||
- Concelier ingestion & Link-Not-Merge
|
- Concelier ingestion & Link-Not-Merge
|
||||||
- MIRROR-CRT-56-001 (DONE; thin bundle v1 sample + hashes published)
|
- MIRROR-CRT-56-001 (DONE; thin bundle v1 sample + hashes published)
|
||||||
- MIRROR-CRT-56-002 (DONE locally with production-mode flags: DSSE/TUF/OCI signed using provided Ed25519 keyid db9928babf3aeb817ccdcd0f6a6688f8395b00d0e42966e32e706931b5301fc8; artefacts in `out/mirror/thin/`; not blocking development)
|
- MIRROR-CRT-56-002 (DONE locally with production-mode flags: DSSE/TUF/OCI signed using provided Ed25519 keyid db9928babf3aeb817ccdcd0f6a6688f8395b00d0e42966e32e706931b5301fc8; artefacts in `out/mirror/thin/`; not blocking development)
|
||||||
@@ -13,8 +15,8 @@ Updated 2025-12-07: FEEDCONN-ICSCISA-02-012/KISA-02-008 unblocked (ICS/KISA SOP
|
|||||||
- AIRGAP-TIME-57-001 (DEV-UNBLOCKED: schema + trust-roots bundle + service config present; production trust roots/signing still needed)
|
- AIRGAP-TIME-57-001 (DEV-UNBLOCKED: schema + trust-roots bundle + service config present; production trust roots/signing still needed)
|
||||||
- EXPORT-OBS-51-001 / 54-001 (DEV-UNBLOCKED: DSSE/TUF profile + test-signed bundle available; release promotion now tracked under DevOps secret import)
|
- EXPORT-OBS-51-001 / 54-001 (DEV-UNBLOCKED: DSSE/TUF profile + test-signed bundle available; release promotion now tracked under DevOps secret import)
|
||||||
- CLI-AIRGAP-56-001 (DEV-UNBLOCKED: dev bundles available; release promotion depends on DevOps secret import + 58-001 CLI path)
|
- CLI-AIRGAP-56-001 (DEV-UNBLOCKED: dev bundles available; release promotion depends on DevOps secret import + 58-001 CLI path)
|
||||||
- CONCELIER-AIRGAP-56-001..58-001 <- PREP-ART-56-001, PREP-EVIDENCE-BDL-01
|
- CONCELIER-AIRGAP-56-001..58-001 ✅ (DONE 2025-12-07; mirror/offline provenance chain + sealed-mode deploy runbook)
|
||||||
- CONCELIER-CONSOLE-23-001..003 <- PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01
|
- CONCELIER-CONSOLE-23-001..003 ✅ (DONE 2025-12-07; console advisory aggregation/search helpers + consumption contract)
|
||||||
|
|
||||||
- SBOM Service (Link-Not-Merge consumers)
|
- SBOM Service (Link-Not-Merge consumers)
|
||||||
- SBOM-SERVICE-21-001 (projection read API) — DONE (2025-11-23): WAF aligned with fixtures + in-memory repo fallback; `ProjectionEndpointTests` pass.
|
- SBOM-SERVICE-21-001 (projection read API) — DONE (2025-11-23): WAF aligned with fixtures + in-memory repo fallback; `ProjectionEndpointTests` pass.
|
||||||
@@ -40,7 +42,9 @@ Updated 2025-12-07: FEEDCONN-ICSCISA-02-012/KISA-02-008 unblocked (ICS/KISA SOP
|
|||||||
- CONCELIER-MIRROR-23-001-DEV (DONE; dev mirror layout documented at `docs/modules/concelier/mirror-export.md`, endpoints serve static bundles)
|
- CONCELIER-MIRROR-23-001-DEV (DONE; dev mirror layout documented at `docs/modules/concelier/mirror-export.md`, endpoints serve static bundles)
|
||||||
- DEVOPS-MIRROR-23-001-REL (release signing/publish tracked under DevOps; not a development blocker)
|
- DEVOPS-MIRROR-23-001-REL (release signing/publish tracked under DevOps; not a development blocker)
|
||||||
- Concelier storage/backfill/object-store chain
|
- Concelier storage/backfill/object-store chain
|
||||||
- CONCELIER-LNM-21-101-DEV/102-DEV/103-DEV (BLOCKED on CI runner and upstream tasks)
|
- CONCELIER-LNM-21-101-DEV ✅ (DONE 2025-11-27; sharding + TTL migration)
|
||||||
|
- CONCELIER-LNM-21-102-DEV ✅ (DONE 2025-11-28; migration + tombstones + rollback)
|
||||||
|
- CONCELIER-LNM-21-103-DEV ✅ (DONE 2025-12-06; object storage + S3ObjectStore)
|
||||||
- Concelier backfill chain (Concelier IV)
|
- Concelier backfill chain (Concelier IV)
|
||||||
- CONCELIER-STORE-AOC-19-005-DEV (BLOCKED pending dataset hash/rehearsal)
|
- CONCELIER-STORE-AOC-19-005-DEV (BLOCKED pending dataset hash/rehearsal)
|
||||||
|
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ public class Sm2AttestorTests
|
|||||||
new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance));
|
new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected virtual void Dispose(bool disposing)
|
private void Dispose(bool disposing)
|
||||||
{
|
{
|
||||||
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", _gate);
|
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", _gate);
|
||||||
}
|
}
|
||||||
@@ -97,7 +97,6 @@ public class Sm2AttestorTests
|
|||||||
public void Dispose()
|
public void Dispose()
|
||||||
{
|
{
|
||||||
Dispose(true);
|
Dispose(true);
|
||||||
GC.SuppressFinalize(this);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
|
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
|
||||||
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
|
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
|
||||||
<!-- Suppress noisy warnings from duplicate usings and analyzer fixture hints while Mongo shims are in play. -->
|
<!-- Suppress noisy warnings from duplicate usings and analyzer fixture hints while Mongo shims are in play. -->
|
||||||
<NoWarn>$(NoWarn);CS0105;RS1032;RS2007;xUnit1041;NU1510</NoWarn>
|
<NoWarn>$(NoWarn);CS0105;CS1591;CS8601;CS8602;CS8604;CS0618;RS1032;RS2007;xUnit1041;xUnit1031;xUnit2013;NU1510;NETSDK1023;SYSLIB0057</NoWarn>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<!-- Concelier is migrating off MongoDB; strip implicit Mongo2Go/Mongo driver packages inherited from the repo root. -->
|
<!-- Concelier is migrating off MongoDB; strip implicit Mongo2Go/Mongo driver packages inherited from the repo root. -->
|
||||||
|
|||||||
@@ -0,0 +1,349 @@
|
|||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Configuration for an export adapter.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportAdapterConfig
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Adapter identifier (e.g., "json:raw", "json:policy").
|
||||||
|
/// </summary>
|
||||||
|
public required string AdapterId { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Format options controlling output behavior.
|
||||||
|
/// </summary>
|
||||||
|
public required ExportFormatOptions FormatOptions { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output directory for exported files.
|
||||||
|
/// </summary>
|
||||||
|
public required string OutputDirectory { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Base name for output files.
|
||||||
|
/// </summary>
|
||||||
|
public string BaseName { get; init; } = "export";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to include a checksum file alongside each artifact.
|
||||||
|
/// </summary>
|
||||||
|
public bool IncludeChecksums { get; init; } = true;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Maximum file size before splitting into chunks (0 = no limit).
|
||||||
|
/// </summary>
|
||||||
|
public long MaxFileSizeBytes { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of processing a single item through an adapter.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record AdapterItemResult
|
||||||
|
{
|
||||||
|
public required Guid ItemId { get; init; }
|
||||||
|
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public string? OutputPath { get; init; }
|
||||||
|
|
||||||
|
public long OutputSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public string? ContentHash { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset ProcessedAt { get; init; }
|
||||||
|
|
||||||
|
public static AdapterItemResult Failed(Guid itemId, string errorMessage)
|
||||||
|
=> new()
|
||||||
|
{
|
||||||
|
ItemId = itemId,
|
||||||
|
Success = false,
|
||||||
|
ErrorMessage = errorMessage,
|
||||||
|
ProcessedAt = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of running an export adapter.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportAdapterResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<AdapterItemResult> ItemResults { get; init; } = [];
|
||||||
|
|
||||||
|
public IReadOnlyList<ExportOutputArtifact> Artifacts { get; init; } = [];
|
||||||
|
|
||||||
|
public ExportManifestCounts ManifestCounts { get; init; } = new();
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public TimeSpan ProcessingTime { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CompletedAt { get; init; }
|
||||||
|
|
||||||
|
public static ExportAdapterResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage, CompletedAt = DateTimeOffset.UtcNow };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// An output artifact produced by an adapter.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportOutputArtifact
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Path to the artifact file.
|
||||||
|
/// </summary>
|
||||||
|
public required string Path { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Size of the artifact in bytes.
|
||||||
|
/// </summary>
|
||||||
|
public required long SizeBytes { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// SHA-256 hash of the artifact content.
|
||||||
|
/// </summary>
|
||||||
|
public required string Sha256 { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// MIME type of the artifact.
|
||||||
|
/// </summary>
|
||||||
|
public string ContentType { get; init; } = "application/json";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Number of items in this artifact.
|
||||||
|
/// </summary>
|
||||||
|
public int ItemCount { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether the artifact is compressed.
|
||||||
|
/// </summary>
|
||||||
|
public bool IsCompressed { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Compression format if compressed.
|
||||||
|
/// </summary>
|
||||||
|
public CompressionFormat? Compression { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Original size before compression.
|
||||||
|
/// </summary>
|
||||||
|
public long? OriginalSizeBytes { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Counts for export manifest generation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportManifestCounts
|
||||||
|
{
|
||||||
|
[JsonPropertyName("totalItems")]
|
||||||
|
public int TotalItems { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("processedItems")]
|
||||||
|
public int ProcessedItems { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("successfulItems")]
|
||||||
|
public int SuccessfulItems { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("failedItems")]
|
||||||
|
public int FailedItems { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("skippedItems")]
|
||||||
|
public int SkippedItems { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("artifactCount")]
|
||||||
|
public int ArtifactCount { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("totalSizeBytes")]
|
||||||
|
public long TotalSizeBytes { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("compressedSizeBytes")]
|
||||||
|
public long? CompressedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("byKind")]
|
||||||
|
public IReadOnlyDictionary<string, int> ByKind { get; init; } = new Dictionary<string, int>();
|
||||||
|
|
||||||
|
[JsonPropertyName("byStatus")]
|
||||||
|
public IReadOnlyDictionary<string, int> ByStatus { get; init; } = new Dictionary<string, int>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Options for JSON normalization during export.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record JsonNormalizationOptions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to sort JSON object keys alphabetically.
|
||||||
|
/// </summary>
|
||||||
|
public bool SortKeys { get; init; } = true;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to normalize timestamps to UTC ISO-8601 format.
|
||||||
|
/// </summary>
|
||||||
|
public bool NormalizeTimestamps { get; init; } = true;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Timestamp format string for normalization.
|
||||||
|
/// </summary>
|
||||||
|
public string TimestampFormat { get; init; } = "yyyy-MM-ddTHH:mm:ss.fffZ";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to remove null values from output.
|
||||||
|
/// </summary>
|
||||||
|
public bool RemoveNullValues { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to use consistent line endings (LF).
|
||||||
|
/// </summary>
|
||||||
|
public bool NormalizeLineEndings { get; init; } = true;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to trim whitespace from string values.
|
||||||
|
/// </summary>
|
||||||
|
public bool TrimStrings { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Options for JSON field redaction during export.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record JsonRedactionOptions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Field paths to redact (supports dot notation and wildcards).
|
||||||
|
/// </summary>
|
||||||
|
public IReadOnlyList<string> RedactFields { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Replacement value for redacted fields.
|
||||||
|
/// </summary>
|
||||||
|
public string RedactedValue { get; init; } = "[REDACTED]";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether to preserve field types (use null for objects/arrays, empty for strings).
|
||||||
|
/// </summary>
|
||||||
|
public bool PreserveTypes { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Regex patterns for values to redact (e.g., email addresses, API keys).
|
||||||
|
/// </summary>
|
||||||
|
public IReadOnlyList<string> RedactPatterns { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Common sensitive field names to always redact.
|
||||||
|
/// </summary>
|
||||||
|
public static readonly string[] DefaultSensitiveFields =
|
||||||
|
[
|
||||||
|
"password",
|
||||||
|
"secret",
|
||||||
|
"apiKey",
|
||||||
|
"api_key",
|
||||||
|
"token",
|
||||||
|
"privateKey",
|
||||||
|
"private_key",
|
||||||
|
"credential",
|
||||||
|
"auth",
|
||||||
|
"authorization"
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Policy metadata included with json:policy format exports.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record PolicyMetadata
|
||||||
|
{
|
||||||
|
[JsonPropertyName("policyId")]
|
||||||
|
public string? PolicyId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("policyVersion")]
|
||||||
|
public string? PolicyVersion { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("policyName")]
|
||||||
|
public string? PolicyName { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("evaluatedAt")]
|
||||||
|
public DateTimeOffset? EvaluatedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("decision")]
|
||||||
|
public string? Decision { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("violations")]
|
||||||
|
public IReadOnlyList<PolicyViolation> Violations { get; init; } = [];
|
||||||
|
|
||||||
|
[JsonPropertyName("attributes")]
|
||||||
|
public IReadOnlyDictionary<string, string> Attributes { get; init; } = new Dictionary<string, string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A policy violation record.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record PolicyViolation
|
||||||
|
{
|
||||||
|
[JsonPropertyName("ruleId")]
|
||||||
|
public required string RuleId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("severity")]
|
||||||
|
public string Severity { get; init; } = "info";
|
||||||
|
|
||||||
|
[JsonPropertyName("message")]
|
||||||
|
public required string Message { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("path")]
|
||||||
|
public string? Path { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("remediation")]
|
||||||
|
public string? Remediation { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Wrapped export item with policy metadata (for json:policy format).
|
||||||
|
/// </summary>
|
||||||
|
public sealed record PolicyWrappedExportItem
|
||||||
|
{
|
||||||
|
[JsonPropertyName("metadata")]
|
||||||
|
public required ExportItemMetadata Metadata { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("policy")]
|
||||||
|
public PolicyMetadata? Policy { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("data")]
|
||||||
|
public required object Data { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Export item metadata.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportItemMetadata
|
||||||
|
{
|
||||||
|
[JsonPropertyName("itemId")]
|
||||||
|
public required Guid ItemId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("kind")]
|
||||||
|
public required string Kind { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("sourceRef")]
|
||||||
|
public required string SourceRef { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("name")]
|
||||||
|
public string? Name { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("namespace")]
|
||||||
|
public string? Namespace { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("tags")]
|
||||||
|
public IReadOnlyList<string> Tags { get; init; } = [];
|
||||||
|
|
||||||
|
[JsonPropertyName("createdAt")]
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("exportedAt")]
|
||||||
|
public DateTimeOffset ExportedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("sha256")]
|
||||||
|
public string? Sha256 { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,229 @@
|
|||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Registry for export adapters.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportAdapterRegistry
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets an adapter by ID.
|
||||||
|
/// </summary>
|
||||||
|
IExportAdapter? GetAdapter(string adapterId);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets an adapter for the specified format.
|
||||||
|
/// </summary>
|
||||||
|
IExportAdapter? GetAdapterForFormat(ExportFormat format);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all registered adapters.
|
||||||
|
/// </summary>
|
||||||
|
IReadOnlyList<IExportAdapter> GetAllAdapters();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all registered adapter IDs.
|
||||||
|
/// </summary>
|
||||||
|
IReadOnlyList<string> GetAdapterIds();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Default implementation of the export adapter registry.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportAdapterRegistry : IExportAdapterRegistry
|
||||||
|
{
|
||||||
|
private readonly Dictionary<string, IExportAdapter> _adapters;
|
||||||
|
private readonly Dictionary<ExportFormat, IExportAdapter> _formatMap;
|
||||||
|
|
||||||
|
public ExportAdapterRegistry(IEnumerable<IExportAdapter> adapters)
|
||||||
|
{
|
||||||
|
_adapters = adapters.ToDictionary(a => a.AdapterId, StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
// Build format to adapter map (first adapter wins for each format)
|
||||||
|
_formatMap = new Dictionary<ExportFormat, IExportAdapter>();
|
||||||
|
foreach (var adapter in adapters)
|
||||||
|
{
|
||||||
|
foreach (var format in adapter.SupportedFormats)
|
||||||
|
{
|
||||||
|
_formatMap.TryAdd(format, adapter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public IExportAdapter? GetAdapter(string adapterId)
|
||||||
|
{
|
||||||
|
_adapters.TryGetValue(adapterId, out var adapter);
|
||||||
|
return adapter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IExportAdapter? GetAdapterForFormat(ExportFormat format)
|
||||||
|
{
|
||||||
|
_formatMap.TryGetValue(format, out var adapter);
|
||||||
|
return adapter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IReadOnlyList<IExportAdapter> GetAllAdapters()
|
||||||
|
=> _adapters.Values.ToList();
|
||||||
|
|
||||||
|
public IReadOnlyList<string> GetAdapterIds()
|
||||||
|
=> _adapters.Keys.ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Extension methods for registering export adapters.
|
||||||
|
/// </summary>
|
||||||
|
public static class ExportAdapterServiceExtensions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Registers export adapters with the service collection.
|
||||||
|
/// </summary>
|
||||||
|
public static IServiceCollection AddExportAdapters(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
// Register individual adapters
|
||||||
|
services.AddSingleton<IExportAdapter, JsonRawAdapter>();
|
||||||
|
services.AddSingleton<IExportAdapter, JsonPolicyAdapter>();
|
||||||
|
|
||||||
|
// Register the registry
|
||||||
|
services.AddSingleton<IExportAdapterRegistry>(sp =>
|
||||||
|
{
|
||||||
|
var adapters = sp.GetServices<IExportAdapter>();
|
||||||
|
return new ExportAdapterRegistry(adapters);
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Registers export adapters with custom normalization options.
|
||||||
|
/// </summary>
|
||||||
|
public static IServiceCollection AddExportAdapters(
|
||||||
|
this IServiceCollection services,
|
||||||
|
JsonNormalizationOptions? normalizationOptions,
|
||||||
|
JsonRedactionOptions? redactionOptions)
|
||||||
|
{
|
||||||
|
// Register individual adapters with custom options
|
||||||
|
services.AddSingleton<IExportAdapter>(sp =>
|
||||||
|
new JsonRawAdapter(
|
||||||
|
sp.GetRequiredService<ILogger<JsonRawAdapter>>(),
|
||||||
|
normalizationOptions,
|
||||||
|
redactionOptions));
|
||||||
|
|
||||||
|
services.AddSingleton<IExportAdapter>(sp =>
|
||||||
|
new JsonPolicyAdapter(
|
||||||
|
sp.GetRequiredService<ILogger<JsonPolicyAdapter>>(),
|
||||||
|
normalizationOptions,
|
||||||
|
redactionOptions));
|
||||||
|
|
||||||
|
// Register the registry
|
||||||
|
services.AddSingleton<IExportAdapterRegistry>(sp =>
|
||||||
|
{
|
||||||
|
var adapters = sp.GetServices<IExportAdapter>();
|
||||||
|
return new ExportAdapterRegistry(adapters);
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// In-memory implementation of IExportDataFetcher for testing.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class InMemoryExportDataFetcher : IExportDataFetcher
|
||||||
|
{
|
||||||
|
private readonly Dictionary<Guid, string> _contents = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds content for an item.
|
||||||
|
/// </summary>
|
||||||
|
public void AddContent(Guid itemId, string jsonContent)
|
||||||
|
{
|
||||||
|
_contents[itemId] = jsonContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds content for multiple items.
|
||||||
|
/// </summary>
|
||||||
|
public void AddContents(IEnumerable<(Guid ItemId, string JsonContent)> items)
|
||||||
|
{
|
||||||
|
foreach (var (itemId, jsonContent) in items)
|
||||||
|
{
|
||||||
|
_contents[itemId] = jsonContent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ExportItemContent> FetchAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (!_contents.TryGetValue(item.ItemId, out var jsonContent))
|
||||||
|
{
|
||||||
|
return Task.FromResult(ExportItemContent.Failed(item.ItemId, $"Content not found for item {item.ItemId}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(new ExportItemContent
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Success = true,
|
||||||
|
JsonContent = jsonContent,
|
||||||
|
ContentType = GetContentType(item.Kind)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<Stream> FetchStreamAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (!_contents.TryGetValue(item.ItemId, out var jsonContent))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"Content not found for item {item.ItemId}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(jsonContent));
|
||||||
|
return Task.FromResult<Stream>(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GetContentType(string kind) => kind.ToLowerInvariant() switch
|
||||||
|
{
|
||||||
|
"sbom" => "application/vnd.cyclonedx+json",
|
||||||
|
"vex" => "application/vnd.cyclonedx.vex+json",
|
||||||
|
"attestation" => "application/vnd.dsse+json",
|
||||||
|
_ => "application/json"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// In-memory implementation of IExportPolicyEvaluator for testing.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class InMemoryExportPolicyEvaluator : IExportPolicyEvaluator
|
||||||
|
{
|
||||||
|
private readonly Dictionary<Guid, PolicyMetadata> _policies = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds policy metadata for an item.
|
||||||
|
/// </summary>
|
||||||
|
public void AddPolicy(Guid itemId, PolicyMetadata policy)
|
||||||
|
{
|
||||||
|
_policies[itemId] = policy;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sets a default policy to return for all items.
|
||||||
|
/// </summary>
|
||||||
|
public PolicyMetadata? DefaultPolicy { get; set; }
|
||||||
|
|
||||||
|
public Task<PolicyMetadata?> EvaluateAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
ExportItemContent content,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (_policies.TryGetValue(item.ItemId, out var policy))
|
||||||
|
{
|
||||||
|
return Task.FromResult<PolicyMetadata?>(policy);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(DefaultPolicy);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,322 @@
|
|||||||
|
using System.IO.Compression;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Handles compression for export artifacts.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportCompressor
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Compresses content using the specified format.
|
||||||
|
/// </summary>
|
||||||
|
public CompressionResult Compress(string content, CompressionFormat format)
|
||||||
|
{
|
||||||
|
if (format == CompressionFormat.None)
|
||||||
|
{
|
||||||
|
var bytes = Encoding.UTF8.GetBytes(content);
|
||||||
|
return new CompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
CompressedData = bytes,
|
||||||
|
OriginalSizeBytes = bytes.Length,
|
||||||
|
CompressedSizeBytes = bytes.Length,
|
||||||
|
CompressionRatio = 1.0,
|
||||||
|
Format = CompressionFormat.None,
|
||||||
|
Sha256 = ComputeSha256(bytes)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var originalBytes = Encoding.UTF8.GetBytes(content);
|
||||||
|
return CompressBytes(originalBytes, format);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Compresses bytes using the specified format.
|
||||||
|
/// </summary>
|
||||||
|
public CompressionResult CompressBytes(byte[] data, CompressionFormat format)
|
||||||
|
{
|
||||||
|
if (format == CompressionFormat.None)
|
||||||
|
{
|
||||||
|
return new CompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
CompressedData = data,
|
||||||
|
OriginalSizeBytes = data.Length,
|
||||||
|
CompressedSizeBytes = data.Length,
|
||||||
|
CompressionRatio = 1.0,
|
||||||
|
Format = CompressionFormat.None,
|
||||||
|
Sha256 = ComputeSha256(data)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var outputStream = new MemoryStream();
|
||||||
|
|
||||||
|
switch (format)
|
||||||
|
{
|
||||||
|
case CompressionFormat.Gzip:
|
||||||
|
using (var gzip = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
gzip.Write(data, 0, data.Length);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Brotli:
|
||||||
|
using (var brotli = new BrotliStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
brotli.Write(data, 0, data.Length);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Zstd:
|
||||||
|
// Zstd not available in standard library - use gzip as fallback
|
||||||
|
// In production, would use ZstdSharp or similar library
|
||||||
|
using (var gzip = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
gzip.Write(data, 0, data.Length);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return CompressionResult.Failed($"Unsupported compression format: {format}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var compressedData = outputStream.ToArray();
|
||||||
|
var ratio = data.Length > 0 ? (double)compressedData.Length / data.Length : 1.0;
|
||||||
|
|
||||||
|
return new CompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
CompressedData = compressedData,
|
||||||
|
OriginalSizeBytes = data.Length,
|
||||||
|
CompressedSizeBytes = compressedData.Length,
|
||||||
|
CompressionRatio = ratio,
|
||||||
|
Format = format,
|
||||||
|
Sha256 = ComputeSha256(compressedData)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
return CompressionResult.Failed($"Compression failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Compresses content to a stream.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<CompressionResult> CompressToStreamAsync(
|
||||||
|
string content,
|
||||||
|
Stream outputStream,
|
||||||
|
CompressionFormat format,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var data = Encoding.UTF8.GetBytes(content);
|
||||||
|
return await CompressBytesToStreamAsync(data, outputStream, format, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Compresses bytes to a stream.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<CompressionResult> CompressBytesToStreamAsync(
|
||||||
|
byte[] data,
|
||||||
|
Stream outputStream,
|
||||||
|
CompressionFormat format,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (format == CompressionFormat.None)
|
||||||
|
{
|
||||||
|
await outputStream.WriteAsync(data, cancellationToken);
|
||||||
|
return new CompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
OriginalSizeBytes = data.Length,
|
||||||
|
CompressedSizeBytes = data.Length,
|
||||||
|
CompressionRatio = 1.0,
|
||||||
|
Format = CompressionFormat.None,
|
||||||
|
Sha256 = ComputeSha256(data)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var startPosition = outputStream.Position;
|
||||||
|
|
||||||
|
switch (format)
|
||||||
|
{
|
||||||
|
case CompressionFormat.Gzip:
|
||||||
|
await using (var gzip = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
await gzip.WriteAsync(data, cancellationToken);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Brotli:
|
||||||
|
await using (var brotli = new BrotliStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
await brotli.WriteAsync(data, cancellationToken);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Zstd:
|
||||||
|
// Fallback to gzip
|
||||||
|
await using (var gzip = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||||
|
{
|
||||||
|
await gzip.WriteAsync(data, cancellationToken);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return CompressionResult.Failed($"Unsupported compression format: {format}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var compressedSize = outputStream.Position - startPosition;
|
||||||
|
var ratio = data.Length > 0 ? (double)compressedSize / data.Length : 1.0;
|
||||||
|
|
||||||
|
return new CompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
OriginalSizeBytes = data.Length,
|
||||||
|
CompressedSizeBytes = compressedSize,
|
||||||
|
CompressionRatio = ratio,
|
||||||
|
Format = format
|
||||||
|
// Note: Sha256 not computed for stream output - caller should compute from stream if needed
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
return CompressionResult.Failed($"Compression failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Decompresses content.
|
||||||
|
/// </summary>
|
||||||
|
public DecompressionResult Decompress(byte[] compressedData, CompressionFormat format)
|
||||||
|
{
|
||||||
|
if (format == CompressionFormat.None)
|
||||||
|
{
|
||||||
|
return new DecompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
DecompressedData = compressedData
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var inputStream = new MemoryStream(compressedData);
|
||||||
|
using var outputStream = new MemoryStream();
|
||||||
|
|
||||||
|
switch (format)
|
||||||
|
{
|
||||||
|
case CompressionFormat.Gzip:
|
||||||
|
using (var gzip = new GZipStream(inputStream, CompressionMode.Decompress))
|
||||||
|
{
|
||||||
|
gzip.CopyTo(outputStream);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Brotli:
|
||||||
|
using (var brotli = new BrotliStream(inputStream, CompressionMode.Decompress))
|
||||||
|
{
|
||||||
|
brotli.CopyTo(outputStream);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CompressionFormat.Zstd:
|
||||||
|
// Fallback - assume gzip
|
||||||
|
using (var gzip = new GZipStream(inputStream, CompressionMode.Decompress))
|
||||||
|
{
|
||||||
|
gzip.CopyTo(outputStream);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return DecompressionResult.Failed($"Unsupported compression format: {format}");
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DecompressionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
DecompressedData = outputStream.ToArray()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
return DecompressionResult.Failed($"Decompression failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the file extension for a compression format.
|
||||||
|
/// </summary>
|
||||||
|
public static string GetFileExtension(CompressionFormat format) => format switch
|
||||||
|
{
|
||||||
|
CompressionFormat.Gzip => ".gz",
|
||||||
|
CompressionFormat.Brotli => ".br",
|
||||||
|
CompressionFormat.Zstd => ".zst",
|
||||||
|
_ => string.Empty
|
||||||
|
};
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the content type for a compression format.
|
||||||
|
/// </summary>
|
||||||
|
public static string GetContentType(CompressionFormat format) => format switch
|
||||||
|
{
|
||||||
|
CompressionFormat.Gzip => "application/gzip",
|
||||||
|
CompressionFormat.Brotli => "application/br",
|
||||||
|
CompressionFormat.Zstd => "application/zstd",
|
||||||
|
_ => "application/octet-stream"
|
||||||
|
};
|
||||||
|
|
||||||
|
private static string ComputeSha256(byte[] data)
|
||||||
|
{
|
||||||
|
var hashBytes = SHA256.HashData(data);
|
||||||
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of compression operation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record CompressionResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public byte[]? CompressedData { get; init; }
|
||||||
|
|
||||||
|
public long OriginalSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public long CompressedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public double CompressionRatio { get; init; }
|
||||||
|
|
||||||
|
public CompressionFormat Format { get; init; }
|
||||||
|
|
||||||
|
public string? Sha256 { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static CompressionResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of decompression operation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record DecompressionResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public byte[]? DecompressedData { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static DecompressionResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
@@ -0,0 +1,160 @@
|
|||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Interface for export format adapters.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportAdapter
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Unique identifier for this adapter (e.g., "json:raw", "json:policy").
|
||||||
|
/// </summary>
|
||||||
|
string AdapterId { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Human-readable name for this adapter.
|
||||||
|
/// </summary>
|
||||||
|
string DisplayName { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Export formats this adapter supports.
|
||||||
|
/// </summary>
|
||||||
|
IReadOnlyList<ExportFormat> SupportedFormats { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Whether this adapter supports streaming output.
|
||||||
|
/// </summary>
|
||||||
|
bool SupportsStreaming { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Processes export items and produces output artifacts.
|
||||||
|
/// </summary>
|
||||||
|
Task<ExportAdapterResult> ProcessAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Processes export items as a stream (for large datasets).
|
||||||
|
/// </summary>
|
||||||
|
IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Validates adapter configuration.
|
||||||
|
/// </summary>
|
||||||
|
Task<IReadOnlyList<string>> ValidateConfigAsync(
|
||||||
|
ExportAdapterConfig config,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Context for export adapter processing.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportAdapterContext
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Adapter configuration.
|
||||||
|
/// </summary>
|
||||||
|
public required ExportAdapterConfig Config { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Resolved export items to process.
|
||||||
|
/// </summary>
|
||||||
|
public required IReadOnlyList<ResolvedExportItem> Items { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Data fetcher for retrieving item content.
|
||||||
|
/// </summary>
|
||||||
|
public required IExportDataFetcher DataFetcher { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Optional policy evaluator for json:policy format.
|
||||||
|
/// </summary>
|
||||||
|
public IExportPolicyEvaluator? PolicyEvaluator { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tenant ID for the export.
|
||||||
|
/// </summary>
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Correlation ID for tracing.
|
||||||
|
/// </summary>
|
||||||
|
public string? CorrelationId { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Time provider for deterministic timestamps.
|
||||||
|
/// </summary>
|
||||||
|
public TimeProvider TimeProvider { get; init; } = TimeProvider.System;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Interface for fetching export item data.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportDataFetcher
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Fetches the content for an export item.
|
||||||
|
/// </summary>
|
||||||
|
Task<ExportItemContent> FetchAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Fetches content as a stream for large items.
|
||||||
|
/// </summary>
|
||||||
|
Task<Stream> FetchStreamAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Content of an export item.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportItemContent
|
||||||
|
{
|
||||||
|
public required Guid ItemId { get; init; }
|
||||||
|
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Raw JSON content.
|
||||||
|
/// </summary>
|
||||||
|
public string? JsonContent { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Parsed content as an object (for manipulation).
|
||||||
|
/// </summary>
|
||||||
|
public object? ParsedContent { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Content type (e.g., "application/vnd.cyclonedx+json").
|
||||||
|
/// </summary>
|
||||||
|
public string? ContentType { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// SHA-256 hash of the original content.
|
||||||
|
/// </summary>
|
||||||
|
public string? OriginalHash { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static ExportItemContent Failed(Guid itemId, string errorMessage)
|
||||||
|
=> new() { ItemId = itemId, Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Interface for evaluating policies on export items.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportPolicyEvaluator
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Evaluates policy for an export item.
|
||||||
|
/// </summary>
|
||||||
|
Task<PolicyMetadata?> EvaluateAsync(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
ExportItemContent content,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
@@ -0,0 +1,429 @@
|
|||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Nodes;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Normalizes JSON documents for deterministic output.
|
||||||
|
/// </summary>
|
||||||
|
public sealed partial class JsonNormalizer
|
||||||
|
{
|
||||||
|
private readonly JsonNormalizationOptions _normalizationOptions;
|
||||||
|
private readonly JsonRedactionOptions _redactionOptions;
|
||||||
|
private readonly JsonSerializerOptions _serializerOptions;
|
||||||
|
|
||||||
|
public JsonNormalizer(
|
||||||
|
JsonNormalizationOptions? normalizationOptions = null,
|
||||||
|
JsonRedactionOptions? redactionOptions = null)
|
||||||
|
{
|
||||||
|
_normalizationOptions = normalizationOptions ?? new JsonNormalizationOptions();
|
||||||
|
_redactionOptions = redactionOptions ?? new JsonRedactionOptions();
|
||||||
|
|
||||||
|
_serializerOptions = new JsonSerializerOptions
|
||||||
|
{
|
||||||
|
WriteIndented = false,
|
||||||
|
PropertyNamingPolicy = null,
|
||||||
|
DefaultIgnoreCondition = _normalizationOptions.RemoveNullValues
|
||||||
|
? System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||||
|
: System.Text.Json.Serialization.JsonIgnoreCondition.Never
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Normalizes and optionally redacts a JSON string.
|
||||||
|
/// </summary>
|
||||||
|
public NormalizationResult Normalize(string json)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(json))
|
||||||
|
{
|
||||||
|
return NormalizationResult.Failed("Input JSON is empty or null");
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var node = JsonNode.Parse(json);
|
||||||
|
if (node is null)
|
||||||
|
{
|
||||||
|
return NormalizationResult.Failed("Failed to parse JSON");
|
||||||
|
}
|
||||||
|
|
||||||
|
var redactedCount = 0;
|
||||||
|
|
||||||
|
// Apply redaction
|
||||||
|
if (_redactionOptions.RedactFields.Count > 0)
|
||||||
|
{
|
||||||
|
redactedCount = RedactFields(node, _redactionOptions.RedactFields, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply pattern-based redaction
|
||||||
|
if (_redactionOptions.RedactPatterns.Count > 0)
|
||||||
|
{
|
||||||
|
redactedCount += RedactPatterns(node, _redactionOptions.RedactPatterns);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort keys if requested
|
||||||
|
if (_normalizationOptions.SortKeys && node is JsonObject rootObject)
|
||||||
|
{
|
||||||
|
node = SortKeys(rootObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize timestamps
|
||||||
|
if (_normalizationOptions.NormalizeTimestamps)
|
||||||
|
{
|
||||||
|
NormalizeTimestamps(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize to string
|
||||||
|
var normalized = node.ToJsonString(_serializerOptions);
|
||||||
|
|
||||||
|
// Normalize line endings
|
||||||
|
if (_normalizationOptions.NormalizeLineEndings)
|
||||||
|
{
|
||||||
|
normalized = NormalizeLineEndings(normalized);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate hash
|
||||||
|
var hash = ComputeSha256(normalized);
|
||||||
|
|
||||||
|
return new NormalizationResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
NormalizedJson = normalized,
|
||||||
|
Sha256 = hash,
|
||||||
|
OriginalSizeBytes = Encoding.UTF8.GetByteCount(json),
|
||||||
|
NormalizedSizeBytes = Encoding.UTF8.GetByteCount(normalized),
|
||||||
|
RedactedFieldCount = redactedCount
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (JsonException ex)
|
||||||
|
{
|
||||||
|
return NormalizationResult.Failed($"JSON parse error: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Normalizes a JSON node in place.
|
||||||
|
/// </summary>
|
||||||
|
public JsonNode? NormalizeNode(JsonNode? node)
|
||||||
|
{
|
||||||
|
if (node is null) return null;
|
||||||
|
|
||||||
|
if (_normalizationOptions.SortKeys && node is JsonObject obj)
|
||||||
|
{
|
||||||
|
node = SortKeys(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_normalizationOptions.NormalizeTimestamps)
|
||||||
|
{
|
||||||
|
NormalizeTimestamps(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int RedactFields(JsonNode node, IReadOnlyList<string> fieldsToRedact, string currentPath)
|
||||||
|
{
|
||||||
|
var redactedCount = 0;
|
||||||
|
|
||||||
|
switch (node)
|
||||||
|
{
|
||||||
|
case JsonObject obj:
|
||||||
|
var keysToRedact = new List<string>();
|
||||||
|
|
||||||
|
foreach (var kvp in obj)
|
||||||
|
{
|
||||||
|
var fieldPath = string.IsNullOrEmpty(currentPath) ? kvp.Key : $"{currentPath}.{kvp.Key}";
|
||||||
|
|
||||||
|
// Check if this field should be redacted
|
||||||
|
if (ShouldRedactField(kvp.Key, fieldPath, fieldsToRedact))
|
||||||
|
{
|
||||||
|
keysToRedact.Add(kvp.Key);
|
||||||
|
}
|
||||||
|
else if (kvp.Value is not null)
|
||||||
|
{
|
||||||
|
// Recurse into nested objects/arrays
|
||||||
|
redactedCount += RedactFields(kvp.Value, fieldsToRedact, fieldPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply redaction
|
||||||
|
foreach (var key in keysToRedact)
|
||||||
|
{
|
||||||
|
obj[key] = GetRedactedValue(obj[key]);
|
||||||
|
redactedCount++;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case JsonArray arr:
|
||||||
|
for (var i = 0; i < arr.Count; i++)
|
||||||
|
{
|
||||||
|
if (arr[i] is not null)
|
||||||
|
{
|
||||||
|
redactedCount += RedactFields(arr[i]!, fieldsToRedact, $"{currentPath}[{i}]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return redactedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool ShouldRedactField(string fieldName, string fieldPath, IReadOnlyList<string> fieldsToRedact)
|
||||||
|
{
|
||||||
|
foreach (var pattern in fieldsToRedact)
|
||||||
|
{
|
||||||
|
// Exact match by field name
|
||||||
|
if (fieldName.Equals(pattern, StringComparison.OrdinalIgnoreCase))
|
||||||
|
return true;
|
||||||
|
|
||||||
|
// Path match with wildcards
|
||||||
|
if (pattern.Contains('*'))
|
||||||
|
{
|
||||||
|
var regex = "^" + Regex.Escape(pattern).Replace("\\*", ".*") + "$";
|
||||||
|
if (Regex.IsMatch(fieldPath, regex, RegexOptions.IgnoreCase))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exact path match
|
||||||
|
if (fieldPath.Equals(pattern, StringComparison.OrdinalIgnoreCase))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check default sensitive fields
|
||||||
|
foreach (var sensitive in JsonRedactionOptions.DefaultSensitiveFields)
|
||||||
|
{
|
||||||
|
if (fieldName.Contains(sensitive, StringComparison.OrdinalIgnoreCase))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private JsonNode GetRedactedValue(JsonNode? original)
|
||||||
|
{
|
||||||
|
if (!_redactionOptions.PreserveTypes || original is null)
|
||||||
|
{
|
||||||
|
return JsonValue.Create(_redactionOptions.RedactedValue)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
return original switch
|
||||||
|
{
|
||||||
|
JsonObject => JsonValue.Create(_redactionOptions.RedactedValue)!,
|
||||||
|
JsonArray => new JsonArray(),
|
||||||
|
JsonValue v when v.TryGetValue<string>(out _) => JsonValue.Create(string.Empty)!,
|
||||||
|
JsonValue v when v.TryGetValue<int>(out _) => JsonValue.Create(0)!,
|
||||||
|
JsonValue v when v.TryGetValue<bool>(out _) => JsonValue.Create(false)!,
|
||||||
|
_ => JsonValue.Create(_redactionOptions.RedactedValue)!
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private int RedactPatterns(JsonNode node, IReadOnlyList<string> patterns)
|
||||||
|
{
|
||||||
|
var redactedCount = 0;
|
||||||
|
var compiledPatterns = patterns.Select(p => new Regex(p, RegexOptions.Compiled | RegexOptions.IgnoreCase)).ToList();
|
||||||
|
|
||||||
|
void ProcessNode(JsonNode? n)
|
||||||
|
{
|
||||||
|
switch (n)
|
||||||
|
{
|
||||||
|
case JsonObject obj:
|
||||||
|
foreach (var kvp in obj.ToList())
|
||||||
|
{
|
||||||
|
if (kvp.Value is JsonValue jv && jv.TryGetValue<string>(out var str))
|
||||||
|
{
|
||||||
|
foreach (var regex in compiledPatterns)
|
||||||
|
{
|
||||||
|
if (regex.IsMatch(str))
|
||||||
|
{
|
||||||
|
obj[kvp.Key] = JsonValue.Create(_redactionOptions.RedactedValue);
|
||||||
|
redactedCount++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (kvp.Value is not null)
|
||||||
|
{
|
||||||
|
ProcessNode(kvp.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case JsonArray arr:
|
||||||
|
for (var i = 0; i < arr.Count; i++)
|
||||||
|
{
|
||||||
|
if (arr[i] is JsonValue jv && jv.TryGetValue<string>(out var str))
|
||||||
|
{
|
||||||
|
foreach (var regex in compiledPatterns)
|
||||||
|
{
|
||||||
|
if (regex.IsMatch(str))
|
||||||
|
{
|
||||||
|
arr[i] = JsonValue.Create(_redactionOptions.RedactedValue);
|
||||||
|
redactedCount++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (arr[i] is not null)
|
||||||
|
{
|
||||||
|
ProcessNode(arr[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ProcessNode(node);
|
||||||
|
return redactedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JsonObject SortKeys(JsonObject obj)
|
||||||
|
{
|
||||||
|
var sorted = new JsonObject();
|
||||||
|
|
||||||
|
foreach (var kvp in obj.OrderBy(x => x.Key, StringComparer.Ordinal))
|
||||||
|
{
|
||||||
|
var value = kvp.Value;
|
||||||
|
|
||||||
|
// Remove from original and add to sorted (JsonNode can only have one parent)
|
||||||
|
obj.Remove(kvp.Key);
|
||||||
|
|
||||||
|
if (value is JsonObject childObj)
|
||||||
|
{
|
||||||
|
sorted[kvp.Key] = SortKeys(childObj);
|
||||||
|
}
|
||||||
|
else if (value is JsonArray arr)
|
||||||
|
{
|
||||||
|
sorted[kvp.Key] = SortKeysInArray(arr);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
sorted[kvp.Key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sorted;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JsonArray SortKeysInArray(JsonArray arr)
|
||||||
|
{
|
||||||
|
var newArray = new JsonArray();
|
||||||
|
|
||||||
|
foreach (var item in arr.ToList())
|
||||||
|
{
|
||||||
|
arr.Remove(item);
|
||||||
|
|
||||||
|
if (item is JsonObject obj)
|
||||||
|
{
|
||||||
|
newArray.Add(SortKeys(obj));
|
||||||
|
}
|
||||||
|
else if (item is JsonArray childArr)
|
||||||
|
{
|
||||||
|
newArray.Add(SortKeysInArray(childArr));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
newArray.Add(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newArray;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void NormalizeTimestamps(JsonNode? node)
|
||||||
|
{
|
||||||
|
switch (node)
|
||||||
|
{
|
||||||
|
case JsonObject obj:
|
||||||
|
foreach (var kvp in obj.ToList())
|
||||||
|
{
|
||||||
|
if (kvp.Value is JsonValue jv && jv.TryGetValue<string>(out var str))
|
||||||
|
{
|
||||||
|
if (TryParseTimestamp(str, out var dt))
|
||||||
|
{
|
||||||
|
obj[kvp.Key] = JsonValue.Create(
|
||||||
|
dt.ToUniversalTime().ToString(_normalizationOptions.TimestampFormat));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (kvp.Value is not null)
|
||||||
|
{
|
||||||
|
NormalizeTimestamps(kvp.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case JsonArray arr:
|
||||||
|
for (var i = 0; i < arr.Count; i++)
|
||||||
|
{
|
||||||
|
if (arr[i] is JsonValue jv && jv.TryGetValue<string>(out var str))
|
||||||
|
{
|
||||||
|
if (TryParseTimestamp(str, out var dt))
|
||||||
|
{
|
||||||
|
arr[i] = JsonValue.Create(
|
||||||
|
dt.ToUniversalTime().ToString(_normalizationOptions.TimestampFormat));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (arr[i] is not null)
|
||||||
|
{
|
||||||
|
NormalizeTimestamps(arr[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool TryParseTimestamp(string value, out DateTimeOffset result)
|
||||||
|
{
|
||||||
|
// Check if the string looks like a timestamp
|
||||||
|
if (value.Length >= 10 && value.Length <= 40)
|
||||||
|
{
|
||||||
|
// Try ISO 8601 formats
|
||||||
|
if (DateTimeOffset.TryParse(value, null,
|
||||||
|
System.Globalization.DateTimeStyles.RoundtripKind, out result))
|
||||||
|
{
|
||||||
|
// Additional validation - must have date separators
|
||||||
|
return value.Contains('-') || value.Contains('/');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result = default;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormalizeLineEndings(string text)
|
||||||
|
{
|
||||||
|
return text.Replace("\r\n", "\n").Replace("\r", "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ComputeSha256(string content)
|
||||||
|
{
|
||||||
|
var bytes = Encoding.UTF8.GetBytes(content);
|
||||||
|
var hashBytes = SHA256.HashData(bytes);
|
||||||
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of JSON normalization.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record NormalizationResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public string? NormalizedJson { get; init; }
|
||||||
|
|
||||||
|
public string? Sha256 { get; init; }
|
||||||
|
|
||||||
|
public long OriginalSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public long NormalizedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public int RedactedFieldCount { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static NormalizationResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
@@ -0,0 +1,452 @@
|
|||||||
|
using System.Diagnostics;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Nodes;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON Policy adapter (json:policy) - exports items wrapped with policy metadata.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class JsonPolicyAdapter : IExportAdapter
|
||||||
|
{
|
||||||
|
public const string Id = "json:policy";
|
||||||
|
|
||||||
|
private readonly ILogger<JsonPolicyAdapter> _logger;
|
||||||
|
private readonly JsonNormalizer _normalizer;
|
||||||
|
private readonly ExportCompressor _compressor;
|
||||||
|
private readonly JsonSerializerOptions _serializerOptions;
|
||||||
|
|
||||||
|
public string AdapterId => Id;
|
||||||
|
public string DisplayName => "JSON with Policy";
|
||||||
|
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.JsonPolicy, ExportFormat.Ndjson];
|
||||||
|
public bool SupportsStreaming => true;
|
||||||
|
|
||||||
|
public JsonPolicyAdapter(ILogger<JsonPolicyAdapter> logger)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
_normalizer = new JsonNormalizer();
|
||||||
|
_compressor = new ExportCompressor();
|
||||||
|
_serializerOptions = new JsonSerializerOptions
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||||
|
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public JsonPolicyAdapter(
|
||||||
|
ILogger<JsonPolicyAdapter> logger,
|
||||||
|
JsonNormalizationOptions? normalizationOptions,
|
||||||
|
JsonRedactionOptions? redactionOptions)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
_normalizer = new JsonNormalizer(normalizationOptions, redactionOptions);
|
||||||
|
_compressor = new ExportCompressor();
|
||||||
|
_serializerOptions = new JsonSerializerOptions
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||||
|
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<ExportAdapterResult> ProcessAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var stopwatch = Stopwatch.StartNew();
|
||||||
|
var itemResults = new List<AdapterItemResult>();
|
||||||
|
var artifacts = new List<ExportOutputArtifact>();
|
||||||
|
var countsBuilder = new ManifestCountsBuilder();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(context.Config.OutputDirectory);
|
||||||
|
|
||||||
|
var format = context.Config.FormatOptions.Format;
|
||||||
|
|
||||||
|
if (format == ExportFormat.Ndjson)
|
||||||
|
{
|
||||||
|
var ndjsonResult = await ProcessAsNdjsonAsync(context, cancellationToken);
|
||||||
|
if (ndjsonResult.Success)
|
||||||
|
{
|
||||||
|
artifacts.Add(ndjsonResult.Artifact!);
|
||||||
|
itemResults.AddRange(ndjsonResult.ItemResults);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return ExportAdapterResult.Failed(ndjsonResult.ErrorMessage ?? "NDJSON export failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var result = await ProcessSingleItemAsync(context, item, cancellationToken);
|
||||||
|
itemResults.Add(result);
|
||||||
|
|
||||||
|
if (result.Success && result.OutputPath is not null)
|
||||||
|
{
|
||||||
|
artifacts.Add(new ExportOutputArtifact
|
||||||
|
{
|
||||||
|
Path = result.OutputPath,
|
||||||
|
SizeBytes = result.OutputSizeBytes,
|
||||||
|
Sha256 = result.ContentHash ?? string.Empty,
|
||||||
|
ContentType = "application/json",
|
||||||
|
ItemCount = 1,
|
||||||
|
IsCompressed = context.Config.FormatOptions.Compression != CompressionFormat.None,
|
||||||
|
Compression = context.Config.FormatOptions.Compression
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
countsBuilder.AddItem(item.Kind, result.Success);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stopwatch.Stop();
|
||||||
|
|
||||||
|
var counts = countsBuilder.Build(artifacts);
|
||||||
|
|
||||||
|
if (_logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
_logger.LogInformation(
|
||||||
|
"JSON policy export completed: {SuccessCount}/{TotalCount} items, {ArtifactCount} artifacts in {ElapsedMs}ms",
|
||||||
|
counts.SuccessfulItems, counts.TotalItems, counts.ArtifactCount, stopwatch.ElapsedMilliseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ExportAdapterResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
ItemResults = itemResults,
|
||||||
|
Artifacts = artifacts,
|
||||||
|
ManifestCounts = counts,
|
||||||
|
ProcessingTime = stopwatch.Elapsed,
|
||||||
|
CompletedAt = context.TimeProvider.GetUtcNow()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
return ExportAdapterResult.Failed("Export cancelled");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "JSON policy export failed");
|
||||||
|
return ExportAdapterResult.Failed($"Export failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(context.Config.OutputDirectory);
|
||||||
|
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var result = await ProcessSingleItemAsync(context, item, cancellationToken);
|
||||||
|
yield return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<IReadOnlyList<string>> ValidateConfigAsync(
|
||||||
|
ExportAdapterConfig config,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var errors = new List<string>();
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
|
||||||
|
{
|
||||||
|
errors.Add("Output directory is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!SupportedFormats.Contains(config.FormatOptions.Format))
|
||||||
|
{
|
||||||
|
errors.Add($"Format {config.FormatOptions.Format} is not supported by this adapter");
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult<IReadOnlyList<string>>(errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<AdapterItemResult> ProcessSingleItemAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Fetch content
|
||||||
|
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||||
|
if (!content.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch content");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(content.JsonContent))
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, "Item content is empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize the data content
|
||||||
|
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||||
|
if (!normalized.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get policy metadata if evaluator is available
|
||||||
|
PolicyMetadata? policyMetadata = null;
|
||||||
|
if (context.PolicyEvaluator is not null)
|
||||||
|
{
|
||||||
|
policyMetadata = await context.PolicyEvaluator.EvaluateAsync(item, content, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the wrapped document
|
||||||
|
var now = context.TimeProvider.GetUtcNow();
|
||||||
|
var wrappedDocument = BuildWrappedDocument(item, normalized.NormalizedJson!, normalized.Sha256!, policyMetadata, now);
|
||||||
|
|
||||||
|
// Serialize to JSON
|
||||||
|
var outputJson = context.Config.FormatOptions.PrettyPrint
|
||||||
|
? JsonSerializer.Serialize(wrappedDocument, new JsonSerializerOptions(_serializerOptions) { WriteIndented = true })
|
||||||
|
: JsonSerializer.Serialize(wrappedDocument, _serializerOptions);
|
||||||
|
|
||||||
|
// Compress if requested
|
||||||
|
var outputBytes = Encoding.UTF8.GetBytes(outputJson);
|
||||||
|
var compression = context.Config.FormatOptions.Compression;
|
||||||
|
|
||||||
|
if (compression != CompressionFormat.None)
|
||||||
|
{
|
||||||
|
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||||
|
if (!compressed.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, compressed.ErrorMessage ?? "Compression failed");
|
||||||
|
}
|
||||||
|
outputBytes = compressed.CompressedData!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to file
|
||||||
|
var fileName = BuildFileName(item, context.Config);
|
||||||
|
var outputPath = Path.Combine(context.Config.OutputDirectory, fileName);
|
||||||
|
|
||||||
|
await File.WriteAllBytesAsync(outputPath, outputBytes, cancellationToken);
|
||||||
|
|
||||||
|
// Write checksum file if requested
|
||||||
|
var hash = ComputeSha256(outputBytes);
|
||||||
|
if (context.Config.IncludeChecksums)
|
||||||
|
{
|
||||||
|
var checksumPath = outputPath + ".sha256";
|
||||||
|
await File.WriteAllTextAsync(checksumPath, $"{hash} {fileName}\n", cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AdapterItemResult
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Success = true,
|
||||||
|
OutputPath = outputPath,
|
||||||
|
OutputSizeBytes = outputBytes.Length,
|
||||||
|
ContentHash = hash,
|
||||||
|
ProcessedAt = now
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private PolicyWrappedExportItem BuildWrappedDocument(
|
||||||
|
ResolvedExportItem item,
|
||||||
|
string normalizedJson,
|
||||||
|
string contentHash,
|
||||||
|
PolicyMetadata? policyMetadata,
|
||||||
|
DateTimeOffset exportedAt)
|
||||||
|
{
|
||||||
|
// Parse the normalized JSON as an object
|
||||||
|
var dataNode = JsonNode.Parse(normalizedJson);
|
||||||
|
|
||||||
|
return new PolicyWrappedExportItem
|
||||||
|
{
|
||||||
|
Metadata = new ExportItemMetadata
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Kind = item.Kind,
|
||||||
|
SourceRef = item.SourceRef,
|
||||||
|
Name = item.Name,
|
||||||
|
Namespace = item.Namespace,
|
||||||
|
Tags = item.Tags,
|
||||||
|
CreatedAt = item.CreatedAt,
|
||||||
|
ExportedAt = exportedAt,
|
||||||
|
Sha256 = contentHash
|
||||||
|
},
|
||||||
|
Policy = policyMetadata,
|
||||||
|
Data = dataNode!
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<NdjsonPolicyExportResult> ProcessAsNdjsonAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var itemResults = new List<AdapterItemResult>();
|
||||||
|
var lines = new List<string>();
|
||||||
|
var now = context.TimeProvider.GetUtcNow();
|
||||||
|
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||||
|
if (!content.Success)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(content.JsonContent))
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||||
|
if (!normalized.Success)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get policy metadata
|
||||||
|
PolicyMetadata? policyMetadata = null;
|
||||||
|
if (context.PolicyEvaluator is not null)
|
||||||
|
{
|
||||||
|
policyMetadata = await context.PolicyEvaluator.EvaluateAsync(item, content, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build wrapped document
|
||||||
|
var wrappedDocument = BuildWrappedDocument(item, normalized.NormalizedJson!, normalized.Sha256!, policyMetadata, now);
|
||||||
|
|
||||||
|
// Serialize to single line
|
||||||
|
var lineJson = JsonSerializer.Serialize(wrappedDocument, _serializerOptions);
|
||||||
|
lines.Add(lineJson);
|
||||||
|
|
||||||
|
itemResults.Add(new AdapterItemResult
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Success = true,
|
||||||
|
ContentHash = normalized.Sha256,
|
||||||
|
ProcessedAt = now
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lines.Count == 0)
|
||||||
|
{
|
||||||
|
return NdjsonPolicyExportResult.Failed("No items processed successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write NDJSON file
|
||||||
|
var ndjsonContent = string.Join("\n", lines) + "\n";
|
||||||
|
var outputBytes = Encoding.UTF8.GetBytes(ndjsonContent);
|
||||||
|
var compression = context.Config.FormatOptions.Compression;
|
||||||
|
long originalSize = outputBytes.Length;
|
||||||
|
|
||||||
|
if (compression != CompressionFormat.None)
|
||||||
|
{
|
||||||
|
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||||
|
if (!compressed.Success)
|
||||||
|
{
|
||||||
|
return NdjsonPolicyExportResult.Failed(compressed.ErrorMessage ?? "Compression failed");
|
||||||
|
}
|
||||||
|
outputBytes = compressed.CompressedData!;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileName = $"{context.Config.BaseName}-policy.ndjson{ExportCompressor.GetFileExtension(compression)}";
|
||||||
|
var outputPath = Path.Combine(context.Config.OutputDirectory, fileName);
|
||||||
|
|
||||||
|
await File.WriteAllBytesAsync(outputPath, outputBytes, cancellationToken);
|
||||||
|
|
||||||
|
var hash = ComputeSha256(outputBytes);
|
||||||
|
if (context.Config.IncludeChecksums)
|
||||||
|
{
|
||||||
|
var checksumPath = outputPath + ".sha256";
|
||||||
|
await File.WriteAllTextAsync(checksumPath, $"{hash} {fileName}\n", cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new NdjsonPolicyExportResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
ItemResults = itemResults,
|
||||||
|
Artifact = new ExportOutputArtifact
|
||||||
|
{
|
||||||
|
Path = outputPath,
|
||||||
|
SizeBytes = outputBytes.Length,
|
||||||
|
Sha256 = hash,
|
||||||
|
ContentType = "application/x-ndjson",
|
||||||
|
ItemCount = lines.Count,
|
||||||
|
IsCompressed = compression != CompressionFormat.None,
|
||||||
|
Compression = compression,
|
||||||
|
OriginalSizeBytes = originalSize
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string BuildFileName(ResolvedExportItem item, ExportAdapterConfig config)
|
||||||
|
{
|
||||||
|
var baseName = !string.IsNullOrEmpty(item.Name)
|
||||||
|
? SanitizeFileName(item.Name)
|
||||||
|
: item.ItemId.ToString("N")[..8];
|
||||||
|
|
||||||
|
var extension = ".policy.json" + ExportCompressor.GetFileExtension(config.FormatOptions.Compression);
|
||||||
|
|
||||||
|
return $"{item.Kind}-{baseName}{extension}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string SanitizeFileName(string name)
|
||||||
|
{
|
||||||
|
var invalid = Path.GetInvalidFileNameChars();
|
||||||
|
var sanitized = new StringBuilder(name.Length);
|
||||||
|
|
||||||
|
foreach (var c in name)
|
||||||
|
{
|
||||||
|
sanitized.Append(invalid.Contains(c) ? '_' : c);
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = sanitized.ToString();
|
||||||
|
if (result.Length > 64)
|
||||||
|
{
|
||||||
|
result = result[..64];
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ComputeSha256(byte[] data)
|
||||||
|
{
|
||||||
|
var hashBytes = SHA256.HashData(data);
|
||||||
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed record NdjsonPolicyExportResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
public IReadOnlyList<AdapterItemResult> ItemResults { get; init; } = [];
|
||||||
|
public ExportOutputArtifact? Artifact { get; init; }
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static NdjsonPolicyExportResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,461 @@
|
|||||||
|
using System.Diagnostics;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON Raw adapter (json:raw) - exports items as raw JSON documents.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class JsonRawAdapter : IExportAdapter
|
||||||
|
{
|
||||||
|
public const string Id = "json:raw";
|
||||||
|
|
||||||
|
private readonly ILogger<JsonRawAdapter> _logger;
|
||||||
|
private readonly JsonNormalizer _normalizer;
|
||||||
|
private readonly ExportCompressor _compressor;
|
||||||
|
|
||||||
|
public string AdapterId => Id;
|
||||||
|
public string DisplayName => "JSON Raw";
|
||||||
|
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.JsonRaw, ExportFormat.Ndjson];
|
||||||
|
public bool SupportsStreaming => true;
|
||||||
|
|
||||||
|
public JsonRawAdapter(ILogger<JsonRawAdapter> logger)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
_normalizer = new JsonNormalizer();
|
||||||
|
_compressor = new ExportCompressor();
|
||||||
|
}
|
||||||
|
|
||||||
|
public JsonRawAdapter(
|
||||||
|
ILogger<JsonRawAdapter> logger,
|
||||||
|
JsonNormalizationOptions? normalizationOptions,
|
||||||
|
JsonRedactionOptions? redactionOptions)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
_normalizer = new JsonNormalizer(normalizationOptions, redactionOptions);
|
||||||
|
_compressor = new ExportCompressor();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<ExportAdapterResult> ProcessAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var stopwatch = Stopwatch.StartNew();
|
||||||
|
var itemResults = new List<AdapterItemResult>();
|
||||||
|
var artifacts = new List<ExportOutputArtifact>();
|
||||||
|
var countsBuilder = new ManifestCountsBuilder();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Ensure output directory exists
|
||||||
|
Directory.CreateDirectory(context.Config.OutputDirectory);
|
||||||
|
|
||||||
|
var format = context.Config.FormatOptions.Format;
|
||||||
|
|
||||||
|
if (format == ExportFormat.Ndjson)
|
||||||
|
{
|
||||||
|
// Process all items into a single NDJSON file
|
||||||
|
var ndjsonResult = await ProcessAsNdjsonAsync(context, cancellationToken);
|
||||||
|
if (ndjsonResult.Success)
|
||||||
|
{
|
||||||
|
artifacts.Add(ndjsonResult.Artifact!);
|
||||||
|
itemResults.AddRange(ndjsonResult.ItemResults);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return ExportAdapterResult.Failed(ndjsonResult.ErrorMessage ?? "NDJSON export failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Process each item as individual JSON file
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var result = await ProcessSingleItemAsync(context, item, cancellationToken);
|
||||||
|
itemResults.Add(result);
|
||||||
|
|
||||||
|
if (result.Success && result.OutputPath is not null)
|
||||||
|
{
|
||||||
|
artifacts.Add(new ExportOutputArtifact
|
||||||
|
{
|
||||||
|
Path = result.OutputPath,
|
||||||
|
SizeBytes = result.OutputSizeBytes,
|
||||||
|
Sha256 = result.ContentHash ?? string.Empty,
|
||||||
|
ContentType = "application/json",
|
||||||
|
ItemCount = 1,
|
||||||
|
IsCompressed = context.Config.FormatOptions.Compression != CompressionFormat.None,
|
||||||
|
Compression = context.Config.FormatOptions.Compression,
|
||||||
|
OriginalSizeBytes = result.OutputSizeBytes
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
countsBuilder.AddItem(item.Kind, result.Success);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stopwatch.Stop();
|
||||||
|
|
||||||
|
// Build manifest counts
|
||||||
|
var counts = countsBuilder.Build(artifacts);
|
||||||
|
|
||||||
|
if (_logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
_logger.LogInformation(
|
||||||
|
"JSON raw export completed: {SuccessCount}/{TotalCount} items, {ArtifactCount} artifacts, {TotalBytes} bytes in {ElapsedMs}ms",
|
||||||
|
counts.SuccessfulItems, counts.TotalItems, counts.ArtifactCount, counts.TotalSizeBytes, stopwatch.ElapsedMilliseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ExportAdapterResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
ItemResults = itemResults,
|
||||||
|
Artifacts = artifacts,
|
||||||
|
ManifestCounts = counts,
|
||||||
|
ProcessingTime = stopwatch.Elapsed,
|
||||||
|
CompletedAt = context.TimeProvider.GetUtcNow()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
return ExportAdapterResult.Failed("Export cancelled");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "JSON raw export failed");
|
||||||
|
return ExportAdapterResult.Failed($"Export failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(context.Config.OutputDirectory);
|
||||||
|
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var result = await ProcessSingleItemAsync(context, item, cancellationToken);
|
||||||
|
yield return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<IReadOnlyList<string>> ValidateConfigAsync(
|
||||||
|
ExportAdapterConfig config,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var errors = new List<string>();
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
|
||||||
|
{
|
||||||
|
errors.Add("Output directory is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!SupportedFormats.Contains(config.FormatOptions.Format))
|
||||||
|
{
|
||||||
|
errors.Add($"Format {config.FormatOptions.Format} is not supported by this adapter");
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult<IReadOnlyList<string>>(errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<AdapterItemResult> ProcessSingleItemAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
ResolvedExportItem item,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Fetch content
|
||||||
|
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||||
|
if (!content.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch content");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(content.JsonContent))
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, "Item content is empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize JSON
|
||||||
|
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||||
|
if (!normalized.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply pretty print if requested
|
||||||
|
var outputJson = normalized.NormalizedJson!;
|
||||||
|
if (context.Config.FormatOptions.PrettyPrint)
|
||||||
|
{
|
||||||
|
outputJson = PrettyPrint(outputJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compress if requested
|
||||||
|
var outputBytes = Encoding.UTF8.GetBytes(outputJson);
|
||||||
|
var compression = context.Config.FormatOptions.Compression;
|
||||||
|
|
||||||
|
if (compression != CompressionFormat.None)
|
||||||
|
{
|
||||||
|
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||||
|
if (!compressed.Success)
|
||||||
|
{
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, compressed.ErrorMessage ?? "Compression failed");
|
||||||
|
}
|
||||||
|
outputBytes = compressed.CompressedData!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to file
|
||||||
|
var fileName = BuildFileName(item, context.Config);
|
||||||
|
var outputPath = Path.Combine(context.Config.OutputDirectory, fileName);
|
||||||
|
|
||||||
|
await File.WriteAllBytesAsync(outputPath, outputBytes, cancellationToken);
|
||||||
|
|
||||||
|
// Write checksum file if requested
|
||||||
|
var hash = ComputeSha256(outputBytes);
|
||||||
|
if (context.Config.IncludeChecksums)
|
||||||
|
{
|
||||||
|
var checksumPath = outputPath + ".sha256";
|
||||||
|
await File.WriteAllTextAsync(checksumPath, $"{hash} {fileName}\n", cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AdapterItemResult
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Success = true,
|
||||||
|
OutputPath = outputPath,
|
||||||
|
OutputSizeBytes = outputBytes.Length,
|
||||||
|
ContentHash = hash,
|
||||||
|
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||||
|
return AdapterItemResult.Failed(item.ItemId, ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<NdjsonExportResult> ProcessAsNdjsonAsync(
|
||||||
|
ExportAdapterContext context,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var itemResults = new List<AdapterItemResult>();
|
||||||
|
var lines = new List<string>();
|
||||||
|
|
||||||
|
foreach (var item in context.Items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||||
|
if (!content.Success)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(content.JsonContent))
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||||
|
if (!normalized.Success)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure single line for NDJSON
|
||||||
|
var singleLine = normalized.NormalizedJson!.Replace("\n", " ").Replace("\r", "");
|
||||||
|
lines.Add(singleLine);
|
||||||
|
|
||||||
|
itemResults.Add(new AdapterItemResult
|
||||||
|
{
|
||||||
|
ItemId = item.ItemId,
|
||||||
|
Success = true,
|
||||||
|
ContentHash = normalized.Sha256,
|
||||||
|
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lines.Count == 0)
|
||||||
|
{
|
||||||
|
return NdjsonExportResult.Failed("No items processed successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write NDJSON file
|
||||||
|
var ndjsonContent = string.Join("\n", lines) + "\n";
|
||||||
|
var outputBytes = Encoding.UTF8.GetBytes(ndjsonContent);
|
||||||
|
var compression = context.Config.FormatOptions.Compression;
|
||||||
|
long originalSize = outputBytes.Length;
|
||||||
|
|
||||||
|
if (compression != CompressionFormat.None)
|
||||||
|
{
|
||||||
|
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||||
|
if (!compressed.Success)
|
||||||
|
{
|
||||||
|
return NdjsonExportResult.Failed(compressed.ErrorMessage ?? "Compression failed");
|
||||||
|
}
|
||||||
|
outputBytes = compressed.CompressedData!;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileName = $"{context.Config.BaseName}.ndjson{ExportCompressor.GetFileExtension(compression)}";
|
||||||
|
var outputPath = Path.Combine(context.Config.OutputDirectory, fileName);
|
||||||
|
|
||||||
|
await File.WriteAllBytesAsync(outputPath, outputBytes, cancellationToken);
|
||||||
|
|
||||||
|
var hash = ComputeSha256(outputBytes);
|
||||||
|
if (context.Config.IncludeChecksums)
|
||||||
|
{
|
||||||
|
var checksumPath = outputPath + ".sha256";
|
||||||
|
await File.WriteAllTextAsync(checksumPath, $"{hash} {fileName}\n", cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new NdjsonExportResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
ItemResults = itemResults,
|
||||||
|
Artifact = new ExportOutputArtifact
|
||||||
|
{
|
||||||
|
Path = outputPath,
|
||||||
|
SizeBytes = outputBytes.Length,
|
||||||
|
Sha256 = hash,
|
||||||
|
ContentType = "application/x-ndjson",
|
||||||
|
ItemCount = lines.Count,
|
||||||
|
IsCompressed = compression != CompressionFormat.None,
|
||||||
|
Compression = compression,
|
||||||
|
OriginalSizeBytes = originalSize
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string BuildFileName(ResolvedExportItem item, ExportAdapterConfig config)
|
||||||
|
{
|
||||||
|
var baseName = !string.IsNullOrEmpty(item.Name)
|
||||||
|
? SanitizeFileName(item.Name)
|
||||||
|
: item.ItemId.ToString("N")[..8];
|
||||||
|
|
||||||
|
var extension = ".json" + ExportCompressor.GetFileExtension(config.FormatOptions.Compression);
|
||||||
|
|
||||||
|
return $"{item.Kind}-{baseName}{extension}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string SanitizeFileName(string name)
|
||||||
|
{
|
||||||
|
var invalid = Path.GetInvalidFileNameChars();
|
||||||
|
var sanitized = new StringBuilder(name.Length);
|
||||||
|
|
||||||
|
foreach (var c in name)
|
||||||
|
{
|
||||||
|
sanitized.Append(invalid.Contains(c) ? '_' : c);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit length
|
||||||
|
var result = sanitized.ToString();
|
||||||
|
if (result.Length > 64)
|
||||||
|
{
|
||||||
|
result = result[..64];
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string PrettyPrint(string json)
|
||||||
|
{
|
||||||
|
using var doc = JsonDocument.Parse(json);
|
||||||
|
return JsonSerializer.Serialize(doc, new JsonSerializerOptions { WriteIndented = true });
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ComputeSha256(byte[] data)
|
||||||
|
{
|
||||||
|
var hashBytes = SHA256.HashData(data);
|
||||||
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed record NdjsonExportResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
public IReadOnlyList<AdapterItemResult> ItemResults { get; init; } = [];
|
||||||
|
public ExportOutputArtifact? Artifact { get; init; }
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public static NdjsonExportResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Builder for manifest counts.
|
||||||
|
/// </summary>
|
||||||
|
internal sealed class ManifestCountsBuilder
|
||||||
|
{
|
||||||
|
private int _totalItems;
|
||||||
|
private int _successfulItems;
|
||||||
|
private int _failedItems;
|
||||||
|
private readonly Dictionary<string, int> _byKind = new();
|
||||||
|
private readonly Dictionary<string, int> _byStatus = new();
|
||||||
|
|
||||||
|
public void AddItem(string kind, bool success)
|
||||||
|
{
|
||||||
|
_totalItems++;
|
||||||
|
|
||||||
|
if (success)
|
||||||
|
{
|
||||||
|
_successfulItems++;
|
||||||
|
IncrementDict(_byStatus, "success");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_failedItems++;
|
||||||
|
IncrementDict(_byStatus, "failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
IncrementDict(_byKind, kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ExportManifestCounts Build(IReadOnlyList<ExportOutputArtifact> artifacts)
|
||||||
|
{
|
||||||
|
var totalSize = artifacts.Sum(a => a.SizeBytes);
|
||||||
|
var compressedSize = artifacts.Where(a => a.IsCompressed).Sum(a => a.SizeBytes);
|
||||||
|
var originalSize = artifacts.Where(a => a.IsCompressed && a.OriginalSizeBytes.HasValue)
|
||||||
|
.Sum(a => a.OriginalSizeBytes!.Value);
|
||||||
|
|
||||||
|
return new ExportManifestCounts
|
||||||
|
{
|
||||||
|
TotalItems = _totalItems,
|
||||||
|
ProcessedItems = _totalItems,
|
||||||
|
SuccessfulItems = _successfulItems,
|
||||||
|
FailedItems = _failedItems,
|
||||||
|
SkippedItems = 0,
|
||||||
|
ArtifactCount = artifacts.Count,
|
||||||
|
TotalSizeBytes = totalSize,
|
||||||
|
CompressedSizeBytes = compressedSize > 0 ? compressedSize : null,
|
||||||
|
ByKind = _byKind,
|
||||||
|
ByStatus = _byStatus
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void IncrementDict(Dictionary<string, int> dict, string key)
|
||||||
|
{
|
||||||
|
dict.TryGetValue(key, out var current);
|
||||||
|
dict[key] = current + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,114 @@
|
|||||||
|
using System.ComponentModel.DataAnnotations;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Configuration;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Root configuration options for the Export Center service.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportCenterOptions
|
||||||
|
{
|
||||||
|
public const string SectionName = "ExportCenter";
|
||||||
|
|
||||||
|
[Required]
|
||||||
|
public required DatabaseOptions Database { get; init; }
|
||||||
|
|
||||||
|
public ObjectStoreOptions? ObjectStore { get; init; }
|
||||||
|
|
||||||
|
public TimelineOptions? Timeline { get; init; }
|
||||||
|
|
||||||
|
public SigningOptions Signing { get; init; } = new();
|
||||||
|
|
||||||
|
public QuotaOptions Quotas { get; init; } = new();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Database connection options for Export Center.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class DatabaseOptions
|
||||||
|
{
|
||||||
|
[Required]
|
||||||
|
public required string ConnectionString { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Enables automatic execution of SQL migrations at startup.
|
||||||
|
/// </summary>
|
||||||
|
public bool ApplyMigrationsAtStartup { get; init; } = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Object storage options for export artifacts.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ObjectStoreOptions
|
||||||
|
{
|
||||||
|
[Required]
|
||||||
|
public required ObjectStoreKind Kind { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Base path for file system storage.
|
||||||
|
/// </summary>
|
||||||
|
public string? RootPath { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// S3 bucket name for cloud storage.
|
||||||
|
/// </summary>
|
||||||
|
public string? BucketName { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// AWS region for S3 storage.
|
||||||
|
/// </summary>
|
||||||
|
public string? Region { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Supported object store backends.
|
||||||
|
/// </summary>
|
||||||
|
public enum ObjectStoreKind
|
||||||
|
{
|
||||||
|
FileSystem = 1,
|
||||||
|
AmazonS3 = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Timeline integration options.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class TimelineOptions
|
||||||
|
{
|
||||||
|
public bool Enabled { get; init; }
|
||||||
|
|
||||||
|
[Url]
|
||||||
|
public string? Endpoint { get; init; }
|
||||||
|
|
||||||
|
[Range(1, 300)]
|
||||||
|
public int RequestTimeoutSeconds { get; init; } = 15;
|
||||||
|
|
||||||
|
public string Source { get; init; } = "stellaops.export-center";
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Signing options for export manifests.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class SigningOptions
|
||||||
|
{
|
||||||
|
public bool Enabled { get; init; } = true;
|
||||||
|
|
||||||
|
public string Algorithm { get; init; } = "ES256";
|
||||||
|
|
||||||
|
public string KeyId { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
public string? Provider { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Quota limits for export operations.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class QuotaOptions
|
||||||
|
{
|
||||||
|
[Range(1, 1000)]
|
||||||
|
public int MaxConcurrentExports { get; init; } = 10;
|
||||||
|
|
||||||
|
[Range(1, long.MaxValue)]
|
||||||
|
public long MaxExportSizeBytes { get; init; } = 1L * 1024 * 1024 * 1024;
|
||||||
|
|
||||||
|
[Range(1, 3650)]
|
||||||
|
public int DefaultRetentionDays { get; init; } = 90;
|
||||||
|
}
|
||||||
@@ -0,0 +1,130 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Domain;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a distribution target for export artifacts.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportDistribution
|
||||||
|
{
|
||||||
|
public required Guid DistributionId { get; init; }
|
||||||
|
|
||||||
|
public required Guid RunId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public required ExportDistributionKind Kind { get; init; }
|
||||||
|
|
||||||
|
public required ExportDistributionStatus Status { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Target location (path, URL, bucket).
|
||||||
|
/// </summary>
|
||||||
|
public required string Target { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Artifact path relative to distribution root.
|
||||||
|
/// </summary>
|
||||||
|
public required string ArtifactPath { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// SHA256 hash of the distributed artifact.
|
||||||
|
/// </summary>
|
||||||
|
public string? ArtifactHash { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Size of the artifact in bytes.
|
||||||
|
/// </summary>
|
||||||
|
public long SizeBytes { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Content type of the artifact.
|
||||||
|
/// </summary>
|
||||||
|
public string? ContentType { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded distribution metadata (e.g., S3 ETag, version).
|
||||||
|
/// </summary>
|
||||||
|
public string? MetadataJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded error details if distribution failed.
|
||||||
|
/// </summary>
|
||||||
|
public string? ErrorJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Number of distribution attempts.
|
||||||
|
/// </summary>
|
||||||
|
public int AttemptCount { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? DistributedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? VerifiedAt { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Kind of distribution target.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportDistributionKind
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Local file system distribution.
|
||||||
|
/// </summary>
|
||||||
|
FileSystem = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Amazon S3 distribution.
|
||||||
|
/// </summary>
|
||||||
|
AmazonS3 = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Mirror server distribution.
|
||||||
|
/// </summary>
|
||||||
|
Mirror = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Air-gap offline kit distribution.
|
||||||
|
/// </summary>
|
||||||
|
OfflineKit = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Webhook notification (metadata only).
|
||||||
|
/// </summary>
|
||||||
|
Webhook = 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of an export distribution.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportDistributionStatus
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution is pending.
|
||||||
|
/// </summary>
|
||||||
|
Pending = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution is in progress.
|
||||||
|
/// </summary>
|
||||||
|
Distributing = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution completed successfully.
|
||||||
|
/// </summary>
|
||||||
|
Distributed = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution verified at target.
|
||||||
|
/// </summary>
|
||||||
|
Verified = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution failed.
|
||||||
|
/// </summary>
|
||||||
|
Failed = 5,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribution cancelled.
|
||||||
|
/// </summary>
|
||||||
|
Cancelled = 6
|
||||||
|
}
|
||||||
@@ -0,0 +1,128 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Domain;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents an input item to be included in an export run.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportInput
|
||||||
|
{
|
||||||
|
public required Guid InputId { get; init; }
|
||||||
|
|
||||||
|
public required Guid RunId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public required ExportInputKind Kind { get; init; }
|
||||||
|
|
||||||
|
public required ExportInputStatus Status { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Reference identifier for the source item (e.g., SBOM ID, scan ID).
|
||||||
|
/// </summary>
|
||||||
|
public required string SourceRef { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Human-readable name for the input.
|
||||||
|
/// </summary>
|
||||||
|
public string? Name { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// SHA256 hash of the input content.
|
||||||
|
/// </summary>
|
||||||
|
public string? ContentHash { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Size of the input in bytes.
|
||||||
|
/// </summary>
|
||||||
|
public long SizeBytes { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded metadata about the input.
|
||||||
|
/// </summary>
|
||||||
|
public string? MetadataJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded error details if processing failed.
|
||||||
|
/// </summary>
|
||||||
|
public string? ErrorJson { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ProcessedAt { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Kind of export input.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportInputKind
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// SBOM document (CycloneDX or SPDX).
|
||||||
|
/// </summary>
|
||||||
|
Sbom = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// VEX document.
|
||||||
|
/// </summary>
|
||||||
|
Vex = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Attestation bundle.
|
||||||
|
/// </summary>
|
||||||
|
Attestation = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Scan report.
|
||||||
|
/// </summary>
|
||||||
|
ScanReport = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Policy evaluation result.
|
||||||
|
/// </summary>
|
||||||
|
PolicyResult = 5,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Evidence bundle.
|
||||||
|
/// </summary>
|
||||||
|
Evidence = 6,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Risk assessment bundle.
|
||||||
|
/// </summary>
|
||||||
|
RiskBundle = 7,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Advisory data.
|
||||||
|
/// </summary>
|
||||||
|
Advisory = 8
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of an export input.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportInputStatus
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Input is pending processing.
|
||||||
|
/// </summary>
|
||||||
|
Pending = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Input is being processed.
|
||||||
|
/// </summary>
|
||||||
|
Processing = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Input was processed successfully.
|
||||||
|
/// </summary>
|
||||||
|
Processed = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Input processing failed.
|
||||||
|
/// </summary>
|
||||||
|
Failed = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Input was skipped (filtered out).
|
||||||
|
/// </summary>
|
||||||
|
Skipped = 5
|
||||||
|
}
|
||||||
@@ -0,0 +1,97 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Domain;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents an export profile defining the scope and configuration of exports.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportProfile
|
||||||
|
{
|
||||||
|
public required Guid ProfileId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public required string Name { get; init; }
|
||||||
|
|
||||||
|
public string? Description { get; init; }
|
||||||
|
|
||||||
|
public required ExportProfileKind Kind { get; init; }
|
||||||
|
|
||||||
|
public required ExportProfileStatus Status { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded scope configuration specifying what to export.
|
||||||
|
/// </summary>
|
||||||
|
public string? ScopeJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded format configuration (output formats, compression, etc.).
|
||||||
|
/// </summary>
|
||||||
|
public string? FormatJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded signing configuration.
|
||||||
|
/// </summary>
|
||||||
|
public string? SigningJson { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Cron expression for scheduled exports.
|
||||||
|
/// </summary>
|
||||||
|
public string? Schedule { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset UpdatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ArchivedAt { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Kind of export profile.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportProfileKind
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Ad-hoc export triggered manually.
|
||||||
|
/// </summary>
|
||||||
|
AdHoc = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Scheduled export running on a cron schedule.
|
||||||
|
/// </summary>
|
||||||
|
Scheduled = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Event-driven export triggered by webhooks or events.
|
||||||
|
/// </summary>
|
||||||
|
EventDriven = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Continuous export for near-real-time mirror updates.
|
||||||
|
/// </summary>
|
||||||
|
Continuous = 4
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of an export profile.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportProfileStatus
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Profile is being set up.
|
||||||
|
/// </summary>
|
||||||
|
Draft = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Profile is active and can run exports.
|
||||||
|
/// </summary>
|
||||||
|
Active = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Profile is paused and will not run scheduled exports.
|
||||||
|
/// </summary>
|
||||||
|
Paused = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Profile is archived and read-only.
|
||||||
|
/// </summary>
|
||||||
|
Archived = 4
|
||||||
|
}
|
||||||
@@ -0,0 +1,128 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Domain;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a single execution of an export profile.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportRun
|
||||||
|
{
|
||||||
|
public required Guid RunId { get; init; }
|
||||||
|
|
||||||
|
public required Guid ProfileId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public required ExportRunStatus Status { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Trigger source (manual, scheduled, event, api).
|
||||||
|
/// </summary>
|
||||||
|
public required ExportRunTrigger Trigger { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Optional correlation ID for tracing.
|
||||||
|
/// </summary>
|
||||||
|
public string? CorrelationId { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// User or service that initiated the export.
|
||||||
|
/// </summary>
|
||||||
|
public string? InitiatedBy { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Total number of items to export.
|
||||||
|
/// </summary>
|
||||||
|
public int TotalItems { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Number of items exported so far.
|
||||||
|
/// </summary>
|
||||||
|
public int ProcessedItems { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Number of items that failed to export.
|
||||||
|
/// </summary>
|
||||||
|
public int FailedItems { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Total size of exported artifacts in bytes.
|
||||||
|
/// </summary>
|
||||||
|
public long TotalSizeBytes { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON-encoded error details if the run failed.
|
||||||
|
/// </summary>
|
||||||
|
public string? ErrorJson { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? StartedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? CompletedAt { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Timestamp when artifacts expire.
|
||||||
|
/// </summary>
|
||||||
|
public DateTimeOffset? ExpiresAt { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of an export run.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportRunStatus
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Run is queued waiting to start.
|
||||||
|
/// </summary>
|
||||||
|
Queued = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run is actively processing.
|
||||||
|
/// </summary>
|
||||||
|
Running = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run completed successfully.
|
||||||
|
/// </summary>
|
||||||
|
Completed = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run completed with some failures.
|
||||||
|
/// </summary>
|
||||||
|
PartiallyCompleted = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run failed.
|
||||||
|
/// </summary>
|
||||||
|
Failed = 5,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run was cancelled.
|
||||||
|
/// </summary>
|
||||||
|
Cancelled = 6
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Trigger source for an export run.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportRunTrigger
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Manually triggered by a user.
|
||||||
|
/// </summary>
|
||||||
|
Manual = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Triggered by a cron schedule.
|
||||||
|
/// </summary>
|
||||||
|
Scheduled = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Triggered by an external event.
|
||||||
|
/// </summary>
|
||||||
|
Event = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Triggered via API.
|
||||||
|
/// </summary>
|
||||||
|
Api = 4
|
||||||
|
}
|
||||||
@@ -0,0 +1,278 @@
|
|||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Request to create an export plan.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportPlanRequest
|
||||||
|
{
|
||||||
|
public required Guid ProfileId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public ExportScope? ScopeOverride { get; init; }
|
||||||
|
|
||||||
|
public ExportFormatOptions? FormatOverride { get; init; }
|
||||||
|
|
||||||
|
public string? CorrelationId { get; init; }
|
||||||
|
|
||||||
|
public string? InitiatedBy { get; init; }
|
||||||
|
|
||||||
|
public bool DryRun { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Output format configuration for exports.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportFormatOptions
|
||||||
|
{
|
||||||
|
[JsonPropertyName("format")]
|
||||||
|
public ExportFormat Format { get; init; } = ExportFormat.JsonRaw;
|
||||||
|
|
||||||
|
[JsonPropertyName("compression")]
|
||||||
|
public CompressionFormat Compression { get; init; } = CompressionFormat.None;
|
||||||
|
|
||||||
|
[JsonPropertyName("includeMetadata")]
|
||||||
|
public bool IncludeMetadata { get; init; } = true;
|
||||||
|
|
||||||
|
[JsonPropertyName("prettyPrint")]
|
||||||
|
public bool PrettyPrint { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("redactFields")]
|
||||||
|
public IReadOnlyList<string> RedactFields { get; init; } = [];
|
||||||
|
|
||||||
|
[JsonPropertyName("normalizeTimestamps")]
|
||||||
|
public bool NormalizeTimestamps { get; init; } = true;
|
||||||
|
|
||||||
|
[JsonPropertyName("sortKeys")]
|
||||||
|
public bool SortKeys { get; init; } = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Supported export formats.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportFormat
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Raw JSON (one object per file).
|
||||||
|
/// </summary>
|
||||||
|
JsonRaw = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// JSON with policy metadata included.
|
||||||
|
/// </summary>
|
||||||
|
JsonPolicy = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Newline-delimited JSON (streaming format).
|
||||||
|
/// </summary>
|
||||||
|
Ndjson = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// CSV format.
|
||||||
|
/// </summary>
|
||||||
|
Csv = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Full mirror layout with indexes.
|
||||||
|
/// </summary>
|
||||||
|
Mirror = 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Compression formats for export artifacts.
|
||||||
|
/// </summary>
|
||||||
|
public enum CompressionFormat
|
||||||
|
{
|
||||||
|
None = 0,
|
||||||
|
Gzip = 1,
|
||||||
|
Zstd = 2,
|
||||||
|
Brotli = 3
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A planned export operation ready for execution.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportPlan
|
||||||
|
{
|
||||||
|
public required Guid PlanId { get; init; }
|
||||||
|
|
||||||
|
public required Guid ProfileId { get; init; }
|
||||||
|
|
||||||
|
public required Guid TenantId { get; init; }
|
||||||
|
|
||||||
|
public required ExportPlanStatus Status { get; init; }
|
||||||
|
|
||||||
|
public required ExportScope ResolvedScope { get; init; }
|
||||||
|
|
||||||
|
public required ExportFormatOptions Format { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<ExportPlanPhase> Phases { get; init; } = [];
|
||||||
|
|
||||||
|
public int TotalItems { get; init; }
|
||||||
|
|
||||||
|
public long EstimatedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public TimeSpan EstimatedDuration { get; init; }
|
||||||
|
|
||||||
|
public string? CorrelationId { get; init; }
|
||||||
|
|
||||||
|
public string? InitiatedBy { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ValidUntil { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<string> Warnings { get; init; } = [];
|
||||||
|
|
||||||
|
public IReadOnlyList<ExportValidationError> ValidationErrors { get; init; } = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of an export plan.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportPlanStatus
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Plan is being created.
|
||||||
|
/// </summary>
|
||||||
|
Creating = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plan is ready for execution.
|
||||||
|
/// </summary>
|
||||||
|
Ready = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plan has validation errors.
|
||||||
|
/// </summary>
|
||||||
|
Invalid = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plan has been executed.
|
||||||
|
/// </summary>
|
||||||
|
Executed = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plan has expired.
|
||||||
|
/// </summary>
|
||||||
|
Expired = 5,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plan was cancelled.
|
||||||
|
/// </summary>
|
||||||
|
Cancelled = 6
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A phase in the export execution plan.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportPlanPhase
|
||||||
|
{
|
||||||
|
public required int Order { get; init; }
|
||||||
|
|
||||||
|
public required string Name { get; init; }
|
||||||
|
|
||||||
|
public required ExportPhaseKind Kind { get; init; }
|
||||||
|
|
||||||
|
public int ItemCount { get; init; }
|
||||||
|
|
||||||
|
public long EstimatedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public TimeSpan EstimatedDuration { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<string> Dependencies { get; init; } = [];
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, string> Parameters { get; init; } = new Dictionary<string, string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Kinds of export phases.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportPhaseKind
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Resolve scope and collect items.
|
||||||
|
/// </summary>
|
||||||
|
ScopeResolution = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Fetch and transform data.
|
||||||
|
/// </summary>
|
||||||
|
DataFetch = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Apply normalization/redaction.
|
||||||
|
/// </summary>
|
||||||
|
Transform = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Write output files.
|
||||||
|
/// </summary>
|
||||||
|
WriteOutput = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Generate checksums and manifest.
|
||||||
|
/// </summary>
|
||||||
|
GenerateManifest = 5,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sign artifacts.
|
||||||
|
/// </summary>
|
||||||
|
Sign = 6,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Distribute to targets.
|
||||||
|
/// </summary>
|
||||||
|
Distribute = 7,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Verify distribution.
|
||||||
|
/// </summary>
|
||||||
|
Verify = 8
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Validation error in an export plan.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportValidationError
|
||||||
|
{
|
||||||
|
public required string Code { get; init; }
|
||||||
|
|
||||||
|
public required string Message { get; init; }
|
||||||
|
|
||||||
|
public string? Field { get; init; }
|
||||||
|
|
||||||
|
public ExportValidationSeverity Severity { get; init; } = ExportValidationSeverity.Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Severity of a validation error.
|
||||||
|
/// </summary>
|
||||||
|
public enum ExportValidationSeverity
|
||||||
|
{
|
||||||
|
Warning = 1,
|
||||||
|
Error = 2,
|
||||||
|
Critical = 3
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of creating an export plan.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportPlanResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public ExportPlan? Plan { get; init; }
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<ExportValidationError> ValidationErrors { get; init; } = [];
|
||||||
|
|
||||||
|
public static ExportPlanResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
|
||||||
|
public static ExportPlanResult Invalid(IReadOnlyList<ExportValidationError> errors)
|
||||||
|
=> new() { Success = false, ValidationErrors = errors };
|
||||||
|
}
|
||||||
@@ -0,0 +1,364 @@
|
|||||||
|
using System.Collections.Concurrent;
|
||||||
|
using System.Text.Json;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.ExportCenter.Core.Domain;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Default implementation of export planner.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportPlanner : IExportPlanner
|
||||||
|
{
|
||||||
|
private const int DefaultPlanValidityMinutes = 60;
|
||||||
|
private const double BytesPerSecondEstimate = 10 * 1024 * 1024; // 10 MB/s
|
||||||
|
|
||||||
|
private readonly IExportScopeResolver _scopeResolver;
|
||||||
|
private readonly IExportProfileRepository _profileRepository;
|
||||||
|
private readonly ILogger<ExportPlanner> _logger;
|
||||||
|
private readonly TimeProvider _timeProvider;
|
||||||
|
|
||||||
|
// In-memory plan store (in production, use database)
|
||||||
|
private readonly ConcurrentDictionary<Guid, ExportPlan> _plans = new();
|
||||||
|
|
||||||
|
public ExportPlanner(
|
||||||
|
IExportScopeResolver scopeResolver,
|
||||||
|
IExportProfileRepository profileRepository,
|
||||||
|
ILogger<ExportPlanner> logger,
|
||||||
|
TimeProvider? timeProvider = null)
|
||||||
|
{
|
||||||
|
_scopeResolver = scopeResolver;
|
||||||
|
_profileRepository = profileRepository;
|
||||||
|
_logger = logger;
|
||||||
|
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<ExportPlanResult> CreatePlanAsync(
|
||||||
|
ExportPlanRequest request,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Load profile
|
||||||
|
var profile = await _profileRepository.GetByIdAsync(request.ProfileId, request.TenantId, cancellationToken);
|
||||||
|
if (profile is null)
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Failed($"Profile not found: {request.ProfileId}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (profile.Status != ExportProfileStatus.Active)
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Failed($"Profile is not active: {profile.Status}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse scope from profile or use override
|
||||||
|
var scope = request.ScopeOverride ?? ParseScope(profile.ScopeJson);
|
||||||
|
var format = request.FormatOverride ?? ParseFormat(profile.FormatJson);
|
||||||
|
|
||||||
|
// Validate scope
|
||||||
|
var scopeErrors = await _scopeResolver.ValidateAsync(scope, cancellationToken);
|
||||||
|
var validationErrors = scopeErrors.Where(e => e.Severity >= ExportValidationSeverity.Error).ToList();
|
||||||
|
if (validationErrors.Count > 0)
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Invalid(validationErrors);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve scope to items
|
||||||
|
var scopeResult = await _scopeResolver.ResolveAsync(request.TenantId, scope, cancellationToken);
|
||||||
|
if (!scopeResult.Success)
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Failed(scopeResult.ErrorMessage ?? "Scope resolution failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build phases
|
||||||
|
var phases = BuildPhases(scopeResult, format);
|
||||||
|
|
||||||
|
// Calculate estimates
|
||||||
|
var estimatedDuration = TimeSpan.FromSeconds(scopeResult.EstimatedTotalSizeBytes / BytesPerSecondEstimate);
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
|
||||||
|
// Create plan
|
||||||
|
var plan = new ExportPlan
|
||||||
|
{
|
||||||
|
PlanId = Guid.NewGuid(),
|
||||||
|
ProfileId = request.ProfileId,
|
||||||
|
TenantId = request.TenantId,
|
||||||
|
Status = ExportPlanStatus.Ready,
|
||||||
|
ResolvedScope = scope,
|
||||||
|
Format = format,
|
||||||
|
Phases = phases,
|
||||||
|
TotalItems = scopeResult.SampledItems,
|
||||||
|
EstimatedSizeBytes = scopeResult.EstimatedTotalSizeBytes,
|
||||||
|
EstimatedDuration = estimatedDuration,
|
||||||
|
CorrelationId = request.CorrelationId,
|
||||||
|
InitiatedBy = request.InitiatedBy,
|
||||||
|
CreatedAt = now,
|
||||||
|
ValidUntil = now.AddMinutes(DefaultPlanValidityMinutes),
|
||||||
|
Warnings = scopeResult.Warnings,
|
||||||
|
ValidationErrors = scopeErrors.Where(e => e.Severity == ExportValidationSeverity.Warning).ToList()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Store plan (unless dry run)
|
||||||
|
if (!request.DryRun)
|
||||||
|
{
|
||||||
|
_plans[plan.PlanId] = plan;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
_logger.LogInformation(
|
||||||
|
"Created export plan {PlanId} for profile {ProfileId}: {ItemCount} items, {EstimatedSize} bytes",
|
||||||
|
plan.PlanId, plan.ProfileId, plan.TotalItems, plan.EstimatedSizeBytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ExportPlanResult { Success = true, Plan = plan };
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "Failed to create export plan for profile {ProfileId}", request.ProfileId);
|
||||||
|
return ExportPlanResult.Failed($"Failed to create plan: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ExportPlan?> GetPlanAsync(Guid planId, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
_plans.TryGetValue(planId, out var plan);
|
||||||
|
return Task.FromResult(plan);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<ExportPlanResult> ValidatePlanAsync(Guid planId, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (!_plans.TryGetValue(planId, out var plan))
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Failed($"Plan not found: {planId}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
|
||||||
|
// Check expiration
|
||||||
|
if (plan.ValidUntil.HasValue && now > plan.ValidUntil)
|
||||||
|
{
|
||||||
|
// Update status to expired
|
||||||
|
var expiredPlan = plan with { Status = ExportPlanStatus.Expired };
|
||||||
|
_plans[planId] = expiredPlan;
|
||||||
|
return ExportPlanResult.Failed("Plan has expired");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check status
|
||||||
|
if (plan.Status != ExportPlanStatus.Ready)
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Failed($"Plan is not ready for execution: {plan.Status}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-validate scope
|
||||||
|
var scopeErrors = await _scopeResolver.ValidateAsync(plan.ResolvedScope, cancellationToken);
|
||||||
|
if (scopeErrors.Any(e => e.Severity >= ExportValidationSeverity.Error))
|
||||||
|
{
|
||||||
|
return ExportPlanResult.Invalid(scopeErrors);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ExportPlanResult { Success = true, Plan = plan };
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<bool> CancelPlanAsync(Guid planId, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (!_plans.TryGetValue(planId, out var plan))
|
||||||
|
{
|
||||||
|
return Task.FromResult(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (plan.Status is not (ExportPlanStatus.Ready or ExportPlanStatus.Creating))
|
||||||
|
{
|
||||||
|
return Task.FromResult(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
var cancelledPlan = plan with { Status = ExportPlanStatus.Cancelled };
|
||||||
|
_plans[planId] = cancelledPlan;
|
||||||
|
|
||||||
|
if (_logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
_logger.LogInformation("Cancelled export plan {PlanId}", planId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ExportScope ParseScope(string? scopeJson)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(scopeJson))
|
||||||
|
{
|
||||||
|
return new ExportScope();
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return JsonSerializer.Deserialize<ExportScope>(scopeJson) ?? new ExportScope();
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
return new ExportScope();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ExportFormatOptions ParseFormat(string? formatJson)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(formatJson))
|
||||||
|
{
|
||||||
|
return new ExportFormatOptions();
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return JsonSerializer.Deserialize<ExportFormatOptions>(formatJson) ?? new ExportFormatOptions();
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
return new ExportFormatOptions();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<ExportPlanPhase> BuildPhases(ScopeResolutionResult scopeResult, ExportFormatOptions format)
|
||||||
|
{
|
||||||
|
var phases = new List<ExportPlanPhase>();
|
||||||
|
var order = 1;
|
||||||
|
|
||||||
|
// Phase 1: Data Fetch
|
||||||
|
phases.Add(new ExportPlanPhase
|
||||||
|
{
|
||||||
|
Order = order++,
|
||||||
|
Name = "Fetch Data",
|
||||||
|
Kind = ExportPhaseKind.DataFetch,
|
||||||
|
ItemCount = scopeResult.SampledItems,
|
||||||
|
EstimatedSizeBytes = scopeResult.EstimatedTotalSizeBytes,
|
||||||
|
EstimatedDuration = TimeSpan.FromMilliseconds(scopeResult.SampledItems * 50),
|
||||||
|
Parameters = new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["parallelism"] = "4"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Phase 2: Transform (if needed)
|
||||||
|
if (format.RedactFields.Count > 0 || format.NormalizeTimestamps || format.SortKeys)
|
||||||
|
{
|
||||||
|
phases.Add(new ExportPlanPhase
|
||||||
|
{
|
||||||
|
Order = order++,
|
||||||
|
Name = "Transform Data",
|
||||||
|
Kind = ExportPhaseKind.Transform,
|
||||||
|
ItemCount = scopeResult.SampledItems,
|
||||||
|
EstimatedDuration = TimeSpan.FromMilliseconds(scopeResult.SampledItems * 10),
|
||||||
|
Dependencies = ["Fetch Data"],
|
||||||
|
Parameters = new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["redactFields"] = string.Join(",", format.RedactFields),
|
||||||
|
["normalizeTimestamps"] = format.NormalizeTimestamps.ToString(),
|
||||||
|
["sortKeys"] = format.SortKeys.ToString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: Write Output
|
||||||
|
phases.Add(new ExportPlanPhase
|
||||||
|
{
|
||||||
|
Order = order++,
|
||||||
|
Name = "Write Output",
|
||||||
|
Kind = ExportPhaseKind.WriteOutput,
|
||||||
|
ItemCount = scopeResult.SampledItems,
|
||||||
|
EstimatedSizeBytes = scopeResult.EstimatedTotalSizeBytes,
|
||||||
|
EstimatedDuration = TimeSpan.FromSeconds(scopeResult.EstimatedTotalSizeBytes / (10 * 1024 * 1024.0)),
|
||||||
|
Dependencies = phases.Count > 1 ? ["Transform Data"] : ["Fetch Data"],
|
||||||
|
Parameters = new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["format"] = format.Format.ToString(),
|
||||||
|
["compression"] = format.Compression.ToString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Phase 4: Generate Manifest
|
||||||
|
phases.Add(new ExportPlanPhase
|
||||||
|
{
|
||||||
|
Order = order++,
|
||||||
|
Name = "Generate Manifest",
|
||||||
|
Kind = ExportPhaseKind.GenerateManifest,
|
||||||
|
EstimatedDuration = TimeSpan.FromSeconds(1),
|
||||||
|
Dependencies = ["Write Output"]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Phase 5: Sign (if format is mirror or requires attestation)
|
||||||
|
if (format.Format == ExportFormat.Mirror)
|
||||||
|
{
|
||||||
|
phases.Add(new ExportPlanPhase
|
||||||
|
{
|
||||||
|
Order = order++,
|
||||||
|
Name = "Sign Artifacts",
|
||||||
|
Kind = ExportPhaseKind.Sign,
|
||||||
|
EstimatedDuration = TimeSpan.FromSeconds(2),
|
||||||
|
Dependencies = ["Generate Manifest"]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return phases;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Repository interface for export profiles.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportProfileRepository
|
||||||
|
{
|
||||||
|
Task<ExportProfile?> GetByIdAsync(Guid profileId, Guid tenantId, CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
Task<IReadOnlyList<ExportProfile>> GetActiveProfilesAsync(Guid tenantId, CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
Task<ExportProfile> CreateAsync(ExportProfile profile, CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
Task<ExportProfile> UpdateAsync(ExportProfile profile, CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// In-memory implementation of export profile repository for development/testing.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class InMemoryExportProfileRepository : IExportProfileRepository
|
||||||
|
{
|
||||||
|
private readonly ConcurrentDictionary<(Guid TenantId, Guid ProfileId), ExportProfile> _profiles = new();
|
||||||
|
private readonly TimeProvider _timeProvider;
|
||||||
|
|
||||||
|
public InMemoryExportProfileRepository(TimeProvider? timeProvider = null)
|
||||||
|
{
|
||||||
|
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ExportProfile?> GetByIdAsync(Guid profileId, Guid tenantId, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
_profiles.TryGetValue((tenantId, profileId), out var profile);
|
||||||
|
return Task.FromResult(profile);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<IReadOnlyList<ExportProfile>> GetActiveProfilesAsync(Guid tenantId, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var profiles = _profiles.Values
|
||||||
|
.Where(p => p.TenantId == tenantId && p.Status == ExportProfileStatus.Active)
|
||||||
|
.ToList();
|
||||||
|
return Task.FromResult<IReadOnlyList<ExportProfile>>(profiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ExportProfile> CreateAsync(ExportProfile profile, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
var newProfile = profile with
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId == Guid.Empty ? Guid.NewGuid() : profile.ProfileId,
|
||||||
|
CreatedAt = now,
|
||||||
|
UpdatedAt = now
|
||||||
|
};
|
||||||
|
_profiles[(newProfile.TenantId, newProfile.ProfileId)] = newProfile;
|
||||||
|
return Task.FromResult(newProfile);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ExportProfile> UpdateAsync(ExportProfile profile, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var updatedProfile = profile with { UpdatedAt = _timeProvider.GetUtcNow() };
|
||||||
|
_profiles[(profile.TenantId, profile.ProfileId)] = updatedProfile;
|
||||||
|
return Task.FromResult(updatedProfile);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,223 @@
|
|||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Defines the scope of items to include in an export.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ExportScope
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Target kind filter (e.g., "sbom", "vex", "attestation").
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("targetKinds")]
|
||||||
|
public IReadOnlyList<string> TargetKinds { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Specific source references to include.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("sourceRefs")]
|
||||||
|
public IReadOnlyList<string> SourceRefs { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tag-based filter (items must have all specified tags).
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("tags")]
|
||||||
|
public IReadOnlyList<string> Tags { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Namespace/project filter.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("namespaces")]
|
||||||
|
public IReadOnlyList<string> Namespaces { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Date range filter (items created/modified within range).
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("dateRange")]
|
||||||
|
public DateRangeFilter? DateRange { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Maximum number of items to include.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("maxItems")]
|
||||||
|
public int? MaxItems { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sampling configuration for large datasets.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("sampling")]
|
||||||
|
public SamplingConfig? Sampling { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Include items from these specific runs.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("runIds")]
|
||||||
|
public IReadOnlyList<Guid> RunIds { get; init; } = [];
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Exclude items matching these patterns.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("excludePatterns")]
|
||||||
|
public IReadOnlyList<string> ExcludePatterns { get; init; } = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Date range filter for export scope.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record DateRangeFilter
|
||||||
|
{
|
||||||
|
[JsonPropertyName("from")]
|
||||||
|
public DateTimeOffset? From { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("to")]
|
||||||
|
public DateTimeOffset? To { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("field")]
|
||||||
|
public DateRangeField Field { get; init; } = DateRangeField.CreatedAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Which date field to filter on.
|
||||||
|
/// </summary>
|
||||||
|
public enum DateRangeField
|
||||||
|
{
|
||||||
|
CreatedAt = 1,
|
||||||
|
ModifiedAt = 2,
|
||||||
|
ProcessedAt = 3
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sampling configuration for deterministic subset selection.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record SamplingConfig
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Sampling strategy.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("strategy")]
|
||||||
|
public SamplingStrategy Strategy { get; init; } = SamplingStrategy.None;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sample size (absolute count or percentage based on strategy).
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("size")]
|
||||||
|
public int Size { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Seed for deterministic random sampling.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("seed")]
|
||||||
|
public int? Seed { get; init; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Field to use for stratified sampling.
|
||||||
|
/// </summary>
|
||||||
|
[JsonPropertyName("stratifyBy")]
|
||||||
|
public string? StratifyBy { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sampling strategies for large datasets.
|
||||||
|
/// </summary>
|
||||||
|
public enum SamplingStrategy
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// No sampling - include all matching items.
|
||||||
|
/// </summary>
|
||||||
|
None = 0,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Deterministic random sampling using seed.
|
||||||
|
/// </summary>
|
||||||
|
Random = 1,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take first N items (ordered by creation date).
|
||||||
|
/// </summary>
|
||||||
|
First = 2,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Take last N items (ordered by creation date).
|
||||||
|
/// </summary>
|
||||||
|
Last = 3,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Stratified sampling by a field (e.g., severity, ecosystem).
|
||||||
|
/// </summary>
|
||||||
|
Stratified = 4,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Systematic sampling (every Nth item).
|
||||||
|
/// </summary>
|
||||||
|
Systematic = 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A resolved export item ready for processing.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ResolvedExportItem
|
||||||
|
{
|
||||||
|
public required Guid ItemId { get; init; }
|
||||||
|
|
||||||
|
public required string Kind { get; init; }
|
||||||
|
|
||||||
|
public required string SourceRef { get; init; }
|
||||||
|
|
||||||
|
public string? Name { get; init; }
|
||||||
|
|
||||||
|
public string? Namespace { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<string> Tags { get; init; } = [];
|
||||||
|
|
||||||
|
public DateTimeOffset CreatedAt { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ModifiedAt { get; init; }
|
||||||
|
|
||||||
|
public long EstimatedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of scope resolution.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ScopeResolutionResult
|
||||||
|
{
|
||||||
|
public required bool Success { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<ResolvedExportItem> Items { get; init; } = [];
|
||||||
|
|
||||||
|
public int TotalMatchingItems { get; init; }
|
||||||
|
|
||||||
|
public int SampledItems { get; init; }
|
||||||
|
|
||||||
|
public long EstimatedTotalSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyList<string> Warnings { get; init; } = [];
|
||||||
|
|
||||||
|
public string? ErrorMessage { get; init; }
|
||||||
|
|
||||||
|
public SamplingMetadata? SamplingMetadata { get; init; }
|
||||||
|
|
||||||
|
public static ScopeResolutionResult Failed(string errorMessage)
|
||||||
|
=> new() { Success = false, ErrorMessage = errorMessage };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Metadata about sampling applied during scope resolution.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record SamplingMetadata
|
||||||
|
{
|
||||||
|
public SamplingStrategy Strategy { get; init; }
|
||||||
|
|
||||||
|
public int Seed { get; init; }
|
||||||
|
|
||||||
|
public int OriginalCount { get; init; }
|
||||||
|
|
||||||
|
public int SampledCount { get; init; }
|
||||||
|
|
||||||
|
public string? StratifyField { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, int>? StrataDistribution { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,385 @@
|
|||||||
|
using System.Text.RegularExpressions;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Default implementation of export scope resolver.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportScopeResolver : IExportScopeResolver
|
||||||
|
{
|
||||||
|
private static readonly string[] ValidTargetKinds = ["sbom", "vex", "attestation", "scan-report", "policy-result", "evidence", "risk-bundle", "advisory"];
|
||||||
|
private const int DefaultMaxItems = 10000;
|
||||||
|
private const long EstimatedBytesPerItem = 50 * 1024; // 50KB average
|
||||||
|
|
||||||
|
private readonly ILogger<ExportScopeResolver> _logger;
|
||||||
|
private readonly TimeProvider _timeProvider;
|
||||||
|
|
||||||
|
public ExportScopeResolver(ILogger<ExportScopeResolver> logger, TimeProvider? timeProvider = null)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ScopeResolutionResult> ResolveAsync(
|
||||||
|
Guid tenantId,
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
// Validate scope first
|
||||||
|
var validationErrors = ValidateScopeInternal(scope);
|
||||||
|
if (validationErrors.Count > 0 && validationErrors.Any(e => e.Severity >= ExportValidationSeverity.Error))
|
||||||
|
{
|
||||||
|
return Task.FromResult(ScopeResolutionResult.Failed(
|
||||||
|
$"Scope validation failed: {validationErrors.First(e => e.Severity >= ExportValidationSeverity.Error).Message}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate mock items based on scope (in real impl, this would query the database)
|
||||||
|
var items = GenerateResolvedItems(tenantId, scope);
|
||||||
|
|
||||||
|
// Apply sampling if configured
|
||||||
|
var (sampledItems, samplingMetadata) = ApplySampling(items, scope.Sampling);
|
||||||
|
|
||||||
|
// Apply max items limit
|
||||||
|
var maxItems = scope.MaxItems ?? DefaultMaxItems;
|
||||||
|
var finalItems = sampledItems.Take(maxItems).ToList();
|
||||||
|
|
||||||
|
var result = new ScopeResolutionResult
|
||||||
|
{
|
||||||
|
Success = true,
|
||||||
|
Items = finalItems,
|
||||||
|
TotalMatchingItems = items.Count,
|
||||||
|
SampledItems = finalItems.Count,
|
||||||
|
EstimatedTotalSizeBytes = finalItems.Sum(i => i.EstimatedSizeBytes),
|
||||||
|
Warnings = validationErrors.Where(e => e.Severity == ExportValidationSeverity.Warning).Select(e => e.Message).ToList(),
|
||||||
|
SamplingMetadata = samplingMetadata
|
||||||
|
};
|
||||||
|
|
||||||
|
if (_logger.IsEnabled(LogLevel.Debug))
|
||||||
|
{
|
||||||
|
_logger.LogDebug(
|
||||||
|
"Resolved scope for tenant {TenantId}: {TotalItems} total, {SampledItems} after sampling",
|
||||||
|
tenantId, result.TotalMatchingItems, result.SampledItems);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<IReadOnlyList<ExportValidationError>> ValidateAsync(
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult<IReadOnlyList<ExportValidationError>>(ValidateScopeInternal(scope));
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ScopeEstimate> EstimateAsync(
|
||||||
|
Guid tenantId,
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
// Calculate estimates based on scope filters
|
||||||
|
var estimatedCount = 100; // Base estimate
|
||||||
|
|
||||||
|
if (scope.SourceRefs.Count > 0)
|
||||||
|
{
|
||||||
|
estimatedCount = scope.SourceRefs.Count;
|
||||||
|
}
|
||||||
|
else if (scope.TargetKinds.Count > 0)
|
||||||
|
{
|
||||||
|
estimatedCount = scope.TargetKinds.Count * 50;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scope.MaxItems.HasValue)
|
||||||
|
{
|
||||||
|
estimatedCount = Math.Min(estimatedCount, scope.MaxItems.Value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scope.Sampling?.Strategy != SamplingStrategy.None && scope.Sampling?.Size > 0)
|
||||||
|
{
|
||||||
|
estimatedCount = Math.Min(estimatedCount, scope.Sampling.Size);
|
||||||
|
}
|
||||||
|
|
||||||
|
var countByKind = new Dictionary<string, int>();
|
||||||
|
foreach (var kind in scope.TargetKinds.DefaultIfEmpty("sbom"))
|
||||||
|
{
|
||||||
|
countByKind[kind] = estimatedCount / Math.Max(1, scope.TargetKinds.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(new ScopeEstimate
|
||||||
|
{
|
||||||
|
EstimatedItemCount = estimatedCount,
|
||||||
|
EstimatedSizeBytes = estimatedCount * EstimatedBytesPerItem,
|
||||||
|
EstimatedProcessingTime = TimeSpan.FromMilliseconds(estimatedCount * 10),
|
||||||
|
CountByKind = countByKind
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<ExportValidationError> ValidateScopeInternal(ExportScope scope)
|
||||||
|
{
|
||||||
|
var errors = new List<ExportValidationError>();
|
||||||
|
|
||||||
|
// Validate target kinds
|
||||||
|
foreach (var kind in scope.TargetKinds)
|
||||||
|
{
|
||||||
|
if (!ValidTargetKinds.Contains(kind, StringComparer.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "INVALID_TARGET_KIND",
|
||||||
|
Message = $"Invalid target kind: {kind}. Valid kinds are: {string.Join(", ", ValidTargetKinds)}",
|
||||||
|
Field = "targetKinds",
|
||||||
|
Severity = ExportValidationSeverity.Error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate date range
|
||||||
|
if (scope.DateRange is not null)
|
||||||
|
{
|
||||||
|
if (scope.DateRange.From.HasValue && scope.DateRange.To.HasValue &&
|
||||||
|
scope.DateRange.From > scope.DateRange.To)
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "INVALID_DATE_RANGE",
|
||||||
|
Message = "Date range 'from' must be before 'to'",
|
||||||
|
Field = "dateRange",
|
||||||
|
Severity = ExportValidationSeverity.Error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate sampling
|
||||||
|
if (scope.Sampling is not null)
|
||||||
|
{
|
||||||
|
if (scope.Sampling.Strategy != SamplingStrategy.None && scope.Sampling.Size <= 0)
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "INVALID_SAMPLE_SIZE",
|
||||||
|
Message = "Sample size must be greater than 0 when sampling is enabled",
|
||||||
|
Field = "sampling.size",
|
||||||
|
Severity = ExportValidationSeverity.Error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scope.Sampling.Strategy == SamplingStrategy.Stratified &&
|
||||||
|
string.IsNullOrWhiteSpace(scope.Sampling.StratifyBy))
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "MISSING_STRATIFY_FIELD",
|
||||||
|
Message = "StratifyBy field is required for stratified sampling",
|
||||||
|
Field = "sampling.stratifyBy",
|
||||||
|
Severity = ExportValidationSeverity.Error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate exclude patterns
|
||||||
|
foreach (var pattern in scope.ExcludePatterns)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
_ = new Regex(pattern);
|
||||||
|
}
|
||||||
|
catch (ArgumentException)
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "INVALID_EXCLUDE_PATTERN",
|
||||||
|
Message = $"Invalid regex pattern: {pattern}",
|
||||||
|
Field = "excludePatterns",
|
||||||
|
Severity = ExportValidationSeverity.Error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warn about large exports
|
||||||
|
if (!scope.MaxItems.HasValue && scope.Sampling?.Strategy == SamplingStrategy.None)
|
||||||
|
{
|
||||||
|
errors.Add(new ExportValidationError
|
||||||
|
{
|
||||||
|
Code = "POTENTIALLY_LARGE_EXPORT",
|
||||||
|
Message = "No maxItems or sampling configured; export may be large",
|
||||||
|
Field = null,
|
||||||
|
Severity = ExportValidationSeverity.Warning
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<ResolvedExportItem> GenerateResolvedItems(Guid tenantId, ExportScope scope)
|
||||||
|
{
|
||||||
|
var items = new List<ResolvedExportItem>();
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
|
||||||
|
// Generate items based on source refs if specified
|
||||||
|
if (scope.SourceRefs.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var sourceRef in scope.SourceRefs)
|
||||||
|
{
|
||||||
|
var kind = scope.TargetKinds.FirstOrDefault() ?? "sbom";
|
||||||
|
items.Add(CreateResolvedItem(sourceRef, kind, now));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Generate sample items for each target kind
|
||||||
|
var kindsToGenerate = scope.TargetKinds.Count > 0 ? scope.TargetKinds : ["sbom"];
|
||||||
|
var itemsPerKind = 50;
|
||||||
|
|
||||||
|
foreach (var kind in kindsToGenerate)
|
||||||
|
{
|
||||||
|
for (var i = 0; i < itemsPerKind; i++)
|
||||||
|
{
|
||||||
|
var sourceRef = $"{kind}-{tenantId:N}-{i:D4}";
|
||||||
|
items.Add(CreateResolvedItem(sourceRef, kind, now.AddHours(-i)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply date range filter
|
||||||
|
if (scope.DateRange is not null)
|
||||||
|
{
|
||||||
|
items = items.Where(item =>
|
||||||
|
{
|
||||||
|
var dateToCheck = scope.DateRange.Field switch
|
||||||
|
{
|
||||||
|
DateRangeField.ModifiedAt => item.ModifiedAt ?? item.CreatedAt,
|
||||||
|
DateRangeField.ProcessedAt => item.CreatedAt, // Use CreatedAt as proxy
|
||||||
|
_ => item.CreatedAt
|
||||||
|
};
|
||||||
|
|
||||||
|
return (!scope.DateRange.From.HasValue || dateToCheck >= scope.DateRange.From.Value) &&
|
||||||
|
(!scope.DateRange.To.HasValue || dateToCheck <= scope.DateRange.To.Value);
|
||||||
|
}).ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply namespace filter
|
||||||
|
if (scope.Namespaces.Count > 0)
|
||||||
|
{
|
||||||
|
items = items.Where(item =>
|
||||||
|
item.Namespace is not null &&
|
||||||
|
scope.Namespaces.Contains(item.Namespace, StringComparer.OrdinalIgnoreCase)).ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply tag filter
|
||||||
|
if (scope.Tags.Count > 0)
|
||||||
|
{
|
||||||
|
items = items.Where(item =>
|
||||||
|
scope.Tags.All(tag => item.Tags.Contains(tag, StringComparer.OrdinalIgnoreCase))).ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply exclude patterns
|
||||||
|
if (scope.ExcludePatterns.Count > 0)
|
||||||
|
{
|
||||||
|
var excludeRegexes = scope.ExcludePatterns.Select(p => new Regex(p, RegexOptions.IgnoreCase)).ToList();
|
||||||
|
items = items.Where(item =>
|
||||||
|
!excludeRegexes.Any(r => r.IsMatch(item.SourceRef) || (item.Name is not null && r.IsMatch(item.Name)))).ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResolvedExportItem CreateResolvedItem(string sourceRef, string kind, DateTimeOffset createdAt)
|
||||||
|
{
|
||||||
|
return new ResolvedExportItem
|
||||||
|
{
|
||||||
|
ItemId = Guid.NewGuid(),
|
||||||
|
Kind = kind,
|
||||||
|
SourceRef = sourceRef,
|
||||||
|
Name = $"{kind}-{sourceRef}",
|
||||||
|
Namespace = "default",
|
||||||
|
Tags = kind == "sbom" ? ["container", "linux"] : [kind],
|
||||||
|
CreatedAt = createdAt,
|
||||||
|
ModifiedAt = createdAt.AddMinutes(5),
|
||||||
|
EstimatedSizeBytes = EstimatedBytesPerItem,
|
||||||
|
Metadata = new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["generator"] = "stellaops",
|
||||||
|
["version"] = "1.0.0"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static (List<ResolvedExportItem> Items, SamplingMetadata? Metadata) ApplySampling(
|
||||||
|
List<ResolvedExportItem> items,
|
||||||
|
SamplingConfig? sampling)
|
||||||
|
{
|
||||||
|
if (sampling is null || sampling.Strategy == SamplingStrategy.None)
|
||||||
|
{
|
||||||
|
return (items, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var seed = sampling.Seed ?? Environment.TickCount;
|
||||||
|
var size = Math.Min(sampling.Size, items.Count);
|
||||||
|
|
||||||
|
List<ResolvedExportItem> sampled;
|
||||||
|
Dictionary<string, int>? strataDistribution = null;
|
||||||
|
|
||||||
|
switch (sampling.Strategy)
|
||||||
|
{
|
||||||
|
case SamplingStrategy.Random:
|
||||||
|
var random = new Random(seed);
|
||||||
|
sampled = items.OrderBy(_ => random.Next()).Take(size).ToList();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case SamplingStrategy.First:
|
||||||
|
sampled = items.OrderBy(i => i.CreatedAt).Take(size).ToList();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case SamplingStrategy.Last:
|
||||||
|
sampled = items.OrderByDescending(i => i.CreatedAt).Take(size).ToList();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case SamplingStrategy.Stratified:
|
||||||
|
var field = sampling.StratifyBy ?? "kind";
|
||||||
|
var grouped = items.GroupBy(i => GetFieldValue(i, field)).ToList();
|
||||||
|
var perStratum = size / grouped.Count;
|
||||||
|
sampled = [];
|
||||||
|
strataDistribution = new Dictionary<string, int>();
|
||||||
|
|
||||||
|
foreach (var group in grouped)
|
||||||
|
{
|
||||||
|
var stratumItems = group.Take(perStratum).ToList();
|
||||||
|
sampled.AddRange(stratumItems);
|
||||||
|
strataDistribution[group.Key] = stratumItems.Count;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case SamplingStrategy.Systematic:
|
||||||
|
var interval = Math.Max(1, items.Count / size);
|
||||||
|
sampled = items.Where((_, index) => index % interval == 0).Take(size).ToList();
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return (items, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var metadata = new SamplingMetadata
|
||||||
|
{
|
||||||
|
Strategy = sampling.Strategy,
|
||||||
|
Seed = seed,
|
||||||
|
OriginalCount = items.Count,
|
||||||
|
SampledCount = sampled.Count,
|
||||||
|
StratifyField = sampling.StratifyBy,
|
||||||
|
StrataDistribution = strataDistribution
|
||||||
|
};
|
||||||
|
|
||||||
|
return (sampled, metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GetFieldValue(ResolvedExportItem item, string field)
|
||||||
|
{
|
||||||
|
return field.ToLowerInvariant() switch
|
||||||
|
{
|
||||||
|
"kind" => item.Kind,
|
||||||
|
"namespace" => item.Namespace ?? "unknown",
|
||||||
|
_ => item.Metadata.TryGetValue(field, out var value) ? value : "unknown"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Plans export operations based on profile configuration.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportPlanner
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Creates an export plan from a profile.
|
||||||
|
/// </summary>
|
||||||
|
Task<ExportPlanResult> CreatePlanAsync(
|
||||||
|
ExportPlanRequest request,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets an existing plan by ID.
|
||||||
|
/// </summary>
|
||||||
|
Task<ExportPlan?> GetPlanAsync(
|
||||||
|
Guid planId,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Validates a plan is still valid for execution.
|
||||||
|
/// </summary>
|
||||||
|
Task<ExportPlanResult> ValidatePlanAsync(
|
||||||
|
Guid planId,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Cancels a pending plan.
|
||||||
|
/// </summary>
|
||||||
|
Task<bool> CancelPlanAsync(
|
||||||
|
Guid planId,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
namespace StellaOps.ExportCenter.Core.Planner;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Resolves export scope to concrete items.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportScopeResolver
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Resolves a scope definition to concrete items.
|
||||||
|
/// </summary>
|
||||||
|
Task<ScopeResolutionResult> ResolveAsync(
|
||||||
|
Guid tenantId,
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Validates a scope definition without resolving.
|
||||||
|
/// </summary>
|
||||||
|
Task<IReadOnlyList<ExportValidationError>> ValidateAsync(
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Estimates the size and count of items matching a scope.
|
||||||
|
/// </summary>
|
||||||
|
Task<ScopeEstimate> EstimateAsync(
|
||||||
|
Guid tenantId,
|
||||||
|
ExportScope scope,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Estimate of items matching a scope.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ScopeEstimate
|
||||||
|
{
|
||||||
|
public int EstimatedItemCount { get; init; }
|
||||||
|
|
||||||
|
public long EstimatedSizeBytes { get; init; }
|
||||||
|
|
||||||
|
public TimeSpan EstimatedProcessingTime { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, int> CountByKind { get; init; } = new Dictionary<string, int>();
|
||||||
|
}
|
||||||
@@ -0,0 +1,73 @@
|
|||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Npgsql;
|
||||||
|
using StellaOps.ExportCenter.Core.Configuration;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Infrastructure.Db;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Manages Npgsql data source for Export Center with tenant isolation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ExportCenterDataSource : IAsyncDisposable
|
||||||
|
{
|
||||||
|
private readonly NpgsqlDataSource _dataSource;
|
||||||
|
private readonly ILogger<ExportCenterDataSource> _logger;
|
||||||
|
|
||||||
|
public ExportCenterDataSource(
|
||||||
|
DatabaseOptions databaseOptions,
|
||||||
|
ILogger<ExportCenterDataSource> logger)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(databaseOptions);
|
||||||
|
ArgumentException.ThrowIfNullOrWhiteSpace(databaseOptions.ConnectionString);
|
||||||
|
|
||||||
|
_logger = logger;
|
||||||
|
_dataSource = CreateDataSource(databaseOptions.ConnectionString);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask DisposeAsync()
|
||||||
|
{
|
||||||
|
await _dataSource.DisposeAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<NpgsqlConnection> OpenConnectionAsync(CancellationToken cancellationToken)
|
||||||
|
=> OpenConnectionAsync(null, cancellationToken);
|
||||||
|
|
||||||
|
public async Task<NpgsqlConnection> OpenConnectionAsync(Guid? tenantId, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||||
|
await ConfigureSessionAsync(connection, tenantId, cancellationToken);
|
||||||
|
return connection;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static NpgsqlDataSource CreateDataSource(string connectionString)
|
||||||
|
{
|
||||||
|
var builder = new NpgsqlDataSourceBuilder(connectionString);
|
||||||
|
builder.EnableDynamicJson();
|
||||||
|
return builder.Build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ConfigureSessionAsync(NpgsqlConnection connection, Guid? tenantId, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await using var command = new NpgsqlCommand("SET TIME ZONE 'UTC';", connection);
|
||||||
|
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
|
||||||
|
if (tenantId.HasValue)
|
||||||
|
{
|
||||||
|
await using var tenantCommand = new NpgsqlCommand("SELECT set_config('app.current_tenant', @tenant, false);", connection);
|
||||||
|
tenantCommand.Parameters.AddWithValue("tenant", tenantId.Value.ToString("D"));
|
||||||
|
await tenantCommand.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
if (_logger.IsEnabled(LogLevel.Error))
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "Failed to configure Export Center session state.");
|
||||||
|
}
|
||||||
|
|
||||||
|
await connection.DisposeAsync();
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.ExportCenter.Core.Configuration;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Infrastructure.Db;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Extension methods for registering Export Center database services.
|
||||||
|
/// </summary>
|
||||||
|
public static class ExportCenterDbServiceExtensions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Adds Export Center database services to the service collection.
|
||||||
|
/// </summary>
|
||||||
|
public static IServiceCollection AddExportCenterDatabase(
|
||||||
|
this IServiceCollection services,
|
||||||
|
Action<DatabaseOptions>? configureOptions = null)
|
||||||
|
{
|
||||||
|
if (configureOptions is not null)
|
||||||
|
{
|
||||||
|
services.Configure(configureOptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
services.AddSingleton(sp =>
|
||||||
|
{
|
||||||
|
var options = sp.GetRequiredService<IOptions<ExportCenterOptions>>().Value.Database;
|
||||||
|
var logger = sp.GetRequiredService<ILogger<ExportCenterDataSource>>();
|
||||||
|
return new ExportCenterDataSource(options, logger);
|
||||||
|
});
|
||||||
|
|
||||||
|
services.AddSingleton<IExportCenterMigrationRunner, ExportCenterMigrationRunner>();
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds the startup migration hosted service.
|
||||||
|
/// </summary>
|
||||||
|
public static IServiceCollection AddExportCenterMigrations(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddHostedService<ExportCenterMigrationHostedService>();
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Hosted service that runs database migrations at startup.
|
||||||
|
/// </summary>
|
||||||
|
internal sealed class ExportCenterMigrationHostedService(
|
||||||
|
IExportCenterMigrationRunner migrationRunner,
|
||||||
|
IOptions<ExportCenterOptions> options,
|
||||||
|
ILogger<ExportCenterMigrationHostedService> logger) : IHostedService
|
||||||
|
{
|
||||||
|
public async Task StartAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (!options.Value.Database.ApplyMigrationsAtStartup)
|
||||||
|
{
|
||||||
|
if (logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
logger.LogInformation("Export Center database migrations disabled by configuration.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
logger.LogInformation("Applying Export Center database migrations...");
|
||||||
|
}
|
||||||
|
|
||||||
|
await migrationRunner.ApplyAsync(cancellationToken);
|
||||||
|
|
||||||
|
if (logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
logger.LogInformation("Export Center database migrations completed successfully.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogCritical(ex, "Failed to apply Export Center database migrations.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||||
|
}
|
||||||
@@ -0,0 +1,139 @@
|
|||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Npgsql;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Infrastructure.Db;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Interface for running Export Center database migrations.
|
||||||
|
/// </summary>
|
||||||
|
public interface IExportCenterMigrationRunner
|
||||||
|
{
|
||||||
|
Task ApplyAsync(CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Applies SQL migrations for Export Center with checksum validation.
|
||||||
|
/// </summary>
|
||||||
|
internal sealed class ExportCenterMigrationRunner(
|
||||||
|
ExportCenterDataSource dataSource,
|
||||||
|
ILogger<ExportCenterMigrationRunner> logger) : IExportCenterMigrationRunner
|
||||||
|
{
|
||||||
|
private const string VersionTableSql = """
|
||||||
|
CREATE TABLE IF NOT EXISTS export_center.export_schema_version
|
||||||
|
(
|
||||||
|
version integer PRIMARY KEY,
|
||||||
|
script_name text NOT NULL,
|
||||||
|
script_checksum text NOT NULL,
|
||||||
|
applied_at_utc timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC')
|
||||||
|
);
|
||||||
|
""";
|
||||||
|
|
||||||
|
public async Task ApplyAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var scripts = MigrationLoader.LoadAll();
|
||||||
|
|
||||||
|
if (scripts.Count == 0)
|
||||||
|
{
|
||||||
|
if (logger.IsEnabled(LogLevel.Debug))
|
||||||
|
{
|
||||||
|
logger.LogDebug("No migrations discovered for Export Center.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await using var connection = await dataSource.OpenConnectionAsync(cancellationToken);
|
||||||
|
await using var transaction = await connection.BeginTransactionAsync(cancellationToken);
|
||||||
|
|
||||||
|
// Ensure schema exists first
|
||||||
|
await EnsureSchemaAsync(connection, transaction, cancellationToken);
|
||||||
|
await EnsureVersionTableAsync(connection, transaction, cancellationToken);
|
||||||
|
var appliedScripts = await LoadAppliedScriptsAsync(connection, transaction, cancellationToken);
|
||||||
|
|
||||||
|
foreach (var script in scripts)
|
||||||
|
{
|
||||||
|
if (appliedScripts.TryGetValue(script.Version, out var existingChecksum))
|
||||||
|
{
|
||||||
|
if (!string.Equals(existingChecksum, script.Sha256, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException(
|
||||||
|
$"Checksum mismatch for migration {script.Name}. Expected {existingChecksum}, computed {script.Sha256}.");
|
||||||
|
}
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (logger.IsEnabled(LogLevel.Information))
|
||||||
|
{
|
||||||
|
logger.LogInformation("Applying Export Center migration {Version}: {Name}", script.Version, script.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
await ExecuteScriptAsync(connection, transaction, script.Sql, cancellationToken);
|
||||||
|
await RecordAppliedScriptAsync(connection, transaction, script, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
await transaction.CommitAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task EnsureSchemaAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const string schemaSql = """
|
||||||
|
CREATE SCHEMA IF NOT EXISTS export_center;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS export_center_app;
|
||||||
|
""";
|
||||||
|
|
||||||
|
await using var command = new NpgsqlCommand(schemaSql, connection, transaction);
|
||||||
|
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task EnsureVersionTableAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
await using var command = new NpgsqlCommand(VersionTableSql, connection, transaction);
|
||||||
|
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<Dictionary<int, string>> LoadAppliedScriptsAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const string sql = """
|
||||||
|
SELECT version, script_checksum
|
||||||
|
FROM export_center.export_schema_version
|
||||||
|
ORDER BY version;
|
||||||
|
""";
|
||||||
|
|
||||||
|
await using var command = new NpgsqlCommand(sql, connection, transaction);
|
||||||
|
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||||
|
var dictionary = new Dictionary<int, string>();
|
||||||
|
|
||||||
|
while (await reader.ReadAsync(cancellationToken))
|
||||||
|
{
|
||||||
|
var version = reader.GetInt32(0);
|
||||||
|
var checksum = reader.GetString(1);
|
||||||
|
dictionary[version] = checksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dictionary;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task ExecuteScriptAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, string sql, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
await using var command = new NpgsqlCommand(sql, connection, transaction)
|
||||||
|
{
|
||||||
|
CommandTimeout = 0
|
||||||
|
};
|
||||||
|
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task RecordAppliedScriptAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, MigrationScript script, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const string insertSql = """
|
||||||
|
INSERT INTO export_center.export_schema_version(version, script_name, script_checksum)
|
||||||
|
VALUES (@version, @name, @checksum);
|
||||||
|
""";
|
||||||
|
|
||||||
|
await using var command = new NpgsqlCommand(insertSql, connection, transaction);
|
||||||
|
command.Parameters.AddWithValue("version", script.Version);
|
||||||
|
command.Parameters.AddWithValue("name", script.Name);
|
||||||
|
command.Parameters.AddWithValue("checksum", script.Sha256);
|
||||||
|
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
using System.Reflection;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Infrastructure.Db;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Loads SQL migration scripts from embedded resources.
|
||||||
|
/// </summary>
|
||||||
|
internal static class MigrationLoader
|
||||||
|
{
|
||||||
|
private static readonly Assembly Assembly = typeof(MigrationLoader).Assembly;
|
||||||
|
|
||||||
|
public static IReadOnlyList<MigrationScript> LoadAll()
|
||||||
|
{
|
||||||
|
var scripts = new List<MigrationScript>();
|
||||||
|
|
||||||
|
foreach (var resourceName in Assembly.GetManifestResourceNames())
|
||||||
|
{
|
||||||
|
if (!resourceName.Contains(".Db.Migrations.", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
using var stream = Assembly.GetManifestResourceStream(resourceName);
|
||||||
|
if (stream is null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
using var reader = new StreamReader(stream);
|
||||||
|
var sql = reader.ReadToEnd();
|
||||||
|
|
||||||
|
if (MigrationScript.TryCreate(resourceName, sql, out var script))
|
||||||
|
{
|
||||||
|
scripts.Add(script);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return scripts
|
||||||
|
.OrderBy(script => script.Version)
|
||||||
|
.ToArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
using System.Diagnostics.CodeAnalysis;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Infrastructure.Db;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a SQL migration script with version tracking.
|
||||||
|
/// </summary>
|
||||||
|
internal sealed partial class MigrationScript
|
||||||
|
{
|
||||||
|
private static readonly Regex VersionRegex = GetVersionRegex();
|
||||||
|
|
||||||
|
private MigrationScript(int version, string name, string sql)
|
||||||
|
{
|
||||||
|
Version = version;
|
||||||
|
Name = name;
|
||||||
|
Sql = sql;
|
||||||
|
Sha256 = ComputeSha256(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
public int Version { get; }
|
||||||
|
|
||||||
|
public string Name { get; }
|
||||||
|
|
||||||
|
public string Sql { get; }
|
||||||
|
|
||||||
|
public string Sha256 { get; }
|
||||||
|
|
||||||
|
public static bool TryCreate(string resourceName, string sql, [NotNullWhen(true)] out MigrationScript? script)
|
||||||
|
{
|
||||||
|
var fileName = resourceName.Split('.').Last();
|
||||||
|
var match = VersionRegex.Match(fileName);
|
||||||
|
|
||||||
|
if (!match.Success || !int.TryParse(match.Groups["version"].Value, out var version))
|
||||||
|
{
|
||||||
|
script = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
script = new MigrationScript(version, fileName, sql);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ComputeSha256(string sql)
|
||||||
|
{
|
||||||
|
var normalized = NormalizeLineEndings(sql);
|
||||||
|
var bytes = Encoding.UTF8.GetBytes(normalized);
|
||||||
|
var hash = SHA256.HashData(bytes);
|
||||||
|
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormalizeLineEndings(string value)
|
||||||
|
=> value.Replace("\r\n", "\n", StringComparison.Ordinal);
|
||||||
|
|
||||||
|
[GeneratedRegex(@"^(?<version>\d{3,})[_-]", RegexOptions.Compiled)]
|
||||||
|
private static partial Regex GetVersionRegex();
|
||||||
|
}
|
||||||
@@ -0,0 +1,180 @@
|
|||||||
|
-- 001_initial_schema.sql
|
||||||
|
-- Establishes core schema, RLS policies, and tables for Export Center.
|
||||||
|
|
||||||
|
CREATE SCHEMA IF NOT EXISTS export_center;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS export_center_app;
|
||||||
|
|
||||||
|
-- Tenant isolation function
|
||||||
|
CREATE OR REPLACE FUNCTION export_center_app.require_current_tenant()
|
||||||
|
RETURNS uuid
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
tenant_text text;
|
||||||
|
BEGIN
|
||||||
|
tenant_text := current_setting('app.current_tenant', true);
|
||||||
|
IF tenant_text IS NULL OR length(tenant_text) = 0 THEN
|
||||||
|
RAISE EXCEPTION 'app.current_tenant is not set for the current session';
|
||||||
|
END IF;
|
||||||
|
RETURN tenant_text::uuid;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Export Profiles: defines scope and configuration for exports
|
||||||
|
CREATE TABLE IF NOT EXISTS export_center.export_profiles
|
||||||
|
(
|
||||||
|
profile_id uuid PRIMARY KEY,
|
||||||
|
tenant_id uuid NOT NULL,
|
||||||
|
name text NOT NULL CHECK (length(name) BETWEEN 1 AND 256),
|
||||||
|
description text,
|
||||||
|
kind smallint NOT NULL CHECK (kind BETWEEN 1 AND 4),
|
||||||
|
status smallint NOT NULL CHECK (status BETWEEN 1 AND 4),
|
||||||
|
scope_json jsonb,
|
||||||
|
format_json jsonb,
|
||||||
|
signing_json jsonb,
|
||||||
|
schedule text,
|
||||||
|
created_at timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC'),
|
||||||
|
updated_at timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC'),
|
||||||
|
archived_at timestamptz
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_profiles_tenant_status
|
||||||
|
ON export_center.export_profiles (tenant_id, status);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS uq_export_profiles_tenant_name
|
||||||
|
ON export_center.export_profiles (tenant_id, name) WHERE archived_at IS NULL;
|
||||||
|
|
||||||
|
ALTER TABLE export_center.export_profiles
|
||||||
|
ENABLE ROW LEVEL SECURITY;
|
||||||
|
|
||||||
|
CREATE POLICY IF NOT EXISTS export_profiles_isolation
|
||||||
|
ON export_center.export_profiles
|
||||||
|
USING (tenant_id = export_center_app.require_current_tenant())
|
||||||
|
WITH CHECK (tenant_id = export_center_app.require_current_tenant());
|
||||||
|
|
||||||
|
-- Export Runs: tracks individual export executions
|
||||||
|
CREATE TABLE IF NOT EXISTS export_center.export_runs
|
||||||
|
(
|
||||||
|
run_id uuid PRIMARY KEY,
|
||||||
|
profile_id uuid NOT NULL,
|
||||||
|
tenant_id uuid NOT NULL,
|
||||||
|
status smallint NOT NULL CHECK (status BETWEEN 1 AND 6),
|
||||||
|
trigger smallint NOT NULL CHECK (trigger BETWEEN 1 AND 4),
|
||||||
|
correlation_id text,
|
||||||
|
initiated_by text,
|
||||||
|
total_items integer NOT NULL DEFAULT 0 CHECK (total_items >= 0),
|
||||||
|
processed_items integer NOT NULL DEFAULT 0 CHECK (processed_items >= 0),
|
||||||
|
failed_items integer NOT NULL DEFAULT 0 CHECK (failed_items >= 0),
|
||||||
|
total_size_bytes bigint NOT NULL DEFAULT 0 CHECK (total_size_bytes >= 0),
|
||||||
|
error_json jsonb,
|
||||||
|
created_at timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC'),
|
||||||
|
started_at timestamptz,
|
||||||
|
completed_at timestamptz,
|
||||||
|
expires_at timestamptz,
|
||||||
|
CONSTRAINT fk_runs_profile FOREIGN KEY (profile_id) REFERENCES export_center.export_profiles (profile_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_runs_tenant_status
|
||||||
|
ON export_center.export_runs (tenant_id, status);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_runs_profile_created
|
||||||
|
ON export_center.export_runs (profile_id, created_at DESC);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_runs_correlation
|
||||||
|
ON export_center.export_runs (correlation_id) WHERE correlation_id IS NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE export_center.export_runs
|
||||||
|
ENABLE ROW LEVEL SECURITY;
|
||||||
|
|
||||||
|
CREATE POLICY IF NOT EXISTS export_runs_isolation
|
||||||
|
ON export_center.export_runs
|
||||||
|
USING (tenant_id = export_center_app.require_current_tenant())
|
||||||
|
WITH CHECK (tenant_id = export_center_app.require_current_tenant());
|
||||||
|
|
||||||
|
-- Export Inputs: tracks items included in each export run
|
||||||
|
CREATE TABLE IF NOT EXISTS export_center.export_inputs
|
||||||
|
(
|
||||||
|
input_id uuid PRIMARY KEY,
|
||||||
|
run_id uuid NOT NULL,
|
||||||
|
tenant_id uuid NOT NULL,
|
||||||
|
kind smallint NOT NULL CHECK (kind BETWEEN 1 AND 8),
|
||||||
|
status smallint NOT NULL CHECK (status BETWEEN 1 AND 5),
|
||||||
|
source_ref text NOT NULL CHECK (length(source_ref) BETWEEN 1 AND 512),
|
||||||
|
name text,
|
||||||
|
content_hash text CHECK (content_hash IS NULL OR content_hash ~ '^[0-9a-f]{64}$'),
|
||||||
|
size_bytes bigint NOT NULL DEFAULT 0 CHECK (size_bytes >= 0),
|
||||||
|
metadata_json jsonb,
|
||||||
|
error_json jsonb,
|
||||||
|
created_at timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC'),
|
||||||
|
processed_at timestamptz,
|
||||||
|
CONSTRAINT fk_inputs_run FOREIGN KEY (run_id) REFERENCES export_center.export_runs (run_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_inputs_run_status
|
||||||
|
ON export_center.export_inputs (run_id, status);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_inputs_tenant_kind
|
||||||
|
ON export_center.export_inputs (tenant_id, kind);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_inputs_source_ref
|
||||||
|
ON export_center.export_inputs (tenant_id, source_ref);
|
||||||
|
|
||||||
|
ALTER TABLE export_center.export_inputs
|
||||||
|
ENABLE ROW LEVEL SECURITY;
|
||||||
|
|
||||||
|
CREATE POLICY IF NOT EXISTS export_inputs_isolation
|
||||||
|
ON export_center.export_inputs
|
||||||
|
USING (tenant_id = export_center_app.require_current_tenant())
|
||||||
|
WITH CHECK (tenant_id = export_center_app.require_current_tenant());
|
||||||
|
|
||||||
|
-- Export Distributions: tracks artifact distribution to targets
|
||||||
|
CREATE TABLE IF NOT EXISTS export_center.export_distributions
|
||||||
|
(
|
||||||
|
distribution_id uuid PRIMARY KEY,
|
||||||
|
run_id uuid NOT NULL,
|
||||||
|
tenant_id uuid NOT NULL,
|
||||||
|
kind smallint NOT NULL CHECK (kind BETWEEN 1 AND 5),
|
||||||
|
status smallint NOT NULL CHECK (status BETWEEN 1 AND 6),
|
||||||
|
target text NOT NULL CHECK (length(target) BETWEEN 1 AND 1024),
|
||||||
|
artifact_path text NOT NULL CHECK (length(artifact_path) BETWEEN 1 AND 1024),
|
||||||
|
artifact_hash text CHECK (artifact_hash IS NULL OR artifact_hash ~ '^[0-9a-f]{64}$'),
|
||||||
|
size_bytes bigint NOT NULL DEFAULT 0 CHECK (size_bytes >= 0),
|
||||||
|
content_type text,
|
||||||
|
metadata_json jsonb,
|
||||||
|
error_json jsonb,
|
||||||
|
attempt_count integer NOT NULL DEFAULT 0 CHECK (attempt_count >= 0),
|
||||||
|
created_at timestamptz NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC'),
|
||||||
|
distributed_at timestamptz,
|
||||||
|
verified_at timestamptz,
|
||||||
|
CONSTRAINT fk_distributions_run FOREIGN KEY (run_id) REFERENCES export_center.export_runs (run_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_distributions_run_status
|
||||||
|
ON export_center.export_distributions (run_id, status);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_export_distributions_tenant_kind
|
||||||
|
ON export_center.export_distributions (tenant_id, kind);
|
||||||
|
|
||||||
|
ALTER TABLE export_center.export_distributions
|
||||||
|
ENABLE ROW LEVEL SECURITY;
|
||||||
|
|
||||||
|
CREATE POLICY IF NOT EXISTS export_distributions_isolation
|
||||||
|
ON export_center.export_distributions
|
||||||
|
USING (tenant_id = export_center_app.require_current_tenant())
|
||||||
|
WITH CHECK (tenant_id = export_center_app.require_current_tenant());
|
||||||
|
|
||||||
|
-- Trigger function to update timestamps
|
||||||
|
CREATE OR REPLACE FUNCTION export_center_app.update_updated_at()
|
||||||
|
RETURNS TRIGGER
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at := NOW() AT TIME ZONE 'UTC';
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
CREATE TRIGGER trg_export_profiles_updated_at
|
||||||
|
BEFORE UPDATE ON export_center.export_profiles
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION export_center_app.update_updated_at();
|
||||||
@@ -15,7 +15,14 @@
|
|||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||||
|
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||||
|
<PackageReference Include="Npgsql" Version="8.0.3" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<EmbeddedResource Include="Db\Migrations\*.sql" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -0,0 +1,249 @@
|
|||||||
|
using StellaOps.ExportCenter.Core.Adapters;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Adapters;
|
||||||
|
|
||||||
|
public sealed class JsonNormalizerTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_SortsKeys()
|
||||||
|
{
|
||||||
|
var json = """{"zebra":"z","alpha":"a","beta":"b"}""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.StartsWith("""{"alpha":""", result.NormalizedJson);
|
||||||
|
Assert.Contains(""""beta":""", result.NormalizedJson);
|
||||||
|
Assert.EndsWith(""""zebra":"z"}""", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_SortsNestedKeys()
|
||||||
|
{
|
||||||
|
var json = """{"outer":{"zebra":"z","alpha":"a"}}""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("""{"alpha":"a","zebra":"z"}""", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_PreservesArrayOrder()
|
||||||
|
{
|
||||||
|
var json = """{"items":["z","a","b"]}""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("""["z","a","b"]""", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_RedactsFieldByName()
|
||||||
|
{
|
||||||
|
var json = """{"name":"public","password":"secret123"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
redactionOptions: new JsonRedactionOptions { RedactFields = ["password"] });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("[REDACTED]", result.NormalizedJson);
|
||||||
|
Assert.DoesNotContain("secret123", result.NormalizedJson);
|
||||||
|
Assert.Equal(1, result.RedactedFieldCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_RedactsNestedField()
|
||||||
|
{
|
||||||
|
var json = """{"user":{"name":"john","credentials":{"apiKey":"abc123"}}}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
redactionOptions: new JsonRedactionOptions { RedactFields = ["apiKey"] });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.DoesNotContain("abc123", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_RedactsDefaultSensitiveFields()
|
||||||
|
{
|
||||||
|
var json = """{"name":"test","secretKey":"hidden","normalField":"visible"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
redactionOptions: new JsonRedactionOptions());
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.DoesNotContain("hidden", result.NormalizedJson);
|
||||||
|
Assert.Contains("visible", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_RedactsFieldByPath()
|
||||||
|
{
|
||||||
|
var json = """{"data":{"sensitive":"value"},"other":"keep"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
redactionOptions: new JsonRedactionOptions { RedactFields = ["data.sensitive"] });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("[REDACTED]", result.NormalizedJson);
|
||||||
|
Assert.Contains("keep", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_NormalizesTimestamps()
|
||||||
|
{
|
||||||
|
var json = """{"created":"2024-01-15T10:30:00+02:00"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
new JsonNormalizationOptions { NormalizeTimestamps = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
// Should be normalized to UTC
|
||||||
|
Assert.Contains("2024-01-15T08:30:00.000Z", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_PreservesNonTimestampStrings()
|
||||||
|
{
|
||||||
|
var json = """{"name":"2024-not-a-date","value":"hello"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
new JsonNormalizationOptions { NormalizeTimestamps = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("2024-not-a-date", result.NormalizedJson);
|
||||||
|
Assert.Contains("hello", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_ComputesDeterministicHash()
|
||||||
|
{
|
||||||
|
var json = """{"b":"2","a":"1"}""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result1 = normalizer.Normalize(json);
|
||||||
|
var result2 = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result1.Success);
|
||||||
|
Assert.True(result2.Success);
|
||||||
|
Assert.NotNull(result1.Sha256);
|
||||||
|
Assert.Equal(result1.Sha256, result2.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_DifferentInputsSameSorted_SameHash()
|
||||||
|
{
|
||||||
|
var json1 = """{"b":"2","a":"1"}""";
|
||||||
|
var json2 = """{"a":"1","b":"2"}""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result1 = normalizer.Normalize(json1);
|
||||||
|
var result2 = normalizer.Normalize(json2);
|
||||||
|
|
||||||
|
Assert.True(result1.Success);
|
||||||
|
Assert.True(result2.Success);
|
||||||
|
Assert.Equal(result1.Sha256, result2.Sha256);
|
||||||
|
Assert.Equal(result1.NormalizedJson, result2.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_InvalidJson_ReturnsFailed()
|
||||||
|
{
|
||||||
|
var json = """{"invalid":}""";
|
||||||
|
var normalizer = new JsonNormalizer();
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
Assert.NotNull(result.ErrorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_EmptyInput_ReturnsFailed()
|
||||||
|
{
|
||||||
|
var normalizer = new JsonNormalizer();
|
||||||
|
|
||||||
|
var result = normalizer.Normalize("");
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_NullInput_ReturnsFailed()
|
||||||
|
{
|
||||||
|
var normalizer = new JsonNormalizer();
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(null!);
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_TracksOriginalAndNormalizedSize()
|
||||||
|
{
|
||||||
|
var json = """{ "a" : "1" , "b" : "2" }""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.True(result.OriginalSizeBytes > 0);
|
||||||
|
Assert.True(result.NormalizedSizeBytes > 0);
|
||||||
|
// Normalized should be smaller (no extra whitespace)
|
||||||
|
Assert.True(result.NormalizedSizeBytes <= result.OriginalSizeBytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_WithCustomRedactedValue()
|
||||||
|
{
|
||||||
|
var json = """{"password":"secret"}""";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
redactionOptions: new JsonRedactionOptions
|
||||||
|
{
|
||||||
|
RedactFields = ["password"],
|
||||||
|
RedactedValue = "***"
|
||||||
|
});
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Contains("***", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_ArrayOfObjects_SortsEachObject()
|
||||||
|
{
|
||||||
|
var json = """[{"z":"1","a":"2"},{"z":"3","a":"4"}]""";
|
||||||
|
var normalizer = new JsonNormalizer(new JsonNormalizationOptions { SortKeys = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Equal("""[{"a":"2","z":"1"},{"a":"4","z":"3"}]""", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Normalize_NormalizesLineEndings()
|
||||||
|
{
|
||||||
|
var json = "{\r\n\"a\":\"1\"\r\n}";
|
||||||
|
var normalizer = new JsonNormalizer(
|
||||||
|
new JsonNormalizationOptions { NormalizeLineEndings = true });
|
||||||
|
|
||||||
|
var result = normalizer.Normalize(json);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.DoesNotContain("\r", result.NormalizedJson);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
using System.Reflection;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Db;
|
||||||
|
|
||||||
|
public sealed class MigrationLoaderTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_ReturnsNonEmptyList()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
Assert.NotNull(scripts);
|
||||||
|
Assert.NotEmpty(scripts);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_ScriptsAreOrderedByVersion()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
var versions = scripts.Select(s => (int)((dynamic)s).Version).ToList();
|
||||||
|
var sortedVersions = versions.OrderBy(v => v).ToList();
|
||||||
|
|
||||||
|
Assert.Equal(sortedVersions, versions);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_AllScriptsHaveNonEmptySql()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
foreach (var script in scripts)
|
||||||
|
{
|
||||||
|
var sql = (string)((dynamic)script).Sql;
|
||||||
|
Assert.False(string.IsNullOrWhiteSpace(sql));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_AllScriptsHaveValidSha256()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
foreach (var script in scripts)
|
||||||
|
{
|
||||||
|
var sha256 = (string)((dynamic)script).Sha256;
|
||||||
|
Assert.Matches("^[0-9a-f]{64}$", sha256);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_FirstMigrationIsInitialSchema()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
Assert.NotEmpty(scripts);
|
||||||
|
var first = scripts.First();
|
||||||
|
var name = (string)((dynamic)first).Name;
|
||||||
|
|
||||||
|
Assert.Contains("initial_schema", name, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void LoadAll_VersionsAreUnique()
|
||||||
|
{
|
||||||
|
var scripts = LoadAllMigrations();
|
||||||
|
|
||||||
|
var versions = scripts.Select(s => (int)((dynamic)s).Version).ToList();
|
||||||
|
var distinctVersions = versions.Distinct().ToList();
|
||||||
|
|
||||||
|
Assert.Equal(distinctVersions.Count, versions.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to access internal MigrationLoader via reflection
|
||||||
|
private static IReadOnlyList<object> LoadAllMigrations()
|
||||||
|
{
|
||||||
|
var assembly = typeof(Infrastructure.Db.ExportCenterDataSource).Assembly;
|
||||||
|
var loaderType = assembly.GetType("StellaOps.ExportCenter.Infrastructure.Db.MigrationLoader");
|
||||||
|
|
||||||
|
if (loaderType is null)
|
||||||
|
{
|
||||||
|
return Array.Empty<object>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var method = loaderType.GetMethod("LoadAll", BindingFlags.Public | BindingFlags.Static);
|
||||||
|
if (method is null)
|
||||||
|
{
|
||||||
|
return Array.Empty<object>();
|
||||||
|
}
|
||||||
|
|
||||||
|
return (IReadOnlyList<object>)method.Invoke(null, null)!;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,154 @@
|
|||||||
|
using System.Reflection;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Db;
|
||||||
|
|
||||||
|
public sealed class MigrationScriptTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void TryCreate_ValidResourceName_ReturnsTrueWithScript()
|
||||||
|
{
|
||||||
|
var resourceName = "StellaOps.ExportCenter.Infrastructure.Db.Migrations.001_initial_schema.sql";
|
||||||
|
var sql = "CREATE TABLE test (id int);";
|
||||||
|
|
||||||
|
var result = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
Assert.NotNull(script);
|
||||||
|
Assert.Equal(1, script.Version);
|
||||||
|
Assert.Equal("001_initial_schema.sql", script.Name);
|
||||||
|
Assert.Equal(sql, script.Sql);
|
||||||
|
Assert.NotEmpty(script.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void TryCreate_ThreeDigitVersion_ParsesCorrectly()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.123_migration.sql";
|
||||||
|
var sql = "SELECT 1;";
|
||||||
|
|
||||||
|
var result = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
Assert.NotNull(script);
|
||||||
|
Assert.Equal(123, script.Version);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void TryCreate_FourDigitVersion_ParsesCorrectly()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.1000_big_migration.sql";
|
||||||
|
var sql = "SELECT 1;";
|
||||||
|
|
||||||
|
var result = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
Assert.NotNull(script);
|
||||||
|
Assert.Equal(1000, script.Version);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void TryCreate_InvalidResourceName_ReturnsFalse()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.invalid.sql";
|
||||||
|
var sql = "SELECT 1;";
|
||||||
|
|
||||||
|
var result = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
Assert.Null(script);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void TryCreate_NoVersionPrefix_ReturnsFalse()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.no_version.sql";
|
||||||
|
var sql = "SELECT 1;";
|
||||||
|
|
||||||
|
var result = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
Assert.Null(script);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Sha256_IsDeterministic()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.001_test.sql";
|
||||||
|
var sql = "CREATE TABLE test (id int);";
|
||||||
|
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sql, out var script1);
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sql, out var script2);
|
||||||
|
|
||||||
|
Assert.NotNull(script1);
|
||||||
|
Assert.NotNull(script2);
|
||||||
|
Assert.Equal(script1.Sha256, script2.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Sha256_NormalizesLineEndings()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.001_test.sql";
|
||||||
|
var sqlUnix = "CREATE TABLE test\n(id int);";
|
||||||
|
var sqlWindows = "CREATE TABLE test\r\n(id int);";
|
||||||
|
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sqlUnix, out var scriptUnix);
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sqlWindows, out var scriptWindows);
|
||||||
|
|
||||||
|
Assert.NotNull(scriptUnix);
|
||||||
|
Assert.NotNull(scriptWindows);
|
||||||
|
Assert.Equal(scriptUnix.Sha256, scriptWindows.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Sha256_DifferentContentProducesDifferentHash()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.001_test.sql";
|
||||||
|
var sql1 = "CREATE TABLE test1 (id int);";
|
||||||
|
var sql2 = "CREATE TABLE test2 (id int);";
|
||||||
|
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sql1, out var script1);
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sql2, out var script2);
|
||||||
|
|
||||||
|
Assert.NotNull(script1);
|
||||||
|
Assert.NotNull(script2);
|
||||||
|
Assert.NotEqual(script1.Sha256, script2.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Sha256_IsValidHexFormat()
|
||||||
|
{
|
||||||
|
var resourceName = "Test.Db.Migrations.001_test.sql";
|
||||||
|
var sql = "SELECT 1;";
|
||||||
|
|
||||||
|
_ = TryCreateMigrationScript(resourceName, sql, out var script);
|
||||||
|
|
||||||
|
Assert.NotNull(script);
|
||||||
|
Assert.Matches("^[0-9a-f]{64}$", script.Sha256);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to access internal MigrationScript via reflection
|
||||||
|
private static bool TryCreateMigrationScript(string resourceName, string sql, out dynamic? script)
|
||||||
|
{
|
||||||
|
var assembly = typeof(Infrastructure.Db.ExportCenterDataSource).Assembly;
|
||||||
|
var scriptType = assembly.GetType("StellaOps.ExportCenter.Infrastructure.Db.MigrationScript");
|
||||||
|
|
||||||
|
if (scriptType is null)
|
||||||
|
{
|
||||||
|
script = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var method = scriptType.GetMethod("TryCreate", BindingFlags.Public | BindingFlags.Static);
|
||||||
|
if (method is null)
|
||||||
|
{
|
||||||
|
script = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var parameters = new object?[] { resourceName, sql, null };
|
||||||
|
var result = (bool)method.Invoke(null, parameters)!;
|
||||||
|
script = parameters[2];
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,175 @@
|
|||||||
|
using StellaOps.ExportCenter.Core.Domain;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Domain;
|
||||||
|
|
||||||
|
public sealed class ExportProfileTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void ExportProfile_CanBeCreated()
|
||||||
|
{
|
||||||
|
var profile = new ExportProfile
|
||||||
|
{
|
||||||
|
ProfileId = Guid.NewGuid(),
|
||||||
|
TenantId = Guid.NewGuid(),
|
||||||
|
Name = "Test Profile",
|
||||||
|
Kind = ExportProfileKind.AdHoc,
|
||||||
|
Status = ExportProfileStatus.Active,
|
||||||
|
CreatedAt = DateTimeOffset.UtcNow,
|
||||||
|
UpdatedAt = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
|
||||||
|
Assert.NotEqual(Guid.Empty, profile.ProfileId);
|
||||||
|
Assert.Equal("Test Profile", profile.Name);
|
||||||
|
Assert.Equal(ExportProfileKind.AdHoc, profile.Kind);
|
||||||
|
Assert.Equal(ExportProfileStatus.Active, profile.Status);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportProfileKind_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportProfileKind.AdHoc);
|
||||||
|
Assert.Equal(2, (int)ExportProfileKind.Scheduled);
|
||||||
|
Assert.Equal(3, (int)ExportProfileKind.EventDriven);
|
||||||
|
Assert.Equal(4, (int)ExportProfileKind.Continuous);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportProfileStatus_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportProfileStatus.Draft);
|
||||||
|
Assert.Equal(2, (int)ExportProfileStatus.Active);
|
||||||
|
Assert.Equal(3, (int)ExportProfileStatus.Paused);
|
||||||
|
Assert.Equal(4, (int)ExportProfileStatus.Archived);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ExportRunTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void ExportRun_CanBeCreated()
|
||||||
|
{
|
||||||
|
var run = new ExportRun
|
||||||
|
{
|
||||||
|
RunId = Guid.NewGuid(),
|
||||||
|
ProfileId = Guid.NewGuid(),
|
||||||
|
TenantId = Guid.NewGuid(),
|
||||||
|
Status = ExportRunStatus.Running,
|
||||||
|
Trigger = ExportRunTrigger.Manual,
|
||||||
|
CreatedAt = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
|
||||||
|
Assert.NotEqual(Guid.Empty, run.RunId);
|
||||||
|
Assert.Equal(ExportRunStatus.Running, run.Status);
|
||||||
|
Assert.Equal(ExportRunTrigger.Manual, run.Trigger);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportRunStatus_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportRunStatus.Queued);
|
||||||
|
Assert.Equal(2, (int)ExportRunStatus.Running);
|
||||||
|
Assert.Equal(3, (int)ExportRunStatus.Completed);
|
||||||
|
Assert.Equal(4, (int)ExportRunStatus.PartiallyCompleted);
|
||||||
|
Assert.Equal(5, (int)ExportRunStatus.Failed);
|
||||||
|
Assert.Equal(6, (int)ExportRunStatus.Cancelled);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportRunTrigger_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportRunTrigger.Manual);
|
||||||
|
Assert.Equal(2, (int)ExportRunTrigger.Scheduled);
|
||||||
|
Assert.Equal(3, (int)ExportRunTrigger.Event);
|
||||||
|
Assert.Equal(4, (int)ExportRunTrigger.Api);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ExportInputTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void ExportInput_CanBeCreated()
|
||||||
|
{
|
||||||
|
var input = new ExportInput
|
||||||
|
{
|
||||||
|
InputId = Guid.NewGuid(),
|
||||||
|
RunId = Guid.NewGuid(),
|
||||||
|
TenantId = Guid.NewGuid(),
|
||||||
|
Kind = ExportInputKind.Sbom,
|
||||||
|
Status = ExportInputStatus.Pending,
|
||||||
|
SourceRef = "sbom-123",
|
||||||
|
CreatedAt = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
|
||||||
|
Assert.NotEqual(Guid.Empty, input.InputId);
|
||||||
|
Assert.Equal(ExportInputKind.Sbom, input.Kind);
|
||||||
|
Assert.Equal("sbom-123", input.SourceRef);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportInputKind_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportInputKind.Sbom);
|
||||||
|
Assert.Equal(2, (int)ExportInputKind.Vex);
|
||||||
|
Assert.Equal(3, (int)ExportInputKind.Attestation);
|
||||||
|
Assert.Equal(4, (int)ExportInputKind.ScanReport);
|
||||||
|
Assert.Equal(5, (int)ExportInputKind.PolicyResult);
|
||||||
|
Assert.Equal(6, (int)ExportInputKind.Evidence);
|
||||||
|
Assert.Equal(7, (int)ExportInputKind.RiskBundle);
|
||||||
|
Assert.Equal(8, (int)ExportInputKind.Advisory);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportInputStatus_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportInputStatus.Pending);
|
||||||
|
Assert.Equal(2, (int)ExportInputStatus.Processing);
|
||||||
|
Assert.Equal(3, (int)ExportInputStatus.Processed);
|
||||||
|
Assert.Equal(4, (int)ExportInputStatus.Failed);
|
||||||
|
Assert.Equal(5, (int)ExportInputStatus.Skipped);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ExportDistributionTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void ExportDistribution_CanBeCreated()
|
||||||
|
{
|
||||||
|
var distribution = new ExportDistribution
|
||||||
|
{
|
||||||
|
DistributionId = Guid.NewGuid(),
|
||||||
|
RunId = Guid.NewGuid(),
|
||||||
|
TenantId = Guid.NewGuid(),
|
||||||
|
Kind = ExportDistributionKind.FileSystem,
|
||||||
|
Status = ExportDistributionStatus.Pending,
|
||||||
|
Target = "/exports/output",
|
||||||
|
ArtifactPath = "manifest.json",
|
||||||
|
CreatedAt = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
|
||||||
|
Assert.NotEqual(Guid.Empty, distribution.DistributionId);
|
||||||
|
Assert.Equal(ExportDistributionKind.FileSystem, distribution.Kind);
|
||||||
|
Assert.Equal("/exports/output", distribution.Target);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportDistributionKind_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportDistributionKind.FileSystem);
|
||||||
|
Assert.Equal(2, (int)ExportDistributionKind.AmazonS3);
|
||||||
|
Assert.Equal(3, (int)ExportDistributionKind.Mirror);
|
||||||
|
Assert.Equal(4, (int)ExportDistributionKind.OfflineKit);
|
||||||
|
Assert.Equal(5, (int)ExportDistributionKind.Webhook);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ExportDistributionStatus_HasExpectedValues()
|
||||||
|
{
|
||||||
|
Assert.Equal(1, (int)ExportDistributionStatus.Pending);
|
||||||
|
Assert.Equal(2, (int)ExportDistributionStatus.Distributing);
|
||||||
|
Assert.Equal(3, (int)ExportDistributionStatus.Distributed);
|
||||||
|
Assert.Equal(4, (int)ExportDistributionStatus.Verified);
|
||||||
|
Assert.Equal(5, (int)ExportDistributionStatus.Failed);
|
||||||
|
Assert.Equal(6, (int)ExportDistributionStatus.Cancelled);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,269 @@
|
|||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using StellaOps.ExportCenter.Core.Domain;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Planner;
|
||||||
|
|
||||||
|
public sealed class ExportPlannerTests
|
||||||
|
{
|
||||||
|
private readonly ExportPlanner _planner;
|
||||||
|
private readonly InMemoryExportProfileRepository _profileRepository;
|
||||||
|
|
||||||
|
public ExportPlannerTests()
|
||||||
|
{
|
||||||
|
var scopeResolver = new ExportScopeResolver(NullLogger<ExportScopeResolver>.Instance);
|
||||||
|
_profileRepository = new InMemoryExportProfileRepository();
|
||||||
|
_planner = new ExportPlanner(
|
||||||
|
scopeResolver,
|
||||||
|
_profileRepository,
|
||||||
|
NullLogger<ExportPlanner>.Instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_ValidProfile_ReturnsPlan()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var request = new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(request);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotNull(result.Plan);
|
||||||
|
Assert.Equal(profile.ProfileId, result.Plan.ProfileId);
|
||||||
|
Assert.Equal(ExportPlanStatus.Ready, result.Plan.Status);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_NonExistentProfile_ReturnsError()
|
||||||
|
{
|
||||||
|
var request = new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = Guid.NewGuid(),
|
||||||
|
TenantId = Guid.NewGuid()
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(request);
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_InactiveProfile_ReturnsError()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await _profileRepository.CreateAsync(new ExportProfile
|
||||||
|
{
|
||||||
|
ProfileId = Guid.NewGuid(),
|
||||||
|
TenantId = tenantId,
|
||||||
|
Name = "Paused Profile",
|
||||||
|
Kind = ExportProfileKind.AdHoc,
|
||||||
|
Status = ExportProfileStatus.Paused
|
||||||
|
});
|
||||||
|
|
||||||
|
var request = new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(request);
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
Assert.Contains("not active", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_WithScopeOverride_UsesScopeOverride()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var request = new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId,
|
||||||
|
ScopeOverride = new ExportScope
|
||||||
|
{
|
||||||
|
MaxItems = 5,
|
||||||
|
TargetKinds = ["vex"]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(request);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotNull(result.Plan);
|
||||||
|
Assert.Equal(5, result.Plan.ResolvedScope.MaxItems);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_DryRun_DoesNotStorePlan()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var request = new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId,
|
||||||
|
DryRun = true
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(request);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotNull(result.Plan);
|
||||||
|
|
||||||
|
// Verify plan was not stored
|
||||||
|
var storedPlan = await _planner.GetPlanAsync(result.Plan.PlanId);
|
||||||
|
Assert.Null(storedPlan);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetPlanAsync_ExistingPlan_ReturnsPlan()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var createResult = await _planner.CreatePlanAsync(new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
});
|
||||||
|
|
||||||
|
var plan = await _planner.GetPlanAsync(createResult.Plan!.PlanId);
|
||||||
|
|
||||||
|
Assert.NotNull(plan);
|
||||||
|
Assert.Equal(createResult.Plan.PlanId, plan.PlanId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetPlanAsync_NonExistentPlan_ReturnsNull()
|
||||||
|
{
|
||||||
|
var plan = await _planner.GetPlanAsync(Guid.NewGuid());
|
||||||
|
|
||||||
|
Assert.Null(plan);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidatePlanAsync_ValidPlan_ReturnsSuccess()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var createResult = await _planner.CreatePlanAsync(new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
});
|
||||||
|
|
||||||
|
var validateResult = await _planner.ValidatePlanAsync(createResult.Plan!.PlanId);
|
||||||
|
|
||||||
|
Assert.True(validateResult.Success);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidatePlanAsync_NonExistentPlan_ReturnsError()
|
||||||
|
{
|
||||||
|
var result = await _planner.ValidatePlanAsync(Guid.NewGuid());
|
||||||
|
|
||||||
|
Assert.False(result.Success);
|
||||||
|
Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CancelPlanAsync_ReadyPlan_CancelsPlan()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var createResult = await _planner.CreatePlanAsync(new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
});
|
||||||
|
|
||||||
|
var cancelled = await _planner.CancelPlanAsync(createResult.Plan!.PlanId);
|
||||||
|
|
||||||
|
Assert.True(cancelled);
|
||||||
|
|
||||||
|
var plan = await _planner.GetPlanAsync(createResult.Plan.PlanId);
|
||||||
|
Assert.NotNull(plan);
|
||||||
|
Assert.Equal(ExportPlanStatus.Cancelled, plan.Status);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CancelPlanAsync_NonExistentPlan_ReturnsFalse()
|
||||||
|
{
|
||||||
|
var cancelled = await _planner.CancelPlanAsync(Guid.NewGuid());
|
||||||
|
|
||||||
|
Assert.False(cancelled);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_GeneratesPhases()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotNull(result.Plan);
|
||||||
|
Assert.NotEmpty(result.Plan.Phases);
|
||||||
|
|
||||||
|
// Verify phase ordering
|
||||||
|
var orders = result.Plan.Phases.Select(p => p.Order).ToList();
|
||||||
|
Assert.Equal(orders.OrderBy(o => o).ToList(), orders);
|
||||||
|
|
||||||
|
// Verify required phases exist
|
||||||
|
Assert.Contains(result.Plan.Phases, p => p.Kind == ExportPhaseKind.DataFetch);
|
||||||
|
Assert.Contains(result.Plan.Phases, p => p.Kind == ExportPhaseKind.WriteOutput);
|
||||||
|
Assert.Contains(result.Plan.Phases, p => p.Kind == ExportPhaseKind.GenerateManifest);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreatePlanAsync_WithCorrelationId_IncludesInPlan()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var profile = await CreateTestProfile(tenantId);
|
||||||
|
var correlationId = "test-correlation-123";
|
||||||
|
|
||||||
|
var result = await _planner.CreatePlanAsync(new ExportPlanRequest
|
||||||
|
{
|
||||||
|
ProfileId = profile.ProfileId,
|
||||||
|
TenantId = tenantId,
|
||||||
|
CorrelationId = correlationId,
|
||||||
|
InitiatedBy = "test-user"
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Equal(correlationId, result.Plan!.CorrelationId);
|
||||||
|
Assert.Equal("test-user", result.Plan.InitiatedBy);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<ExportProfile> CreateTestProfile(Guid tenantId)
|
||||||
|
{
|
||||||
|
return await _profileRepository.CreateAsync(new ExportProfile
|
||||||
|
{
|
||||||
|
ProfileId = Guid.NewGuid(),
|
||||||
|
TenantId = tenantId,
|
||||||
|
Name = "Test Profile",
|
||||||
|
Kind = ExportProfileKind.AdHoc,
|
||||||
|
Status = ExportProfileStatus.Active,
|
||||||
|
ScopeJson = """{"targetKinds":["sbom"],"maxItems":100}"""
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,221 @@
|
|||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using StellaOps.ExportCenter.Core.Planner;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.ExportCenter.Tests.Planner;
|
||||||
|
|
||||||
|
public sealed class ExportScopeResolverTests
|
||||||
|
{
|
||||||
|
private readonly ExportScopeResolver _resolver;
|
||||||
|
|
||||||
|
public ExportScopeResolverTests()
|
||||||
|
{
|
||||||
|
_resolver = new ExportScopeResolver(NullLogger<ExportScopeResolver>.Instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_EmptyScope_ReturnsDefaultItems()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope();
|
||||||
|
|
||||||
|
var result = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotEmpty(result.Items);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_WithSourceRefs_ReturnsMatchingItems()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
SourceRefs = ["ref-001", "ref-002", "ref-003"]
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.Equal(3, result.Items.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_WithTargetKinds_FiltersCorrectly()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
TargetKinds = ["sbom", "vex"]
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.All(result.Items, item =>
|
||||||
|
Assert.Contains(item.Kind, new[] { "sbom", "vex" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_WithMaxItems_LimitsResults()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
MaxItems = 5
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.True(result.Items.Count <= 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_WithRandomSampling_AppliesSampling()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
Sampling = new SamplingConfig
|
||||||
|
{
|
||||||
|
Strategy = SamplingStrategy.Random,
|
||||||
|
Size = 10,
|
||||||
|
Seed = 42
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(result.Success);
|
||||||
|
Assert.NotNull(result.SamplingMetadata);
|
||||||
|
Assert.Equal(SamplingStrategy.Random, result.SamplingMetadata.Strategy);
|
||||||
|
Assert.Equal(42, result.SamplingMetadata.Seed);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ResolveAsync_DeterministicSampling_ProducesSameResults()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
Sampling = new SamplingConfig
|
||||||
|
{
|
||||||
|
Strategy = SamplingStrategy.Random,
|
||||||
|
Size = 5,
|
||||||
|
Seed = 123
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var result1 = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
var result2 = await _resolver.ResolveAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.Equal(result1.Items.Count, result2.Items.Count);
|
||||||
|
// Items should be in same order due to deterministic seeding
|
||||||
|
for (var i = 0; i < result1.Items.Count; i++)
|
||||||
|
{
|
||||||
|
Assert.Equal(result1.Items[i].SourceRef, result2.Items[i].SourceRef);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAsync_InvalidTargetKind_ReturnsError()
|
||||||
|
{
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
TargetKinds = ["invalid-kind"]
|
||||||
|
};
|
||||||
|
|
||||||
|
var errors = await _resolver.ValidateAsync(scope);
|
||||||
|
|
||||||
|
Assert.NotEmpty(errors);
|
||||||
|
Assert.Contains(errors, e => e.Code == "INVALID_TARGET_KIND");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAsync_InvalidDateRange_ReturnsError()
|
||||||
|
{
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
DateRange = new DateRangeFilter
|
||||||
|
{
|
||||||
|
From = DateTimeOffset.UtcNow.AddDays(1),
|
||||||
|
To = DateTimeOffset.UtcNow.AddDays(-1)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var errors = await _resolver.ValidateAsync(scope);
|
||||||
|
|
||||||
|
Assert.NotEmpty(errors);
|
||||||
|
Assert.Contains(errors, e => e.Code == "INVALID_DATE_RANGE");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAsync_SamplingWithoutSize_ReturnsError()
|
||||||
|
{
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
Sampling = new SamplingConfig
|
||||||
|
{
|
||||||
|
Strategy = SamplingStrategy.Random,
|
||||||
|
Size = 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var errors = await _resolver.ValidateAsync(scope);
|
||||||
|
|
||||||
|
Assert.NotEmpty(errors);
|
||||||
|
Assert.Contains(errors, e => e.Code == "INVALID_SAMPLE_SIZE");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAsync_StratifiedWithoutField_ReturnsError()
|
||||||
|
{
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
Sampling = new SamplingConfig
|
||||||
|
{
|
||||||
|
Strategy = SamplingStrategy.Stratified,
|
||||||
|
Size = 10,
|
||||||
|
StratifyBy = null
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var errors = await _resolver.ValidateAsync(scope);
|
||||||
|
|
||||||
|
Assert.NotEmpty(errors);
|
||||||
|
Assert.Contains(errors, e => e.Code == "MISSING_STRATIFY_FIELD");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAsync_NoLimits_ReturnsWarning()
|
||||||
|
{
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
TargetKinds = ["sbom"]
|
||||||
|
};
|
||||||
|
|
||||||
|
var errors = await _resolver.ValidateAsync(scope);
|
||||||
|
|
||||||
|
Assert.Contains(errors, e =>
|
||||||
|
e.Code == "POTENTIALLY_LARGE_EXPORT" &&
|
||||||
|
e.Severity == ExportValidationSeverity.Warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task EstimateAsync_ReturnsReasonableEstimates()
|
||||||
|
{
|
||||||
|
var tenantId = Guid.NewGuid();
|
||||||
|
var scope = new ExportScope
|
||||||
|
{
|
||||||
|
SourceRefs = ["ref-001", "ref-002", "ref-003"],
|
||||||
|
TargetKinds = ["sbom"]
|
||||||
|
};
|
||||||
|
|
||||||
|
var estimate = await _resolver.EstimateAsync(tenantId, scope);
|
||||||
|
|
||||||
|
Assert.True(estimate.EstimatedItemCount > 0);
|
||||||
|
Assert.True(estimate.EstimatedSizeBytes > 0);
|
||||||
|
Assert.True(estimate.EstimatedProcessingTime > TimeSpan.Zero);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -179,7 +179,8 @@ internal static class NodePnpDataLoader
|
|||||||
// If location points to a zip, try to read package/package.json inside the archive
|
// If location points to a zip, try to read package/package.json inside the archive
|
||||||
if (packageLocation.Contains(".zip", StringComparison.OrdinalIgnoreCase))
|
if (packageLocation.Contains(".zip", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
var zipPath = Path.Combine(rootPath, packageLocation[..packageLocation.IndexOf(".zip", StringComparison.OrdinalIgnoreCase) + 4]);
|
var zipIndex = packageLocation.IndexOf(".zip", StringComparison.OrdinalIgnoreCase);
|
||||||
|
var zipPath = Path.Combine(rootPath, packageLocation[..(zipIndex + 4)]);
|
||||||
if (File.Exists(zipPath))
|
if (File.Exists(zipPath))
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
|
|||||||
@@ -70,10 +70,8 @@ public sealed class RpmDatabaseReaderTests
|
|||||||
|
|
||||||
// Magic
|
// Magic
|
||||||
BinaryPrimitives.WriteUInt32BigEndian(header.AsSpan(0), 0x8eade8ab);
|
BinaryPrimitives.WriteUInt32BigEndian(header.AsSpan(0), 0x8eade8ab);
|
||||||
// Version/reserved bytes (version=1)
|
// Reserved/version bytes set to zero for parser compatibility
|
||||||
header[4] = 1;
|
BinaryPrimitives.WriteUInt32BigEndian(header.AsSpan(4), 0);
|
||||||
header[5] = 0;
|
|
||||||
BinaryPrimitives.WriteUInt16BigEndian(header.AsSpan(6), 0);
|
|
||||||
|
|
||||||
// Index count (3) and store size
|
// Index count (3) and store size
|
||||||
BinaryPrimitives.WriteInt32BigEndian(header.AsSpan(8), 3);
|
BinaryPrimitives.WriteInt32BigEndian(header.AsSpan(8), 3);
|
||||||
@@ -117,4 +115,3 @@ public sealed class RpmDatabaseReaderTests
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ import { AuthSessionStore } from './core/auth/auth-session.store';
|
|||||||
import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor';
|
import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor';
|
||||||
import { MockNotifyApiService } from './testing/mock-notify-api.service';
|
import { MockNotifyApiService } from './testing/mock-notify-api.service';
|
||||||
import { seedAuthSession, type StubAuthSession } from './testing';
|
import { seedAuthSession, type StubAuthSession } from './testing';
|
||||||
|
import { CVSS_API_BASE_URL } from './core/api/cvss.client';
|
||||||
|
|
||||||
export const appConfig: ApplicationConfig = {
|
export const appConfig: ApplicationConfig = {
|
||||||
providers: [
|
providers: [
|
||||||
@@ -105,6 +106,19 @@ export const appConfig: ApplicationConfig = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: CVSS_API_BASE_URL,
|
||||||
|
deps: [AppConfigService],
|
||||||
|
useFactory: (config: AppConfigService) => {
|
||||||
|
const policyBase = config.config.apiBaseUrls.policy;
|
||||||
|
try {
|
||||||
|
return new URL('/api/cvss', policyBase).toString();
|
||||||
|
} catch {
|
||||||
|
const normalized = policyBase.endsWith('/') ? policyBase.slice(0, -1) : policyBase;
|
||||||
|
return `${normalized}/api/cvss`;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
RiskHttpClient,
|
RiskHttpClient,
|
||||||
MockRiskApi,
|
MockRiskApi,
|
||||||
{
|
{
|
||||||
|
|||||||
87
src/Web/StellaOps.Web/src/app/core/api/cvss.client.spec.ts
Normal file
87
src/Web/StellaOps.Web/src/app/core/api/cvss.client.spec.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
|
||||||
|
import { TestBed } from '@angular/core/testing';
|
||||||
|
|
||||||
|
import { AuthSessionStore } from '../auth/auth-session.store';
|
||||||
|
import { CvssClient, CVSS_API_BASE_URL } from './cvss.client';
|
||||||
|
import { CvssReceipt, CvssReceiptDto } from './cvss.models';
|
||||||
|
|
||||||
|
class FakeAuthSessionStore {
|
||||||
|
getActiveTenantId(): string | null {
|
||||||
|
return 'tenant-123';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('CvssClient', () => {
|
||||||
|
let httpMock: HttpTestingController;
|
||||||
|
let client: CvssClient;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
TestBed.configureTestingModule({
|
||||||
|
imports: [HttpClientTestingModule],
|
||||||
|
providers: [
|
||||||
|
CvssClient,
|
||||||
|
{ provide: CVSS_API_BASE_URL, useValue: '/api/cvss' },
|
||||||
|
{ provide: AuthSessionStore, useClass: FakeAuthSessionStore },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
httpMock = TestBed.inject(HttpTestingController);
|
||||||
|
client = TestBed.inject(CvssClient);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
httpMock.verify();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('adds tenant headers and maps receipt response', () => {
|
||||||
|
const dto: CvssReceiptDto = {
|
||||||
|
receiptId: 'rcpt-1',
|
||||||
|
vulnerabilityId: 'CVE-2025-0001',
|
||||||
|
createdAt: '2025-12-07T12:00:00Z',
|
||||||
|
createdBy: 'tester@example.com',
|
||||||
|
vectorString: 'CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H',
|
||||||
|
severity: 'Critical',
|
||||||
|
scores: {
|
||||||
|
baseScore: 9.0,
|
||||||
|
threatScore: 9.0,
|
||||||
|
environmentalScore: 9.1,
|
||||||
|
fullScore: 9.1,
|
||||||
|
effectiveScore: 9.1,
|
||||||
|
effectiveScoreType: 'Environmental',
|
||||||
|
},
|
||||||
|
policyRef: { policyId: 'default', version: '1.0.0', hash: 'sha256:abc' },
|
||||||
|
evidence: [
|
||||||
|
{
|
||||||
|
uri: 'cas://evidence/1',
|
||||||
|
description: 'Vendor advisory evidence',
|
||||||
|
source: 'vendor',
|
||||||
|
collectedAt: '2025-12-07T10:00:00Z',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
history: [
|
||||||
|
{
|
||||||
|
historyId: 'hist-1',
|
||||||
|
reason: 'Initial scoring',
|
||||||
|
actor: 'tester@example.com',
|
||||||
|
createdAt: '2025-12-07T12:00:00Z',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
let receipt: CvssReceipt | undefined;
|
||||||
|
|
||||||
|
client.getReceipt(dto.receiptId).subscribe((result) => (receipt = result));
|
||||||
|
|
||||||
|
const req = httpMock.expectOne('/api/cvss/receipts/rcpt-1');
|
||||||
|
expect(req.request.method).toBe('GET');
|
||||||
|
expect(req.request.headers.get('X-Stella-Tenant')).toBe('tenant-123');
|
||||||
|
expect(req.request.headers.has('X-Stella-Trace-Id')).toBeTrue();
|
||||||
|
req.flush(dto);
|
||||||
|
|
||||||
|
expect(receipt?.score.overall).toBe(9.1);
|
||||||
|
expect(receipt?.score.effectiveType).toBe('Environmental');
|
||||||
|
expect(receipt?.policy.policyId).toBe('default');
|
||||||
|
expect(receipt?.evidence[0].uri).toBe('cas://evidence/1');
|
||||||
|
expect(receipt?.history[0].reason).toBe('Initial scoring');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,58 +1,117 @@
|
|||||||
import { Injectable } from '@angular/core';
|
import { HttpClient, HttpHeaders } from '@angular/common/http';
|
||||||
import { Observable, of } from 'rxjs';
|
import { Inject, Injectable, InjectionToken } from '@angular/core';
|
||||||
|
import { Observable, map } from 'rxjs';
|
||||||
|
|
||||||
import { CvssReceipt } from './cvss.models';
|
import { AuthSessionStore } from '../auth/auth-session.store';
|
||||||
|
import {
|
||||||
|
CvssEvidenceDto,
|
||||||
|
CvssHistoryDto,
|
||||||
|
CvssHistoryEntry,
|
||||||
|
CvssReceipt,
|
||||||
|
CvssReceiptDto,
|
||||||
|
CvssScoresDto,
|
||||||
|
CvssEvidenceItem,
|
||||||
|
} from './cvss.models';
|
||||||
|
import { generateTraceId } from './trace.util';
|
||||||
|
|
||||||
|
export const CVSS_API_BASE_URL = new InjectionToken<string>('CVSS_API_BASE_URL');
|
||||||
|
|
||||||
/**
|
|
||||||
* Placeholder CVSS client until Policy Gateway endpoint is wired.
|
|
||||||
* Emits deterministic sample data for UI development and tests.
|
|
||||||
*/
|
|
||||||
@Injectable({
|
@Injectable({
|
||||||
providedIn: 'root',
|
providedIn: 'root',
|
||||||
})
|
})
|
||||||
export class CvssClient {
|
export class CvssClient {
|
||||||
|
constructor(
|
||||||
|
private readonly http: HttpClient,
|
||||||
|
private readonly authSession: AuthSessionStore,
|
||||||
|
@Inject(CVSS_API_BASE_URL) private readonly baseUrl: string
|
||||||
|
) {}
|
||||||
|
|
||||||
getReceipt(receiptId: string): Observable<CvssReceipt> {
|
getReceipt(receiptId: string): Observable<CvssReceipt> {
|
||||||
const sample: CvssReceipt = {
|
const tenant = this.resolveTenant();
|
||||||
receiptId,
|
const headers = this.buildHeaders(tenant);
|
||||||
vulnerabilityId: 'CVE-2025-1234',
|
const url = `${this.baseUrl}/receipts/${encodeURIComponent(receiptId)}`;
|
||||||
createdAt: '2025-12-05T12:00:00Z',
|
|
||||||
createdBy: 'analyst@example.org',
|
return this.http
|
||||||
|
.get<CvssReceiptDto>(url, { headers })
|
||||||
|
.pipe(map((dto) => this.toView(dto)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private toView(dto: CvssReceiptDto): CvssReceipt {
|
||||||
|
const scores: CvssScoresDto = dto.scores ?? ({} as CvssScoresDto);
|
||||||
|
const policyRef = dto.policyRef;
|
||||||
|
|
||||||
|
const overall =
|
||||||
|
scores.effectiveScore ??
|
||||||
|
scores.fullScore ??
|
||||||
|
scores.environmentalScore ??
|
||||||
|
scores.threatScore ??
|
||||||
|
scores.baseScore;
|
||||||
|
|
||||||
|
return {
|
||||||
|
receiptId: dto.receiptId,
|
||||||
|
vulnerabilityId: dto.vulnerabilityId,
|
||||||
|
createdAt: dto.createdAt,
|
||||||
|
createdBy: dto.createdBy,
|
||||||
score: {
|
score: {
|
||||||
base: 7.6,
|
base: scores.baseScore,
|
||||||
threat: 7.6,
|
threat: scores.threatScore ?? scores.baseScore,
|
||||||
environmental: 8.1,
|
environmental: scores.environmentalScore ?? scores.threatScore ?? scores.baseScore,
|
||||||
overall: 8.1,
|
full: scores.fullScore ?? scores.environmentalScore ?? scores.threatScore ?? scores.baseScore,
|
||||||
vector:
|
overall: overall ?? 0,
|
||||||
'CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:P/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H',
|
effectiveType: scores.effectiveScoreType,
|
||||||
severity: 'High',
|
vector: dto.vectorString,
|
||||||
|
severity: dto.severity,
|
||||||
},
|
},
|
||||||
policy: {
|
policy: {
|
||||||
policyId: 'policy-bundle-main',
|
policyId: policyRef?.policyId ?? 'unknown',
|
||||||
policyHash: 'sha256:deadbeefcafec0ffee1234',
|
policyHash: policyRef?.hash,
|
||||||
version: '1.0.0',
|
version: policyRef?.version,
|
||||||
|
activatedAt: policyRef?.activatedAt,
|
||||||
},
|
},
|
||||||
evidence: [
|
evidence: (dto.evidence ?? []).map((item, idx) => this.mapEvidence(item, idx)),
|
||||||
{
|
history: (dto.history ?? []).map((entry, idx) => this.mapHistory(entry, idx, dto)),
|
||||||
id: 'ev-001',
|
|
||||||
description: 'Upstream advisory references vulnerable TLS parser',
|
|
||||||
source: 'NVD',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'ev-002',
|
|
||||||
description: 'Vendor bulletin confirms threat active in region',
|
|
||||||
source: 'Vendor',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
history: [
|
|
||||||
{
|
|
||||||
version: 1,
|
|
||||||
changedAt: '2025-12-05T12:00:00Z',
|
|
||||||
changedBy: 'analyst@example.org',
|
|
||||||
reason: 'Initial scoring',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return of(sample);
|
private mapEvidence(item: CvssEvidenceDto, index: number): CvssEvidenceItem {
|
||||||
|
const id = item.uri ?? item.dsseRef ?? `evidence-${index + 1}`;
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
description: item.description ?? item.type ?? item.uri ?? 'Evidence item',
|
||||||
|
source: item.source,
|
||||||
|
uri: item.uri,
|
||||||
|
dsseRef: item.dsseRef,
|
||||||
|
collectedAt: item.collectedAt,
|
||||||
|
retentionClass: item.retentionClass,
|
||||||
|
isAuthoritative: item.isAuthoritative,
|
||||||
|
verifiedAt: item.verifiedAt,
|
||||||
|
isRedacted: item.isRedacted,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private mapHistory(entry: CvssHistoryDto, index: number, dto: CvssReceiptDto): CvssHistoryEntry {
|
||||||
|
return {
|
||||||
|
id: entry.historyId ?? `history-${index + 1}`,
|
||||||
|
changedAt: entry.createdAt ?? dto.modifiedAt ?? dto.createdAt,
|
||||||
|
changedBy: entry.actor ?? dto.modifiedBy ?? dto.createdBy,
|
||||||
|
reason: entry.reason,
|
||||||
|
field: entry.field,
|
||||||
|
previousValue: entry.previousValue,
|
||||||
|
newValue: entry.newValue,
|
||||||
|
referenceUri: entry.referenceUri,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildHeaders(tenantId: string): HttpHeaders {
|
||||||
|
let headers = new HttpHeaders({ 'X-Stella-Tenant': tenantId, 'X-Stella-Trace-Id': generateTraceId() });
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolveTenant(): string {
|
||||||
|
const tenant = this.authSession.getActiveTenantId();
|
||||||
|
if (!tenant) {
|
||||||
|
throw new Error('CvssClient requires an active tenant identifier.');
|
||||||
|
}
|
||||||
|
return tenant;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,29 +1,112 @@
|
|||||||
|
export interface CvssScoresDto {
|
||||||
|
readonly baseScore: number;
|
||||||
|
readonly threatScore?: number;
|
||||||
|
readonly environmentalScore?: number;
|
||||||
|
readonly fullScore?: number;
|
||||||
|
readonly effectiveScore: number;
|
||||||
|
readonly effectiveScoreType: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CvssPolicyRefDto {
|
||||||
|
readonly policyId: string;
|
||||||
|
readonly version: string;
|
||||||
|
readonly hash: string;
|
||||||
|
readonly activatedAt?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CvssEvidenceDto {
|
||||||
|
readonly type?: string;
|
||||||
|
readonly uri?: string;
|
||||||
|
readonly description?: string;
|
||||||
|
readonly source?: string;
|
||||||
|
readonly collectedAt?: string;
|
||||||
|
readonly dsseRef?: string;
|
||||||
|
readonly isAuthoritative?: boolean;
|
||||||
|
readonly isRedacted?: boolean;
|
||||||
|
readonly verifiedAt?: string;
|
||||||
|
readonly retentionClass?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CvssHistoryDto {
|
||||||
|
readonly historyId: string;
|
||||||
|
readonly field?: string;
|
||||||
|
readonly previousValue?: string;
|
||||||
|
readonly newValue?: string;
|
||||||
|
readonly reason?: string;
|
||||||
|
readonly referenceUri?: string;
|
||||||
|
readonly actor?: string;
|
||||||
|
readonly createdAt?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CvssReceiptDto {
|
||||||
|
readonly receiptId: string;
|
||||||
|
readonly schemaVersion?: string;
|
||||||
|
readonly format?: string;
|
||||||
|
readonly vulnerabilityId: string;
|
||||||
|
readonly tenantId?: string;
|
||||||
|
readonly createdAt: string;
|
||||||
|
readonly createdBy: string;
|
||||||
|
readonly modifiedAt?: string;
|
||||||
|
readonly modifiedBy?: string;
|
||||||
|
readonly cvssVersion?: string;
|
||||||
|
readonly baseMetrics?: unknown;
|
||||||
|
readonly threatMetrics?: unknown;
|
||||||
|
readonly environmentalMetrics?: unknown;
|
||||||
|
readonly supplementalMetrics?: unknown;
|
||||||
|
readonly scores: CvssScoresDto;
|
||||||
|
readonly vectorString: string;
|
||||||
|
readonly severity: string;
|
||||||
|
readonly policyRef: CvssPolicyRefDto;
|
||||||
|
readonly evidence?: readonly CvssEvidenceDto[];
|
||||||
|
readonly exportHash?: string;
|
||||||
|
readonly attestationRefs?: readonly string[];
|
||||||
|
readonly inputHash?: string;
|
||||||
|
readonly history?: readonly CvssHistoryDto[];
|
||||||
|
readonly amendsReceiptId?: string;
|
||||||
|
readonly supersedesReceiptId?: string;
|
||||||
|
readonly isActive?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface CvssScoreBreakdown {
|
export interface CvssScoreBreakdown {
|
||||||
readonly base: number;
|
readonly base?: number;
|
||||||
readonly threat: number;
|
readonly threat?: number;
|
||||||
readonly environmental: number;
|
readonly environmental?: number;
|
||||||
|
readonly full?: number;
|
||||||
readonly overall: number;
|
readonly overall: number;
|
||||||
|
readonly effectiveType?: string;
|
||||||
readonly vector: string;
|
readonly vector: string;
|
||||||
readonly severity: string;
|
readonly severity: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CvssPolicySummary {
|
export interface CvssPolicySummary {
|
||||||
readonly policyId: string;
|
readonly policyId: string;
|
||||||
readonly policyHash: string;
|
readonly policyHash?: string;
|
||||||
readonly version?: string;
|
readonly version?: string;
|
||||||
|
readonly activatedAt?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CvssEvidenceItem {
|
export interface CvssEvidenceItem {
|
||||||
readonly id: string;
|
readonly id: string;
|
||||||
readonly description: string;
|
readonly description?: string;
|
||||||
readonly source: string;
|
readonly source?: string;
|
||||||
|
readonly uri?: string;
|
||||||
|
readonly dsseRef?: string;
|
||||||
|
readonly collectedAt?: string;
|
||||||
|
readonly retentionClass?: string;
|
||||||
|
readonly isAuthoritative?: boolean;
|
||||||
|
readonly verifiedAt?: string;
|
||||||
|
readonly isRedacted?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CvssHistoryEntry {
|
export interface CvssHistoryEntry {
|
||||||
readonly version: number;
|
readonly id: string;
|
||||||
readonly changedAt: string;
|
readonly changedAt: string;
|
||||||
readonly changedBy: string;
|
readonly changedBy: string;
|
||||||
readonly reason?: string;
|
readonly reason?: string;
|
||||||
|
readonly field?: string;
|
||||||
|
readonly previousValue?: string;
|
||||||
|
readonly newValue?: string;
|
||||||
|
readonly referenceUri?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CvssReceipt {
|
export interface CvssReceipt {
|
||||||
|
|||||||
@@ -7,14 +7,17 @@
|
|||||||
<span class="cvss-receipt__id">#{{ receipt.receiptId }}</span>
|
<span class="cvss-receipt__id">#{{ receipt.receiptId }}</span>
|
||||||
</h1>
|
</h1>
|
||||||
<p class="cvss-receipt__meta">
|
<p class="cvss-receipt__meta">
|
||||||
Created {{ receipt.createdAt }} by {{ receipt.createdBy }} · Policy
|
Created {{ receipt.createdAt }} by {{ receipt.createdBy }} - Policy
|
||||||
{{ receipt.policy.policyId }} ({{ receipt.policy.version ?? 'v1' }})
|
{{ receipt.policy.policyId }} ({{ receipt.policy.version ?? 'v1' }})
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="cvss-receipt__score">
|
<div class="cvss-receipt__score">
|
||||||
<div class="cvss-score-badge" [class.cvss-score-badge--critical]="receipt.score.overall >= 9">
|
<div class="cvss-score-badge" [class.cvss-score-badge--critical]="receipt.score.overall >= 9">
|
||||||
{{ receipt.score.overall | number : '1.1-1' }}
|
{{ receipt.score.overall | number : '1.1-1' }}
|
||||||
<span class="cvss-score-badge__label">{{ receipt.score.severity }}</span>
|
<span class="cvss-score-badge__label">
|
||||||
|
{{ receipt.score.severity }}
|
||||||
|
<span class="cvss-score-badge__type">({{ receipt.score.effectiveType ?? 'Effective' }})</span>
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<p class="cvss-receipt__vector">{{ receipt.score.vector }}</p>
|
<p class="cvss-receipt__vector">{{ receipt.score.vector }}</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -47,19 +50,19 @@
|
|||||||
|
|
||||||
<section class="cvss-panel" *ngIf="activeTab === 'base'">
|
<section class="cvss-panel" *ngIf="activeTab === 'base'">
|
||||||
<h2>Base Metrics</h2>
|
<h2>Base Metrics</h2>
|
||||||
<p>Base score: {{ receipt.score.base | number : '1.1-1' }}</p>
|
<p>Base score: {{ receipt.score.base ?? 'n/a' }}</p>
|
||||||
<p>Vector: {{ receipt.score.vector }}</p>
|
<p>Vector: {{ receipt.score.vector }}</p>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section class="cvss-panel" *ngIf="activeTab === 'threat'">
|
<section class="cvss-panel" *ngIf="activeTab === 'threat'">
|
||||||
<h2>Threat Metrics</h2>
|
<h2>Threat Metrics</h2>
|
||||||
<p>Threat-adjusted score: {{ receipt.score.threat | number : '1.1-1' }}</p>
|
<p>Threat-adjusted score: {{ receipt.score.threat ?? 'n/a' }}</p>
|
||||||
<p>Vector: {{ receipt.score.vector }}</p>
|
<p>Vector: {{ receipt.score.vector }}</p>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section class="cvss-panel" *ngIf="activeTab === 'environmental'">
|
<section class="cvss-panel" *ngIf="activeTab === 'environmental'">
|
||||||
<h2>Environmental Metrics</h2>
|
<h2>Environmental Metrics</h2>
|
||||||
<p>Environmental score: {{ receipt.score.environmental | number : '1.1-1' }}</p>
|
<p>Environmental score: {{ receipt.score.environmental ?? 'n/a' }}</p>
|
||||||
<p>Vector: {{ receipt.score.vector }}</p>
|
<p>Vector: {{ receipt.score.vector }}</p>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
@@ -69,7 +72,8 @@
|
|||||||
<li *ngFor="let item of receipt.evidence; trackBy: trackById">
|
<li *ngFor="let item of receipt.evidence; trackBy: trackById">
|
||||||
<p class="evidence__id">{{ item.id }}</p>
|
<p class="evidence__id">{{ item.id }}</p>
|
||||||
<p>{{ item.description }}</p>
|
<p>{{ item.description }}</p>
|
||||||
<p class="evidence__source">Source: {{ item.source }}</p>
|
<p class="evidence__source">Source: {{ item.source ?? 'unknown' }}</p>
|
||||||
|
<p *ngIf="item.uri" class="evidence__uri">{{ item.uri }}</p>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</section>
|
</section>
|
||||||
@@ -78,16 +82,17 @@
|
|||||||
<h2>Policy</h2>
|
<h2>Policy</h2>
|
||||||
<p>Policy ID: {{ receipt.policy.policyId }}</p>
|
<p>Policy ID: {{ receipt.policy.policyId }}</p>
|
||||||
<p>Version: {{ receipt.policy.version ?? 'v1' }}</p>
|
<p>Version: {{ receipt.policy.version ?? 'v1' }}</p>
|
||||||
<p>Hash: {{ receipt.policy.policyHash }}</p>
|
<p>Hash: {{ receipt.policy.policyHash ?? 'n/a' }}</p>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section class="cvss-panel" *ngIf="activeTab === 'history'">
|
<section class="cvss-panel" *ngIf="activeTab === 'history'">
|
||||||
<h2>History</h2>
|
<h2>History</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li *ngFor="let entry of receipt.history">
|
<li *ngFor="let entry of receipt.history; trackBy: trackById">
|
||||||
<p>
|
<p>
|
||||||
v{{ entry.version }} · {{ entry.changedAt }} by {{ entry.changedBy }}
|
{{ entry.changedAt }} by {{ entry.changedBy }}
|
||||||
<span *ngIf="entry.reason">— {{ entry.reason }}</span>
|
<span *ngIf="entry.reason">- {{ entry.reason }}</span>
|
||||||
|
<span *ngIf="entry.field"> ({{ entry.field }} -> {{ entry.newValue ?? 'updated' }})</span>
|
||||||
</p>
|
</p>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|||||||
@@ -18,7 +18,9 @@ describe(CvssReceiptComponent.name, () => {
|
|||||||
base: 7.6,
|
base: 7.6,
|
||||||
threat: 7.6,
|
threat: 7.6,
|
||||||
environmental: 8.1,
|
environmental: 8.1,
|
||||||
|
full: 8.1,
|
||||||
overall: 8.1,
|
overall: 8.1,
|
||||||
|
effectiveType: 'Environmental',
|
||||||
vector: 'CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:P/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H',
|
vector: 'CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:P/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H',
|
||||||
severity: 'High',
|
severity: 'High',
|
||||||
},
|
},
|
||||||
@@ -28,7 +30,14 @@ describe(CvssReceiptComponent.name, () => {
|
|||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
},
|
},
|
||||||
evidence: [],
|
evidence: [],
|
||||||
history: [],
|
history: [
|
||||||
|
{
|
||||||
|
id: 'history-1',
|
||||||
|
changedAt: '2025-12-05T12:00:00Z',
|
||||||
|
changedBy: 'analyst@example.org',
|
||||||
|
reason: 'Initial scoring',
|
||||||
|
},
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ export class CvssReceiptComponent implements OnInit {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
trackById(_: number, item: { id?: string }): string | undefined {
|
trackById(index: number, item: { id?: string }): string {
|
||||||
return item.id;
|
return item.id ?? `${index}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user