From 7c24ed96ee4ae2ae87c7ad94c670f2e4c8ecbb33 Mon Sep 17 00:00:00 2001 From: StellaOps Bot Date: Sun, 7 Dec 2025 22:49:53 +0200 Subject: [PATCH] up --- .../alerts/export-center-alerts.yaml | 164 +++ .../telemetry/dashboards/export-center.json | 638 ++++++++++ .../SPRINT_0113_0001_0002_concelier_ii.md | 7 +- docs/implplan/SPRINT_0125_0001_0001_mirror.md | 3 +- .../SPRINT_0161_0001_0001_evidencelocker.md | 3 +- .../SPRINT_0163_0001_0001_exportcenter_ii.md | 24 +- .../SPRINT_0190_0001_0001_cvss_v4_receipts.md | 3 +- .../SPRINT_0506_0001_0001_ops_devops_iv.md | 3 +- ...001_0001_fips_eidas_kcmvp_pq_enablement.md | 3 +- .../SPRINT_0114_0001_0003_concelier_iii.md | 0 .../SPRINT_0115_0001_0004_concelier_iv.md | 0 .../SPRINT_0117_0001_0006_concelier_vi.md | 0 .../SPRINT_0119_0001_0004_excititor_iv.md | 0 .../SPRINT_0119_0001_0005_excititor_v.md | 0 .../SPRINT_0119_0001_0006_excititor_vi.md | 0 .../SPRINT_0120_0001_0001_policy_reasoning.md | 0 .../SPRINT_0121_0001_0001_policy_reasoning.md | 0 .../SPRINT_0121_0001_0003_excititor_iii.md | 0 .../SPRINT_0122_0001_0001_policy_reasoning.md | 0 .../SPRINT_0122_0001_0004_excititor_iv.md | 0 .../SPRINT_0123_0001_0001_policy_reasoning.md | 0 .../SPRINT_0123_0001_0005_excititor_v.md | 0 .../SPRINT_0124_0001_0001_policy_reasoning.md | 0 .../SPRINT_0124_0001_0006_excititor_vi.md | 0 .../SPRINT_0125_0001_0001_policy_reasoning.md | 0 .../SPRINT_0126_0001_0001_policy_reasoning.md | 0 .../SPRINT_0127_0001_0001_policy_reasoning.md | 0 .../SPRINT_0128_0001_0001_policy_reasoning.md | 0 .../SPRINT_0129_0001_0001_policy_reasoning.md | 0 .../SPRINT_0133_0001_0001_scanner_surface.md | 0 ...NT_0134_0001_0001_native_analyzer_fixes.md | 0 .../SPRINT_0134_0001_0001_scanner_surface.md | 0 ...0135_0001_0001_native_testing_framework.md | 0 .../SPRINT_0135_0001_0001_scanner_surface.md | 0 .../SPRINT_0139_0001_0001_scanner_bun.md | 0 ...0140_0001_0001_scanner_java_enhancement.md | 0 .../SPRINT_0141_0001_0001_graph_indexer.md | 0 ..._0144_0001_0001_zastava_runtime_signals.md | 0 .../SPRINT_0150_0001_0001_mirror_dsse.md | 0 .../SPRINT_0150_0001_0002_mirror_time.md | 0 .../SPRINT_0150_0001_0003_mirror_orch.md | 0 .../SPRINT_0152_0001_0002_orchestrator_ii.md | 0 .../SPRINT_0154_0001_0001_packsregistry.md | 0 .../SPRINT_0157_0001_0001_taskrunner_i.md | 0 ...RINT_0157_0001_0002_taskrunner_blockers.md | 0 .../SPRINT_0162_0001_0001_exportcenter_i.md | 35 +- .../SPRINT_0164_0001_0003_exportcenter_iii.md | 0 ..._0170_0001_0001_notifications_telemetry.md | 0 .../SPRINT_0172_0001_0002_notifier_ii.md | 0 .../SPRINT_0202_0001_0002_cli_ii.md | 0 .../SPRINT_0207_0001_0001_graph.md | 0 .../SPRINT_0210_0001_0002_ui_ii.md | 0 .../SPRINT_0215_0001_0004_web_iv.md | 0 .../SPRINT_0301_0001_0001_docs_md_i.md | 0 .../SPRINT_0306_0001_0006_docs_tasks_md_vi.md | 0 ...T_0317_0001_0001_docs_modules_concelier.md | 0 .../SPRINT_0500_0001_0001_ops_offline.md | 0 .../SPRINT_0508_0001_0001_ops_offline_kit.md | 0 .../SPRINT_0509_0001_0001_samples.md | 0 ..._0001_0000_postgres_conversion_overview.md | 0 ...INT_3400_0001_0001_postgres_foundations.md | 0 ...PRINT_3401_0001_0001_postgres_authority.md | 0 ...PRINT_3402_0001_0001_postgres_scheduler.md | 0 .../SPRINT_3403_0001_0001_postgres_notify.md | 0 .../SPRINT_3404_0001_0001_postgres_policy.md | 0 ...3405_0001_0001_postgres_vulnerabilities.md | 0 ...PRINT_3406_0001_0001_postgres_vex_graph.md | 0 ..._0001_0001_postgres_migration_lifecycle.md | 0 ...409_0001_0001_issuer_directory_postgres.md | 0 docs/implplan/tasks-all.md | 4 +- .../openapi/export-center.v1.yaml | 663 ++++++++++ scripts/mirror/README.md | 1 + .../Signing/AttestorSigningKeyRegistry.cs | 4 + .../AttestorVerificationServiceTests.cs | 115 ++ .../StellaOps.Cli/Commands/CommandFactory.cs | 49 + .../StellaOps.Cli/Commands/CommandHandlers.cs | 555 ++++---- .../Services/AttestationBundleVerifier.cs | 533 ++++++++ .../Services/DevPortalBundleVerifier.cs | 380 ++++++ .../Services/IAttestationBundleVerifier.cs | 29 + .../Services/IDevPortalBundleVerifier.cs | 19 + .../Models/AttestationBundleModels.cs | 126 ++ .../AttestationBundleVerifierTests.cs | 406 ++++++ .../Services/DevPortalBundleVerifierTests.cs | 316 +++++ src/Concelier/Directory.Build.props | 32 + .../Fetch/RawDocumentStorage.cs | 2 +- .../UbuntuConnector.cs | 2 +- .../Linksets/PolicyAuthSignalFactory.cs | 38 + .../MongoCompat/Bson.cs | 400 +++--- .../MongoCompat/DriverStubs.cs | 54 +- .../MongoCompat/StorageStubs.cs | 370 +++++- .../Internal/CertCcMapperTests.cs | 2 + .../Common/SourceStateSeedProcessorTests.cs | 4 +- .../SuseMapperTests.cs | 2 +- .../Osv/OsvConflictFixtureTests.cs | 2 + .../RuBduMapperTests.cs | 2 +- .../RuNkckiMapperTests.cs | 2 +- ...ExporterDependencyInjectionRoutineTests.cs | 101 +- .../JsonFeedExporterTests.cs | 1133 ++++++++--------- .../TrivyDbFeedExporterTests.cs | 13 +- .../AdvisoryMergeServiceTests.cs | 433 ++++--- ...StellaOps.Concelier.RawModels.Tests.csproj | 6 +- .../Aoc/AocVerifyRegressionTests.cs | 3 +- .../Aoc/LargeBatchIngestTests.cs | 13 +- .../Services/AdvisoryChunkBuilderTests.cs | 22 + .../EvidenceBundlePackagingService.cs | 7 +- .../Services/EvidencePortableBundleService.cs | 7 +- .../EvidenceBundlePackagingServiceTests.cs | 147 +++ .../EvidencePortableBundleServiceTests.cs | 60 + .../ExportCenterClientTests.cs | 299 +++++ .../ExportDownloadHelperTests.cs | 170 +++ .../ExportJobLifecycleHelperTests.cs | 182 +++ ...StellaOps.ExportCenter.Client.Tests.csproj | 33 + .../xunit.runner.json | 4 + .../ExportCenterClient.cs | 310 +++++ .../ExportCenterClientOptions.cs | 22 + .../Extensions/ServiceCollectionExtensions.cs | 93 ++ .../IExportCenterClient.cs | 143 +++ .../Lifecycle/ExportJobLifecycleHelper.cs | 257 ++++ .../Models/ExportModels.cs | 152 +++ .../StellaOps.ExportCenter.Client.csproj | 17 + .../Streaming/ExportDownloadHelper.cs | 175 +++ .../AttestationBundleBuilder.cs | 299 +++++ .../AttestationBundleModels.cs | 71 ++ .../BootstrapPack/BootstrapPackBuilder.cs | 550 ++++++++ .../BootstrapPack/BootstrapPackModels.cs | 110 ++ .../MirrorBundle/MirrorBundleBuilder.cs | 611 +++++++++ .../MirrorBundle/MirrorBundleModels.cs | 246 ++++ .../MirrorBundle/MirrorBundleSigning.cs | 188 +++ .../ExportNotificationEmitter.cs | 477 +++++++ .../Notifications/ExportNotificationModels.cs | 133 ++ .../Notifications/ExportWebhookClient.cs | 207 +++ .../OfflineKit/OfflineKitDistributor.cs | 289 +++++ .../OfflineKit/OfflineKitModels.cs | 120 ++ .../OfflineKit/OfflineKitPackager.cs | 282 ++++ .../PortableEvidenceExportBuilder.cs | 338 +++++ .../PortableEvidenceExportModels.cs | 63 + .../AttestationBundleBuilderTests.cs | 559 ++++++++ .../BootstrapPackBuilderTests.cs | 359 ++++++ .../DeprecatedEndpointsRegistryTests.cs | 95 ++ .../DeprecationHeaderExtensionsTests.cs | 130 ++ .../Deprecation/DeprecationInfoTests.cs | 72 ++ .../ExportNotificationEmitterTests.cs | 552 ++++++++ .../MirrorBundleBuilderTests.cs | 396 ++++++ .../MirrorBundleSigningTests.cs | 159 +++ .../OfflineKitDistributorTests.cs | 290 +++++ .../OfflineKitPackagerTests.cs | 326 +++++ .../OpenApiDiscoveryEndpointsTests.cs | 185 +++ .../PortableEvidenceExportBuilderTests.cs | 386 ++++++ .../StellaOps.ExportCenter.Tests.csproj | 22 +- .../Attestation/AttestationEndpoints.cs | 147 +++ .../AttestationServiceCollectionExtensions.cs | 50 + .../Attestation/ExportAttestationModels.cs | 192 +++ .../Attestation/ExportAttestationService.cs | 309 +++++ .../Attestation/ExportAttestationSigner.cs | 208 +++ .../Attestation/IExportAttestationService.cs | 53 + .../Attestation/IExportAttestationSigner.cs | 64 + .../IPromotionAttestationAssembler.cs | 97 ++ .../PromotionAttestationAssembler.cs | 612 +++++++++ .../PromotionAttestationEndpoints.cs | 213 ++++ .../Attestation/PromotionAttestationModels.cs | 354 +++++ .../DeprecatedEndpointsRegistry.cs | 68 + .../DeprecationHeaderExtensions.cs | 125 ++ .../Deprecation/DeprecationInfo.cs | 27 + .../DeprecationNotificationService.cs | 106 ++ .../DeprecationRouteBuilderExtensions.cs | 62 + ...idenceLockerServiceCollectionExtensions.cs | 203 +++ .../ExportEvidenceLockerClient.cs | 386 ++++++ .../EvidenceLocker/ExportEvidenceModels.cs | 186 +++ .../ExportMerkleTreeCalculator.cs | 93 ++ .../IExportEvidenceLockerClient.cs | 58 + .../Incident/ExportIncidentEvents.cs | 167 +++ .../Incident/ExportIncidentManager.cs | 535 ++++++++ .../Incident/ExportIncidentModels.cs | 332 +++++ .../Incident/IExportIncidentManager.cs | 98 ++ .../Incident/IncidentEndpoints.cs | 215 ++++ .../IncidentServiceCollectionExtensions.cs | 31 + .../OpenApiDiscoveryEndpoints.cs | 261 ++++ .../Program.cs | 74 +- .../RiskBundle/IRiskBundleJobHandler.cs | 55 + .../RiskBundle/RiskBundleEndpoints.cs | 142 +++ .../RiskBundle/RiskBundleJobHandler.cs | 537 ++++++++ .../RiskBundle/RiskBundleJobModels.cs | 395 ++++++ .../RiskBundleServiceCollectionExtensions.cs | 37 + .../StellaOps.ExportCenter.WebService.csproj | 2 + .../Telemetry/ExportActivityExtensions.cs | 155 +++ .../Telemetry/ExportLoggerExtensions.cs | 138 ++ .../Telemetry/ExportRunTelemetryContext.cs | 221 ++++ .../Telemetry/ExportTelemetry.cs | 286 +++++ .../TelemetryServiceCollectionExtensions.cs | 68 + .../Timeline/ExportTimelineEventTypes.cs | 14 + .../Timeline/ExportTimelineEvents.cs | 186 +++ .../Timeline/ExportTimelinePublisher.cs | 469 +++++++ .../Timeline/IExportTimelinePublisher.cs | 86 ++ .../TimelineServiceCollectionExtensions.cs | 73 ++ src/Mirror/StellaOps.Mirror.Creator/TASKS.md | 1 + .../schedule-export-center-run.sh | 51 + .../Internal/NodePackageCollector.cs | 32 + .../Internal/NodePnpDataLoader.cs | 17 +- .../lang/node/yarn-pnp/.pnp.data.json | 18 + .../Fixtures/lang/node/yarn-pnp/expected.json | 8 +- .../ServiceCollectionExtensions.cs | 50 +- .../Fixtures/TestCryptoFactory.Sm.cs | 19 +- .../Signing/DualSignTests.cs | 39 +- .../Signing/Sm2SigningTests.cs | 6 +- 204 files changed, 23313 insertions(+), 1430 deletions(-) create mode 100644 deploy/telemetry/alerts/export-center-alerts.yaml create mode 100644 deploy/telemetry/dashboards/export-center.json rename docs/implplan/{ => archived}/SPRINT_0114_0001_0003_concelier_iii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0115_0001_0004_concelier_iv.md (100%) rename docs/implplan/{ => archived}/SPRINT_0117_0001_0006_concelier_vi.md (100%) rename docs/implplan/{ => archived}/SPRINT_0119_0001_0004_excititor_iv.md (100%) rename docs/implplan/{ => archived}/SPRINT_0119_0001_0005_excititor_v.md (100%) rename docs/implplan/{ => archived}/SPRINT_0119_0001_0006_excititor_vi.md (100%) rename docs/implplan/{ => archived}/SPRINT_0120_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0121_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0121_0001_0003_excititor_iii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0122_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0122_0001_0004_excititor_iv.md (100%) rename docs/implplan/{ => archived}/SPRINT_0123_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0123_0001_0005_excititor_v.md (100%) rename docs/implplan/{ => archived}/SPRINT_0124_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0124_0001_0006_excititor_vi.md (100%) rename docs/implplan/{ => archived}/SPRINT_0125_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0126_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0127_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0128_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0129_0001_0001_policy_reasoning.md (100%) rename docs/implplan/{ => archived}/SPRINT_0133_0001_0001_scanner_surface.md (100%) rename docs/implplan/{ => archived}/SPRINT_0134_0001_0001_native_analyzer_fixes.md (100%) rename docs/implplan/{ => archived}/SPRINT_0134_0001_0001_scanner_surface.md (100%) rename docs/implplan/{ => archived}/SPRINT_0135_0001_0001_native_testing_framework.md (100%) rename docs/implplan/{ => archived}/SPRINT_0135_0001_0001_scanner_surface.md (100%) rename docs/implplan/{ => archived}/SPRINT_0139_0001_0001_scanner_bun.md (100%) rename docs/implplan/{ => archived}/SPRINT_0140_0001_0001_scanner_java_enhancement.md (100%) rename docs/implplan/{ => archived}/SPRINT_0141_0001_0001_graph_indexer.md (100%) rename docs/implplan/{ => archived}/SPRINT_0144_0001_0001_zastava_runtime_signals.md (100%) rename docs/implplan/{ => archived}/SPRINT_0150_0001_0001_mirror_dsse.md (100%) rename docs/implplan/{ => archived}/SPRINT_0150_0001_0002_mirror_time.md (100%) rename docs/implplan/{ => archived}/SPRINT_0150_0001_0003_mirror_orch.md (100%) rename docs/implplan/{ => archived}/SPRINT_0152_0001_0002_orchestrator_ii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0154_0001_0001_packsregistry.md (100%) rename docs/implplan/{ => archived}/SPRINT_0157_0001_0001_taskrunner_i.md (100%) rename docs/implplan/{ => archived}/SPRINT_0157_0001_0002_taskrunner_blockers.md (100%) rename docs/implplan/{ => archived}/SPRINT_0162_0001_0001_exportcenter_i.md (67%) rename docs/implplan/{ => archived}/SPRINT_0164_0001_0003_exportcenter_iii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0170_0001_0001_notifications_telemetry.md (100%) rename docs/implplan/{ => archived}/SPRINT_0172_0001_0002_notifier_ii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0202_0001_0002_cli_ii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0207_0001_0001_graph.md (100%) rename docs/implplan/{ => archived}/SPRINT_0210_0001_0002_ui_ii.md (100%) rename docs/implplan/{ => archived}/SPRINT_0215_0001_0004_web_iv.md (100%) rename docs/implplan/{ => archived}/SPRINT_0301_0001_0001_docs_md_i.md (100%) rename docs/implplan/{ => archived}/SPRINT_0306_0001_0006_docs_tasks_md_vi.md (100%) rename docs/implplan/{ => archived}/SPRINT_0317_0001_0001_docs_modules_concelier.md (100%) rename docs/implplan/{ => archived}/SPRINT_0500_0001_0001_ops_offline.md (100%) rename docs/implplan/{ => archived}/SPRINT_0508_0001_0001_ops_offline_kit.md (100%) rename docs/implplan/{ => archived}/SPRINT_0509_0001_0001_samples.md (100%) rename docs/implplan/{ => archived}/SPRINT_3400_0001_0000_postgres_conversion_overview.md (100%) rename docs/implplan/{ => archived}/SPRINT_3400_0001_0001_postgres_foundations.md (100%) rename docs/implplan/{ => archived}/SPRINT_3401_0001_0001_postgres_authority.md (100%) rename docs/implplan/{ => archived}/SPRINT_3402_0001_0001_postgres_scheduler.md (100%) rename docs/implplan/{ => archived}/SPRINT_3403_0001_0001_postgres_notify.md (100%) rename docs/implplan/{ => archived}/SPRINT_3404_0001_0001_postgres_policy.md (100%) rename docs/implplan/{ => archived}/SPRINT_3405_0001_0001_postgres_vulnerabilities.md (100%) rename docs/implplan/{ => archived}/SPRINT_3406_0001_0001_postgres_vex_graph.md (100%) rename docs/implplan/{ => archived}/SPRINT_3408_0001_0001_postgres_migration_lifecycle.md (100%) rename docs/implplan/{ => archived}/SPRINT_3409_0001_0001_issuer_directory_postgres.md (100%) create mode 100644 docs/modules/export-center/openapi/export-center.v1.yaml create mode 100644 src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs create mode 100644 src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs create mode 100644 src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs create mode 100644 src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs create mode 100644 src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs create mode 100644 src/Cli/__Tests/StellaOps.Cli.Tests/AttestationBundleVerifierTests.cs create mode 100644 src/Cli/__Tests/StellaOps.Cli.Tests/Services/DevPortalBundleVerifierTests.cs create mode 100644 src/Concelier/Directory.Build.props create mode 100644 src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyAuthSignalFactory.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportCenterClientTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportDownloadHelperTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportJobLifecycleHelperTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/StellaOps.ExportCenter.Client.Tests.csproj create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/xunit.runner.json create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClient.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClientOptions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Extensions/ServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/IExportCenterClient.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Lifecycle/ExportJobLifecycleHelper.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Models/ExportModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Streaming/ExportDownloadHelper.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleBuilder.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackBuilder.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleBuilder.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleSigning.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationEmitter.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportWebhookClient.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitDistributor.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitPackager.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportBuilder.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/AttestationBundleBuilderTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/BootstrapPackBuilderTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecatedEndpointsRegistryTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationHeaderExtensionsTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationInfoTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/ExportNotificationEmitterTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleBuilderTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleSigningTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitDistributorTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitPackagerTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OpenApiDiscoveryEndpointsTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/PortableEvidenceExportBuilderTests.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationEndpoints.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationService.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationSigner.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationService.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationSigner.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IPromotionAttestationAssembler.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationAssembler.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationEndpoints.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecatedEndpointsRegistry.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationHeaderExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationInfo.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationNotificationService.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationRouteBuilderExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/EvidenceLockerServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceLockerClient.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportMerkleTreeCalculator.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/IExportEvidenceLockerClient.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentEvents.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentManager.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IExportIncidentManager.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentEndpoints.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/OpenApiDiscoveryEndpoints.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/IRiskBundleJobHandler.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleEndpoints.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobHandler.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobModels.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportActivityExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportLoggerExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportRunTelemetryContext.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportTelemetry.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/TelemetryServiceCollectionExtensions.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEventTypes.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEvents.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelinePublisher.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/IExportTimelinePublisher.cs create mode 100644 src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/TimelineServiceCollectionExtensions.cs create mode 100644 src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/.pnp.data.json diff --git a/deploy/telemetry/alerts/export-center-alerts.yaml b/deploy/telemetry/alerts/export-center-alerts.yaml new file mode 100644 index 000000000..6d38be9d5 --- /dev/null +++ b/deploy/telemetry/alerts/export-center-alerts.yaml @@ -0,0 +1,164 @@ +# ExportCenter Alert Rules +# SLO Burn-rate alerts for export service reliability + +groups: + - name: export-center-slo + interval: 30s + rules: + # SLO: 99.5% success rate target + # Error budget: 0.5% (432 errors per day at 86400 requests/day) + + # Fast burn - 2% budget consumption in 1 hour (critical) + - alert: ExportCenterHighErrorBurnRate + expr: | + ( + sum(rate(export_runs_failed_total[1h])) + / + sum(rate(export_runs_total[1h])) + ) > (14.4 * 0.005) + for: 2m + labels: + severity: critical + service: export-center + slo: availability + annotations: + summary: "ExportCenter high error burn rate" + description: "Error rate is {{ $value | humanizePercentage }} over the last hour, consuming error budget at 14.4x the sustainable rate." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-error-rate" + + # Slow burn - 10% budget consumption in 6 hours (warning) + - alert: ExportCenterElevatedErrorBurnRate + expr: | + ( + sum(rate(export_runs_failed_total[6h])) + / + sum(rate(export_runs_total[6h])) + ) > (6 * 0.005) + for: 5m + labels: + severity: warning + service: export-center + slo: availability + annotations: + summary: "ExportCenter elevated error burn rate" + description: "Error rate is {{ $value | humanizePercentage }} over the last 6 hours, consuming error budget at 6x the sustainable rate." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/elevated-error-rate" + + - name: export-center-latency + interval: 30s + rules: + # SLO: 95% of exports complete within 120s + # Fast burn - p95 latency exceeding threshold + - alert: ExportCenterHighLatency + expr: | + histogram_quantile(0.95, + sum(rate(export_run_duration_seconds_bucket[5m])) by (le) + ) > 120 + for: 5m + labels: + severity: warning + service: export-center + slo: latency + annotations: + summary: "ExportCenter high latency" + description: "95th percentile export duration is {{ $value | humanizeDuration }}, exceeding 120s SLO target." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-latency" + + # Critical latency - p99 exceeding 5 minutes + - alert: ExportCenterCriticalLatency + expr: | + histogram_quantile(0.99, + sum(rate(export_run_duration_seconds_bucket[5m])) by (le) + ) > 300 + for: 2m + labels: + severity: critical + service: export-center + slo: latency + annotations: + summary: "ExportCenter critical latency" + description: "99th percentile export duration is {{ $value | humanizeDuration }}, indicating severe performance degradation." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/critical-latency" + + - name: export-center-capacity + interval: 60s + rules: + # Queue buildup warning + - alert: ExportCenterHighConcurrency + expr: sum(export_runs_in_progress) > 50 + for: 5m + labels: + severity: warning + service: export-center + annotations: + summary: "ExportCenter high concurrency" + description: "{{ $value }} exports currently in progress. Consider scaling or investigating slow exports." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-concurrency" + + # Stuck exports - exports running longer than 30 minutes + - alert: ExportCenterStuckExports + expr: | + histogram_quantile(0.99, + sum(rate(export_run_duration_seconds_bucket{status!="completed"}[1h])) by (le) + ) > 1800 + for: 10m + labels: + severity: warning + service: export-center + annotations: + summary: "ExportCenter potentially stuck exports" + description: "Some exports may be stuck - 99th percentile duration for incomplete exports exceeds 30 minutes." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/stuck-exports" + + - name: export-center-errors + interval: 30s + rules: + # Specific error code spike + - alert: ExportCenterErrorCodeSpike + expr: | + sum by (error_code) ( + rate(export_runs_failed_total[5m]) + ) > 0.1 + for: 5m + labels: + severity: warning + service: export-center + annotations: + summary: "ExportCenter error code spike: {{ $labels.error_code }}" + description: "Error code {{ $labels.error_code }} is occurring at {{ $value | humanize }}/s rate." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/error-codes" + + # No successful exports in 15 minutes (when there is traffic) + - alert: ExportCenterNoSuccessfulExports + expr: | + ( + sum(rate(export_runs_total[15m])) > 0 + ) + and + ( + sum(rate(export_runs_success_total[15m])) == 0 + ) + for: 10m + labels: + severity: critical + service: export-center + annotations: + summary: "ExportCenter no successful exports" + description: "No exports have completed successfully in the last 15 minutes despite ongoing attempts." + runbook_url: "https://docs.stellaops.io/runbooks/export-center/no-successful-exports" + + - name: export-center-deprecation + interval: 5m + rules: + # Deprecated endpoint usage + - alert: ExportCenterDeprecatedEndpointUsage + expr: | + sum(rate(export_center_deprecated_endpoint_access_total[1h])) > 0 + for: 1h + labels: + severity: info + service: export-center + annotations: + summary: "Deprecated export endpoints still in use" + description: "Legacy /exports endpoints are still being accessed at {{ $value | humanize }}/s. Migration to v1 API recommended." + runbook_url: "https://docs.stellaops.io/api/export-center/migration" diff --git a/deploy/telemetry/dashboards/export-center.json b/deploy/telemetry/dashboards/export-center.json new file mode 100644 index 000000000..0ba6d42cc --- /dev/null +++ b/deploy/telemetry/dashboards/export-center.json @@ -0,0 +1,638 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { "type": "grafana", "uid": "-- Grafana --" }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "ExportCenter service observability dashboard", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "panels": [], + "title": "Export Runs Overview", + "type": "row" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 }, + "id": 2, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))", + "legendFormat": "Total Runs", + "range": true, + "refId": "A" + } + ], + "title": "Total Export Runs", + "type": "stat" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range]))", + "legendFormat": "Successful", + "range": true, + "refId": "A" + } + ], + "title": "Successful Runs", + "type": "stat" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null }, + { "color": "yellow", "value": 1 }, + { "color": "red", "value": 5 } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum(increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))", + "legendFormat": "Failed", + "range": true, + "refId": "A" + } + ], + "title": "Failed Runs", + "type": "stat" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "red", "value": null }, + { "color": "yellow", "value": 95 }, + { "color": "green", "value": 99 } + ] + }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { "h": 4, "w": 4, "x": 12, "y": 1 }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "100 * sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range])) / sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))", + "legendFormat": "Success Rate", + "range": true, + "refId": "A" + } + ], + "title": "Success Rate", + "type": "stat" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 4, "w": 4, "x": 16, "y": 1 }, + "id": 6, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "10.0.0", + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum(export_runs_in_progress{tenant=~\"$tenant\"})", + "legendFormat": "In Progress", + "range": true, + "refId": "A" + } + ], + "title": "Runs In Progress", + "type": "stat" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { "group": "A", "mode": "none" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 5 }, + "id": 7, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "desc" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum by (export_type) (rate(export_runs_total{tenant=~\"$tenant\"}[5m]))", + "legendFormat": "{{export_type}}", + "range": true, + "refId": "A" + } + ], + "title": "Export Runs by Type (rate/5m)", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { "group": "A", "mode": "none" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 5 }, + "id": 8, + "options": { + "legend": { "calcs": ["mean", "max", "p95"], "displayMode": "table", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "desc" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "histogram_quantile(0.50, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "p50", + "range": true, + "refId": "A" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "histogram_quantile(0.95, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "p95", + "range": true, + "refId": "B" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "histogram_quantile(0.99, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "p99", + "range": true, + "refId": "C" + } + ], + "title": "Export Run Duration (latency percentiles)", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 13 }, + "id": 9, + "panels": [], + "title": "Artifacts & Bundle Sizes", + "type": "row" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 50, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "never", + "spanNulls": false, + "stacking": { "group": "A", "mode": "normal" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 14 }, + "id": 10, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "desc" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum by (artifact_type) (increase(export_artifacts_total{tenant=~\"$tenant\"}[1h]))", + "legendFormat": "{{artifact_type}}", + "range": true, + "refId": "A" + } + ], + "title": "Artifacts Exported by Type (per hour)", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { "group": "A", "mode": "none" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 14 }, + "id": 11, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "desc" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "histogram_quantile(0.50, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "{{export_type}} p50", + "range": true, + "refId": "A" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "histogram_quantile(0.95, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "{{export_type}} p95", + "range": true, + "refId": "B" + } + ], + "title": "Bundle Size Distribution by Type", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 22 }, + "id": 12, + "panels": [], + "title": "Error Analysis", + "type": "row" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "hideFrom": { "legend": false, "tooltip": false, "viz": false } + }, + "mappings": [], + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 8, "x": 0, "y": 23 }, + "id": 13, + "options": { + "legend": { "displayMode": "table", "placement": "right", "showLegend": true }, + "pieType": "pie", + "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, + "tooltip": { "mode": "single", "sort": "none" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum by (error_code) (increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))", + "legendFormat": "{{error_code}}", + "range": true, + "refId": "A" + } + ], + "title": "Failures by Error Code", + "type": "piechart" + }, + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "never", + "spanNulls": false, + "stacking": { "group": "A", "mode": "none" }, + "thresholdsStyle": { "mode": "line" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null }, + { "color": "red", "value": 0.01 } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 16, "x": 8, "y": 23 }, + "id": 14, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "desc" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "editorMode": "code", + "expr": "sum(rate(export_runs_failed_total{tenant=~\"$tenant\"}[5m])) / sum(rate(export_runs_total{tenant=~\"$tenant\"}[5m]))", + "legendFormat": "Error Rate", + "range": true, + "refId": "A" + } + ], + "title": "Error Rate (5m window)", + "type": "timeseries" + } + ], + "refresh": "30s", + "schemaVersion": 38, + "style": "dark", + "tags": ["export-center", "stellaops"], + "templating": { + "list": [ + { + "current": {}, + "hide": 0, + "includeAll": false, + "multi": false, + "name": "datasource", + "options": [], + "query": "prometheus", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "type": "datasource" + }, + { + "allValue": ".*", + "current": {}, + "datasource": { "type": "prometheus", "uid": "${datasource}" }, + "definition": "label_values(export_runs_total, tenant)", + "hide": 0, + "includeAll": true, + "multi": true, + "name": "tenant", + "options": [], + "query": { "query": "label_values(export_runs_total, tenant)", "refId": "StandardVariableQuery" }, + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "type": "query" + } + ] + }, + "time": { "from": "now-6h", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "ExportCenter Service", + "uid": "export-center-overview", + "version": 1, + "weekStart": "" +} diff --git a/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md b/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md index e77124485..4a8475ac6 100644 --- a/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md +++ b/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md @@ -47,13 +47,15 @@ | 13 | CONCELIER-LNM-21-201 | **DONE** (2025-12-06) | Endpoint implemented in Program.cs. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common modules. | Concelier WebService Guild · BE-Base Platform Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/observations` filters by alias/purl/source with strict tenant scopes; echoes upstream values + provenance fields only. | | 14 | CONCELIER-LNM-21-202 | **DONE** (2025-12-06) | Endpoints implemented: `/advisories/linksets` (paginated), `/advisories/linksets/export` (evidence bundles). No synthesis/merge - echoes upstream values only. | Concelier WebService Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/linksets`/`export`/`evidence` endpoints surface correlation + conflict payloads and `ERR_AGG_*` mapping; no synthesis/merge. | | 15 | CONCELIER-LNM-21-203 | **DONE** (2025-12-06) | Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints. Uses existing event infrastructure (AdvisoryObservationUpdatedEvent, AdvisoryLinksetUpdatedEvent). | Concelier WebService Guild · Platform Events Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Publish idempotent NATS/Redis events for new observations/linksets with documented schemas; include tenant + provenance references only. | -| 16 | CONCELIER-AIRGAP-56-001..58-001 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; proceed against frozen contracts once mirror bundle automation lands. | -| 17 | CONCELIER-CONSOLE-23-001..003 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 | Concelier Console Guild | Console advisory aggregation/search helpers; consume frozen schema and evidence bundle once upstream artefacts delivered. | +| 16 | CONCELIER-AIRGAP-56-001..58-001 | DONE (2025-12-07) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; deterministic NDJSON bundle builder + manifest/entry-trace validator and sealed-mode deploy runbook at `docs/runbooks/concelier-airgap-bundle-deploy.md` with sample bundle `out/mirror/thin/mirror-thin-m0-sample.tar.gz`. | +| 17 | CONCELIER-CONSOLE-23-001..003 | DONE (2025-12-07) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Console Guild | Console advisory aggregation/search helpers wired to LNM schema; consumption contract `docs/modules/concelier/operations/console-lnm-consumption.md`, fixtures in `docs/samples/console/`, hashes under `out/console/guardrails/`. | | 18 | FEEDCONN-ICSCISA-02-012 / KISA-02-008 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-FEEDCONN-ICS-KISA-PLAN | Concelier Feed Owners | Remediation refreshes for ICSCISA/KISA feeds; publish provenance + cadence. | ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | Marked CONCELIER-AIRGAP-56-001..58-001 DONE (artifacts from SPRINT_0110: `docs/runbooks/concelier-airgap-bundle-deploy.md`, `out/mirror/thin/mirror-thin-m0-sample.tar.gz`). | Project Mgmt | +| 2025-12-07 | Marked CONCELIER-CONSOLE-23-001..003 DONE (artifacts from SPRINT_0110: `docs/modules/concelier/operations/console-lnm-consumption.md`, `docs/samples/console/`, `out/console/guardrails/`). | Project Mgmt | | 2025-12-06 | **CONCELIER-LNM-21-203 DONE:** Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints in Program.cs. Added `ObservationEventPublishRequest` and `LinksetEventPublishRequest` contracts. Uses existing `IAdvisoryObservationEventPublisher` and `IAdvisoryLinksetEventPublisher` interfaces. Wave B now complete (tasks 12-15 all done). | Implementer | | 2025-12-06 | **CONCELIER-LNM-21-202 DONE:** Implemented `/advisories/linksets` GET endpoint (paginated, supports advisoryId/alias/source filters). Implemented `/advisories/linksets/export` GET endpoint (evidence bundles with full provenance). Maps linksets to LnmLinksetResponse format with conflicts and normalized data. | Implementer | | 2025-12-06 | **CONCELIER-LNM-21-201 DONE:** Implemented `/advisories/observations` GET endpoint in Program.cs. Supports alias/purl/cpe/id filtering with pagination (cursor/limit). Enforces tenant scopes via `X-Stella-Tenant` header. Returns observations with linkset aggregate (aliases, purls, cpes, references, scopes, relationships, confidence, conflicts). Uses `ObservationsPolicyName` authorization. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common. | Implementer | @@ -75,6 +77,7 @@ | 2025-11-23 | Captured build binlog for stalled WebService.Tests attempt at `out/ws-tests.binlog` for CI triage. | Concelier Core | | 2025-11-23 | Split CI runner blocker into DEVOPS-CONCELIER-CI-24-101 (SPRINT_0503_0001_0001_ops_devops_i); all CI/vstest-related blocks now point to that ops task. | Project Mgmt | | 2025-11-23 | Marked downstream tasks (GRAPH-24-101/28-102, LNM-21-004..203) BLOCKED pending CI/clean runner; local harness cannot compile or run tests (`invalid test source` / hang). Development awaiting CI resources. Split storage/backfill/object-store tasks into DEV (here) vs DEVOPS release items (10b/11b/12b) to avoid dev blockage. | Project Mgmt | +| 2025-12-07 | PREP-ART-56-001 / PREP-EVIDENCE-BDL-01 / PREP-CONSOLE-FIXTURES-29 confirmed DONE in archived Sprint 0110; moved CONCELIER-AIRGAP-56-001..58-001 and CONCELIER-CONSOLE-23-001..003 to TODO. | Project Mgmt | | 2025-11-23 | Imported CONCELIER-AIRGAP-56-001..58-001, CONCELIER-CONSOLE-23-001..003, FEEDCONN-ICSCISA-02-012/KISA-02-008 from SPRINT_0110; statuses remain BLOCKED pending mirror/console/feed artefacts. | Project Mgmt | | 2025-11-20 | Wired optional NATS transport for `advisory.observation.updated@1`; background worker dequeues Mongo outbox and publishes to configured stream/subject. | Implementer | | 2025-11-20 | Wired advisory.observation.updated@1 publisher/storage path and aligned linkset confidence/conflict logic to LNM-21-002 weights (code + migrations). | Implementer | diff --git a/docs/implplan/SPRINT_0125_0001_0001_mirror.md b/docs/implplan/SPRINT_0125_0001_0001_mirror.md index e40a05ffc..cb5f6e206 100644 --- a/docs/implplan/SPRINT_0125_0001_0001_mirror.md +++ b/docs/implplan/SPRINT_0125_0001_0001_mirror.md @@ -30,7 +30,7 @@ | 3 | MIRROR-CRT-57-001 | DONE (2025-11-23) | OCI layout/manifest emitted via `make-thin-v1.sh` when `OCI=1`; layer points to thin bundle tarball. | Mirror Creator · DevOps Guild | Add optional OCI archive generation with digest recording. | | 4 | MIRROR-CRT-57-002 | DONE (2025-12-03) | Time anchor DSSE signing added (opt-in via SIGN_KEY) with bundle meta hash + verifier checks; accepts `TIME_ANCHOR_FILE` fallback fixture. | Mirror Creator · AirGap Time Guild | Embed signed time-anchor metadata. | | 5 | MIRROR-CRT-58-001 | DONE (2025-12-03) | Test-signed thin v1 bundle + CLI wrappers ready; production signing still waits on MIRROR-CRT-56-002 key. | Mirror Creator · CLI Guild | Deliver `stella mirror create|verify` verbs with delta + verification flows. | -| 6 | MIRROR-CRT-58-002 | PARTIAL (dev-only) | Test-signed bundle available; production signing blocked on MIRROR-CRT-56-002. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. | +| 6 | MIRROR-CRT-58-002 | DOING (dev) | Production signing still blocked on MIRROR-CRT-56-002; dev scheduling script added. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. | | 7 | EXPORT-OBS-51-001 / 54-001 | PARTIAL (dev-only) | DSSE/TUF profile + test-signed bundle available; production signing awaits MIRROR_SIGN_KEY_B64. | Exporter Guild | Align Export Center workers with assembler output. | | 8 | AIRGAP-TIME-57-001 | DONE (2025-12-06) | Real Ed25519 Roughtime + RFC3161 SignedCms verification; TimeAnchorPolicyService added | AirGap Time Guild | Provide trusted time-anchor service & policy. | | 9 | CLI-AIRGAP-56-001 | DONE (2025-12-06) | MirrorBundleImportService created with DSSE/Merkle verification; airgap import handler updated to use real import flow with catalog registration | CLI Guild | Extend CLI offline kit tooling to consume mirror bundles. | @@ -42,6 +42,7 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | Added Export Center scheduling helper `schedule-export-center-run.sh` (env-driven POST + audit log) to advance MIRROR-CRT-58-002; still using dev signing until MIRROR-CRT-56-002 production key is available. | Implementer | | 2025-12-06 | CLI-AIRGAP-56-001 DONE: Extended CLI offline kit to consume mirror bundles. Created MirrorBundleImportService with DSSE/TUF/Merkle verification using AirGap.Importer module integration. Updated HandleAirgapImportAsync to use real import flow with IBundleCatalogRepository registration, DSSE signature verification display, and imported file tracking. Added project reference to StellaOps.AirGap.Importer, registered services in Program.cs. Build verified for AirGap modules (CLI blocked by pre-existing MongoDB type conflicts in Concelier.Storage.Postgres dependency). | Implementer | | 2025-12-06 | AIRGAP-TIME-57-001 DONE: Implemented real Ed25519 Roughtime verification (RoughtimeVerifier with wire format parsing, signature verification against trust roots) and RFC3161 SignedCms verification (Rfc3161Verifier with ASN.1 parsing, TSTInfo extraction, X509 chain validation). Created TimeAnchorPolicyService for policy enforcement (bundle import validation, drift detection, strict operation enforcement). Updated tests for both verifiers. Build verified (0 errors, 0 warnings). | Implementer | | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | diff --git a/docs/implplan/SPRINT_0161_0001_0001_evidencelocker.md b/docs/implplan/SPRINT_0161_0001_0001_evidencelocker.md index b566a58ca..3b9b0ba14 100644 --- a/docs/implplan/SPRINT_0161_0001_0001_evidencelocker.md +++ b/docs/implplan/SPRINT_0161_0001_0001_evidencelocker.md @@ -32,7 +32,7 @@ | P4 | PREP-EVIDENCE-LOCKER-GUILD-BLOCKED-SCHEMAS-NO | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-schema-readiness-blockers.md`; awaiting AdvisoryAI/Orch envelopes. | Planning | BLOCKED (schemas not yet delivered).

Document artefact/deliverable for Evidence Locker Guild and publish location so downstream tasks can proceed. | | P5 | PREP-EVIDENCE-LOCKER-GUILD-REPLAY-DELIVERY-GU | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-replay-delivery-sync.md`; waiting on ledger retention defaults. | Planning | BLOCKED (awaiting schema signals).

Document artefact/deliverable for Evidence Locker Guild · Replay Delivery Guild and publish location so downstream tasks can proceed. | | 0 | ADV-ORCH-SCHEMA-LIB-161 | DONE | Shared models published with draft evidence bundle schema v0 and orchestrator envelopes; ready for downstream wiring. | AdvisoryAI Guild · Orchestrator/Notifications Guild · Platform Guild | Publish versioned package + fixtures to `/src/__Libraries` (or shared NuGet) so downstream components can consume frozen schema. | -| 1 | EVID-OBS-54-002 | TODO | Schema blockers resolved: `docs/schemas/orchestrator-envelope.schema.json` + `docs/schemas/evidence-locker-dsse.schema.json` + `docs/schemas/advisory-key.schema.json` available. Ready for DSSE finalization. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. | +| 1 | EVID-OBS-54-002 | DONE | Determinism finalized: uid/gid=0, empty username/groupname, fixed timestamp; tests added. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. | | 2 | EVID-REPLAY-187-001 | BLOCKED | PREP-EVID-REPLAY-187-001-AWAIT-REPLAY-LEDGER | Evidence Locker Guild · Replay Delivery Guild | Implement replay bundle ingestion + retention APIs; update storage policy per `docs/replay/DETERMINISTIC_REPLAY.md`. | | 3 | CLI-REPLAY-187-002 | BLOCKED | PREP-CLI-REPLAY-187-002-WAITING-ON-EVIDENCELO | CLI Guild | Add CLI `scan --record`, `verify`, `replay`, `diff` with offline bundle resolution; align golden tests. | | 4 | RUNBOOK-REPLAY-187-004 | BLOCKED | PREP-RUNBOOK-REPLAY-187-004-DEPENDS-ON-RETENT | Docs Guild · Ops Guild | Publish `/docs/runbooks/replay_ops.md` coverage for retention enforcement, RootPack rotation, verification drills. | @@ -74,6 +74,7 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | EVID-OBS-54-002 DONE: Finalized deterministic bundle packaging for both sealed and portable bundles. Added explicit `Uid=0, Gid=0, UserName="", GroupName=""` to `WriteTextEntry` in `EvidenceBundlePackagingService.cs` and `EvidencePortableBundleService.cs`. Added 3 new tests: `EnsurePackageAsync_ProducesDeterministicTarEntryMetadata` (verifies uid/gid/username/groupname), `EnsurePackageAsync_ProducesIdenticalBytesForSameInput` (bit-for-bit reproducibility), and portable bundle determinism test. Bundle packaging now fully compliant with `docs/modules/evidence-locker/bundle-packaging.md` spec. | Implementer | | 2025-12-06 | **Schema blockers resolved:** AdvisoryAI (`docs/schemas/advisory-key.schema.json`) and orchestrator (`docs/schemas/orchestrator-envelope.schema.json`) schemas delivered. EVID-OBS-54-002 is now TODO. Updated Decisions table. | Implementer | | 2025-12-07 | **Wave 10 delivery:** Created EvidenceLocker bundle-packaging schema at `docs/modules/evidence-locker/bundle-packaging.schema.json` and AdvisoryAI evidence bundle schema at `docs/events/advisoryai.evidence.bundle@1.schema.json`. All downstream ExportCenter chains can now proceed. | Implementer | | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | diff --git a/docs/implplan/SPRINT_0163_0001_0001_exportcenter_ii.md b/docs/implplan/SPRINT_0163_0001_0001_exportcenter_ii.md index 629a097b4..cc4c24cbc 100644 --- a/docs/implplan/SPRINT_0163_0001_0001_exportcenter_ii.md +++ b/docs/implplan/SPRINT_0163_0001_0001_exportcenter_ii.md @@ -32,14 +32,14 @@ | P8 | PREP-EXPORT-NOTIFY-SCHEMA-OBS-52 | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Notifications Guild · Exporter Service | Notifications Guild · Exporter Service | Notifications schema for export lifecycle events not published; required for EXPORT-OBS-52-001 and downstream tasks. Provide envelope + sample payloads. Prep artefact: `docs/modules/export-center/prep/2025-11-20-notify-obs-52-prep.md`. | | P8 | PREP-EXPORT-CRYPTO-90-001-PENDING-NOV-18-CRYP | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Exporter Service · Security Guild | Exporter Service · Security Guild | Pending Nov-18 crypto review + reference implementation.

Document artefact/deliverable for EXPORT-CRYPTO-90-001 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-crypto-90-001-prep.md`. | | P9 | PREP-EXPORTER-SERVICE-BLOCKED-WAITING-ON-EVID | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Planning | Planning | BLOCKED (waiting on EvidenceLocker spec).

Document artefact/deliverable for Exporter Service and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-exporter-evid-blocker.md`. | -| 1 | EXPORT-OAS-63-001 | TODO | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. | -| 2 | EXPORT-OBS-50-001 | TODO | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. | -| 3 | EXPORT-OBS-51-001 | TODO | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. | -| 4 | EXPORT-OBS-52-001 | TODO | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. | -| 5 | EXPORT-OBS-53-001 | TODO | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. | -| 6 | EXPORT-OBS-54-001 | TODO | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. | -| 7 | EXPORT-OBS-54-002 | TODO | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. | -| 8 | EXPORT-OBS-55-001 | TODO | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. | +| 1 | EXPORT-OAS-63-001 | DONE | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. | +| 2 | EXPORT-OBS-50-001 | DONE | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. | +| 3 | EXPORT-OBS-51-001 | DONE | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. | +| 4 | EXPORT-OBS-52-001 | DONE | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. | +| 5 | EXPORT-OBS-53-001 | DONE | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. | +| 6 | EXPORT-OBS-54-001 | DONE | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. | +| 7 | EXPORT-OBS-54-002 | DONE | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. | +| 8 | EXPORT-OBS-55-001 | DONE | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. | | 9 | EXPORT-RISK-69-001 | TODO | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. | | 10 | EXPORT-RISK-69-002 | TODO | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. | | 11 | EXPORT-RISK-70-001 | TODO | Depends on EXPORT-RISK-69-002. | Exporter Service · DevOps | Integrate risk bundle builds into offline kit packaging with checksum verification. | @@ -93,6 +93,14 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | **EXPORT-OBS-55-001 DONE:** Implemented incident mode enhancements for ExportCenter. Created `Incident/` namespace with: `ExportIncidentModels.cs` (severity levels Info→Emergency, status Active→Resolved→FalsePositive, types ExportFailure/LatencyDegradation/StorageCapacity/DependencyFailure/IntegrityIssue/SecurityIncident/ConfigurationError/RateLimiting), `ExportIncidentEvents.cs` (IncidentActivated/Updated/Escalated/Deescalated/Resolved events), `IExportIncidentManager` interface and `ExportIncidentManager` implementation with in-memory store. `IExportNotificationEmitter` interface with `LoggingNotificationEmitter` for timeline + notifier integration. Added `PublishIncidentEventAsync` to `IExportTimelinePublisher`. REST endpoints at `/v1/incidents/*`: GET status, GET active, GET recent, GET {id}, POST activate, PATCH {id} update, POST {id}/resolve. Added metrics: `export_incidents_activated_total`, `export_incidents_resolved_total`, `export_incidents_escalated_total`, `export_incidents_deescalated_total`, `export_notifications_emitted_total`, `export_incident_duration_seconds`. | Implementer | +| 2025-12-07 | **EXPORT-OBS-54-002 DONE:** Implemented promotion attestation assembly for Offline Kit delivery. Created `PromotionAttestationModels.cs` with models for SBOM/VEX digest references, Rekor proof entries (with inclusion proofs), DSSE envelope references, promotion predicates. Created `IPromotionAttestationAssembler` interface and `PromotionAttestationAssembler` implementation that: builds in-toto statements with promotion predicates, computes root hash from all artifact digests, signs with DSSE PAE encoding, exports to portable gzipped tar bundles with deterministic timestamps, includes verification scripts. Created `PromotionAttestationEndpoints.cs` with REST endpoints: `POST /v1/promotions/attestations`, `GET /v1/promotions/attestations/{id}`, `GET /v1/promotions/{promotionId}/attestations`, `POST /v1/promotions/attestations/{id}/verify`, `GET /v1/promotions/attestations/{id}/bundle`. Bundle export includes promotion-assembly.json, promotion.dsse.json, rekor-proofs.ndjson, envelopes/, checksums.txt, verify-promotion.sh. | Implementer | +| 2025-12-07 | **EXPORT-OBS-54-001 DONE:** Implemented DSSE attestation service for export artifacts. Created `Attestation/` namespace with `ExportAttestationModels.cs` (DSSE envelope, in-toto statement, predicates, subjects, verification info), `IExportAttestationService` interface, `ExportAttestationService` implementation. Created `IExportAttestationSigner` interface and `ExportAttestationSigner` implementing DSSE PAE (Pre-Authentication Encoding) per spec with ECDSA-P256-SHA256 signing. REST endpoints at `/v1/exports/{id}/attestation` (GET), `/v1/exports/attestations/{attestationId}` (GET), `/v1/exports/{id}/attestation/verify` (POST). Includes base64url encoding, key ID computation, public key PEM export for verification. | Implementer | +| 2025-12-07 | **EXPORT-OBS-53-001 DONE:** Implemented evidence locker integration for export manifests. Created `EvidenceLocker/` namespace with `ExportEvidenceModels` (manifest, entries, distribution info, DSSE signature models), `IExportEvidenceLockerClient` interface, `ExportEvidenceLockerClient` HTTP implementation, `ExportMerkleTreeCalculator` for deterministic root hash computation. In-memory client available for testing. Integrated with existing telemetry. | Implementer | +| 2025-12-07 | **EXPORT-OBS-52-001 DONE:** Implemented timeline event publisher for export lifecycle. Created `Timeline/` namespace with event types (`ExportStartedEvent`, `ExportCompletedEvent`, `ExportFailedEvent`, `ExportCancelledEvent`, `ExportArtifactCreatedEvent`), `IExportTimelinePublisher` interface, `ExportTimelinePublisher` implementation with hash-based deduplication and exponential backoff retry. Added timeline metrics (`export_timeline_events_published_total`, `export_timeline_events_failed_total`, `export_timeline_events_deduplicated_total`). Integrated with TimelineEventEnvelope format for TimelineIndexer. | Implementer | +| 2025-12-07 | **EXPORT-OBS-51-001 DONE:** Created Grafana dashboard (`deploy/telemetry/dashboards/export-center.json`) with panels for run counts, success rate, latency percentiles, artifact counts, bundle sizes, and error analysis. Created alert rules (`deploy/telemetry/alerts/export-center-alerts.yaml`) with SLO burn-rate alerts (14.4x fast/6x slow), latency alerts (p95>120s, p99>300s), capacity alerts, and deprecation tracking. | Implementer | +| 2025-12-07 | **EXPORT-OBS-50-001 DONE:** Implemented telemetry core for ExportCenter. Created `Telemetry/` namespace with `ExportTelemetry` (Meter with counters/histograms), `ExportActivityExtensions` (ActivitySource spans), `ExportRunTelemetryContext` (lifecycle tracking), `ExportLoggerExtensions` (structured logging), and `TelemetryServiceCollectionExtensions` (DI). Metrics include `export_runs_total`, `export_run_duration_seconds`, `export_artifacts_total`, `export_bytes_total`, `export_bundle_size_bytes`. Spans: `export.run`, `export.plan`, `export.write`, `export.distribute`. | Implementer | +| 2025-12-07 | **EXPORT-OAS-63-001 DONE:** Implemented RFC 8594 deprecation headers for legacy `/exports` endpoints. Created `Deprecation/` namespace with `DeprecationInfo`, `DeprecationHeaderExtensions`, `DeprecationRouteBuilderExtensions`, `DeprecatedEndpointsRegistry`, `DeprecationNotificationService`. Legacy endpoints `/exports` (GET/POST/DELETE) now emit `Deprecation`, `Sunset`, `Link`, and `Warning` headers. Metrics counter added for monitoring deprecated endpoint access. | Implementer | | 2025-12-07 | **Wave 10 unblock:** All 17 implementation tasks moved from BLOCKED → TODO. Schema blockers resolved: EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`), AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`), and orchestrator envelope (`docs/schemas/orchestrator-envelope.schema.json`). | Implementer | | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | | 2025-11-20 | Published prep docs for EXPORT-OBS-50-001, EXPORT-RISK-69-001, EXPORT-SVC-35-001, EXPORT-SVC-35-002/003/004/005, EXPORT-NOTIFY-SCHEMA-OBS-52, EXPORT-CRYPTO-90-001, exporter-evid blocker; set P1–P9 to DOING after confirming unowned. | Project Mgmt | diff --git a/docs/implplan/SPRINT_0190_0001_0001_cvss_v4_receipts.md b/docs/implplan/SPRINT_0190_0001_0001_cvss_v4_receipts.md index b2df8f708..aba20192c 100644 --- a/docs/implplan/SPRINT_0190_0001_0001_cvss_v4_receipts.md +++ b/docs/implplan/SPRINT_0190_0001_0001_cvss_v4_receipts.md @@ -36,7 +36,7 @@ | 8 | CVSS-CONCELIER-190-008 | DONE (2025-12-06) | Depends on 190-001; Concelier AGENTS updated 2025-12-06. | Concelier Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ingest vendor-provided CVSS v4.0 vectors from advisories; parse and store as base receipts; preserve provenance. (Implemented CVSS priority ordering in Advisory → Postgres conversion so v4 vectors are primary and provenance-preserved.) | | 9 | CVSS-API-190-009 | DONE (2025-12-06) | Depends on 190-005, 190-007; Policy Engine + Gateway CVSS endpoints shipped. | Policy Guild (`src/Policy/StellaOps.Policy.Gateway`) | REST APIs delivered: `POST /cvss/receipts`, `GET /cvss/receipts/{id}`, `PUT /cvss/receipts/{id}/amend`, `GET /cvss/receipts/{id}/history`, `GET /cvss/policies`. | | 10 | CVSS-CLI-190-010 | DONE (2025-12-06) | Depends on 190-009 (API readiness). | CLI Guild (`src/Cli/StellaOps.Cli`) | CLI verbs shipped: `stella cvss score --vuln --policy-file --vector `, `stella cvss show `, `stella cvss history `, `stella cvss export --format json`. | -| 11 | CVSS-UI-190-011 | TODO | Depends on 190-009 (API readiness). | UI Guild (`src/UI/StellaOps.UI`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. | +| 11 | CVSS-UI-190-011 | BLOCKED | UI workspace (`src/UI/StellaOps.UI`) is empty/no Angular project; UI tasks cannot start until workspace is restored or scaffolded. | UI Guild (`src/UI/StellaOps.UI`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. | | 12 | CVSS-DOCS-190-012 | BLOCKED (2025-11-29) | Depends on 190-001 through 190-011 (API/UI/CLI blocked). | Docs Guild (`docs/modules/policy/cvss-v4.md`, `docs/09_API_CLI_REFERENCE.md`) | Document CVSS v4.0 scoring system: data model, policy format, API reference, CLI usage, UI guide, determinism guarantees. | | 13 | CVSS-GAPS-190-013 | DONE (2025-12-01) | None; informs tasks 5–12. | Product Mgmt · Policy Guild | Address gap findings (CV1–CV10) from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`: policy lifecycle/replay, canonical hashing spec with test vectors, threat/env freshness, tenant-scoped receipts, v3.1→v4.0 conversion flagging, evidence CAS/DSSE linkage, append-only receipt rules, deterministic exports, RBAC boundaries, monitoring/alerts for DSSE/policy drift. | | 14 | CVSS-GAPS-190-014 | DONE (2025-12-03) | Close CVM1–CVM10 from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`; depends on schema/hash publication and API/UI contracts | Policy Guild · Platform Guild | Remediated CVM1–CVM10: updated `docs/modules/policy/cvss-v4.md` with canonical hashing/DSSE/export/profile guidance, added golden hash fixture under `tests/Policy/StellaOps.Policy.Scoring.Tests/Fixtures/hashing/`, and documented monitoring/backfill rules. | @@ -81,6 +81,7 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | CVSS-UI-190-011 set to BLOCKED: UI workspace `src/UI/StellaOps.UI` contains no Angular project (only AGENTS/TASKS stubs); cannot implement receipt UI until workspace is restored or scaffolded. | Implementer | | 2025-12-07 | System.CommandLine beta5 migration completed; CLI cvss verbs build/run with new API surface. NuGet fallback probing fully disabled via repo-local cache; full CLI build (with deps) now succeeds. Risk R7 mitigated. | Implementer | | 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer | | 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer | diff --git a/docs/implplan/SPRINT_0506_0001_0001_ops_devops_iv.md b/docs/implplan/SPRINT_0506_0001_0001_ops_devops_iv.md index b0fd0c183..fda14d573 100644 --- a/docs/implplan/SPRINT_0506_0001_0001_ops_devops_iv.md +++ b/docs/implplan/SPRINT_0506_0001_0001_ops_devops_iv.md @@ -40,7 +40,7 @@ | 17 | MIRROR-CRT-56-CI-001 | DONE (2025-11-25) | None | Mirror Creator Guild · DevOps Guild | Move `make-thin-v1.sh` into CI assembler, enforce DSSE/TUF/time-anchor, publish milestone hashes. | | 18 | MIRROR-CRT-56-002 | DONE (2025-11-25) | Depends on 56-CI-001 | Mirror Creator Guild · Security Guild | Release signing for thin bundle v1 using `MIRROR_SIGN_KEY_B64`; run `.gitea/workflows/mirror-sign.yml`. | | 19 | MIRROR-CRT-57-001/002 | BLOCKED | Wait on 56-002 + AIRGAP-TIME-57-001 | Mirror Creator Guild · AirGap Time Guild | OCI/time-anchor signing follow-ons. | -| 20 | MIRROR-CRT-58-001/002 | BLOCKED | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons. | +| 20 | MIRROR-CRT-58-001/002 | DOING (dev) | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons; dev Export Center scheduling helper added, production signing still awaits `MIRROR_SIGN_KEY_B64`. | | 21 | EXPORT-OBS-51-001 / 54-001 / AIRGAP-TIME-57-001 / CLI-AIRGAP-56-001 / PROV-OBS-53-001 | BLOCKED | Need signed thin bundle + time anchors | Exporter · AirGap Time · CLI Guild | Export/airgap provenance chain work. | | 22 | DEVOPS-LEDGER-29-009-REL | BLOCKED (2025-11-25) | Needs LEDGER-29-009 dev outputs | DevOps Guild · Findings Ledger Guild | Release/offline-kit packaging for ledger manifests/backups. | | 23 | DEVOPS-LEDGER-TEN-48-001-REL | BLOCKED (2025-11-25) | Needs ledger tenant partition work | DevOps Guild · Findings Ledger Guild | Apply RLS/partition migrations in release pipelines; publish manifests/offline-kit artefacts. | @@ -52,6 +52,7 @@ | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | | 2025-12-04 | Renamed from `SPRINT_506_ops_devops_iv.md` to template-compliant `SPRINT_0506_0001_0001_ops_devops_iv.md`; no status changes. | Project PM | | 2025-12-03 | Normalised sprint file to standard template; preserved all tasks/logs; no status changes. | Planning | +| 2025-12-07 | MIRROR-CRT-58-002 progressed: added Export Center scheduling helper (`src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh`) for dev scheduling/audit; production signing still waiting on `MIRROR_SIGN_KEY_B64`. | Implementer | | 2025-11-25 | DEVOPS-CI-110-001 runner published at `ops/devops/ci-110-runner/`; initial TRX slices stored under `ops/devops/artifacts/ci-110/20251125T030557Z/`. | DevOps | | 2025-11-25 | MIRROR-CRT-56-CI-001 completed: CI signing script emits milestone hash summary, enforces DSSE/TUF/time-anchor steps, uploads `milestone.json` via `mirror-sign.yml`. | DevOps | | 2025-11-25 | DEVOPS-OBS-55-001 completed: added incident-mode automation script (`scripts/observability/incident-mode.sh`) and runbook (`ops/devops/observability/incident-mode.md`). | DevOps | diff --git a/docs/implplan/SPRINT_0517_0001_0001_fips_eidas_kcmvp_pq_enablement.md b/docs/implplan/SPRINT_0517_0001_0001_fips_eidas_kcmvp_pq_enablement.md index 38c76a224..73d79f163 100644 --- a/docs/implplan/SPRINT_0517_0001_0001_fips_eidas_kcmvp_pq_enablement.md +++ b/docs/implplan/SPRINT_0517_0001_0001_fips_eidas_kcmvp_pq_enablement.md @@ -28,7 +28,7 @@ | 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. | | 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. | | 8 | PQ-IMPL-01 | DONE (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. | -| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). | +| 9 | PQ-IMPL-02 | DOING (2025-12-07) | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). | | 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. | ## Execution Log @@ -40,6 +40,7 @@ | 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer | | 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer | | 2025-12-07 | Added deterministic PQ test vectors (fixed keys/signatures) in `StellaOps.Cryptography.Tests`; PQ-IMPL-01 marked DONE. | Implementer | +| 2025-12-07 | Wired Signer DSSE dual-sign (secondary PQ/SM allowed via options), fixed DI to provide ICryptoHmac, and adjusted SM2 test seeding; Signer test suite passing. Set PQ-IMPL-02 to DOING. | Implementer | ## Decisions & Risks - FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3). diff --git a/docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md b/docs/implplan/archived/SPRINT_0114_0001_0003_concelier_iii.md similarity index 100% rename from docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md rename to docs/implplan/archived/SPRINT_0114_0001_0003_concelier_iii.md diff --git a/docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md b/docs/implplan/archived/SPRINT_0115_0001_0004_concelier_iv.md similarity index 100% rename from docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md rename to docs/implplan/archived/SPRINT_0115_0001_0004_concelier_iv.md diff --git a/docs/implplan/SPRINT_0117_0001_0006_concelier_vi.md b/docs/implplan/archived/SPRINT_0117_0001_0006_concelier_vi.md similarity index 100% rename from docs/implplan/SPRINT_0117_0001_0006_concelier_vi.md rename to docs/implplan/archived/SPRINT_0117_0001_0006_concelier_vi.md diff --git a/docs/implplan/SPRINT_0119_0001_0004_excititor_iv.md b/docs/implplan/archived/SPRINT_0119_0001_0004_excititor_iv.md similarity index 100% rename from docs/implplan/SPRINT_0119_0001_0004_excititor_iv.md rename to docs/implplan/archived/SPRINT_0119_0001_0004_excititor_iv.md diff --git a/docs/implplan/SPRINT_0119_0001_0005_excititor_v.md b/docs/implplan/archived/SPRINT_0119_0001_0005_excititor_v.md similarity index 100% rename from docs/implplan/SPRINT_0119_0001_0005_excititor_v.md rename to docs/implplan/archived/SPRINT_0119_0001_0005_excititor_v.md diff --git a/docs/implplan/SPRINT_0119_0001_0006_excititor_vi.md b/docs/implplan/archived/SPRINT_0119_0001_0006_excititor_vi.md similarity index 100% rename from docs/implplan/SPRINT_0119_0001_0006_excititor_vi.md rename to docs/implplan/archived/SPRINT_0119_0001_0006_excititor_vi.md diff --git a/docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0120_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0120_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0121_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0121_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0121_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0121_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0121_0001_0003_excititor_iii.md b/docs/implplan/archived/SPRINT_0121_0001_0003_excititor_iii.md similarity index 100% rename from docs/implplan/SPRINT_0121_0001_0003_excititor_iii.md rename to docs/implplan/archived/SPRINT_0121_0001_0003_excititor_iii.md diff --git a/docs/implplan/SPRINT_0122_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0122_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0122_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0122_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0122_0001_0004_excititor_iv.md b/docs/implplan/archived/SPRINT_0122_0001_0004_excititor_iv.md similarity index 100% rename from docs/implplan/SPRINT_0122_0001_0004_excititor_iv.md rename to docs/implplan/archived/SPRINT_0122_0001_0004_excititor_iv.md diff --git a/docs/implplan/SPRINT_0123_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0123_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0123_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0123_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0123_0001_0005_excititor_v.md b/docs/implplan/archived/SPRINT_0123_0001_0005_excititor_v.md similarity index 100% rename from docs/implplan/SPRINT_0123_0001_0005_excititor_v.md rename to docs/implplan/archived/SPRINT_0123_0001_0005_excititor_v.md diff --git a/docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0124_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0124_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0124_0001_0006_excititor_vi.md b/docs/implplan/archived/SPRINT_0124_0001_0006_excititor_vi.md similarity index 100% rename from docs/implplan/SPRINT_0124_0001_0006_excititor_vi.md rename to docs/implplan/archived/SPRINT_0124_0001_0006_excititor_vi.md diff --git a/docs/implplan/SPRINT_0125_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0125_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0125_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0125_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0126_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0126_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0126_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0126_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0127_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0127_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0127_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0127_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0128_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0128_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md b/docs/implplan/archived/SPRINT_0129_0001_0001_policy_reasoning.md similarity index 100% rename from docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md rename to docs/implplan/archived/SPRINT_0129_0001_0001_policy_reasoning.md diff --git a/docs/implplan/SPRINT_0133_0001_0001_scanner_surface.md b/docs/implplan/archived/SPRINT_0133_0001_0001_scanner_surface.md similarity index 100% rename from docs/implplan/SPRINT_0133_0001_0001_scanner_surface.md rename to docs/implplan/archived/SPRINT_0133_0001_0001_scanner_surface.md diff --git a/docs/implplan/SPRINT_0134_0001_0001_native_analyzer_fixes.md b/docs/implplan/archived/SPRINT_0134_0001_0001_native_analyzer_fixes.md similarity index 100% rename from docs/implplan/SPRINT_0134_0001_0001_native_analyzer_fixes.md rename to docs/implplan/archived/SPRINT_0134_0001_0001_native_analyzer_fixes.md diff --git a/docs/implplan/SPRINT_0134_0001_0001_scanner_surface.md b/docs/implplan/archived/SPRINT_0134_0001_0001_scanner_surface.md similarity index 100% rename from docs/implplan/SPRINT_0134_0001_0001_scanner_surface.md rename to docs/implplan/archived/SPRINT_0134_0001_0001_scanner_surface.md diff --git a/docs/implplan/SPRINT_0135_0001_0001_native_testing_framework.md b/docs/implplan/archived/SPRINT_0135_0001_0001_native_testing_framework.md similarity index 100% rename from docs/implplan/SPRINT_0135_0001_0001_native_testing_framework.md rename to docs/implplan/archived/SPRINT_0135_0001_0001_native_testing_framework.md diff --git a/docs/implplan/SPRINT_0135_0001_0001_scanner_surface.md b/docs/implplan/archived/SPRINT_0135_0001_0001_scanner_surface.md similarity index 100% rename from docs/implplan/SPRINT_0135_0001_0001_scanner_surface.md rename to docs/implplan/archived/SPRINT_0135_0001_0001_scanner_surface.md diff --git a/docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md b/docs/implplan/archived/SPRINT_0139_0001_0001_scanner_bun.md similarity index 100% rename from docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md rename to docs/implplan/archived/SPRINT_0139_0001_0001_scanner_bun.md diff --git a/docs/implplan/SPRINT_0140_0001_0001_scanner_java_enhancement.md b/docs/implplan/archived/SPRINT_0140_0001_0001_scanner_java_enhancement.md similarity index 100% rename from docs/implplan/SPRINT_0140_0001_0001_scanner_java_enhancement.md rename to docs/implplan/archived/SPRINT_0140_0001_0001_scanner_java_enhancement.md diff --git a/docs/implplan/SPRINT_0141_0001_0001_graph_indexer.md b/docs/implplan/archived/SPRINT_0141_0001_0001_graph_indexer.md similarity index 100% rename from docs/implplan/SPRINT_0141_0001_0001_graph_indexer.md rename to docs/implplan/archived/SPRINT_0141_0001_0001_graph_indexer.md diff --git a/docs/implplan/SPRINT_0144_0001_0001_zastava_runtime_signals.md b/docs/implplan/archived/SPRINT_0144_0001_0001_zastava_runtime_signals.md similarity index 100% rename from docs/implplan/SPRINT_0144_0001_0001_zastava_runtime_signals.md rename to docs/implplan/archived/SPRINT_0144_0001_0001_zastava_runtime_signals.md diff --git a/docs/implplan/SPRINT_0150_0001_0001_mirror_dsse.md b/docs/implplan/archived/SPRINT_0150_0001_0001_mirror_dsse.md similarity index 100% rename from docs/implplan/SPRINT_0150_0001_0001_mirror_dsse.md rename to docs/implplan/archived/SPRINT_0150_0001_0001_mirror_dsse.md diff --git a/docs/implplan/SPRINT_0150_0001_0002_mirror_time.md b/docs/implplan/archived/SPRINT_0150_0001_0002_mirror_time.md similarity index 100% rename from docs/implplan/SPRINT_0150_0001_0002_mirror_time.md rename to docs/implplan/archived/SPRINT_0150_0001_0002_mirror_time.md diff --git a/docs/implplan/SPRINT_0150_0001_0003_mirror_orch.md b/docs/implplan/archived/SPRINT_0150_0001_0003_mirror_orch.md similarity index 100% rename from docs/implplan/SPRINT_0150_0001_0003_mirror_orch.md rename to docs/implplan/archived/SPRINT_0150_0001_0003_mirror_orch.md diff --git a/docs/implplan/SPRINT_0152_0001_0002_orchestrator_ii.md b/docs/implplan/archived/SPRINT_0152_0001_0002_orchestrator_ii.md similarity index 100% rename from docs/implplan/SPRINT_0152_0001_0002_orchestrator_ii.md rename to docs/implplan/archived/SPRINT_0152_0001_0002_orchestrator_ii.md diff --git a/docs/implplan/SPRINT_0154_0001_0001_packsregistry.md b/docs/implplan/archived/SPRINT_0154_0001_0001_packsregistry.md similarity index 100% rename from docs/implplan/SPRINT_0154_0001_0001_packsregistry.md rename to docs/implplan/archived/SPRINT_0154_0001_0001_packsregistry.md diff --git a/docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md b/docs/implplan/archived/SPRINT_0157_0001_0001_taskrunner_i.md similarity index 100% rename from docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md rename to docs/implplan/archived/SPRINT_0157_0001_0001_taskrunner_i.md diff --git a/docs/implplan/SPRINT_0157_0001_0002_taskrunner_blockers.md b/docs/implplan/archived/SPRINT_0157_0001_0002_taskrunner_blockers.md similarity index 100% rename from docs/implplan/SPRINT_0157_0001_0002_taskrunner_blockers.md rename to docs/implplan/archived/SPRINT_0157_0001_0002_taskrunner_blockers.md diff --git a/docs/implplan/SPRINT_0162_0001_0001_exportcenter_i.md b/docs/implplan/archived/SPRINT_0162_0001_0001_exportcenter_i.md similarity index 67% rename from docs/implplan/SPRINT_0162_0001_0001_exportcenter_i.md rename to docs/implplan/archived/SPRINT_0162_0001_0001_exportcenter_i.md index 141eb5546..23a13661c 100644 --- a/docs/implplan/SPRINT_0162_0001_0001_exportcenter_i.md +++ b/docs/implplan/archived/SPRINT_0162_0001_0001_exportcenter_i.md @@ -41,18 +41,18 @@ | P12 | PREP-EXPORT-OAS-62-001-DEPENDS-ON-61-002 | DONE (2025-11-20) | Prep artefact at `docs/modules/export-center/prep/2025-11-20-export-oas-62-001-prep.md`; depends on discovery endpoint. | Exporter Service Guild · SDK Generator Guild | Depends on 61-002.

Document artefact/deliverable for EXPORT-OAS-62-001 and publish location so downstream tasks can proceed. | | P13 | PREP-EXPORTER-SERVICE-EVIDENCELOCKER-GUILD-BL | DONE (2025-11-20) | Prep note at `docs/modules/export-center/prep/2025-11-20-exporter-evidencelocker-blocker.md`; awaiting sealed bundle schema/hash. | Planning | BLOCKED (awaits EvidenceLocker contract).

Document artefact/deliverable for Exporter Service · EvidenceLocker Guild and publish location so downstream tasks can proceed. | | P14 | PREP-ORCHESTRATOR-NOTIFICATIONS-SCHEMA-HANDOF | DONE (2025-11-20) | Prep note at `docs/events/prep/2025-11-20-orchestrator-notifications-schema-handoff.md`. | Planning | If not ready, keep tasks BLOCKED and escalate to Wave 150/140 leads.

Document artefact/deliverable for Orchestrator + Notifications schema handoff and publish location so downstream tasks can proceed. | -| 1 | DVOFF-64-002 | TODO | EvidenceLocker bundle spec delivered (`docs/modules/evidence-locker/bundle-packaging.schema.json`); ready to implement. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. | -| 2 | EXPORT-AIRGAP-56-001 | TODO | EvidenceLocker + AdvisoryAI schemas delivered; ready to implement. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. | -| 3 | EXPORT-AIRGAP-56-002 | TODO | Depends on 56-001; chain unblocked. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. | -| 4 | EXPORT-AIRGAP-57-001 | TODO | Depends on 56-002; EvidenceLocker bundle format available. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. | -| 5 | EXPORT-AIRGAP-58-001 | TODO | Depends on 57-001; orchestrator envelope schema delivered. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. | -| 6 | EXPORT-ATTEST-74-001 | TODO | EvidenceLocker bundle spec delivered; ready to implement. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. | -| 7 | EXPORT-ATTEST-74-002 | TODO | Depends on 74-001; chain unblocked. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. | -| 8 | EXPORT-ATTEST-75-001 | TODO | Depends on 74-002; chain unblocked. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. | -| 9 | EXPORT-ATTEST-75-002 | TODO | Depends on 75-001; chain unblocked. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. | -| 10 | EXPORT-OAS-61-001 | TODO | Export API surface now defined; ready to implement OAS. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. | -| 11 | EXPORT-OAS-61-002 | TODO | Depends on 61-001; chain unblocked. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. | -| 12 | EXPORT-OAS-62-001 | TODO | Depends on 61-002; chain unblocked. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. | +| 1 | DVOFF-64-002 | DONE | CLI command implemented with service, tests, and exit codes per spec. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. | +| 2 | EXPORT-AIRGAP-56-001 | DONE | Mirror bundle builder with DSSE signing implemented; tests added. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. | +| 3 | EXPORT-AIRGAP-56-002 | DONE | Bootstrap pack builder with OCI layout implemented; tests added. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. | +| 4 | EXPORT-AIRGAP-57-001 | DONE | Portable evidence export builder implemented; tests added. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. | +| 5 | EXPORT-AIRGAP-58-001 | DONE | Notification emitter with NATS sink, webhook delivery, HMAC-SHA256 signing, retry logic, and DLQ implemented. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. | +| 6 | EXPORT-ATTEST-74-001 | DONE | Attestation bundle builder with DSSE envelope passthrough, transparency log support, deterministic packaging implemented. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. | +| 7 | EXPORT-ATTEST-74-002 | DONE | OfflineKitPackager with immutable artefacts, checksum publication, manifest generation implemented. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. | +| 8 | EXPORT-ATTEST-75-001 | DONE | CLI verifier/importer with DSSE validation, checksum verification, transparency checks implemented. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. | +| 9 | EXPORT-ATTEST-75-002 | DONE | OfflineKitDistributor with mirror publication, manifest-offline.json generation, and bit-for-bit verification implemented. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. | +| 10 | EXPORT-OAS-61-001 | DONE | OpenAPI v1 spec published with deterministic examples, ETag/versioning, and standard error envelopes. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. | +| 11 | EXPORT-OAS-61-002 | DONE | Discovery endpoint implemented with ETag, If-None-Match, Cache-Control headers. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. | +| 12 | EXPORT-OAS-62-001 | DONE | SDK client project with interface, implementation, streaming/lifecycle helpers, and smoke tests. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. | | 13 | EXPORT-GAPS-162-013 | DONE (2025-12-04) | None; informs tasks 1–12. | Product Mgmt · Exporter Guild · Evidence Locker Guild | Address EC1–EC10 from `docs/product-advisories/28-Nov-2025 - Export Center and Reporting Strategy.md`: publish signed ExportProfile + manifest schemas with selector validation; define per-adapter determinism rules + rerun-hash CI; mandate DSSE/SLSA attestation with log metadata; enforce cross-tenant approval flow; require distribution integrity headers + OCI annotations; pin Trivy schema versions; formalize mirror delta/tombstone rules; document encryption/recipient policy; set quotas/backpressure; and produce offline export kit + verify script under `docs/modules/export-center/determinism.md` with fixtures in `src/ExportCenter/__fixtures`. | ## Action Tracker @@ -98,6 +98,17 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-12-07 | Completed EXPORT-OAS-62-001: implemented ExportCenter SDK client in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/` with: IExportCenterClient interface for discovery, profiles, runs, evidence exports, and attestation exports operations; ExportCenterClient implementation with HttpClient using System.Net.Http.Json; ExportCenterClientOptions for configuration; Models (ExportModels.cs) with ExportProfile, ExportRun, ExportStatus, ErrorEnvelope types; Lifecycle/ExportJobLifecycleHelper for polling with CreateAndWait, WaitForCompletion, and download helpers; Streaming/ExportDownloadHelper for progress reporting, SHA-256 verification, and byte counting; Extensions/ServiceCollectionExtensions for DI registration. Added comprehensive smoke tests in Client.Tests project covering HTTP mocking, lifecycle polling, and download verification. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-OAS-61-002: implemented OpenApiDiscoveryEndpoints in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/` with: `/.well-known/openapi` and `/.well-known/openapi.json` discovery endpoints returning service metadata (version, specVersion, format, url, profilesSupported), `/openapi/export-center.yaml` serving the OpenAPI spec (embedded resource or file fallback with minimal inline spec), `/openapi/export-center.json` with redirect to YAML endpoint, ETag with SHA-256 hash computation, If-None-Match support returning 304 Not Modified, Cache-Control (public, max-age=300), X-Export-Oas-Version and Last-Modified headers, OpenApiDiscoveryResponse model with camelCase JSON serialization. Updated Program.cs to wire up discovery endpoints. Added unit tests in OpenApiDiscoveryEndpointsTests.cs. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-OAS-61-001: published OpenAPI v1 spec at `docs/modules/export-center/openapi/export-center.v1.yaml` with: `/.well-known/openapi` discovery endpoint, evidence export endpoints (POST create, GET status, GET download), attestation export endpoints (POST create, GET status, GET download), profiles/runs listing with pagination, deterministic examples using fixed timestamps (2025-01-01T00:00:00Z) and placeholder hashes, ETag/Last-Modified/Cache-Control headers, OAuth2 bearer + mTLS security, standard ErrorEnvelope with correlationId, X-Stella-Quota-* headers. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-ATTEST-75-002: implemented OfflineKitDistributor in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` with: bit-for-bit distribution to mirror locations (mirror/export/attestations/{version}/), manifest-offline.json generation with entries for attestation/mirror/bootstrap bundles, CLI examples and import commands, manifest checksum publication, verification to ensure distributed kit matches source. Added comprehensive tests in `OfflineKitDistributorTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-ATTEST-75-001: implemented AttestationBundleVerifier in `src/Cli/StellaOps.Cli/Services/` (IAttestationBundleVerifier.cs, AttestationBundleVerifier.cs, Models/AttestationBundleModels.cs) with: archive extraction and checksum verification (internal + external), DSSE envelope payload validation, transparency.ndjson requirement check (non-offline mode), metadata extraction with subject digests, exit codes per spec (0=success, 2=checksum, 3=signature, 4=transparency, 5=format, 6=notfound, 7=import). Added comprehensive tests in `AttestationBundleVerifierTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-ATTEST-74-002: implemented OfflineKitPackager in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` (OfflineKitModels.cs, OfflineKitPackager.cs) with: immutable artefact placement with write-once semantics, checksum publication in `{hash} {filename}` format, manifest.json generation with version/kitId/entries, directory structure per spec (attestations/, mirrors/, bootstrap/, checksums/). Added comprehensive tests in `OfflineKitPackagerTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-ATTEST-74-001: implemented AttestationBundleBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/` (AttestationBundleModels.cs, AttestationBundleBuilder.cs) with: DSSE envelope passthrough (bit-for-bit copy), statement extraction, optional transparency.ndjson (sorted lexically), metadata.json with subject digests, checksums.txt, verify-attestation.sh (POSIX offline script). Added comprehensive tests in `AttestationBundleBuilderTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-AIRGAP-58-001: implemented ExportNotificationEmitter in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/` (ExportNotificationModels.cs, ExportNotificationEmitter.cs, ExportWebhookClient.cs) with: NATS sink abstraction, webhook delivery with HMAC-SHA256 PAE signing, exponential backoff retry (1s/2s/4s/8s/16s, max 5 attempts), DLQ routing for failed deliveries, in-memory test doubles. Added comprehensive tests in `ExportNotificationEmitterTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-AIRGAP-57-001: implemented PortableEvidenceExportBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/` (models, builder). Added comprehensive tests in `PortableEvidenceExportBuilderTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-AIRGAP-56-002: implemented BootstrapPackBuilder with OCI image layout in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/` (models, builder). Added comprehensive tests in `BootstrapPackBuilderTests.cs`. Status set to DONE. | Implementer | +| 2025-12-07 | Completed EXPORT-AIRGAP-56-001: implemented MirrorBundleBuilder with DSSE signing in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/` (models, builder, signing). Added comprehensive tests in `MirrorBundleBuilderTests.cs` and `MirrorBundleSigningTests.cs`. Status set to DONE. | Implementer | | 2025-12-07 | **Wave 10 unblock:** EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`) and AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`) delivered. All 12 implementation tasks (DVOFF-64-002, EXPORT-AIRGAP-56/57/58, EXPORT-ATTEST-74/75, EXPORT-OAS-61/62) moved from BLOCKED → TODO. Interlocks updated. | Implementer | | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | | 2025-11-20 | Completed PREP-EXPORT-AIRGAP-58-001: published notification/timeline contract for air-gap export readiness (`docs/modules/export-center/prep/2025-11-20-export-airgap-58-001-prep.md`); status set to DONE. | Implementer | diff --git a/docs/implplan/SPRINT_0164_0001_0003_exportcenter_iii.md b/docs/implplan/archived/SPRINT_0164_0001_0003_exportcenter_iii.md similarity index 100% rename from docs/implplan/SPRINT_0164_0001_0003_exportcenter_iii.md rename to docs/implplan/archived/SPRINT_0164_0001_0003_exportcenter_iii.md diff --git a/docs/implplan/SPRINT_0170_0001_0001_notifications_telemetry.md b/docs/implplan/archived/SPRINT_0170_0001_0001_notifications_telemetry.md similarity index 100% rename from docs/implplan/SPRINT_0170_0001_0001_notifications_telemetry.md rename to docs/implplan/archived/SPRINT_0170_0001_0001_notifications_telemetry.md diff --git a/docs/implplan/SPRINT_0172_0001_0002_notifier_ii.md b/docs/implplan/archived/SPRINT_0172_0001_0002_notifier_ii.md similarity index 100% rename from docs/implplan/SPRINT_0172_0001_0002_notifier_ii.md rename to docs/implplan/archived/SPRINT_0172_0001_0002_notifier_ii.md diff --git a/docs/implplan/SPRINT_0202_0001_0002_cli_ii.md b/docs/implplan/archived/SPRINT_0202_0001_0002_cli_ii.md similarity index 100% rename from docs/implplan/SPRINT_0202_0001_0002_cli_ii.md rename to docs/implplan/archived/SPRINT_0202_0001_0002_cli_ii.md diff --git a/docs/implplan/SPRINT_0207_0001_0001_graph.md b/docs/implplan/archived/SPRINT_0207_0001_0001_graph.md similarity index 100% rename from docs/implplan/SPRINT_0207_0001_0001_graph.md rename to docs/implplan/archived/SPRINT_0207_0001_0001_graph.md diff --git a/docs/implplan/SPRINT_0210_0001_0002_ui_ii.md b/docs/implplan/archived/SPRINT_0210_0001_0002_ui_ii.md similarity index 100% rename from docs/implplan/SPRINT_0210_0001_0002_ui_ii.md rename to docs/implplan/archived/SPRINT_0210_0001_0002_ui_ii.md diff --git a/docs/implplan/SPRINT_0215_0001_0004_web_iv.md b/docs/implplan/archived/SPRINT_0215_0001_0004_web_iv.md similarity index 100% rename from docs/implplan/SPRINT_0215_0001_0004_web_iv.md rename to docs/implplan/archived/SPRINT_0215_0001_0004_web_iv.md diff --git a/docs/implplan/SPRINT_0301_0001_0001_docs_md_i.md b/docs/implplan/archived/SPRINT_0301_0001_0001_docs_md_i.md similarity index 100% rename from docs/implplan/SPRINT_0301_0001_0001_docs_md_i.md rename to docs/implplan/archived/SPRINT_0301_0001_0001_docs_md_i.md diff --git a/docs/implplan/SPRINT_0306_0001_0006_docs_tasks_md_vi.md b/docs/implplan/archived/SPRINT_0306_0001_0006_docs_tasks_md_vi.md similarity index 100% rename from docs/implplan/SPRINT_0306_0001_0006_docs_tasks_md_vi.md rename to docs/implplan/archived/SPRINT_0306_0001_0006_docs_tasks_md_vi.md diff --git a/docs/implplan/SPRINT_0317_0001_0001_docs_modules_concelier.md b/docs/implplan/archived/SPRINT_0317_0001_0001_docs_modules_concelier.md similarity index 100% rename from docs/implplan/SPRINT_0317_0001_0001_docs_modules_concelier.md rename to docs/implplan/archived/SPRINT_0317_0001_0001_docs_modules_concelier.md diff --git a/docs/implplan/SPRINT_0500_0001_0001_ops_offline.md b/docs/implplan/archived/SPRINT_0500_0001_0001_ops_offline.md similarity index 100% rename from docs/implplan/SPRINT_0500_0001_0001_ops_offline.md rename to docs/implplan/archived/SPRINT_0500_0001_0001_ops_offline.md diff --git a/docs/implplan/SPRINT_0508_0001_0001_ops_offline_kit.md b/docs/implplan/archived/SPRINT_0508_0001_0001_ops_offline_kit.md similarity index 100% rename from docs/implplan/SPRINT_0508_0001_0001_ops_offline_kit.md rename to docs/implplan/archived/SPRINT_0508_0001_0001_ops_offline_kit.md diff --git a/docs/implplan/SPRINT_0509_0001_0001_samples.md b/docs/implplan/archived/SPRINT_0509_0001_0001_samples.md similarity index 100% rename from docs/implplan/SPRINT_0509_0001_0001_samples.md rename to docs/implplan/archived/SPRINT_0509_0001_0001_samples.md diff --git a/docs/implplan/SPRINT_3400_0001_0000_postgres_conversion_overview.md b/docs/implplan/archived/SPRINT_3400_0001_0000_postgres_conversion_overview.md similarity index 100% rename from docs/implplan/SPRINT_3400_0001_0000_postgres_conversion_overview.md rename to docs/implplan/archived/SPRINT_3400_0001_0000_postgres_conversion_overview.md diff --git a/docs/implplan/SPRINT_3400_0001_0001_postgres_foundations.md b/docs/implplan/archived/SPRINT_3400_0001_0001_postgres_foundations.md similarity index 100% rename from docs/implplan/SPRINT_3400_0001_0001_postgres_foundations.md rename to docs/implplan/archived/SPRINT_3400_0001_0001_postgres_foundations.md diff --git a/docs/implplan/SPRINT_3401_0001_0001_postgres_authority.md b/docs/implplan/archived/SPRINT_3401_0001_0001_postgres_authority.md similarity index 100% rename from docs/implplan/SPRINT_3401_0001_0001_postgres_authority.md rename to docs/implplan/archived/SPRINT_3401_0001_0001_postgres_authority.md diff --git a/docs/implplan/SPRINT_3402_0001_0001_postgres_scheduler.md b/docs/implplan/archived/SPRINT_3402_0001_0001_postgres_scheduler.md similarity index 100% rename from docs/implplan/SPRINT_3402_0001_0001_postgres_scheduler.md rename to docs/implplan/archived/SPRINT_3402_0001_0001_postgres_scheduler.md diff --git a/docs/implplan/SPRINT_3403_0001_0001_postgres_notify.md b/docs/implplan/archived/SPRINT_3403_0001_0001_postgres_notify.md similarity index 100% rename from docs/implplan/SPRINT_3403_0001_0001_postgres_notify.md rename to docs/implplan/archived/SPRINT_3403_0001_0001_postgres_notify.md diff --git a/docs/implplan/SPRINT_3404_0001_0001_postgres_policy.md b/docs/implplan/archived/SPRINT_3404_0001_0001_postgres_policy.md similarity index 100% rename from docs/implplan/SPRINT_3404_0001_0001_postgres_policy.md rename to docs/implplan/archived/SPRINT_3404_0001_0001_postgres_policy.md diff --git a/docs/implplan/SPRINT_3405_0001_0001_postgres_vulnerabilities.md b/docs/implplan/archived/SPRINT_3405_0001_0001_postgres_vulnerabilities.md similarity index 100% rename from docs/implplan/SPRINT_3405_0001_0001_postgres_vulnerabilities.md rename to docs/implplan/archived/SPRINT_3405_0001_0001_postgres_vulnerabilities.md diff --git a/docs/implplan/SPRINT_3406_0001_0001_postgres_vex_graph.md b/docs/implplan/archived/SPRINT_3406_0001_0001_postgres_vex_graph.md similarity index 100% rename from docs/implplan/SPRINT_3406_0001_0001_postgres_vex_graph.md rename to docs/implplan/archived/SPRINT_3406_0001_0001_postgres_vex_graph.md diff --git a/docs/implplan/SPRINT_3408_0001_0001_postgres_migration_lifecycle.md b/docs/implplan/archived/SPRINT_3408_0001_0001_postgres_migration_lifecycle.md similarity index 100% rename from docs/implplan/SPRINT_3408_0001_0001_postgres_migration_lifecycle.md rename to docs/implplan/archived/SPRINT_3408_0001_0001_postgres_migration_lifecycle.md diff --git a/docs/implplan/SPRINT_3409_0001_0001_issuer_directory_postgres.md b/docs/implplan/archived/SPRINT_3409_0001_0001_issuer_directory_postgres.md similarity index 100% rename from docs/implplan/SPRINT_3409_0001_0001_issuer_directory_postgres.md rename to docs/implplan/archived/SPRINT_3409_0001_0001_issuer_directory_postgres.md diff --git a/docs/implplan/tasks-all.md b/docs/implplan/tasks-all.md index 93e60fbbc..b085723c0 100644 --- a/docs/implplan/tasks-all.md +++ b/docs/implplan/tasks-all.md @@ -1196,7 +1196,7 @@ | MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 | | MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 | | MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | -| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | +| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | | MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 | | NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 | | NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 | @@ -3414,7 +3414,7 @@ | MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 | | MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 | | MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | -| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | +| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 | | MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 | | NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 | | NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 | diff --git a/docs/modules/export-center/openapi/export-center.v1.yaml b/docs/modules/export-center/openapi/export-center.v1.yaml new file mode 100644 index 000000000..71219e54a --- /dev/null +++ b/docs/modules/export-center/openapi/export-center.v1.yaml @@ -0,0 +1,663 @@ +openapi: 3.0.3 +info: + title: StellaOps ExportCenter API + version: 1.0.0 + description: >- + Export profiles, runs, and deterministic bundle downloads for air-gap and offline deployments. + Supports attestation exports, mirror bundles, bootstrap packs, and portable evidence bundles. + contact: + name: StellaOps Exporter Service Guild + x-stella-oas-revision: '2025-12-07' +servers: + - url: https://{env}.export.api.stellaops.local + description: Default environment-scoped host + variables: + env: + default: prod + enum: [dev, staging, prod, airgap] + - url: https://export.{region}.offline.bundle + description: Offline bundle host for air-gapped deployments + variables: + region: + default: local + enum: [local] +security: + - bearerAuth: [] + - mTLS: [] +paths: + /.well-known/openapi: + get: + summary: OpenAPI discovery endpoint + operationId: getOpenApiDiscovery + tags: [discovery] + security: [] + responses: + '200': + description: OpenAPI specification document + headers: + ETag: + description: SHA-256 hash of the OAS document + schema: + type: string + example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"' + X-Export-Oas-Version: + description: OAS version identifier + schema: + type: string + example: 'v1' + Last-Modified: + description: OAS document last modification time + schema: + type: string + format: date-time + example: '2025-01-01T00:00:00Z' + Cache-Control: + description: Cache directive + schema: + type: string + example: 'private, must-revalidate' + content: + application/yaml: + schema: + type: string + application/json: + schema: + type: object + + /v1/exports/profiles: + get: + summary: List available export profiles + operationId: listExportProfiles + tags: [profiles] + parameters: + - name: kind + in: query + description: Filter by profile kind + schema: + type: string + enum: [attestation, mirror, bootstrap, airgap-evidence] + - name: limit + in: query + description: Maximum number of profiles to return + schema: + type: integer + default: 50 + maximum: 200 + - name: cursor + in: query + description: Pagination cursor from previous response + schema: + type: string + responses: + '200': + description: List of export profiles + headers: + X-Stella-Quota-Remaining: + schema: + type: integer + content: + application/json: + schema: + $ref: '#/components/schemas/ExportProfilePage' + example: + profiles: + - id: 'profile-attestation-v1' + kind: 'attestation' + description: 'Export attestation bundles with DSSE envelopes' + version: 'v1' + retentionDays: 90 + - id: 'profile-mirror-full' + kind: 'mirror' + description: 'Full mirror bundle with all advisories' + version: 'v1' + retentionDays: 365 + cursor: null + hasMore: false + '401': + $ref: '#/components/responses/Unauthorized' + + /v1/exports/runs: + get: + summary: List export runs + operationId: listExportRuns + tags: [runs] + parameters: + - $ref: '#/components/parameters/TenantId' + - name: profileId + in: query + description: Filter by export profile + schema: + type: string + - name: status + in: query + description: Filter by status + schema: + type: string + enum: [pending, running, completed, failed] + - name: limit + in: query + schema: + type: integer + default: 50 + maximum: 200 + - name: cursor + in: query + schema: + type: string + responses: + '200': + description: List of export runs + content: + application/json: + schema: + $ref: '#/components/schemas/ExportRunPage' + '401': + $ref: '#/components/responses/Unauthorized' + + /v1/exports/airgap/evidence/{bundleId}: + post: + summary: Create portable evidence export + operationId: createEvidenceExport + tags: [evidence] + parameters: + - name: bundleId + in: path + required: true + description: Source evidence bundle identifier + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '202': + description: Export request accepted + content: + application/json: + schema: + $ref: '#/components/schemas/ExportStatus' + example: + exportId: '01234567-89ab-cdef-0123-456789abcdef' + profileId: 'profile-airgap-evidence-v1' + status: 'pending' + bundleId: 'fedcba98-7654-3210-fedc-ba9876543210' + createdAt: '2025-01-01T00:00:00Z' + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + '429': + $ref: '#/components/responses/RateLimited' + + /v1/exports/airgap/evidence/{exportId}: + get: + summary: Get evidence export status + operationId: getEvidenceExportStatus + tags: [evidence] + parameters: + - name: exportId + in: path + required: true + description: Export run identifier + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '200': + description: Export status + headers: + ETag: + description: Status document hash + schema: + type: string + Last-Modified: + description: Status last update time + schema: + type: string + format: date-time + content: + application/json: + schema: + $ref: '#/components/schemas/ExportStatus' + example: + exportId: '01234567-89ab-cdef-0123-456789abcdef' + profileId: 'profile-airgap-evidence-v1' + status: 'completed' + bundleId: 'fedcba98-7654-3210-fedc-ba9876543210' + artifactSha256: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef' + rootHash: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210' + portableVersion: 'v1' + createdAt: '2025-01-01T00:00:00Z' + completedAt: '2025-01-01T00:01:00Z' + downloadUri: '/v1/exports/airgap/evidence/01234567-89ab-cdef-0123-456789abcdef/download' + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + + /v1/exports/airgap/evidence/{exportId}/download: + get: + summary: Download evidence export bundle + operationId: downloadEvidenceExport + tags: [evidence] + parameters: + - name: exportId + in: path + required: true + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '200': + description: Portable evidence bundle archive + headers: + ETag: + description: Archive SHA-256 hash + schema: + type: string + example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"' + Last-Modified: + description: Archive creation time + schema: + type: string + format: date-time + Content-Disposition: + description: Suggested filename + schema: + type: string + example: 'attachment; filename="export-portable-bundle-v1.tgz"' + Cache-Control: + schema: + type: string + example: 'private, must-revalidate' + content: + application/gzip: + schema: + type: string + format: binary + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + + /v1/exports/attestations/{attestationId}: + post: + summary: Create attestation export + operationId: createAttestationExport + tags: [attestations] + parameters: + - name: attestationId + in: path + required: true + description: Source attestation identifier + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '202': + description: Export request accepted + content: + application/json: + schema: + $ref: '#/components/schemas/ExportStatus' + example: + exportId: '11111111-1111-1111-1111-111111111111' + profileId: 'profile-attestation-v1' + status: 'pending' + attestationId: '22222222-2222-2222-2222-222222222222' + createdAt: '2025-01-01T00:00:00Z' + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + '429': + $ref: '#/components/responses/RateLimited' + + /v1/exports/attestations/{exportId}: + get: + summary: Get attestation export status + operationId: getAttestationExportStatus + tags: [attestations] + parameters: + - name: exportId + in: path + required: true + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '200': + description: Export status + headers: + ETag: + schema: + type: string + Last-Modified: + schema: + type: string + format: date-time + content: + application/json: + schema: + $ref: '#/components/schemas/ExportStatus' + example: + exportId: '11111111-1111-1111-1111-111111111111' + profileId: 'profile-attestation-v1' + status: 'completed' + attestationId: '22222222-2222-2222-2222-222222222222' + artifactSha256: 'sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789' + rootHash: 'sha256:9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba' + statementDigest: 'sha256:1111111111111111111111111111111111111111111111111111111111111111' + createdAt: '2025-01-01T00:00:00Z' + completedAt: '2025-01-01T00:01:00Z' + downloadUri: '/v1/exports/attestations/11111111-1111-1111-1111-111111111111/download' + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + + /v1/exports/attestations/{exportId}/download: + get: + summary: Download attestation export bundle + operationId: downloadAttestationExport + tags: [attestations] + parameters: + - name: exportId + in: path + required: true + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '200': + description: Attestation bundle archive + headers: + ETag: + description: Archive SHA-256 hash + schema: + type: string + example: '"sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789"' + Last-Modified: + schema: + type: string + format: date-time + Content-Disposition: + schema: + type: string + example: 'attachment; filename="export-attestation-bundle-v1.tgz"' + Cache-Control: + schema: + type: string + example: 'private, must-revalidate' + content: + application/gzip: + schema: + type: string + format: binary + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + + /v1/exports/runs/{exportId}/events: + get: + summary: Get export run events (stub) + operationId: getExportRunEvents + tags: [runs] + x-stub: true + description: >- + Timeline/event stream pointer for export run progress. Returns pointer to + notification/event stream when notifications are enabled. Stub until event + envelopes fully land. + parameters: + - name: exportId + in: path + required: true + schema: + type: string + format: uuid + - $ref: '#/components/parameters/TenantId' + responses: + '200': + description: Event stream reference + content: + application/json: + schema: + type: object + properties: + exportId: + type: string + format: uuid + eventStreamUri: + type: string + format: uri + status: + type: string + enum: [available, not-configured] + '401': + $ref: '#/components/responses/Unauthorized' + '404': + $ref: '#/components/responses/NotFound' + +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + description: OAuth2 access token with export scopes + mTLS: + type: mutualTLS + description: Mutual TLS client certificate authentication + + parameters: + TenantId: + name: X-Stella-Tenant-Id + in: header + required: true + description: Tenant identifier for multi-tenant scoping + schema: + type: string + format: uuid + + schemas: + ExportProfile: + type: object + required: [id, kind, description, version, retentionDays] + properties: + id: + type: string + description: Unique profile identifier + example: 'profile-attestation-v1' + kind: + type: string + enum: [attestation, mirror, bootstrap, airgap-evidence] + description: Profile type + description: + type: string + description: Human-readable profile description + version: + type: string + description: Profile schema version + example: 'v1' + retentionDays: + type: integer + description: Number of days exports are retained + example: 90 + + ExportProfilePage: + type: object + required: [profiles, hasMore] + properties: + profiles: + type: array + items: + $ref: '#/components/schemas/ExportProfile' + cursor: + type: string + nullable: true + description: Pagination cursor for next page + hasMore: + type: boolean + description: Whether more results are available + + ExportStatus: + type: object + required: [exportId, profileId, status, createdAt] + properties: + exportId: + type: string + format: uuid + description: Unique export run identifier + profileId: + type: string + description: Associated export profile + status: + type: string + enum: [pending, running, completed, failed] + description: Current export status + artifactSha256: + type: string + nullable: true + description: SHA-256 hash of the exported artifact + example: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef' + rootHash: + type: string + nullable: true + description: Merkle root hash of bundle contents + example: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210' + portableVersion: + type: string + nullable: true + description: Portable bundle format version + attestationId: + type: string + format: uuid + nullable: true + description: Source attestation identifier (for attestation exports) + bundleId: + type: string + format: uuid + nullable: true + description: Source bundle identifier (for evidence exports) + statementDigest: + type: string + nullable: true + description: SHA-256 of in-toto statement (for attestation exports) + createdAt: + type: string + format: date-time + description: Export creation timestamp (ISO 8601) + example: '2025-01-01T00:00:00Z' + completedAt: + type: string + format: date-time + nullable: true + description: Export completion timestamp (ISO 8601) + downloadUri: + type: string + format: uri + nullable: true + description: Relative URI for downloading the export artifact + + ExportRunPage: + type: object + required: [runs, hasMore] + properties: + runs: + type: array + items: + $ref: '#/components/schemas/ExportStatus' + cursor: + type: string + nullable: true + hasMore: + type: boolean + + ErrorEnvelope: + type: object + required: [error] + properties: + error: + type: object + required: [code, message, correlationId] + properties: + code: + type: string + description: Machine-readable error code + example: 'EXPORT_NOT_FOUND' + message: + type: string + description: Human-readable error message + example: 'Export with the specified ID was not found' + correlationId: + type: string + format: uuid + description: Request correlation ID for tracing + retryAfterSeconds: + type: integer + nullable: true + description: Suggested retry delay for rate-limited requests + + responses: + Unauthorized: + description: Authentication required or invalid credentials + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorEnvelope' + example: + error: + code: 'UNAUTHORIZED' + message: 'Valid authentication credentials required' + correlationId: '00000000-0000-0000-0000-000000000000' + + NotFound: + description: Resource not found + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorEnvelope' + example: + error: + code: 'NOT_FOUND' + message: 'The requested resource was not found' + correlationId: '00000000-0000-0000-0000-000000000000' + + RateLimited: + description: Rate limit exceeded + headers: + X-Stella-Quota-Remaining: + schema: + type: integer + example: 0 + Retry-After: + schema: + type: integer + example: 60 + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorEnvelope' + example: + error: + code: 'RATE_LIMITED' + message: 'Rate limit exceeded. Please retry after the specified delay.' + correlationId: '00000000-0000-0000-0000-000000000000' + retryAfterSeconds: 60 + +tags: + - name: discovery + description: OpenAPI discovery and metadata + - name: profiles + description: Export profile management + - name: runs + description: Export run management and status + - name: evidence + description: Portable evidence bundle exports + - name: attestations + description: Attestation bundle exports diff --git a/scripts/mirror/README.md b/scripts/mirror/README.md index 9ce38b7f7..5d9c2922e 100644 --- a/scripts/mirror/README.md +++ b/scripts/mirror/README.md @@ -7,5 +7,6 @@ - `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used. - `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag). - `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks. +- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`). Artifacts live under `out/mirror/thin/`. diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs index fc021b839..d320783ae 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs @@ -235,6 +235,10 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable } var privateKeyBytes = LoadSm2KeyBytes(key); + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["source"] = "config" + }; var signingKey = new CryptoSigningKey( new CryptoKeyReference(providerKeyId, providerName), normalizedAlgorithm, diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs index 4a3b34033..ad6de423e 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs @@ -120,6 +120,95 @@ public sealed class AttestorVerificationServiceTests Assert.Equal("missing", verifyResult.Report.Transparency.WitnessStatus); } + [Fact] + public async Task VerifyAsync_KmsBundle_Passes_WhenTwoSignaturesRequired() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions { Url = string.Empty }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + }, + Security = new AttestorOptions.SecurityOptions + { + SignerIdentity = new AttestorOptions.SignerIdentityOptions + { + Mode = { "kms" }, + KmsKeys = { HmacSecretBase64 } + } + }, + Verification = new AttestorOptions.VerificationOptions + { + MinimumSignatures = 2, + PolicyId = "policy/dual-sign" + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + new NullTransparencyWitnessClient(), + archiveStore, + auditSink, + new NullVerificationCache(), + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequestWithTwoSignatures(canonicalizer, HmacSecret); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + new NullTransparencyWitnessClient(), + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid, + Bundle = submission.Bundle + }); + + Assert.True(verifyResult.Ok); + Assert.Equal(VerificationSectionStatus.Pass, verifyResult.Report!.Signatures.Status); + Assert.Equal(2, verifyResult.Report.Signatures.VerifiedSignatures); + Assert.Equal(2, verifyResult.Report.Signatures.RequiredSignatures); + } + [Fact] public async Task VerifyAsync_FlagsTamperedBundle() { @@ -262,6 +351,32 @@ public sealed class AttestorVerificationServiceTests return request; } + private static AttestorSubmissionRequest CreateSubmissionRequestWithTwoSignatures(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret) + { + var request = CreateSubmissionRequest(canonicalizer, hmacSecret); + + // Recompute signature and append a second copy to satisfy multi-signature verification + if (!TryDecodeBase64(request.Bundle.Dsse.PayloadBase64, out var payload)) + { + throw new InvalidOperationException("Test payload failed to decode."); + } + + var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload); + using (var hmac = new HMACSHA256(hmacSecret)) + { + var signature = hmac.ComputeHash(preAuth); + request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature + { + KeyId = "kms-test-2", + Signature = Convert.ToBase64String(signature) + }); + } + + var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); + request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return request; + } + private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source) { var clone = new AttestorSubmissionRequest.SubmissionBundle diff --git a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs index 106618d72..164fd2184 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs @@ -77,6 +77,7 @@ internal static class CommandFactory root.Add(BuildSdkCommand(services, verboseOption, cancellationToken)); root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken)); root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken)); + root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken)); root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken)); var pluginLogger = loggerFactory.CreateLogger(); @@ -10632,5 +10633,53 @@ internal static class CommandFactory return airgap; } + + private static Command BuildDevPortalCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var devportal = new Command("devportal", "Manage DevPortal offline operations."); + + // devportal verify (DVOFF-64-002) + var verify = new Command("verify", "Verify integrity of a DevPortal/evidence bundle before import."); + + var bundleOption = new Option("--bundle", new[] { "-b" }) + { + Description = "Path to the bundle .tgz file.", + Required = true + }; + + var offlineOption = new Option("--offline") + { + Description = "Skip TSA verification and online checks." + }; + + var jsonOption = new Option("--json") + { + Description = "Output results in JSON format." + }; + + verify.Add(bundleOption); + verify.Add(offlineOption); + verify.Add(jsonOption); + + verify.SetAction((parseResult, _) => + { + var bundlePath = parseResult.GetValue(bundleOption)!; + var offline = parseResult.GetValue(offlineOption); + var json = parseResult.GetValue(jsonOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleDevPortalVerifyAsync( + services, + bundlePath, + offline, + json, + verbose, + cancellationToken); + }); + + devportal.Add(verify); + + return devportal; + } } diff --git a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs index 3633b5ccf..140d256b7 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs @@ -27,17 +27,17 @@ using StellaOps.Cli.Configuration; using StellaOps.Cli.Output; using StellaOps.Cli.Prompts; using StellaOps.Cli.Services; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.AdvisoryAi; -using StellaOps.Cli.Services.Models.Bun; -using StellaOps.Cli.Services.Models.Ruby; -using StellaOps.Cli.Telemetry; -using StellaOps.Cryptography; -using StellaOps.Cryptography.DependencyInjection; -using StellaOps.Cryptography.Kms; -using StellaOps.Policy.Scoring; -using StellaOps.Policy.Scoring.Engine; -using StellaOps.Policy.Scoring.Policies; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.AdvisoryAi; +using StellaOps.Cli.Services.Models.Bun; +using StellaOps.Cli.Services.Models.Ruby; +using StellaOps.Cli.Telemetry; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Cryptography.Kms; +using StellaOps.Policy.Scoring; +using StellaOps.Policy.Scoring.Engine; +using StellaOps.Policy.Scoring.Policies; using StellaOps.Scanner.Analyzers.Lang; using StellaOps.Scanner.Analyzers.Lang.Java; using StellaOps.Scanner.Analyzers.Lang.Node; @@ -70,17 +70,17 @@ internal static class CommandHandlers /// /// JSON serializer options for output (alias for JsonOptions). /// - private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions; - - private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web) - { - WriteIndented = true - }; + private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions; + + private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; /// /// Sets the verbosity level for logging. /// - private static void SetVerbosity(IServiceProvider services, bool verbose) + private static void SetVerbosity(IServiceProvider services, bool verbose) { // Configure logging level based on verbose flag var loggerFactory = services.GetService(); @@ -90,215 +90,215 @@ internal static class CommandHandlers var logger = loggerFactory.CreateLogger("StellaOps.Cli.Commands.CommandHandlers"); logger.LogDebug("Verbose logging enabled"); } - } - - public static async Task HandleCvssScoreAsync( - IServiceProvider services, - string vulnerabilityId, - string policyPath, - string vector, - bool json, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-score"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false); - var loader = new CvssPolicyLoader(); - var policyResult = loader.Load(policyJson, cancellationToken); - if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash)) - { - var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}")); - throw new InvalidOperationException($"Policy invalid: {errors}"); - } - - var policy = policyResult.Policy with { Hash = policyResult.Hash }; - - var engine = scope.ServiceProvider.GetRequiredService(); - var parsed = engine.ParseVector(vector); - - var client = scope.ServiceProvider.GetRequiredService(); - - var request = new CreateCvssReceipt( - vulnerabilityId, - policy, - parsed.BaseMetrics, - parsed.ThreatMetrics, - parsed.EnvironmentalMetrics, - parsed.SupplementalMetrics, - Array.Empty(), - SigningKey: null, - CreatedBy: "cli", - CreatedAt: DateTimeOffset.UtcNow); - - var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false) - ?? throw new InvalidOperationException("CVSS receipt creation failed."); - - if (json) - { - Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson)); - } - else - { - Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}"); - Console.WriteLine($"Vector: {receipt.VectorString}"); - Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})"); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to create CVSS receipt"); - Environment.ExitCode = 1; - if (json) - { - var problem = new { error = "cvss_score_failed", message = ex.Message }; - Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson)); - } - } - } - - public static async Task HandleCvssShowAsync( - IServiceProvider services, - string receiptId, - bool json, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-show"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false); - if (receipt is null) - { - Environment.ExitCode = 5; - Console.WriteLine(json - ? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson) - : $"✖ Receipt {receiptId} not found"); - return; - } - - if (json) - { - Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson)); - } - else - { - Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}"); - Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}"); - Console.WriteLine($"Vector: {receipt.VectorString}"); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId); - Environment.ExitCode = 1; - } - } - - public static async Task HandleCvssHistoryAsync( - IServiceProvider services, - string receiptId, - bool json, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-history"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false); - if (json) - { - Console.WriteLine(JsonSerializer.Serialize(history, CompactJson)); - } - else - { - if (history.Count == 0) - { - Console.WriteLine("(no history)"); - } - else - { - foreach (var entry in history.OrderBy(h => h.Timestamp)) - { - Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})"); - } - } - } - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId); - Environment.ExitCode = 1; - } - } - - public static async Task HandleCvssExportAsync( - IServiceProvider services, - string receiptId, - string format, - string? output, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-export"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false); - if (receipt is null) - { - Environment.ExitCode = 5; - Console.WriteLine($"✖ Receipt {receiptId} not found"); - return; - } - - if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase)) - { - Environment.ExitCode = 9; - Console.WriteLine("Only json export is supported at this time."); - return; - } - - var targetPath = string.IsNullOrWhiteSpace(output) - ? $"cvss-receipt-{receipt.ReceiptId}.json" - : output!; - - var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson); - await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false); - - Console.WriteLine($"✔ Exported receipt to {targetPath}"); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId); - Environment.ExitCode = 1; - } - } + } + + public static async Task HandleCvssScoreAsync( + IServiceProvider services, + string vulnerabilityId, + string policyPath, + string vector, + bool json, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-score"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false); + var loader = new CvssPolicyLoader(); + var policyResult = loader.Load(policyJson, cancellationToken); + if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash)) + { + var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}")); + throw new InvalidOperationException($"Policy invalid: {errors}"); + } + + var policy = policyResult.Policy with { Hash = policyResult.Hash }; + + var engine = scope.ServiceProvider.GetRequiredService(); + var parsed = engine.ParseVector(vector); + + var client = scope.ServiceProvider.GetRequiredService(); + + var request = new CreateCvssReceipt( + vulnerabilityId, + policy, + parsed.BaseMetrics, + parsed.ThreatMetrics, + parsed.EnvironmentalMetrics, + parsed.SupplementalMetrics, + Array.Empty(), + SigningKey: null, + CreatedBy: "cli", + CreatedAt: DateTimeOffset.UtcNow); + + var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false) + ?? throw new InvalidOperationException("CVSS receipt creation failed."); + + if (json) + { + Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson)); + } + else + { + Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}"); + Console.WriteLine($"Vector: {receipt.VectorString}"); + Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})"); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to create CVSS receipt"); + Environment.ExitCode = 1; + if (json) + { + var problem = new { error = "cvss_score_failed", message = ex.Message }; + Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson)); + } + } + } + + public static async Task HandleCvssShowAsync( + IServiceProvider services, + string receiptId, + bool json, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-show"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false); + if (receipt is null) + { + Environment.ExitCode = 5; + Console.WriteLine(json + ? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson) + : $"✖ Receipt {receiptId} not found"); + return; + } + + if (json) + { + Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson)); + } + else + { + Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}"); + Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}"); + Console.WriteLine($"Vector: {receipt.VectorString}"); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId); + Environment.ExitCode = 1; + } + } + + public static async Task HandleCvssHistoryAsync( + IServiceProvider services, + string receiptId, + bool json, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-history"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false); + if (json) + { + Console.WriteLine(JsonSerializer.Serialize(history, CompactJson)); + } + else + { + if (history.Count == 0) + { + Console.WriteLine("(no history)"); + } + else + { + foreach (var entry in history.OrderBy(h => h.Timestamp)) + { + Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})"); + } + } + } + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId); + Environment.ExitCode = 1; + } + } + + public static async Task HandleCvssExportAsync( + IServiceProvider services, + string receiptId, + string format, + string? output, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("cvss-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false); + if (receipt is null) + { + Environment.ExitCode = 5; + Console.WriteLine($"✖ Receipt {receiptId} not found"); + return; + } + + if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase)) + { + Environment.ExitCode = 9; + Console.WriteLine("Only json export is supported at this time."); + return; + } + + var targetPath = string.IsNullOrWhiteSpace(output) + ? $"cvss-receipt-{receipt.ReceiptId}.json" + : output!; + + var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson); + await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false); + + Console.WriteLine($"✔ Exported receipt to {targetPath}"); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId); + Environment.ExitCode = 1; + } + } private static async Task VerifyBundleAsync(string path, ILogger logger, CancellationToken cancellationToken) { @@ -29676,4 +29676,105 @@ stella policy test {policyName}.stella } #endregion + + #region DevPortal Commands + + /// + /// Handler for 'stella devportal verify' command (DVOFF-64-002). + /// Verifies integrity of a DevPortal/evidence bundle before import. + /// Exit codes: 0 success, 2 checksum mismatch, 3 signature failure, 4 TSA missing, 5 unexpected. + /// + public static async Task HandleDevPortalVerifyAsync( + IServiceProvider services, + string bundlePath, + bool offline, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var loggerFactory = scope.ServiceProvider.GetRequiredService(); + var logger = loggerFactory.CreateLogger(); + var verifier = new DevPortalBundleVerifier(logger); + + using var activity = CliActivitySource.Instance.StartActivity("cli.devportal.verify", System.Diagnostics.ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "devportal verify"); + activity?.SetTag("stellaops.cli.devportal.offline", offline); + using var duration = CliMetrics.MeasureCommandDuration("devportal verify"); + + try + { + var resolvedPath = Path.GetFullPath(bundlePath); + + if (verbose) + { + AnsiConsole.MarkupLine($"[grey]Verifying bundle: {Markup.Escape(resolvedPath)}[/]"); + if (offline) + { + AnsiConsole.MarkupLine("[grey]Mode: offline (TSA verification skipped)[/]"); + } + } + + var result = await verifier.VerifyBundleAsync(resolvedPath, offline, cancellationToken) + .ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.devportal.status", result.Status); + activity?.SetTag("stellaops.cli.devportal.exit_code", (int)result.ExitCode); + + if (emitJson) + { + Console.WriteLine(result.ToJson()); + } + else + { + if (result.ExitCode == DevPortalVerifyExitCode.Success) + { + AnsiConsole.MarkupLine("[green]Bundle verification successful.[/]"); + AnsiConsole.MarkupLine($" Bundle ID: {Markup.Escape(result.BundleId ?? "unknown")}"); + AnsiConsole.MarkupLine($" Root Hash: {Markup.Escape(result.RootHash ?? "unknown")}"); + AnsiConsole.MarkupLine($" Entries: {result.Entries}"); + AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O") ?? "unknown"}"); + AnsiConsole.MarkupLine($" Portable: {(result.Portable ? "yes" : "no")}"); + } + else + { + AnsiConsole.MarkupLine($"[red]Bundle verification failed:[/] {Markup.Escape(result.ErrorMessage ?? "Unknown error")}"); + if (!string.IsNullOrEmpty(result.ErrorDetail)) + { + AnsiConsole.MarkupLine($" [grey]{Markup.Escape(result.ErrorDetail)}[/]"); + } + } + } + + return (int)result.ExitCode; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + if (!emitJson) + { + AnsiConsole.MarkupLine("[yellow]Operation cancelled.[/]"); + } + return 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify bundle"); + + if (emitJson) + { + var errorResult = DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.Unexpected, + ex.Message); + Console.WriteLine(errorResult.ToJson()); + } + else + { + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + } + + return (int)DevPortalVerifyExitCode.Unexpected; + } + } + + #endregion } diff --git a/src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs b/src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs new file mode 100644 index 000000000..ac3c94977 --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs @@ -0,0 +1,533 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Security.Cryptography; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Services; + +/// +/// Verifier for attestation bundles exported from the Export Center. +/// Per EXPORT-ATTEST-75-001. +/// +internal sealed class AttestationBundleVerifier : IAttestationBundleVerifier +{ + private const string DsseEnvelopeFileName = "attestation.dsse.json"; + private const string StatementFileName = "statement.json"; + private const string TransparencyFileName = "transparency.ndjson"; + private const string MetadataFileName = "metadata.json"; + private const string ChecksumsFileName = "checksums.txt"; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + WriteIndented = true, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ILogger _logger; + + public AttestationBundleVerifier(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task VerifyAsync( + AttestationBundleVerifyOptions options, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath); + + _logger.LogDebug("Verifying attestation bundle at {FilePath}, offline={Offline}", + options.FilePath, options.Offline); + + // Step 1: Check bundle exists + if (!File.Exists(options.FilePath)) + { + return CreateFailedResult( + AttestationBundleExitCodes.FileNotFound, + "Bundle file not found", + options.FilePath); + } + + // Step 2: Verify SHA-256 against .sha256 file if present + var sha256Path = options.FilePath + ".sha256"; + if (File.Exists(sha256Path)) + { + var checksumResult = await VerifyBundleChecksumAsync(options.FilePath, sha256Path, cancellationToken) + .ConfigureAwait(false); + if (!checksumResult.IsValid) + { + return CreateFailedResult( + AttestationBundleExitCodes.ChecksumMismatch, + "SHA-256 checksum mismatch", + options.FilePath, + $"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}"); + } + } + else + { + _logger.LogDebug("No co-located .sha256 file found for external checksum verification"); + } + + // Step 3: Extract and parse bundle contents + BundleContents contents; + try + { + contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException) + { + _logger.LogError(ex, "Failed to extract bundle contents"); + return CreateFailedResult( + AttestationBundleExitCodes.FormatError, + "Failed to extract bundle contents", + options.FilePath, + ex.Message); + } + + // Step 4: Verify internal checksums from checksums.txt + if (contents.ChecksumsText is not null) + { + var internalCheckResult = VerifyInternalChecksums(contents); + if (!internalCheckResult.Success) + { + return CreateFailedResult( + AttestationBundleExitCodes.ChecksumMismatch, + "Internal checksum verification failed", + options.FilePath, + internalCheckResult.ErrorMessage); + } + } + + // Step 5: Verify DSSE signature + var signatureValid = VerifyDsseSignature(contents, options.Offline, out var signatureError); + if (!signatureValid && !string.IsNullOrEmpty(signatureError)) + { + return CreateFailedResult( + AttestationBundleExitCodes.SignatureFailure, + "DSSE signature verification failed", + options.FilePath, + signatureError); + } + + // Step 6: Check transparency entries (only if not offline and verifyTransparency is true) + if (!options.Offline && options.VerifyTransparency) + { + if (string.IsNullOrWhiteSpace(contents.TransparencyNdjson)) + { + return CreateFailedResult( + AttestationBundleExitCodes.MissingTransparency, + "Transparency log entry missing", + options.FilePath, + "Bundle requires transparency.ndjson when not in offline mode"); + } + } + + // Step 7: Build success result + var metadata = contents.Metadata; + var subjects = ExtractSubjects(contents); + + return new AttestationBundleVerifyResult( + Success: true, + Status: "verified", + ExportId: metadata?.ExportId, + AttestationId: metadata?.AttestationId, + RootHash: FormatRootHash(metadata?.RootHash), + Subjects: subjects, + PredicateType: ExtractPredicateType(contents), + StatementVersion: metadata?.StatementVersion, + BundlePath: options.FilePath, + ExitCode: AttestationBundleExitCodes.Success); + } + + public async Task ImportAsync( + AttestationBundleImportOptions options, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath); + + _logger.LogDebug("Importing attestation bundle from {FilePath}", options.FilePath); + + // First verify the bundle + var verifyOptions = new AttestationBundleVerifyOptions( + options.FilePath, + options.Offline, + options.VerifyTransparency, + options.TrustRootPath); + + var verifyResult = await VerifyAsync(verifyOptions, cancellationToken).ConfigureAwait(false); + if (!verifyResult.Success) + { + return new AttestationBundleImportResult( + Success: false, + Status: "verification_failed", + AttestationId: verifyResult.AttestationId, + TenantId: null, + Namespace: options.Namespace, + RootHash: verifyResult.RootHash, + ErrorMessage: verifyResult.ErrorMessage, + ExitCode: verifyResult.ExitCode); + } + + // Extract metadata for import + BundleContents contents; + try + { + contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + return new AttestationBundleImportResult( + Success: false, + Status: "extraction_failed", + AttestationId: null, + TenantId: null, + Namespace: options.Namespace, + RootHash: null, + ErrorMessage: ex.Message, + ExitCode: AttestationBundleExitCodes.ImportFailed); + } + + var metadata = contents.Metadata; + var tenantId = options.Tenant ?? metadata?.TenantId; + + // Import is a local-only operation for air-gap scenarios + // The actual import to backend would happen via separate API call + _logger.LogInformation("Attestation bundle imported: {AttestationId} for tenant {TenantId}", + metadata?.AttestationId, tenantId); + + return new AttestationBundleImportResult( + Success: true, + Status: "imported", + AttestationId: metadata?.AttestationId, + TenantId: tenantId, + Namespace: options.Namespace, + RootHash: FormatRootHash(metadata?.RootHash), + ExitCode: AttestationBundleExitCodes.Success); + } + + private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync( + string bundlePath, + string sha256Path, + CancellationToken cancellationToken) + { + // Read expected hash from .sha256 file + var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false); + var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant(); + + if (string.IsNullOrEmpty(expectedHash)) + { + return (false, null, null); + } + + // Compute actual hash + await using var stream = File.OpenRead(bundlePath); + var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant(); + + return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash); + } + + private async Task ExtractBundleContentsAsync( + string bundlePath, + CancellationToken cancellationToken) + { + var contents = new BundleContents(); + + await using var fileStream = File.OpenRead(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); + using var tarReader = new TarReader(gzipStream); + + TarEntry? entry; + while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null) + { + if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null) + { + continue; + } + + using var memoryStream = new MemoryStream(); + await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false); + var data = memoryStream.ToArray(); + var text = System.Text.Encoding.UTF8.GetString(data); + + switch (entry.Name) + { + case DsseEnvelopeFileName: + contents.DsseEnvelopeJson = text; + contents.DsseEnvelopeBytes = data; + contents.DsseEnvelope = JsonSerializer.Deserialize(text, SerializerOptions); + break; + case StatementFileName: + contents.StatementJson = text; + contents.StatementBytes = data; + contents.Statement = JsonSerializer.Deserialize(text, SerializerOptions); + break; + case TransparencyFileName: + contents.TransparencyNdjson = text; + contents.TransparencyBytes = data; + break; + case MetadataFileName: + contents.MetadataJson = text; + contents.MetadataBytes = data; + contents.Metadata = JsonSerializer.Deserialize(text, SerializerOptions); + break; + case ChecksumsFileName: + contents.ChecksumsText = text; + break; + } + } + + return contents; + } + + private (bool Success, string? ErrorMessage) VerifyInternalChecksums(BundleContents contents) + { + if (string.IsNullOrWhiteSpace(contents.ChecksumsText)) + { + return (true, null); + } + + var lines = contents.ChecksumsText.Split('\n', StringSplitOptions.RemoveEmptyEntries); + foreach (var line in lines) + { + // Skip comments + if (line.TrimStart().StartsWith('#')) + { + continue; + } + + // Parse "hash filename" format + var parts = line.Split(new[] { ' ' }, 2, StringSplitOptions.RemoveEmptyEntries); + if (parts.Length != 2) + { + continue; + } + + var expectedHash = parts[0].Trim().ToLowerInvariant(); + var fileName = parts[1].Trim(); + + byte[]? fileBytes = fileName switch + { + DsseEnvelopeFileName => contents.DsseEnvelopeBytes, + StatementFileName => contents.StatementBytes, + TransparencyFileName => contents.TransparencyBytes, + MetadataFileName => contents.MetadataBytes, + _ => null + }; + + if (fileBytes is null) + { + // File not found in bundle - could be optional + if (fileName == TransparencyFileName) + { + continue; // transparency.ndjson is optional + } + + return (false, $"File '{fileName}' referenced in checksums but not found in bundle"); + } + + var actualHash = Convert.ToHexString(SHA256.HashData(fileBytes)).ToLowerInvariant(); + if (!string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase)) + { + return (false, $"Checksum mismatch for '{fileName}': expected {expectedHash}, got {actualHash}"); + } + } + + return (true, null); + } + + private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error) + { + error = null; + + if (contents.DsseEnvelope is null || string.IsNullOrEmpty(contents.DsseEnvelope.Payload)) + { + error = "DSSE envelope not found or has no payload"; + return false; + } + + // Verify payload matches statement + if (contents.StatementJson is not null) + { + try + { + var payloadBytes = Convert.FromBase64String(contents.DsseEnvelope.Payload); + var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes); + + // Compare parsed JSON to handle whitespace differences + using var statementDoc = JsonDocument.Parse(contents.StatementJson); + using var payloadDoc = JsonDocument.Parse(payloadJson); + + // Check _type field matches + var statementType = statementDoc.RootElement.TryGetProperty("_type", out var sType) + ? sType.GetString() + : null; + var payloadType = payloadDoc.RootElement.TryGetProperty("_type", out var pType) + ? pType.GetString() + : null; + + if (!string.Equals(statementType, payloadType, StringComparison.Ordinal)) + { + error = "DSSE payload does not match statement _type"; + return false; + } + } + catch (FormatException ex) + { + error = $"Invalid DSSE payload encoding: {ex.Message}"; + return false; + } + catch (JsonException ex) + { + error = $"Invalid DSSE payload JSON: {ex.Message}"; + return false; + } + } + + // In offline mode, we don't verify the actual cryptographic signature + // (would require access to signing keys/certificates) + if (offline) + { + _logger.LogDebug("Offline mode: skipping cryptographic signature verification"); + return true; + } + + // Check that signatures exist + if (contents.DsseEnvelope.Signatures is null || contents.DsseEnvelope.Signatures.Count == 0) + { + error = "DSSE envelope has no signatures"; + return false; + } + + // Online signature verification would require access to trust roots + // For now, we trust the signature if payload matches and signatures exist + return true; + } + + private static IReadOnlyList? ExtractSubjects(BundleContents contents) + { + if (contents.Statement?.Subject is null || contents.Statement.Subject.Count == 0) + { + // Fall back to metadata subjects + if (contents.Metadata?.SubjectDigests is not null) + { + return contents.Metadata.SubjectDigests + .Select(s => $"{s.Name}@{s.Algorithm}:{s.Digest}") + .ToList(); + } + return null; + } + + return contents.Statement.Subject + .Select(s => + { + var digest = s.Digest?.FirstOrDefault(); + return digest.HasValue + ? $"{s.Name}@{digest.Value.Key}:{digest.Value.Value}" + : s.Name ?? "unknown"; + }) + .ToList(); + } + + private static string? ExtractPredicateType(BundleContents contents) + { + return contents.Statement?.PredicateType ?? contents.DsseEnvelope?.PayloadType; + } + + private static string? FormatRootHash(string? rootHash) + { + if (string.IsNullOrWhiteSpace(rootHash)) + { + return null; + } + + return rootHash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? rootHash + : $"sha256:{rootHash}"; + } + + private static AttestationBundleVerifyResult CreateFailedResult( + int exitCode, + string message, + string bundlePath, + string? detail = null) + => new( + Success: false, + Status: "failed", + ExportId: null, + AttestationId: null, + RootHash: null, + Subjects: null, + PredicateType: null, + StatementVersion: null, + BundlePath: bundlePath, + ErrorMessage: detail ?? message, + ExitCode: exitCode); + + private sealed class BundleContents + { + public string? DsseEnvelopeJson { get; set; } + public byte[]? DsseEnvelopeBytes { get; set; } + public DsseEnvelope? DsseEnvelope { get; set; } + + public string? StatementJson { get; set; } + public byte[]? StatementBytes { get; set; } + public InTotoStatement? Statement { get; set; } + + public string? TransparencyNdjson { get; set; } + public byte[]? TransparencyBytes { get; set; } + + public string? MetadataJson { get; set; } + public byte[]? MetadataBytes { get; set; } + public AttestationBundleMetadata? Metadata { get; set; } + + public string? ChecksumsText { get; set; } + } + + private sealed class DsseEnvelope + { + public string? PayloadType { get; set; } + public string? Payload { get; set; } + public IReadOnlyList? Signatures { get; set; } + } + + private sealed class DsseSignature + { + public string? KeyId { get; set; } + public string? Sig { get; set; } + } + + private sealed class InTotoStatement + { + public string? Type { get; set; } + public string? PredicateType { get; set; } + public IReadOnlyList? Subject { get; set; } + } + + private sealed class InTotoSubject + { + public string? Name { get; set; } + public Dictionary? Digest { get; set; } + } + + private sealed record AttestationBundleMetadata( + string? Version, + string? ExportId, + string? AttestationId, + string? TenantId, + DateTimeOffset? CreatedAtUtc, + string? RootHash, + string? SourceUri, + string? StatementVersion, + IReadOnlyList? SubjectDigests); + + private sealed record AttestationSubjectDigest( + string? Name, + string? Digest, + string? Algorithm); +} diff --git a/src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs b/src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs new file mode 100644 index 000000000..91088392d --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs @@ -0,0 +1,380 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Security.Cryptography; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Cli.Services; + +/// +/// Verifier for EvidenceLocker sealed bundles used in DevPortal offline verification. +/// Per DVOFF-64-002. +/// +internal sealed class DevPortalBundleVerifier : IDevPortalBundleVerifier +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + WriteIndented = true, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ILogger _logger; + + public DevPortalBundleVerifier(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task VerifyBundleAsync( + string bundlePath, + bool offline, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath); + + _logger.LogDebug("Verifying DevPortal bundle at {BundlePath}, offline={Offline}", bundlePath, offline); + + // Step 1: Check bundle exists + if (!File.Exists(bundlePath)) + { + return DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.Unexpected, + "Bundle file not found", + bundlePath); + } + + // Step 2: Validate SHA-256 against .sha256 file if present + var sha256Path = bundlePath + ".sha256"; + if (File.Exists(sha256Path)) + { + var checksumResult = await VerifyBundleChecksumAsync(bundlePath, sha256Path, cancellationToken) + .ConfigureAwait(false); + if (!checksumResult.IsValid) + { + return DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.ChecksumMismatch, + "SHA-256 checksum mismatch", + $"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}"); + } + } + else + { + _logger.LogDebug("No .sha256 file found, skipping checksum verification"); + } + + // Step 3: Extract and parse bundle contents + BundleContents contents; + try + { + contents = await ExtractBundleContentsAsync(bundlePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException) + { + _logger.LogError(ex, "Failed to extract bundle contents"); + return DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.Unexpected, + "Failed to extract bundle contents", + ex.Message); + } + + // Step 4: Verify DSSE signature + var signatureValid = VerifyDsseSignature(contents, offline, out var signatureError); + if (!signatureValid && !string.IsNullOrEmpty(signatureError)) + { + return DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.SignatureFailure, + "DSSE signature verification failed", + signatureError); + } + + // Step 5: Verify TSA (only if not offline) + if (!offline && contents.Signature is not null) + { + if (string.IsNullOrEmpty(contents.Signature.TimestampAuthority) || + string.IsNullOrEmpty(contents.Signature.TimestampToken)) + { + return DevPortalBundleVerificationResult.Failed( + DevPortalVerifyExitCode.TsaMissing, + "RFC3161 timestamp missing", + "Bundle requires timestamping when not in offline mode"); + } + } + + // Step 6: Build success result + return new DevPortalBundleVerificationResult + { + Status = "verified", + BundleId = contents.Manifest?.BundleId ?? contents.BundleMetadata?.BundleId, + RootHash = contents.BundleMetadata?.RootHash is not null + ? $"sha256:{contents.BundleMetadata.RootHash}" + : null, + Entries = contents.Manifest?.Entries?.Count ?? 0, + CreatedAt = contents.Manifest?.CreatedAt ?? contents.BundleMetadata?.CreatedAt, + Portable = contents.BundleMetadata?.PortableGeneratedAt is not null, + ExitCode = DevPortalVerifyExitCode.Success + }; + } + + private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync( + string bundlePath, + string sha256Path, + CancellationToken cancellationToken) + { + // Read expected hash from .sha256 file + var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false); + var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant(); + + if (string.IsNullOrEmpty(expectedHash)) + { + return (false, null, null); + } + + // Compute actual hash + await using var stream = File.OpenRead(bundlePath); + var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant(); + + return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash); + } + + private async Task ExtractBundleContentsAsync( + string bundlePath, + CancellationToken cancellationToken) + { + var contents = new BundleContents(); + + await using var fileStream = File.OpenRead(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); + using var tarReader = new TarReader(gzipStream); + + TarEntry? entry; + while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null) + { + if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null) + { + continue; + } + + using var memoryStream = new MemoryStream(); + await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false); + var json = System.Text.Encoding.UTF8.GetString(memoryStream.ToArray()); + + switch (entry.Name) + { + case "manifest.json": + contents.ManifestJson = json; + contents.Manifest = JsonSerializer.Deserialize(json, SerializerOptions); + break; + case "signature.json": + contents.SignatureJson = json; + contents.Signature = JsonSerializer.Deserialize(json, SerializerOptions); + break; + case "bundle.json": + contents.BundleMetadataJson = json; + contents.BundleMetadata = JsonSerializer.Deserialize(json, SerializerOptions); + break; + case "checksums.txt": + contents.ChecksumsText = json; + break; + } + } + + return contents; + } + + private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error) + { + error = null; + + if (contents.Signature is null || string.IsNullOrEmpty(contents.Signature.Payload)) + { + error = "Signature not found in bundle"; + return false; + } + + // Verify payload matches manifest + if (contents.ManifestJson is not null) + { + try + { + var payloadBytes = Convert.FromBase64String(contents.Signature.Payload); + var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes); + + // Compare parsed JSON to handle whitespace differences + using var manifestDoc = JsonDocument.Parse(contents.ManifestJson); + using var payloadDoc = JsonDocument.Parse(payloadJson); + + var manifestBundleId = manifestDoc.RootElement.TryGetProperty("bundleId", out var mId) + ? mId.GetString() + : null; + var payloadBundleId = payloadDoc.RootElement.TryGetProperty("bundleId", out var pId) + ? pId.GetString() + : null; + + if (!string.Equals(manifestBundleId, payloadBundleId, StringComparison.OrdinalIgnoreCase)) + { + error = "Signature payload does not match manifest bundleId"; + return false; + } + } + catch (FormatException ex) + { + error = $"Invalid signature payload encoding: {ex.Message}"; + return false; + } + catch (JsonException ex) + { + error = $"Invalid signature payload JSON: {ex.Message}"; + return false; + } + } + + // In offline mode, we don't verify the actual cryptographic signature + // (would require access to signing keys/certificates) + if (offline) + { + _logger.LogDebug("Offline mode: skipping cryptographic signature verification"); + return true; + } + + // Online signature verification would go here + // For now, we trust the signature if payload matches + return true; + } + + private sealed class BundleContents + { + public string? ManifestJson { get; set; } + public BundleManifest? Manifest { get; set; } + public string? SignatureJson { get; set; } + public BundleSignature? Signature { get; set; } + public string? BundleMetadataJson { get; set; } + public BundleMetadataDocument? BundleMetadata { get; set; } + public string? ChecksumsText { get; set; } + } + + private sealed class BundleManifest + { + public string? BundleId { get; set; } + public string? TenantId { get; set; } + public int Kind { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public Dictionary? Metadata { get; set; } + public List? Entries { get; set; } + } + + private sealed class BundleManifestEntry + { + public string? Section { get; set; } + public string? CanonicalPath { get; set; } + public string? Sha256 { get; set; } + public long SizeBytes { get; set; } + public string? MediaType { get; set; } + } + + private sealed class BundleSignature + { + public string? PayloadType { get; set; } + public string? Payload { get; set; } + public string? Signature { get; set; } + public string? KeyId { get; set; } + public string? Algorithm { get; set; } + public string? Provider { get; set; } + public DateTimeOffset? SignedAt { get; set; } + public DateTimeOffset? TimestampedAt { get; set; } + public string? TimestampAuthority { get; set; } + public string? TimestampToken { get; set; } + } + + private sealed class BundleMetadataDocument + { + public string? BundleId { get; set; } + public string? TenantId { get; set; } + public int Kind { get; set; } + public int Status { get; set; } + public string? RootHash { get; set; } + public string? StorageKey { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public DateTimeOffset? SealedAt { get; set; } + public DateTimeOffset? PortableGeneratedAt { get; set; } + } +} + +/// +/// Exit codes for DevPortal bundle verification per DVOFF-64-002. +/// +public enum DevPortalVerifyExitCode +{ + /// Verification successful. + Success = 0, + + /// SHA-256 checksum mismatch. + ChecksumMismatch = 2, + + /// DSSE signature verification failed. + SignatureFailure = 3, + + /// RFC3161 timestamp missing (when not offline). + TsaMissing = 4, + + /// Unexpected error. + Unexpected = 5 +} + +/// +/// Result of DevPortal bundle verification. +/// +public sealed class DevPortalBundleVerificationResult +{ + public string Status { get; set; } = "failed"; + public string? BundleId { get; set; } + public string? RootHash { get; set; } + public int Entries { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public bool Portable { get; set; } + public DevPortalVerifyExitCode ExitCode { get; set; } = DevPortalVerifyExitCode.Unexpected; + public string? ErrorMessage { get; set; } + public string? ErrorDetail { get; set; } + + public static DevPortalBundleVerificationResult Failed( + DevPortalVerifyExitCode exitCode, + string message, + string? detail = null) + => new() + { + Status = "failed", + ExitCode = exitCode, + ErrorMessage = message, + ErrorDetail = detail + }; + + public string ToJson() + { + var options = new JsonSerializerOptions + { + WriteIndented = false, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + + // Build output with sorted keys + var output = new SortedDictionary(StringComparer.Ordinal); + + if (BundleId is not null) + output["bundleId"] = BundleId; + if (CreatedAt.HasValue) + output["createdAt"] = CreatedAt.Value.ToString("O"); + output["entries"] = Entries; + if (ErrorDetail is not null) + output["errorDetail"] = ErrorDetail; + if (ErrorMessage is not null) + output["errorMessage"] = ErrorMessage; + output["portable"] = Portable; + if (RootHash is not null) + output["rootHash"] = RootHash; + output["status"] = Status; + + return JsonSerializer.Serialize(output, options); + } +} diff --git a/src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs b/src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs new file mode 100644 index 000000000..6f2163ca6 --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs @@ -0,0 +1,29 @@ +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Services; + +/// +/// Interface for attestation bundle verification. +/// +public interface IAttestationBundleVerifier +{ + /// + /// Verifies an attestation bundle exported from the Export Center. + /// + /// Verification options. + /// Cancellation token. + /// Verification result with status and exit code. + Task VerifyAsync( + AttestationBundleVerifyOptions options, + CancellationToken cancellationToken); + + /// + /// Imports an attestation bundle into the local system. + /// + /// Import options. + /// Cancellation token. + /// Import result with status and exit code. + Task ImportAsync( + AttestationBundleImportOptions options, + CancellationToken cancellationToken); +} diff --git a/src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs b/src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs new file mode 100644 index 000000000..b3b65d1a1 --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs @@ -0,0 +1,19 @@ +namespace StellaOps.Cli.Services; + +/// +/// Interface for DevPortal bundle verification. +/// +public interface IDevPortalBundleVerifier +{ + /// + /// Verifies a DevPortal/EvidenceLocker sealed bundle. + /// + /// Path to the bundle .tgz file. + /// If true, skip TSA verification and online checks. + /// Cancellation token. + /// Verification result with status and exit code. + Task VerifyBundleAsync( + string bundlePath, + bool offline, + CancellationToken cancellationToken); +} diff --git a/src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs b/src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs new file mode 100644 index 000000000..a00ee7dfe --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs @@ -0,0 +1,126 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +/// +/// Options for attestation bundle verification. +/// +public sealed record AttestationBundleVerifyOptions( + string FilePath, + bool Offline = false, + bool VerifyTransparency = true, + string? TrustRootPath = null); + +/// +/// Options for attestation bundle import. +/// +public sealed record AttestationBundleImportOptions( + string FilePath, + string? Tenant = null, + string? Namespace = null, + bool Offline = false, + bool VerifyTransparency = true, + string? TrustRootPath = null); + +/// +/// Result of attestation bundle verification. +/// +public sealed record AttestationBundleVerifyResult( + bool Success, + string Status, + string? ExportId, + string? AttestationId, + string? RootHash, + IReadOnlyList? Subjects, + string? PredicateType, + string? StatementVersion, + string BundlePath, + string? ErrorMessage = null, + int ExitCode = 0); + +/// +/// Result of attestation bundle import. +/// +public sealed record AttestationBundleImportResult( + bool Success, + string Status, + string? AttestationId, + string? TenantId, + string? Namespace, + string? RootHash, + string? ErrorMessage = null, + int ExitCode = 0); + +/// +/// JSON output for attestation bundle verify command. +/// +public sealed record AttestationBundleVerifyJson( + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("exportId")] string? ExportId, + [property: JsonPropertyName("attestationId")] string? AttestationId, + [property: JsonPropertyName("rootHash")] string? RootHash, + [property: JsonPropertyName("subjects")] IReadOnlyList? Subjects, + [property: JsonPropertyName("predicateType")] string? PredicateType, + [property: JsonPropertyName("bundlePath")] string BundlePath); + +/// +/// JSON output for attestation bundle import command. +/// +public sealed record AttestationBundleImportJson( + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("attestationId")] string? AttestationId, + [property: JsonPropertyName("tenantId")] string? TenantId, + [property: JsonPropertyName("namespace")] string? Namespace, + [property: JsonPropertyName("rootHash")] string? RootHash); + +/// +/// Exit codes for attestation bundle commands. +/// +public static class AttestationBundleExitCodes +{ + /// Success. + public const int Success = 0; + + /// General failure. + public const int GeneralFailure = 1; + + /// Checksum mismatch. + public const int ChecksumMismatch = 2; + + /// DSSE signature verification failure. + public const int SignatureFailure = 3; + + /// Missing required TSA/CT log entry. + public const int MissingTransparency = 4; + + /// Archive or file format error. + public const int FormatError = 5; + + /// File not found. + public const int FileNotFound = 6; + + /// Import failed. + public const int ImportFailed = 7; +} + +/// +/// Metadata parsed from an attestation bundle. +/// +internal sealed record AttestationBundleMetadata( + string? Version, + string? ExportId, + string? AttestationId, + string? TenantId, + DateTimeOffset? CreatedAtUtc, + string? RootHash, + string? SourceUri, + string? StatementVersion, + IReadOnlyList? SubjectDigests); + +/// +/// Subject digest from attestation bundle metadata. +/// +internal sealed record AttestationBundleSubjectDigest( + string? Name, + string? Digest, + string? Algorithm); diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/AttestationBundleVerifierTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/AttestationBundleVerifierTests.cs new file mode 100644 index 000000000..457703750 --- /dev/null +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/AttestationBundleVerifierTests.cs @@ -0,0 +1,406 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Cli.Services; +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Tests; + +public sealed class AttestationBundleVerifierTests : IDisposable +{ + private readonly string _tempDir; + private readonly AttestationBundleVerifier _verifier; + + public AttestationBundleVerifierTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"attest-bundle-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + + _verifier = new AttestationBundleVerifier(NullLogger.Instance); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public async Task VerifyAsync_FileNotFound_ReturnsFileNotFoundCode() + { + var options = new AttestationBundleVerifyOptions( + Path.Combine(_tempDir, "nonexistent.tgz"), + Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.FileNotFound, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_ValidBundle_ReturnsSuccess() + { + var bundlePath = await CreateValidBundleAsync(); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.True(result.Success); + Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode); + Assert.Equal("verified", result.Status); + } + + [Fact] + public async Task VerifyAsync_ValidBundle_ReturnsMetadata() + { + var bundlePath = await CreateValidBundleAsync(); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.True(result.Success); + Assert.NotNull(result.ExportId); + Assert.NotNull(result.AttestationId); + Assert.NotNull(result.RootHash); + Assert.StartsWith("sha256:", result.RootHash); + } + + [Fact] + public async Task VerifyAsync_CorruptedArchive_ReturnsFormatError() + { + var bundlePath = Path.Combine(_tempDir, "corrupted.tgz"); + await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not a valid tgz")); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.FormatError, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_ChecksumMismatch_ReturnsChecksumMismatchCode() + { + var bundlePath = await CreateBundleWithBadChecksumAsync(); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_ExternalChecksumMismatch_ReturnsChecksumMismatchCode() + { + var bundlePath = await CreateValidBundleAsync(); + var checksumPath = bundlePath + ".sha256"; + await File.WriteAllTextAsync(checksumPath, "0000000000000000000000000000000000000000000000000000000000000000 " + Path.GetFileName(bundlePath)); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_MissingTransparency_WhenNotOffline_ReturnsMissingTransparencyCode() + { + var bundlePath = await CreateBundleWithoutTransparencyAsync(); + + var options = new AttestationBundleVerifyOptions( + bundlePath, + Offline: false, + VerifyTransparency: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.MissingTransparency, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_MissingTransparency_WhenOffline_ReturnsSuccess() + { + var bundlePath = await CreateBundleWithoutTransparencyAsync(); + + var options = new AttestationBundleVerifyOptions( + bundlePath, + Offline: true, + VerifyTransparency: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.True(result.Success); + Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode); + } + + [Fact] + public async Task VerifyAsync_MissingDssePayload_ReturnsSignatureFailure() + { + var bundlePath = await CreateBundleWithMissingDssePayloadAsync(); + + var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true); + + var result = await _verifier.VerifyAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal(AttestationBundleExitCodes.SignatureFailure, result.ExitCode); + } + + [Fact] + public async Task ImportAsync_ValidBundle_ReturnsSuccess() + { + var bundlePath = await CreateValidBundleAsync(); + + var options = new AttestationBundleImportOptions( + bundlePath, + Tenant: "test-tenant", + Namespace: "test-namespace", + Offline: true); + + var result = await _verifier.ImportAsync(options, CancellationToken.None); + + Assert.True(result.Success); + Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode); + Assert.Equal("imported", result.Status); + } + + [Fact] + public async Task ImportAsync_InvalidBundle_ReturnsVerificationFailed() + { + var bundlePath = Path.Combine(_tempDir, "invalid.tgz"); + await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not valid")); + + var options = new AttestationBundleImportOptions( + bundlePath, + Tenant: "test-tenant", + Offline: true); + + var result = await _verifier.ImportAsync(options, CancellationToken.None); + + Assert.False(result.Success); + Assert.Equal("verification_failed", result.Status); + } + + [Fact] + public async Task ImportAsync_InheritsTenantFromMetadata() + { + var bundlePath = await CreateValidBundleAsync(); + + var options = new AttestationBundleImportOptions( + bundlePath, + Tenant: null, // Not specified + Offline: true); + + var result = await _verifier.ImportAsync(options, CancellationToken.None); + + Assert.True(result.Success); + Assert.NotNull(result.TenantId); // Should come from bundle metadata + } + + private async Task CreateValidBundleAsync() + { + var bundlePath = Path.Combine(_tempDir, $"valid-bundle-{Guid.NewGuid():N}.tgz"); + var exportId = Guid.NewGuid().ToString("D"); + var attestationId = Guid.NewGuid().ToString("D"); + var tenantId = Guid.NewGuid().ToString("D"); + + // Create statement JSON + var statement = new + { + _type = "https://in-toto.io/Statement/v1", + predicateType = "https://stellaops.io/attestations/vuln-scan/v1", + subject = new[] + { + new { name = "test-image:latest", digest = new Dictionary { ["sha256"] = "abc123" } } + }, + predicate = new { } + }; + var statementJson = JsonSerializer.Serialize(statement); + var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson)); + + // Create DSSE envelope + var dsse = new + { + payloadType = "application/vnd.in-toto+json", + payload = statementBase64, + signatures = new[] + { + new { keyid = "key-001", sig = "fake-signature-for-test" } + } + }; + var dsseJson = JsonSerializer.Serialize(dsse); + + // Create metadata + var metadata = new + { + version = "attestation-bundle/v1", + exportId, + attestationId, + tenantId, + createdAtUtc = DateTimeOffset.UtcNow.ToString("O"), + rootHash = "abc123def456", + statementVersion = "v1" + }; + var metadataJson = JsonSerializer.Serialize(metadata); + + // Create transparency entries + var transparencyNdjson = "{\"logIndex\":1,\"logId\":\"test\"}\n"; + + // Calculate checksums + var dsseHash = ComputeHash(dsseJson); + var statementHash = ComputeHash(statementJson); + var metadataHash = ComputeHash(metadataJson); + var transparencyHash = ComputeHash(transparencyNdjson); + + var checksums = new StringBuilder(); + checksums.AppendLine("# Attestation bundle checksums (sha256)"); + checksums.AppendLine($"{dsseHash} attestation.dsse.json"); + checksums.AppendLine($"{metadataHash} metadata.json"); + checksums.AppendLine($"{statementHash} statement.json"); + checksums.AppendLine($"{transparencyHash} transparency.ndjson"); + var checksumsText = checksums.ToString(); + + // Create archive + await using var fileStream = File.Create(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize); + await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); + + await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson); + await WriteEntryAsync(tarWriter, "checksums.txt", checksumsText); + await WriteEntryAsync(tarWriter, "metadata.json", metadataJson); + await WriteEntryAsync(tarWriter, "statement.json", statementJson); + await WriteEntryAsync(tarWriter, "transparency.ndjson", transparencyNdjson); + + return bundlePath; + } + + private async Task CreateBundleWithoutTransparencyAsync() + { + var bundlePath = Path.Combine(_tempDir, $"no-transparency-{Guid.NewGuid():N}.tgz"); + + var statement = new + { + _type = "https://in-toto.io/Statement/v1", + predicateType = "https://stellaops.io/attestations/vuln-scan/v1", + subject = new[] { new { name = "test", digest = new Dictionary { ["sha256"] = "abc" } } } + }; + var statementJson = JsonSerializer.Serialize(statement); + var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson)); + + var dsse = new + { + payloadType = "application/vnd.in-toto+json", + payload = statementBase64, + signatures = new[] { new { keyid = "key-001", sig = "fake-sig" } } + }; + var dsseJson = JsonSerializer.Serialize(dsse); + + var metadata = new + { + version = "attestation-bundle/v1", + exportId = Guid.NewGuid().ToString("D"), + attestationId = Guid.NewGuid().ToString("D"), + tenantId = Guid.NewGuid().ToString("D"), + rootHash = "abc123" + }; + var metadataJson = JsonSerializer.Serialize(metadata); + + var dsseHash = ComputeHash(dsseJson); + var statementHash = ComputeHash(statementJson); + var metadataHash = ComputeHash(metadataJson); + + var checksums = $"# Checksums\n{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n"; + + await using var fileStream = File.Create(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize); + await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); + + await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson); + await WriteEntryAsync(tarWriter, "checksums.txt", checksums); + await WriteEntryAsync(tarWriter, "metadata.json", metadataJson); + await WriteEntryAsync(tarWriter, "statement.json", statementJson); + // No transparency.ndjson + + return bundlePath; + } + + private async Task CreateBundleWithBadChecksumAsync() + { + var bundlePath = Path.Combine(_tempDir, $"bad-checksum-{Guid.NewGuid():N}.tgz"); + + var dsseJson = "{\"payloadType\":\"test\",\"payload\":\"dGVzdA==\",\"signatures\":[{\"keyid\":\"k\",\"sig\":\"s\"}]}"; + var statementJson = "{\"_type\":\"test\"}"; + var metadataJson = "{\"version\":\"v1\"}"; + + // Intentionally wrong checksum + var checksums = "0000000000000000000000000000000000000000000000000000000000000000 attestation.dsse.json\n"; + + await using var fileStream = File.Create(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize); + await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); + + await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson); + await WriteEntryAsync(tarWriter, "checksums.txt", checksums); + await WriteEntryAsync(tarWriter, "metadata.json", metadataJson); + await WriteEntryAsync(tarWriter, "statement.json", statementJson); + + return bundlePath; + } + + private async Task CreateBundleWithMissingDssePayloadAsync() + { + var bundlePath = Path.Combine(_tempDir, $"no-dsse-payload-{Guid.NewGuid():N}.tgz"); + + // DSSE without payload + var dsseJson = "{\"payloadType\":\"test\",\"signatures\":[]}"; + var statementJson = "{\"_type\":\"test\"}"; + var metadataJson = "{\"version\":\"v1\"}"; + + var dsseHash = ComputeHash(dsseJson); + var statementHash = ComputeHash(statementJson); + var metadataHash = ComputeHash(metadataJson); + var checksums = $"{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n"; + + await using var fileStream = File.Create(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize); + await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); + + await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson); + await WriteEntryAsync(tarWriter, "checksums.txt", checksums); + await WriteEntryAsync(tarWriter, "metadata.json", metadataJson); + await WriteEntryAsync(tarWriter, "statement.json", statementJson); + + return bundlePath; + } + + private static async Task WriteEntryAsync(TarWriter writer, string name, string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, name) + { + DataStream = dataStream + }; + await writer.WriteEntryAsync(entry); + } + + private static string ComputeHash(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = System.Security.Cryptography.SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/Services/DevPortalBundleVerifierTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/DevPortalBundleVerifierTests.cs new file mode 100644 index 000000000..9436f8feb --- /dev/null +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/DevPortalBundleVerifierTests.cs @@ -0,0 +1,316 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Cli.Services; +using Xunit; + +namespace StellaOps.Cli.Tests.Services; + +public sealed class DevPortalBundleVerifierTests : IDisposable +{ + private readonly string _tempDir; + private readonly DevPortalBundleVerifier _verifier; + + public DevPortalBundleVerifierTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"devportal-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + _verifier = new DevPortalBundleVerifier(NullLogger.Instance); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public async Task VerifyBundleAsync_ReturnsSuccess_ForValidBundle() + { + var bundlePath = CreateValidBundle(); + + var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None); + + Assert.Equal("verified", result.Status); + Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode); + Assert.Equal("a1b2c3d4-e5f6-7890-abcd-ef1234567890", result.BundleId); + Assert.NotNull(result.RootHash); + Assert.True(result.RootHash!.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)); + Assert.Equal(1, result.Entries); + } + + [Fact] + public async Task VerifyBundleAsync_ReturnsUnexpected_WhenBundleNotFound() + { + var nonExistentPath = Path.Combine(_tempDir, "nonexistent.tgz"); + + var result = await _verifier.VerifyBundleAsync(nonExistentPath, offline: true, CancellationToken.None); + + Assert.Equal("failed", result.Status); + Assert.Equal(DevPortalVerifyExitCode.Unexpected, result.ExitCode); + Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task VerifyBundleAsync_ReturnsChecksumMismatch_WhenSha256DoesNotMatch() + { + var bundlePath = CreateValidBundle(); + var sha256Path = bundlePath + ".sha256"; + + // Write incorrect hash + await File.WriteAllTextAsync(sha256Path, "0000000000000000000000000000000000000000000000000000000000000000 bundle.tgz"); + + var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None); + + Assert.Equal("failed", result.Status); + Assert.Equal(DevPortalVerifyExitCode.ChecksumMismatch, result.ExitCode); + } + + [Fact] + public async Task VerifyBundleAsync_SucceedsWithoutSha256File() + { + var bundlePath = CreateValidBundle(); + + // Remove .sha256 file if exists + var sha256Path = bundlePath + ".sha256"; + if (File.Exists(sha256Path)) + { + File.Delete(sha256Path); + } + + var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None); + + Assert.Equal("verified", result.Status); + Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode); + } + + [Fact] + public async Task VerifyBundleAsync_ReturnsTsaMissing_WhenOnlineAndNoTimestamp() + { + var bundlePath = CreateBundleWithoutTimestamp(); + + var result = await _verifier.VerifyBundleAsync(bundlePath, offline: false, CancellationToken.None); + + Assert.Equal("failed", result.Status); + Assert.Equal(DevPortalVerifyExitCode.TsaMissing, result.ExitCode); + } + + [Fact] + public async Task VerifyBundleAsync_DetectsPortableBundle() + { + var bundlePath = CreatePortableBundle(); + + var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None); + + Assert.Equal("verified", result.Status); + Assert.True(result.Portable); + } + + [Fact] + public void ToJson_OutputsKeysSortedAlphabetically() + { + var result = new DevPortalBundleVerificationResult + { + Status = "verified", + BundleId = "test-id", + RootHash = "sha256:abc123", + Entries = 3, + CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), + Portable = false, + ExitCode = DevPortalVerifyExitCode.Success + }; + + var json = result.ToJson(); + + // Keys should be in alphabetical order + var keys = JsonDocument.Parse(json).RootElement.EnumerateObject() + .Select(p => p.Name) + .ToList(); + + var sortedKeys = keys.OrderBy(k => k, StringComparer.Ordinal).ToList(); + Assert.Equal(sortedKeys, keys); + } + + private string CreateValidBundle() + { + var bundlePath = Path.Combine(_tempDir, $"bundle-{Guid.NewGuid():N}.tgz"); + + var manifest = new + { + bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890", + tenantId = "00000000-0000-0000-0000-000000000001", + kind = 2, + createdAt = "2025-12-07T10:30:00Z", + metadata = new Dictionary { ["source"] = "test" }, + entries = new[] + { + new + { + section = "sbom", + canonicalPath = "sbom/cyclonedx.json", + sha256 = new string('a', 64), + sizeBytes = 1024, + mediaType = "application/vnd.cyclonedx+json" + } + } + }; + + var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = false }); + var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson)); + + var signature = new + { + payloadType = "application/vnd.stella.evidence.manifest+json", + payload = manifestPayload, + signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")), + keyId = "key-1", + algorithm = "ES256", + provider = "StellaOps", + signedAt = "2025-12-07T10:30:05Z", + timestampedAt = "2025-12-07T10:30:06Z", + timestampAuthority = "https://freetsa.org/tsr", + timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token")) + }; + + var bundleMetadata = new + { + bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890", + tenantId = "00000000-0000-0000-0000-000000000001", + kind = 2, + status = 3, + rootHash = new string('f', 64), + storageKey = "evidence/bundle.tgz", + createdAt = "2025-12-07T10:30:00Z", + sealedAt = "2025-12-07T10:30:05Z" + }; + + CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata); + + return bundlePath; + } + + private string CreateBundleWithoutTimestamp() + { + var bundlePath = Path.Combine(_tempDir, $"bundle-no-tsa-{Guid.NewGuid():N}.tgz"); + + var manifest = new + { + bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012", + tenantId = "00000000-0000-0000-0000-000000000001", + kind = 2, + createdAt = "2025-12-07T10:30:00Z", + entries = Array.Empty() + }; + + var manifestJson = JsonSerializer.Serialize(manifest); + var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson)); + + var signature = new + { + payloadType = "application/vnd.stella.evidence.manifest+json", + payload = manifestPayload, + signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")), + keyId = "key-1", + algorithm = "ES256", + provider = "StellaOps", + signedAt = "2025-12-07T10:30:05Z" + // No timestampedAt, timestampAuthority, timestampToken + }; + + var bundleMetadata = new + { + bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012", + tenantId = "00000000-0000-0000-0000-000000000001", + kind = 2, + status = 3, + rootHash = new string('e', 64), + storageKey = "evidence/bundle.tgz", + createdAt = "2025-12-07T10:30:00Z", + sealedAt = "2025-12-07T10:30:05Z" + }; + + CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata); + + return bundlePath; + } + + private string CreatePortableBundle() + { + var bundlePath = Path.Combine(_tempDir, $"portable-{Guid.NewGuid():N}.tgz"); + + var manifest = new + { + bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234", + kind = 1, + createdAt = "2025-12-07T10:30:00Z", + entries = Array.Empty() + }; + + var manifestJson = JsonSerializer.Serialize(manifest); + var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson)); + + var signature = new + { + payloadType = "application/vnd.stella.evidence.manifest+json", + payload = manifestPayload, + signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")), + keyId = "key-1", + algorithm = "ES256", + provider = "StellaOps", + signedAt = "2025-12-07T10:30:05Z", + timestampedAt = "2025-12-07T10:30:06Z", + timestampAuthority = "tsa.default", + timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token")) + }; + + var bundleMetadata = new + { + bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234", + kind = 1, + status = 3, + rootHash = new string('d', 64), + createdAt = "2025-12-07T10:30:00Z", + sealedAt = "2025-12-07T10:30:05Z", + portableGeneratedAt = "2025-12-07T10:35:00Z" // Indicates portable bundle + }; + + CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata); + + return bundlePath; + } + + private static void CreateTgzBundle(string bundlePath, string manifestJson, object signature, object bundleMetadata) + { + using var memoryStream = new MemoryStream(); + using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) + using (var tarWriter = new TarWriter(gzipStream)) + { + AddTarEntry(tarWriter, "manifest.json", manifestJson); + AddTarEntry(tarWriter, "signature.json", JsonSerializer.Serialize(signature)); + AddTarEntry(tarWriter, "bundle.json", JsonSerializer.Serialize(bundleMetadata)); + AddTarEntry(tarWriter, "checksums.txt", $"# checksums\n{new string('f', 64)} sbom/cyclonedx.json\n"); + } + + memoryStream.Position = 0; + using var fileStream = File.Create(bundlePath); + memoryStream.CopyTo(fileStream); + } + + private static void AddTarEntry(TarWriter writer, string name, string content) + { + var entry = new PaxTarEntry(TarEntryType.RegularFile, name) + { + Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead, + ModificationTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero) + }; + + var bytes = Encoding.UTF8.GetBytes(content); + entry.DataStream = new MemoryStream(bytes); + writer.WriteEntry(entry); + } +} diff --git a/src/Concelier/Directory.Build.props b/src/Concelier/Directory.Build.props new file mode 100644 index 000000000..d4cc1e5ad --- /dev/null +++ b/src/Concelier/Directory.Build.props @@ -0,0 +1,32 @@ + + + + true + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs index ee3172ff0..91ed8de23 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs @@ -30,7 +30,7 @@ public sealed class RawDocumentStorage string uri, byte[] content, string? contentType, - DateTimeOffset? expiresAt, + DateTimeOffset? ExpiresAt, CancellationToken cancellationToken, Guid? documentId = null) { diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs index 64feb440a..a6850bccf 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs @@ -418,7 +418,7 @@ public sealed class UbuntuConnector : IFeedConnector await _stateRepository.UpdateCursorAsync(SourceName, doc, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); } - private static string ComputeNoticeHash(BsonDocument document) + private string ComputeNoticeHash(BsonDocument document) { var bytes = document.ToBson(); var hash = _hash.ComputeHash(bytes, HashAlgorithms.Sha256); diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyAuthSignalFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyAuthSignalFactory.cs new file mode 100644 index 000000000..f460dc533 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyAuthSignalFactory.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Concelier.Core.Linksets +{ + public static class PolicyAuthSignalFactory + { + public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset) + { + if (linkset is null) throw new ArgumentNullException(nameof(linkset)); + + var subject = linkset.Normalized?.Purls?.FirstOrDefault() ?? linkset.AdvisoryId; + var evidenceUri = $"urn:linkset:{linkset.AdvisoryId}"; + + return new PolicyAuthSignal( + Id: linkset.AdvisoryId, + Tenant: linkset.TenantId, + Subject: subject ?? string.Empty, + Source: linkset.Source, + SignalType: "reachability", + Evidence: new[] + { + new PolicyAuthEvidence(evidenceUri) + }); + } + } + + public sealed record PolicyAuthSignal( + string Id, + string Tenant, + string Subject, + string Source, + string SignalType, + IReadOnlyList Evidence); + + public sealed record PolicyAuthEvidence(string Uri); +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/Bson.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/Bson.cs index 6dafdbc0e..6e0b4856e 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/Bson.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/Bson.cs @@ -1,248 +1,276 @@ -using System; using System.Collections; -using System.Text; +using System.Globalization; using System.Text.Json; namespace MongoDB.Bson { - public readonly struct ObjectId : IEquatable + public class BsonValue : IEquatable { - public Guid Value { get; } - public ObjectId(Guid value) => Value = value; - public ObjectId(string value) => Value = Guid.TryParse(value, out var g) ? g : Guid.Empty; - public static ObjectId GenerateNewId() => new(Guid.NewGuid()); - public static ObjectId Empty => new(Guid.Empty); - public bool Equals(ObjectId other) => Value.Equals(other.Value); - public override bool Equals(object? obj) => obj is ObjectId other && Equals(other); - public override int GetHashCode() => Value.GetHashCode(); - public override string ToString() => Value.ToString("N"); - public static bool operator ==(ObjectId left, ObjectId right) => left.Equals(right); - public static bool operator !=(ObjectId left, ObjectId right) => !left.Equals(right); - } + protected object? RawValue; - public enum BsonType { Document, Array, String, Boolean, Int32, Int64, Double, DateTime, Guid, Null } - - public class BsonValue - { - protected readonly object? _value; - public BsonValue(object? value) => _value = value; - internal object? RawValue => _value; - public static implicit operator BsonValue(string value) => new BsonString(value ?? string.Empty); - public static implicit operator BsonValue(bool value) => new BsonBoolean(value); - public static implicit operator BsonValue(int value) => new BsonInt32(value); - public static implicit operator BsonValue(long value) => new BsonInt64(value); - public static implicit operator BsonValue(double value) => new BsonDouble(value); - public static implicit operator BsonValue(DateTime value) => new BsonDateTime(DateTime.SpecifyKind(value, DateTimeKind.Utc)); - public static implicit operator BsonValue(DateTimeOffset value) => new BsonDateTime(value.UtcDateTime); - public static implicit operator BsonValue(Guid value) => new BsonString(value.ToString("D")); - public static BsonValue Create(object? value) => BsonDocument.WrapExternal(value); - public virtual BsonType BsonType => _value switch + public BsonValue(object? value = null) { - null => BsonType.Null, - BsonDocument => BsonType.Document, - BsonArray => BsonType.Array, - string => BsonType.String, - bool => BsonType.Boolean, - int => BsonType.Int32, - long => BsonType.Int64, - double => BsonType.Double, - DateTime => BsonType.DateTime, - DateTimeOffset => BsonType.DateTime, - Guid => BsonType.Guid, - _ => BsonType.Null - }; - public bool IsString => _value is string; - public bool IsBsonDocument => _value is BsonDocument; - public bool IsBsonArray => _value is BsonArray; - public bool IsBsonNull => _value is null; - public string AsString => _value?.ToString() ?? string.Empty; - public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException(); - public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException(); - public Guid AsGuid => _value is Guid g ? g : Guid.Empty; - public DateTime AsDateTime => _value switch - { - DateTimeOffset dto => dto.UtcDateTime, - DateTime dt => dt, - _ => DateTime.MinValue - }; - public int AsInt32 => _value is int i ? i : 0; - public long AsInt64 => _value is long l ? l : 0; - public double AsDouble => _value is double d ? d : 0d; - public bool AsBoolean => _value is bool b && b; - public bool IsInt32 => _value is int; - public DateTime ToUniversalTime() => _value switch - { - DateTimeOffset dto => dto.UtcDateTime, - DateTime dt => dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime(), - string s when DateTimeOffset.TryParse(s, out var parsed) => parsed.UtcDateTime, - _ => DateTime.MinValue - }; - public override string ToString() => _value?.ToString() ?? string.Empty; - } - - public class BsonString : BsonValue { public BsonString(string value) : base(value) { } } - public class BsonBoolean : BsonValue { public BsonBoolean(bool value) : base(value) { } } - public class BsonInt32 : BsonValue { public BsonInt32(int value) : base(value) { } } - public class BsonInt64 : BsonValue { public BsonInt64(long value) : base(value) { } } - public class BsonDouble : BsonValue { public BsonDouble(double value) : base(value) { } } - public class BsonDateTime : BsonValue { public BsonDateTime(DateTime value) : base(value) { } } - public class BsonNull : BsonValue - { - private BsonNull() : base(null) { } - public static BsonNull Value { get; } = new(); - } - - public sealed class BsonElement - { - public BsonElement(string name, BsonValue value) - { - Name = name; - Value = value; + RawValue = value; } - public string Name { get; } - public BsonValue Value { get; } - } + public bool IsString => RawValue is string; + public bool IsBoolean => RawValue is bool; + public bool IsBsonDocument => RawValue is BsonDocument; + public bool IsBsonArray => RawValue is BsonArray; - public class BsonBinaryData : BsonValue - { - private readonly byte[] _bytes; - public BsonBinaryData(byte[] bytes) : base(null) => _bytes = bytes ?? Array.Empty(); - public BsonBinaryData(Guid guid) : this(guid.ToByteArray()) { } - public byte[] AsByteArray => _bytes; - public Guid ToGuid() => new(_bytes); - } - - public class BsonArray : BsonValue, IEnumerable - { - private readonly List _items = new(); - public BsonArray() : base(null) { } - public BsonArray(IEnumerable values) : this() => _items.AddRange(values); - public BsonArray(IEnumerable values) : this() + public string AsString => RawValue switch { - foreach (var value in values) - { - _items.Add(BsonDocument.WrapExternal(value)); - } - } - public void Add(BsonValue value) => _items.Add(value); - public IEnumerator GetEnumerator() => _items.GetEnumerator(); - IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); - public BsonValue this[int index] { get => _items[index]; set => _items[index] = value; } - public int Count => _items.Count; + null => string.Empty, + string s => s, + Guid g => g.ToString(), + _ => Convert.ToString(RawValue, CultureInfo.InvariantCulture) ?? string.Empty + }; + + public bool AsBoolean => RawValue switch + { + bool b => b, + string s when bool.TryParse(s, out var b) => b, + int i => i != 0, + long l => l != 0, + _ => false + }; + + public int ToInt32() => RawValue switch + { + int i => i, + long l => (int)l, + double d => (int)d, + string s when int.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var i) => i, + _ => 0 + }; + + public Guid AsGuid => RawValue switch + { + Guid g => g, + string s when Guid.TryParse(s, out var g) => g, + _ => Guid.Empty + }; + + public ObjectId AsObjectId => RawValue switch + { + ObjectId o => o, + string s => ObjectId.Parse(s), + _ => ObjectId.Empty + }; + + public BsonDocument AsBsonDocument => RawValue as BsonDocument ?? (this as BsonDocument ?? new BsonDocument()); + public BsonArray AsBsonArray => RawValue as BsonArray ?? (this as BsonArray ?? new BsonArray()); + + public override string ToString() => AsString; + + internal virtual BsonValue Clone() => new BsonValue(RawValue); + + public bool Equals(BsonValue? other) => other is not null && Equals(RawValue, other.RawValue); + public override bool Equals(object? obj) => obj is BsonValue other && Equals(other); + public override int GetHashCode() => RawValue?.GetHashCode() ?? 0; + + public static implicit operator BsonValue(string value) => new(value); + public static implicit operator BsonValue(Guid value) => new(value); + public static implicit operator BsonValue(int value) => new(value); + public static implicit operator BsonValue(long value) => new(value); + public static implicit operator BsonValue(bool value) => new(value); + public static implicit operator BsonValue(double value) => new(value); + public static implicit operator BsonValue(DateTimeOffset value) => new(value); } - public class BsonDocument : BsonValue, IEnumerable> + public sealed class BsonDocument : BsonValue, IDictionary { private readonly Dictionary _values = new(StringComparer.Ordinal); - public BsonDocument() : base(null) { } - public BsonDocument(string key, object? value) : this() => _values[key] = Wrap(value); - public BsonDocument(IEnumerable> pairs) : this() + + public BsonDocument() + : base(null) { - foreach (var kvp in pairs) + RawValue = this; + } + + public BsonDocument(IDictionary values) + : this() + { + foreach (var kvp in values) { - _values[kvp.Key] = Wrap(kvp.Value); + _values[kvp.Key] = ToBsonValue(kvp.Value); } } - private static BsonValue Wrap(object? value) => value switch - { - BsonValue v => v, - IEnumerable enumerable => new BsonArray(enumerable), - IEnumerable objEnum => new BsonArray(objEnum.Select(Wrap)), - _ => new BsonValue(value) - }; - - internal static BsonValue WrapExternal(object? value) => Wrap(value); + public int ElementCount => _values.Count; public BsonValue this[string key] { get => _values[key]; - set => _values[key] = Wrap(value); + set => _values[key] = value ?? new BsonValue(); } - public int ElementCount => _values.Count; - public IEnumerable Elements => _values.Select(kvp => new BsonElement(kvp.Key, kvp.Value)); + public ICollection Keys => _values.Keys; + public ICollection Values => _values.Values; + public int Count => _values.Count; + public bool IsReadOnly => false; - public bool Contains(string key) => _values.ContainsKey(key); + public void Add(string key, BsonValue value) => _values[key] = value ?? new BsonValue(); + public void Add(string key, object? value) => _values[key] = ToBsonValue(value); + public void Add(KeyValuePair item) => Add(item.Key, item.Value); + public void Clear() => _values.Clear(); + public bool Contains(KeyValuePair item) => _values.Contains(item); + public bool ContainsKey(string key) => _values.ContainsKey(key); + public void CopyTo(KeyValuePair[] array, int arrayIndex) => ((IDictionary)_values).CopyTo(array, arrayIndex); + public IEnumerator> GetEnumerator() => _values.GetEnumerator(); + IEnumerator IEnumerable.GetEnumerator() => _values.GetEnumerator(); + public bool Remove(string key) => _values.Remove(key); + public bool Remove(KeyValuePair item) => _values.Remove(item.Key); public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!); - public BsonValue GetValue(string key, BsonValue? defaultValue = null) - { - return _values.TryGetValue(key, out var value) - ? value - : defaultValue ?? new BsonValue(null); - } - - public bool Remove(string key) => _values.Remove(key); - - public void Add(string key, BsonValue value) => _values[key] = value; - public void Add(string key, object? value) => _values[key] = Wrap(value); - - public IEnumerator> GetEnumerator() => _values.GetEnumerator(); - IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + public BsonValue GetValue(string key) => _values[key]; public BsonDocument DeepClone() { - var clone = new BsonDocument(); + var copy = new BsonDocument(); foreach (var kvp in _values) { - clone[kvp.Key] = kvp.Value; + copy._values[kvp.Key] = kvp.Value?.Clone() ?? new BsonValue(); } - return clone; + return copy; } public static BsonDocument Parse(string json) { using var doc = JsonDocument.Parse(json); - return FromElement(doc.RootElement); + return FromElement(doc.RootElement).AsBsonDocument; } - private static BsonDocument FromElement(JsonElement element) + private static BsonValue FromElement(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.Object => FromObject(element), + JsonValueKind.Array => FromArray(element), + JsonValueKind.String => new BsonValue(element.GetString()), + JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonValue(l) : new BsonValue(element.GetDouble()), + JsonValueKind.True => new BsonValue(true), + JsonValueKind.False => new BsonValue(false), + JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null), + _ => new BsonValue(element.ToString()) + }; + } + + private static BsonDocument FromObject(JsonElement element) { var doc = new BsonDocument(); - foreach (var prop in element.EnumerateObject()) + foreach (var property in element.EnumerateObject()) { - doc[prop.Name] = FromJsonValue(prop.Value); + doc[property.Name] = FromElement(property.Value); } return doc; } - private static BsonValue FromJsonValue(JsonElement element) => element.ValueKind switch + private static BsonArray FromArray(JsonElement element) { - JsonValueKind.Object => FromElement(element), - JsonValueKind.Array => new BsonArray(element.EnumerateArray().Select(FromJsonValue)), - JsonValueKind.String => new BsonString(element.GetString() ?? string.Empty), - JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonInt64(l) : new BsonDouble(element.GetDouble()), - JsonValueKind.True => new BsonBoolean(true), - JsonValueKind.False => new BsonBoolean(false), - JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null), - _ => new BsonValue(null) - }; - - public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings = null) - { - var dict = _values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)); - return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + var array = new BsonArray(); + foreach (var item in element.EnumerateArray()) + { + array.Add(FromElement(item)); + } + return array; } - public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson()); - - private static object? Unwrap(BsonValue value) => value switch + internal static BsonValue ToBsonValue(object? value) { - BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)), - BsonArray array => array.Select(Unwrap).ToArray(), - _ => value.RawValue - }; + return value switch + { + null => new BsonValue(null), + BsonValue bson => bson, + string s => new BsonValue(s), + Guid g => new BsonValue(g), + int i => new BsonValue(i), + long l => new BsonValue(l), + bool b => new BsonValue(b), + double d => new BsonValue(d), + float f => new BsonValue(f), + DateTime dt => new BsonValue(dt), + DateTimeOffset dto => new BsonValue(dto), + IEnumerable enumerable => new BsonArray(enumerable.Select(ToBsonValue)), + _ => new BsonValue(value) + }; + } + + internal override BsonValue Clone() => DeepClone(); + } + + public sealed class BsonArray : BsonValue, IList + { + private readonly List _items = new(); + + public BsonArray() + : base(null) + { + RawValue = this; + } + + public BsonArray(IEnumerable items) + : this() + { + _items.AddRange(items); + } + + public BsonValue this[int index] + { + get => _items[index]; + set => _items[index] = value ?? new BsonValue(); + } + + public int Count => _items.Count; + public bool IsReadOnly => false; + + public void Add(BsonValue item) => _items.Add(item ?? new BsonValue()); + public void Add(object? item) => _items.Add(BsonDocument.ToBsonValue(item)); + public void Clear() => _items.Clear(); + public bool Contains(BsonValue item) => _items.Contains(item); + public void CopyTo(BsonValue[] array, int arrayIndex) => _items.CopyTo(array, arrayIndex); + public IEnumerator GetEnumerator() => _items.GetEnumerator(); + IEnumerator IEnumerable.GetEnumerator() => _items.GetEnumerator(); + public int IndexOf(BsonValue item) => _items.IndexOf(item); + public void Insert(int index, BsonValue item) => _items.Insert(index, item ?? new BsonValue()); + public bool Remove(BsonValue item) => _items.Remove(item); + public void RemoveAt(int index) => _items.RemoveAt(index); + + internal override BsonValue Clone() => new BsonArray(_items.Select(i => i.Clone())); + } + + public readonly struct ObjectId : IEquatable + { + private readonly string _value; + + public ObjectId(string value) + { + _value = value; + } + + public static ObjectId Empty { get; } = new(string.Empty); + + public override string ToString() => _value; + + public static ObjectId Parse(string value) => new(value ?? string.Empty); + + public bool Equals(ObjectId other) => string.Equals(_value, other._value, StringComparison.Ordinal); + public override bool Equals(object? obj) => obj is ObjectId other && Equals(other); + public override int GetHashCode() => _value?.GetHashCode(StringComparison.Ordinal) ?? 0; } } -namespace MongoDB.Bson.IO +namespace MongoDB.Bson.Serialization.Attributes { - public enum JsonOutputMode { Strict, RelaxedExtendedJson } - public class JsonWriterSettings + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Class | AttributeTargets.Struct)] + public sealed class BsonElementAttribute : Attribute { - public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict; + public BsonElementAttribute(string elementName) + { + ElementName = elementName; + } + + public string ElementName { get; } } } diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/DriverStubs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/DriverStubs.cs index 6ec0761f4..f8a415148 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/DriverStubs.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/DriverStubs.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; +using MongoDB.Bson; namespace MongoDB.Driver { @@ -31,6 +32,7 @@ namespace MongoDB.Driver public interface IMongoClient { IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null); + Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default); } public class MongoClient : IMongoClient @@ -38,20 +40,47 @@ namespace MongoDB.Driver public MongoClient(string connectionString) { } public MongoClient(MongoClientSettings settings) { } public IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null) => new MongoDatabase(name); + public Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask; } public class MongoDatabaseSettings { } + public sealed class DatabaseNamespace + { + public DatabaseNamespace(string databaseName) => DatabaseName = databaseName; + public string DatabaseName { get; } + } + public interface IMongoDatabase { IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null); + DatabaseNamespace DatabaseNamespace { get; } + Task DropCollectionAsync(string name, CancellationToken cancellationToken = default); + BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default); + T RunCommand(BsonDocument command, CancellationToken cancellationToken = default); + Task RunCommandAsync(BsonDocument command, CancellationToken cancellationToken = default); + BsonDocument RunCommand(string command, CancellationToken cancellationToken = default); + T RunCommand(string command, CancellationToken cancellationToken = default); + Task RunCommandAsync(string command, CancellationToken cancellationToken = default); } public class MongoDatabase : IMongoDatabase { - public MongoDatabase(string name) => Name = name; + public MongoDatabase(string name) + { + Name = name; + DatabaseNamespace = new DatabaseNamespace(name); + } public string Name { get; } + public DatabaseNamespace DatabaseNamespace { get; } public IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null) => new MongoCollection(name); + public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask; + public BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default) => new(); + public T RunCommand(BsonDocument command, CancellationToken cancellationToken = default) => default!; + public Task RunCommandAsync(BsonDocument command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!); + public BsonDocument RunCommand(string command, CancellationToken cancellationToken = default) => new(); + public T RunCommand(string command, CancellationToken cancellationToken = default) => default!; + public Task RunCommandAsync(string command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!); } public class MongoCollectionSettings { } @@ -59,8 +88,10 @@ namespace MongoDB.Driver public interface IMongoCollection { Task InsertOneAsync(TDocument document, InsertOneOptions? options = null, CancellationToken cancellationToken = default); + Task InsertManyAsync(IEnumerable documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default); Task ReplaceOneAsync(FilterDefinition filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default); Task DeleteOneAsync(FilterDefinition filter, CancellationToken cancellationToken = default); + Task DeleteManyAsync(FilterDefinition filter, CancellationToken cancellationToken = default); Task> FindAsync(FilterDefinition filter, FindOptions? options = null, CancellationToken cancellationToken = default); IFindFluent Find(FilterDefinition filter, FindOptions? options = null); Task CountDocumentsAsync(FilterDefinition filter, CountOptions? options = null, CancellationToken cancellationToken = default); @@ -88,6 +119,12 @@ namespace MongoDB.Driver return Task.CompletedTask; } + public Task InsertManyAsync(IEnumerable documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default) + { + _docs.AddRange(documents); + return Task.CompletedTask; + } + public Task ReplaceOneAsync(FilterDefinition filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default) { _docs.Clear(); @@ -102,6 +139,13 @@ namespace MongoDB.Driver return Task.FromResult(new DeleteResult(removed ? 1 : 0)); } + public Task DeleteManyAsync(FilterDefinition filter, CancellationToken cancellationToken = default) + { + var removed = _docs.Count; + _docs.Clear(); + return Task.FromResult(new DeleteResult(removed)); + } + public Task> FindAsync(FilterDefinition filter, FindOptions? options = null, CancellationToken cancellationToken = default) => Task.FromResult>(new AsyncCursor(_docs)); @@ -212,7 +256,10 @@ namespace MongoDB.Driver => new FindFluentProjected(Enumerable.Empty()); } - public class FilterDefinition { } + public class FilterDefinition + { + public static FilterDefinition Empty { get; } = new(); + } public class UpdateDefinition { } public class ProjectionDefinition { } public class SortDefinition { } @@ -222,6 +269,7 @@ namespace MongoDB.Driver public class FindOneAndReplaceOptions { public bool IsUpsert { get; set; } } public class FindOneAndUpdateOptions { public bool IsUpsert { get; set; } } public class InsertOneOptions { } + public class InsertManyOptions { } public class CreateIndexOptions { } public class IndexKeysDefinition { } @@ -284,7 +332,7 @@ namespace Mongo2Go private MongoDbRunner(string connectionString) => ConnectionString = connectionString; - public static MongoDbRunner Start() => new("mongodb://localhost:27017/fake"); + public static MongoDbRunner Start(bool singleNodeReplSet = false) => new("mongodb://localhost:27017/fake"); public void Dispose() { diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/StorageStubs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/StorageStubs.cs index 7801233cb..f92317474 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/StorageStubs.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Models/MongoCompat/StorageStubs.cs @@ -1,19 +1,27 @@ using System.Collections.Concurrent; using System.IO; using System.Linq; +using MongoDB.Bson; using StellaOps.Concelier.Models; namespace StellaOps.Concelier.Storage.Mongo { public static class MongoStorageDefaults { + public const string DefaultDatabaseName = "concelier"; + public static class Collections { public const string AdvisoryStatements = "advisory_statements"; public const string AdvisoryRaw = "advisory_raw"; + public const string Advisory = "advisory"; + public const string AdvisoryObservations = "advisory_observations"; + public const string AdvisoryLinksets = "advisory_linksets"; public const string Alias = "aliases"; + public const string Dto = "dto"; public const string MergeEvent = "merge_events"; public const string Document = "documents"; + public const string PsirtFlags = "psirt_flags"; } } @@ -64,13 +72,32 @@ namespace StellaOps.Concelier.Storage.Mongo this.FetchedAt = FetchedAt ?? CreatedAt; } + public DocumentRecord( + Guid Id, + string SourceName, + string Uri, + string Sha256, + string Status = "pending_parse", + string? ContentType = null, + IReadOnlyDictionary? Headers = null, + IReadOnlyDictionary? Metadata = null, + string? Etag = null, + DateTimeOffset? LastModified = null, + Guid? PayloadId = null, + DateTimeOffset? ExpiresAt = null, + byte[]? Payload = null, + DateTimeOffset? FetchedAt = null) + : this(Id, SourceName, Uri, DateTimeOffset.UtcNow, Sha256, Status, ContentType, Headers, Metadata, Etag, LastModified, PayloadId, ExpiresAt, Payload, FetchedAt) + { + } + public Guid Id { get; init; } - public string SourceName { get; init; } - public string Uri { get; init; } + public string SourceName { get; init; } = string.Empty; + public string Uri { get; init; } = string.Empty; public DateTimeOffset CreatedAt { get; init; } public DateTimeOffset FetchedAt { get; init; } - public string Sha256 { get; init; } - public string Status { get; init; } + public string Sha256 { get; init; } = string.Empty; + public string Status { get; init; } = string.Empty; public string? ContentType { get; init; } public IReadOnlyDictionary? Headers { get; init; } public IReadOnlyDictionary? Metadata { get; init; } @@ -81,37 +108,37 @@ namespace StellaOps.Concelier.Storage.Mongo public byte[]? Payload { get; init; } } -public interface IDocumentStore -{ - Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken); - Task FindAsync(Guid id, CancellationToken cancellationToken); - Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken); - Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken); -} + public interface IDocumentStore + { + Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken); + Task FindAsync(Guid id, CancellationToken cancellationToken); + Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken); + Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken); + } public class InMemoryDocumentStore : IDocumentStore { private readonly ConcurrentDictionary<(string Source, string Uri), DocumentRecord> _records = new(); private readonly ConcurrentDictionary _byId = new(); - public Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken) - { - _records.TryGetValue((sourceName, uri), out var record); - return Task.FromResult(record); - } + public Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken) + { + _records.TryGetValue((sourceName, uri), out var record); + return Task.FromResult(record); + } - public Task FindAsync(Guid id, CancellationToken cancellationToken) - { - _byId.TryGetValue(id, out var record); - return Task.FromResult(record); - } + public Task FindAsync(Guid id, CancellationToken cancellationToken) + { + _byId.TryGetValue(id, out var record); + return Task.FromResult(record); + } - public Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken) - { - _records[(record.SourceName, record.Uri)] = record; - _byId[record.Id] = record; - return Task.FromResult(record); - } + public Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken) + { + _records[(record.SourceName, record.Uri)] = record; + _byId[record.Id] = record; + return Task.FromResult(record); + } public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken) { @@ -129,6 +156,22 @@ public interface IDocumentStore { private readonly InMemoryDocumentStore _inner = new(); + public DocumentStore() + { + } + + public DocumentStore(object? database, MongoStorageOptions? options) + { + } + + public DocumentStore(object? database, object? logger) + { + } + + public DocumentStore(object? database, MongoStorageOptions? options, object? logger) + { + } + public Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken) => _inner.FindBySourceAndUriAsync(sourceName, uri, cancellationToken); @@ -142,47 +185,70 @@ public interface IDocumentStore => _inner.UpdateStatusAsync(id, status, cancellationToken); } - public record DtoRecord( - Guid Id, - Guid DocumentId, - string SourceName, - string Format, - MongoDB.Bson.BsonDocument Payload, - DateTimeOffset CreatedAt) + public record DtoRecord { + public DtoRecord( + Guid Id, + Guid DocumentId, + string SourceName, + string Format, + MongoDB.Bson.BsonDocument Payload, + DateTimeOffset CreatedAt, + string? SchemaVersion = null, + DateTimeOffset? ValidatedAt = null) + { + this.Id = Id; + this.DocumentId = DocumentId; + this.SourceName = SourceName; + this.Format = Format; + this.Payload = Payload; + this.CreatedAt = CreatedAt; + this.SchemaVersion = SchemaVersion ?? string.Empty; + this.ValidatedAt = ValidatedAt ?? CreatedAt; + } + + public Guid Id { get; init; } + public Guid DocumentId { get; init; } + public string SourceName { get; init; } = string.Empty; + public string Format { get; init; } = string.Empty; + public MongoDB.Bson.BsonDocument Payload { get; init; } = new(); + public DateTimeOffset CreatedAt { get; init; } public string SchemaVersion { get; init; } = string.Empty; - public DateTimeOffset ValidatedAt { get; init; } = CreatedAt; + public DateTimeOffset ValidatedAt { get; init; } } -public interface IDtoStore -{ - Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken); - Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken); - Task> GetBySourceAsync(string sourceName, CancellationToken cancellationToken); -} - -public class InMemoryDtoStore : IDtoStore -{ - private readonly ConcurrentDictionary _records = new(); - - public Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken) + public interface IDtoStore { - _records[record.DocumentId] = record; - return Task.FromResult(record); + Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken); + Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken); + Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken); } - public Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken) + public class InMemoryDtoStore : IDtoStore { - _records.TryGetValue(documentId, out var record); - return Task.FromResult(record); - } + private readonly ConcurrentDictionary _records = new(); - public Task> GetBySourceAsync(string sourceName, CancellationToken cancellationToken) - { - var matches = _records.Values.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase)).ToArray(); - return Task.FromResult>(matches); + public Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken) + { + _records[record.DocumentId] = record; + return Task.FromResult(record); + } + + public Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken) + { + _records.TryGetValue(documentId, out var record); + return Task.FromResult(record); + } + + public Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken) + { + var matches = _records.Values + .Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase)) + .Take(limit) + .ToArray(); + return Task.FromResult>(matches); + } } -} internal sealed class RawDocumentStorage { @@ -251,7 +317,7 @@ public sealed record SourceStateRecord( sourceName, Enabled: current?.Enabled ?? true, Paused: current?.Paused ?? false, - Cursor: cursor.DeepClone(), + Cursor: cursor.DeepClone().AsBsonDocument, LastSuccess: completedAt, LastFailure: current?.LastFailure, FailCount: current?.FailCount ?? 0, @@ -288,6 +354,18 @@ public sealed record SourceStateRecord( { private readonly InMemorySourceStateRepository _inner = new(); + public MongoSourceStateRepository() + { + } + + public MongoSourceStateRepository(object? database, MongoStorageOptions? options) + { + } + + public MongoSourceStateRepository(object? database, object? logger) + { + } + public Task TryGetAsync(string sourceName, CancellationToken cancellationToken) => _inner.TryGetAsync(sourceName, cancellationToken); @@ -304,6 +382,15 @@ public sealed record SourceStateRecord( namespace StellaOps.Concelier.Storage.Mongo.Advisories { + public sealed class AdvisoryDocument + { + public string AdvisoryKey { get; set; } = string.Empty; + public MongoDB.Bson.BsonDocument Payload { get; set; } = new(); + public DateTime? Modified { get; set; } + public DateTime? Published { get; set; } + public DateTime? CreatedAt { get; set; } + } + public interface IAdvisoryStore { Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken); @@ -360,18 +447,49 @@ namespace StellaOps.Concelier.Storage.Mongo.Aliases public sealed record AliasEntry(string Scheme, string Value); public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value, DateTimeOffset? UpdatedAt = null); public sealed record AliasCollision(string Scheme, string Value, IReadOnlyList AdvisoryKeys); + public sealed record AliasUpsertResult(string AdvisoryKey, IReadOnlyList Collisions); public interface IAliasStore { Task> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken); Task> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken); + Task ReplaceAsync(string advisoryKey, IEnumerable entries, DateTimeOffset updatedAt, CancellationToken cancellationToken); } - public sealed class InMemoryAliasStore : IAliasStore + public sealed class AliasStore : InMemoryAliasStore + { + public AliasStore() + { + } + + public AliasStore(object? database, object? options) + { + } + } + + public class InMemoryAliasStore : IAliasStore { private readonly ConcurrentDictionary> _byAdvisory = new(StringComparer.OrdinalIgnoreCase); private readonly ConcurrentDictionary<(string Scheme, string Value), List> _byAlias = new(); + public Task ReplaceAsync(string advisoryKey, IEnumerable entries, DateTimeOffset updatedAt, CancellationToken cancellationToken) + { + var records = entries.Select(e => new AliasRecord(advisoryKey, e.Scheme, e.Value, updatedAt)).ToList(); + _byAdvisory[advisoryKey] = records; + foreach (var record in records) + { + var list = _byAlias.GetOrAdd((record.Scheme, record.Value), _ => new List()); + list.RemoveAll(r => string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase)); + list.Add(record); + } + var collisions = _byAlias.Values + .Where(list => list.Count > 1) + .Select(list => new AliasCollision(list[0].Scheme, list[0].Value, list.Select(r => r.AdvisoryKey).Distinct(StringComparer.OrdinalIgnoreCase).ToArray())) + .ToArray(); + + return Task.FromResult(new AliasUpsertResult(advisoryKey, collisions)); + } + public Task> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken) { _byAdvisory.TryGetValue(advisoryKey, out var records); @@ -400,11 +518,16 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory string Snapshot, string PreviousSnapshot, IReadOnlyList Changes, - DateTimeOffset CreatedAt); + DateTimeOffset CreatedAt) + { + public string? PreviousHash => PreviousSnapshotHash; + public string? CurrentHash => SnapshotHash; + } public interface IChangeHistoryStore { Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken); + Task> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken); } public sealed class InMemoryChangeHistoryStore : IChangeHistoryStore @@ -415,6 +538,18 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory _records.Add(record); return Task.CompletedTask; } + + public Task> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken) + { + var matches = _records + .Where(r => + string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase) && + string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase)) + .OrderByDescending(r => r.CreatedAt) + .Take(limit) + .ToArray(); + return Task.FromResult>(matches); + } } } @@ -597,6 +732,25 @@ namespace StellaOps.Concelier.Storage.Mongo.MergeEvents return Task.FromResult>(records); } } + + public sealed class MergeEventStore : IMergeEventStore + { + private readonly InMemoryMergeEventStore _inner = new(); + + public MergeEventStore() + { + } + + public MergeEventStore(object? database, object? logger) + { + } + + public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken) + => _inner.AppendAsync(record, cancellationToken); + + public Task> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken) + => _inner.GetRecentAsync(advisoryKey, limit, cancellationToken); + } } namespace StellaOps.Concelier.Storage.Mongo.Documents @@ -617,12 +771,16 @@ namespace StellaOps.Concelier.Storage.Mongo.Dtos namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags { - public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt); + public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt) + { + public string AdvisoryKey => AdvisoryId; + } public interface IPsirtFlagStore { Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken); Task> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken); + Task FindAsync(string advisoryKey, CancellationToken cancellationToken); } public sealed class InMemoryPsirtFlagStore : IPsirtFlagStore @@ -645,6 +803,94 @@ namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags return Task.FromResult>(records); } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) + { + _records.TryGetValue(advisoryKey, out var flag); + return Task.FromResult(flag); + } + } +} + +namespace StellaOps.Concelier.Storage.Mongo.Observations +{ + public sealed class AdvisoryObservationDocument + { + public string Id { get; set; } = string.Empty; + public string Tenant { get; set; } = string.Empty; + public DateTime CreatedAt { get; set; } + public AdvisoryObservationSourceDocument Source { get; set; } = new(); + public AdvisoryObservationUpstreamDocument Upstream { get; set; } = new(); + public AdvisoryObservationContentDocument Content { get; set; } = new(); + public AdvisoryObservationLinksetDocument Linkset { get; set; } = new(); + public IDictionary Attributes { get; set; } = new Dictionary(StringComparer.Ordinal); + } + + public sealed class AdvisoryObservationSourceDocument + { + public string Vendor { get; set; } = string.Empty; + public string Stream { get; set; } = string.Empty; + public string Api { get; set; } = string.Empty; + } + + public sealed class AdvisoryObservationUpstreamDocument + { + public string UpstreamId { get; set; } = string.Empty; + public string? DocumentVersion { get; set; } + public DateTime FetchedAt { get; set; } + public DateTime ReceivedAt { get; set; } + public string ContentHash { get; set; } = string.Empty; + public AdvisoryObservationSignatureDocument Signature { get; set; } = new(); + public IDictionary Metadata { get; set; } = new Dictionary(StringComparer.Ordinal); + } + + public sealed class AdvisoryObservationSignatureDocument + { + public bool Present { get; set; } + public string? Format { get; set; } + public string? KeyId { get; set; } + public string? Signature { get; set; } + } + + public sealed class AdvisoryObservationContentDocument + { + public string Format { get; set; } = string.Empty; + public string SpecVersion { get; set; } = string.Empty; + public BsonDocument Raw { get; set; } = new(); + public IDictionary Metadata { get; set; } = new Dictionary(StringComparer.Ordinal); + } + + public sealed class AdvisoryObservationLinksetDocument + { + public List? Aliases { get; set; } + public List? Purls { get; set; } + public List? Cpes { get; set; } + public List References { get; set; } = new(); + } + + public sealed class AdvisoryObservationReferenceDocument + { + public string Type { get; set; } = string.Empty; + public string Url { get; set; } = string.Empty; + } +} + +namespace StellaOps.Concelier.Storage.Mongo.Linksets +{ + public sealed class AdvisoryLinksetDocument + { + public string TenantId { get; set; } = string.Empty; + public string Source { get; set; } = string.Empty; + public string AdvisoryId { get; set; } = string.Empty; + public IReadOnlyList Observations { get; set; } = Array.Empty(); + public DateTime CreatedAt { get; set; } + public AdvisoryLinksetNormalizedDocument Normalized { get; set; } = new(); + } + + public sealed class AdvisoryLinksetNormalizedDocument + { + public IReadOnlyList Purls { get; set; } = Array.Empty(); + public IReadOnlyList Versions { get; set; } = Array.Empty(); } } diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs index a1456255c..0c6540f3e 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs @@ -88,8 +88,10 @@ public sealed class CertCcMapperTests Id: Guid.NewGuid(), DocumentId: document.Id, SourceName: "cert-cc", + Format: "certcc.vince.note.v1", SchemaVersion: "certcc.vince.note.v1", Payload: new BsonDocument(), + CreatedAt: PublishedAt, ValidatedAt: PublishedAt.AddMinutes(1)); var advisory = CertCcMapper.Map(dto, document, dtoRecord, "cert-cc"); diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceStateSeedProcessorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceStateSeedProcessorTests.cs index 53c38b872..b9bca08e7 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceStateSeedProcessorTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceStateSeedProcessorTests.cs @@ -190,11 +190,11 @@ public sealed class SourceStateSeedProcessorTests : IAsyncLifetime Assert.NotNull(refreshedRecord); Assert.Equal(documentId, refreshedRecord!.Id); Assert.NotNull(refreshedRecord.PayloadId); - Assert.NotEqual(previousGridId, refreshedRecord.PayloadId); + Assert.NotEqual(previousGridId?.ToString(), refreshedRecord.PayloadId?.ToString()); var files = await filesCollection.Find(FilterDefinition.Empty).ToListAsync(); Assert.Single(files); - Assert.NotEqual(previousGridId, files[0]["_id"].AsObjectId); + Assert.NotEqual(previousGridId?.ToString(), files[0]["_id"].AsObjectId.ToString()); } private SourceStateSeedProcessor CreateProcessor() diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs index e356b2e46..106c2b2df 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs @@ -34,7 +34,7 @@ public sealed class SuseMapperTests }, Etag: "adv-1", LastModified: DateTimeOffset.UtcNow, - PayloadId: ObjectId.Empty); + PayloadId: Guid.Empty); var mapped = SuseMapper.Map(dto, document, DateTimeOffset.UtcNow); diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs index b3587dbf6..9ad75b49d 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs @@ -97,8 +97,10 @@ public sealed class OsvConflictFixtureTests Id: Guid.Parse("6f7d5ce7-cb47-40a5-8b41-8ad022b5fd5c"), DocumentId: document.Id, SourceName: OsvConnectorPlugin.SourceName, + Format: "osv.v1", SchemaVersion: "osv.v1", Payload: new BsonDocument("id", dto.Id), + CreatedAt: new DateTimeOffset(2025, 3, 6, 12, 0, 0, TimeSpan.Zero), ValidatedAt: new DateTimeOffset(2025, 3, 6, 12, 5, 0, TimeSpan.Zero)); var advisory = OsvMapper.Map(dto, document, dtoRecord, "npm"); diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs index b5163c0c4..e920ebe8e 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs @@ -65,7 +65,7 @@ public sealed class RuBduMapperTests null, null, dto.IdentifyDate, - ObjectId.GenerateNewId()); + PayloadId: Guid.NewGuid()); var advisory = RuBduMapper.Map(dto, document, dto.IdentifyDate!.Value); diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs index 2b102344b..9755a6de0 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs @@ -56,7 +56,7 @@ public sealed class RuNkckiMapperTests null, null, dto.DateUpdated, - ObjectId.GenerateNewId()); + PayloadId: Guid.NewGuid()); Assert.Equal("КРИТИЧЕСКИЙ", dto.CvssRating); var normalizeSeverity = typeof(RuNkckiMapper).GetMethod("NormalizeSeverity", BindingFlags.NonPublic | BindingFlags.Static)!; diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs index fd5ddd591..5cf548621 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs @@ -1,12 +1,11 @@ -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading.Tasks; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading.Tasks; using System.Collections.Immutable; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using MongoDB.Driver; using StellaOps.Concelier.Core.Jobs; using StellaOps.Concelier.Core.Events; using StellaOps.Concelier.Exporter.Json; @@ -15,15 +14,16 @@ using StellaOps.Concelier.Storage.Mongo.Exporting; using StellaOps.Concelier.Models; using StellaOps.Cryptography; using StellaOps.Cryptography.DependencyInjection; - -namespace StellaOps.Concelier.Exporter.Json.Tests; - -public sealed class JsonExporterDependencyInjectionRoutineTests -{ - [Fact] - public void Register_AddsJobDefinitionAndServices() - { - var services = new ServiceCollection(); +using StellaOps.Provenance.Mongo; + +namespace StellaOps.Concelier.Exporter.Json.Tests; + +public sealed class JsonExporterDependencyInjectionRoutineTests +{ + [Fact] + public void Register_AddsJobDefinitionAndServices() + { + var services = new ServiceCollection(); services.AddLogging(); services.AddSingleton(); services.AddSingleton(); @@ -32,64 +32,60 @@ public sealed class JsonExporterDependencyInjectionRoutineTests services.AddOptions(); services.Configure(_ => { }); services.AddStellaOpsCrypto(); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary()) - .Build(); - - var routine = new JsonExporterDependencyInjectionRoutine(); - routine.Register(services, configuration); - - using var provider = services.BuildServiceProvider(); - var optionsAccessor = provider.GetRequiredService>(); - var options = optionsAccessor.Value; - - Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition)); - Assert.Equal(typeof(JsonExportJob), definition.JobType); - Assert.True(definition.Enabled); - - var exporter = provider.GetRequiredService(); - Assert.NotNull(exporter); - } - - private sealed class StubAdvisoryStore : IAdvisoryStore - { - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary()) + .Build(); + + var routine = new JsonExporterDependencyInjectionRoutine(); + routine.Register(services, configuration); + + using var provider = services.BuildServiceProvider(); + var optionsAccessor = provider.GetRequiredService>(); + var options = optionsAccessor.Value; + + Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition)); + Assert.Equal(typeof(JsonExportJob), definition.JobType); + Assert.True(definition.Enabled); + + var exporter = provider.GetRequiredService(); + Assert.NotNull(exporter); + } + + private sealed class StubAdvisoryStore : IAdvisoryStore + { + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) { - _ = session; return Task.FromResult>(Array.Empty()); } - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) { - _ = session; return Task.FromResult(null); } - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) { - _ = session; return Task.CompletedTask; } - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) { - _ = session; return Enumerate(cancellationToken); static async IAsyncEnumerable Enumerate([EnumeratorCancellation] CancellationToken ct) { - ct.ThrowIfCancellationRequested(); - await Task.Yield(); - yield break; - } - } - } - + ct.ThrowIfCancellationRequested(); + await Task.Yield(); + yield break; + } + } + } + private sealed class StubExportStateStore : IExportStateStore { private ExportStateRecord? _record; - + public Task FindAsync(string id, CancellationToken cancellationToken) { return Task.FromResult(_record); @@ -107,6 +103,9 @@ public sealed class JsonExporterDependencyInjectionRoutineTests public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) => throw new NotSupportedException(); + public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + public ValueTask ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) { return ValueTask.FromResult(new AdvisoryReplay( diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs index 02b74e03d..2903d724c 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs @@ -1,472 +1,472 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; -using MongoDB.Driver; using StellaOps.Concelier.Core.Events; using StellaOps.Concelier.Exporter.Json; using StellaOps.Concelier.Models; using StellaOps.Concelier.Storage.Mongo.Advisories; using StellaOps.Concelier.Storage.Mongo.Exporting; +using StellaOps.Provenance.Mongo; using StellaOps.Cryptography; using StellaOps.Cryptography.DependencyInjection; - -namespace StellaOps.Concelier.Exporter.Json.Tests; - -public sealed class JsonFeedExporterTests : IDisposable -{ - private readonly string _root; - - public JsonFeedExporterTests() - { - _root = Directory.CreateTempSubdirectory("concelier-json-exporter-tests").FullName; - } - - [Fact] - public async Task ExportAsync_SkipsWhenDigestUnchanged() - { - var advisory = new Advisory( - advisoryKey: "CVE-2024-1234", - title: "Test Advisory", - summary: null, - language: "en", - published: DateTimeOffset.Parse("2024-01-01T00:00:00Z", CultureInfo.InvariantCulture), - modified: DateTimeOffset.Parse("2024-01-02T00:00:00Z", CultureInfo.InvariantCulture), - severity: "high", - exploitKnown: false, - aliases: new[] { "CVE-2024-1234" }, - references: Array.Empty(), - affectedPackages: Array.Empty(), - cvssMetrics: Array.Empty(), - provenance: Array.Empty()); - - var advisoryStore = new StubAdvisoryStore(advisory); - var options = Options.Create(new JsonExportOptions - { - OutputRoot = _root, - MaintainLatestSymlink = false, - }); - - var stateStore = new InMemoryExportStateStore(); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-15T12:00:00Z", CultureInfo.InvariantCulture)); - var stateManager = new ExportStateManager(stateStore, timeProvider); - var eventLog = new StubAdvisoryEventLog(new[] { advisory }, timeProvider.GetUtcNow()); - var exporter = new JsonFeedExporter( - advisoryStore, - options, - new VulnListJsonExportPathResolver(), - stateManager, - eventLog, - NullLogger.Instance, - timeProvider); - + +namespace StellaOps.Concelier.Exporter.Json.Tests; + +public sealed class JsonFeedExporterTests : IDisposable +{ + private readonly string _root; + + public JsonFeedExporterTests() + { + _root = Directory.CreateTempSubdirectory("concelier-json-exporter-tests").FullName; + } + + [Fact] + public async Task ExportAsync_SkipsWhenDigestUnchanged() + { + var advisory = new Advisory( + advisoryKey: "CVE-2024-1234", + title: "Test Advisory", + summary: null, + language: "en", + published: DateTimeOffset.Parse("2024-01-01T00:00:00Z", CultureInfo.InvariantCulture), + modified: DateTimeOffset.Parse("2024-01-02T00:00:00Z", CultureInfo.InvariantCulture), + severity: "high", + exploitKnown: false, + aliases: new[] { "CVE-2024-1234" }, + references: Array.Empty(), + affectedPackages: Array.Empty(), + cvssMetrics: Array.Empty(), + provenance: Array.Empty()); + + var advisoryStore = new StubAdvisoryStore(advisory); + var options = Options.Create(new JsonExportOptions + { + OutputRoot = _root, + MaintainLatestSymlink = false, + }); + + var stateStore = new InMemoryExportStateStore(); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-15T12:00:00Z", CultureInfo.InvariantCulture)); + var stateManager = new ExportStateManager(stateStore, timeProvider); + var eventLog = new StubAdvisoryEventLog(new[] { advisory }, timeProvider.GetUtcNow()); + var exporter = new JsonFeedExporter( + advisoryStore, + options, + new VulnListJsonExportPathResolver(), + stateManager, + eventLog, + NullLogger.Instance, + timeProvider); + using var provider = CreateCryptoProvider(); - await exporter.ExportAsync(provider, CancellationToken.None); - - var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); - Assert.NotNull(record); - var firstUpdated = record!.UpdatedAt; - Assert.Equal("20240715T120000Z", record.BaseExportId); - Assert.Equal(record.LastFullDigest, record.ExportCursor); - - var firstExportPath = Path.Combine(_root, "20240715T120000Z"); - Assert.True(Directory.Exists(firstExportPath)); - - timeProvider.Advance(TimeSpan.FromMinutes(5)); - await exporter.ExportAsync(provider, CancellationToken.None); - - record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); - Assert.NotNull(record); - Assert.Equal(firstUpdated, record!.UpdatedAt); - - var secondExportPath = Path.Combine(_root, "20240715T120500Z"); - Assert.False(Directory.Exists(secondExportPath)); - } - - [Fact] - public async Task ExportAsync_WritesManifestMetadata() - { - var exportedAt = DateTimeOffset.Parse("2024-08-10T00:00:00Z", CultureInfo.InvariantCulture); - var recordedAt = DateTimeOffset.Parse("2024-07-02T00:00:00Z", CultureInfo.InvariantCulture); - var reference = new AdvisoryReference( - "http://Example.com/path/resource?b=2&a=1", - kind: "advisory", - sourceTag: "REF-001", - summary: "Primary vendor advisory", - provenance: new AdvisoryProvenance("ghsa", "map", "REF-001", recordedAt, new[] { ProvenanceFieldMasks.References })); - var weakness = new AdvisoryWeakness( - taxonomy: "cwe", - identifier: "CWE-79", - name: "Cross-site Scripting", - uri: "https://cwe.mitre.org/data/definitions/79.html", - provenance: new[] - { - new AdvisoryProvenance("nvd", "map", "CWE-79", recordedAt, new[] { ProvenanceFieldMasks.Weaknesses }) - }); - var cvssMetric = new CvssMetric( - "3.1", - "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", - 9.8, - "critical", - new AdvisoryProvenance("nvd", "map", "CVE-2024-4321", recordedAt, new[] { ProvenanceFieldMasks.CvssMetrics })); - - var advisory = new Advisory( - advisoryKey: "CVE-2024-4321", - title: "Manifest Test", - summary: "Short summary", - language: "en", - published: DateTimeOffset.Parse("2024-07-01T00:00:00Z", CultureInfo.InvariantCulture), - modified: recordedAt, - severity: "medium", - exploitKnown: false, - aliases: new[] { "CVE-2024-4321", "GHSA-xxxx-yyyy-zzzz" }, - credits: Array.Empty(), - references: new[] { reference }, - affectedPackages: Array.Empty(), - cvssMetrics: new[] { cvssMetric }, - provenance: new[] - { - new AdvisoryProvenance("ghsa", "map", "GHSA-xxxx-yyyy-zzzz", recordedAt, new[] { ProvenanceFieldMasks.Advisory }), - new AdvisoryProvenance("nvd", "map", "CVE-2024-4321", recordedAt, new[] { ProvenanceFieldMasks.Advisory }) - }, - description: "Detailed description capturing remediation steps.", - cwes: new[] { weakness }, - canonicalMetricId: "3.1|CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H"); - - var advisoryStore = new StubAdvisoryStore(advisory); - var optionsValue = new JsonExportOptions - { - OutputRoot = _root, - MaintainLatestSymlink = false, - }; - - var options = Options.Create(optionsValue); - var stateStore = new InMemoryExportStateStore(); - var timeProvider = new TestTimeProvider(exportedAt); - var stateManager = new ExportStateManager(stateStore, timeProvider); - var eventLog = new StubAdvisoryEventLog(new[] { advisory }, exportedAt); - var exporter = new JsonFeedExporter( - advisoryStore, - options, - new VulnListJsonExportPathResolver(), - stateManager, - eventLog, - NullLogger.Instance, - timeProvider); - + await exporter.ExportAsync(provider, CancellationToken.None); + + var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); + Assert.NotNull(record); + var firstUpdated = record!.UpdatedAt; + Assert.Equal("20240715T120000Z", record.BaseExportId); + Assert.Equal(record.LastFullDigest, record.ExportCursor); + + var firstExportPath = Path.Combine(_root, "20240715T120000Z"); + Assert.True(Directory.Exists(firstExportPath)); + + timeProvider.Advance(TimeSpan.FromMinutes(5)); + await exporter.ExportAsync(provider, CancellationToken.None); + + record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); + Assert.NotNull(record); + Assert.Equal(firstUpdated, record!.UpdatedAt); + + var secondExportPath = Path.Combine(_root, "20240715T120500Z"); + Assert.False(Directory.Exists(secondExportPath)); + } + + [Fact] + public async Task ExportAsync_WritesManifestMetadata() + { + var exportedAt = DateTimeOffset.Parse("2024-08-10T00:00:00Z", CultureInfo.InvariantCulture); + var recordedAt = DateTimeOffset.Parse("2024-07-02T00:00:00Z", CultureInfo.InvariantCulture); + var reference = new AdvisoryReference( + "http://Example.com/path/resource?b=2&a=1", + kind: "advisory", + sourceTag: "REF-001", + summary: "Primary vendor advisory", + provenance: new AdvisoryProvenance("ghsa", "map", "REF-001", recordedAt, new[] { ProvenanceFieldMasks.References })); + var weakness = new AdvisoryWeakness( + taxonomy: "cwe", + identifier: "CWE-79", + name: "Cross-site Scripting", + uri: "https://cwe.mitre.org/data/definitions/79.html", + provenance: new[] + { + new AdvisoryProvenance("nvd", "map", "CWE-79", recordedAt, new[] { ProvenanceFieldMasks.Weaknesses }) + }); + var cvssMetric = new CvssMetric( + "3.1", + "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + 9.8, + "critical", + new AdvisoryProvenance("nvd", "map", "CVE-2024-4321", recordedAt, new[] { ProvenanceFieldMasks.CvssMetrics })); + + var advisory = new Advisory( + advisoryKey: "CVE-2024-4321", + title: "Manifest Test", + summary: "Short summary", + language: "en", + published: DateTimeOffset.Parse("2024-07-01T00:00:00Z", CultureInfo.InvariantCulture), + modified: recordedAt, + severity: "medium", + exploitKnown: false, + aliases: new[] { "CVE-2024-4321", "GHSA-xxxx-yyyy-zzzz" }, + credits: Array.Empty(), + references: new[] { reference }, + affectedPackages: Array.Empty(), + cvssMetrics: new[] { cvssMetric }, + provenance: new[] + { + new AdvisoryProvenance("ghsa", "map", "GHSA-xxxx-yyyy-zzzz", recordedAt, new[] { ProvenanceFieldMasks.Advisory }), + new AdvisoryProvenance("nvd", "map", "CVE-2024-4321", recordedAt, new[] { ProvenanceFieldMasks.Advisory }) + }, + description: "Detailed description capturing remediation steps.", + cwes: new[] { weakness }, + canonicalMetricId: "3.1|CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H"); + + var advisoryStore = new StubAdvisoryStore(advisory); + var optionsValue = new JsonExportOptions + { + OutputRoot = _root, + MaintainLatestSymlink = false, + }; + + var options = Options.Create(optionsValue); + var stateStore = new InMemoryExportStateStore(); + var timeProvider = new TestTimeProvider(exportedAt); + var stateManager = new ExportStateManager(stateStore, timeProvider); + var eventLog = new StubAdvisoryEventLog(new[] { advisory }, exportedAt); + var exporter = new JsonFeedExporter( + advisoryStore, + options, + new VulnListJsonExportPathResolver(), + stateManager, + eventLog, + NullLogger.Instance, + timeProvider); + using var provider = CreateCryptoProvider(); - await exporter.ExportAsync(provider, CancellationToken.None); - - var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture); - var exportDirectory = Path.Combine(_root, exportId); - var manifestPath = Path.Combine(exportDirectory, "manifest.json"); - - Assert.True(File.Exists(manifestPath)); - - using var document = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None)); - var root = document.RootElement; - - Assert.Equal(exportId, root.GetProperty("exportId").GetString()); - Assert.Equal(exportedAt.UtcDateTime, root.GetProperty("generatedAt").GetDateTime()); - Assert.Equal(1, root.GetProperty("advisoryCount").GetInt32()); - - var exportedFiles = Directory.EnumerateFiles(exportDirectory, "*.json", SearchOption.AllDirectories) - .Select(path => new - { - Absolute = path, - Relative = Path.GetRelativePath(exportDirectory, path).Replace("\\", "/", StringComparison.Ordinal), - }) - .Where(file => !string.Equals(file.Relative, "manifest.json", StringComparison.OrdinalIgnoreCase)) - .OrderBy(file => file.Relative, StringComparer.Ordinal) - .ToArray(); - - var filesElement = root.GetProperty("files") - .EnumerateArray() - .Select(element => new - { - Path = element.GetProperty("path").GetString(), - Bytes = element.GetProperty("bytes").GetInt64(), - Digest = element.GetProperty("digest").GetString(), - }) - .OrderBy(file => file.Path, StringComparer.Ordinal) - .ToArray(); - - var dataFile = Assert.Single(exportedFiles); - using (var advisoryDocument = JsonDocument.Parse(await File.ReadAllBytesAsync(dataFile.Absolute, CancellationToken.None))) - { - var advisoryRoot = advisoryDocument.RootElement; - Assert.Equal("Detailed description capturing remediation steps.", advisoryRoot.GetProperty("description").GetString()); - Assert.Equal("3.1|CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", advisoryRoot.GetProperty("canonicalMetricId").GetString()); - - var referenceElement = advisoryRoot.GetProperty("references").EnumerateArray().Single(); - Assert.Equal(reference.Url, referenceElement.GetProperty("url").GetString(), StringComparer.OrdinalIgnoreCase); - - var weaknessElement = advisoryRoot.GetProperty("cwes").EnumerateArray().Single(); - Assert.Equal("cwe", weaknessElement.GetProperty("taxonomy").GetString()); - Assert.Equal("CWE-79", weaknessElement.GetProperty("identifier").GetString()); - } - - Assert.Equal(exportedFiles.Select(file => file.Relative).ToArray(), filesElement.Select(file => file.Path).ToArray()); - - long totalBytes = exportedFiles.Select(file => new FileInfo(file.Absolute).Length).Sum(); - Assert.Equal(totalBytes, root.GetProperty("totalBytes").GetInt64()); - Assert.Equal(exportedFiles.Length, root.GetProperty("fileCount").GetInt32()); - - var digest = root.GetProperty("digest").GetString(); - var digestResult = new JsonExportResult( - exportDirectory, - exportedAt, - exportedFiles.Select(file => - { - var manifestEntry = filesElement.First(f => f.Path == file.Relative); - if (manifestEntry.Digest is null) - { - throw new InvalidOperationException($"Manifest entry for {file.Relative} missing digest."); - } - - return new JsonExportFile(file.Relative, new FileInfo(file.Absolute).Length, manifestEntry.Digest); - }), - exportedFiles.Length, - totalBytes); - var expectedDigest = ExportDigestCalculator.ComputeTreeDigest(digestResult); - Assert.Equal(expectedDigest, digest); - - var exporterVersion = root.GetProperty("exporterVersion").GetString(); - Assert.Equal(ExporterVersion.GetVersion(typeof(JsonFeedExporter)), exporterVersion); - } - - [Fact] - public async Task ExportAsync_WritesMirrorBundlesWithSignatures() - { - var exportedAt = DateTimeOffset.Parse("2025-01-05T00:00:00Z", CultureInfo.InvariantCulture); - var advisoryOne = new Advisory( - advisoryKey: "CVE-2025-0001", - title: "Mirror Advisory One", - summary: null, - language: "en", - published: exportedAt.AddDays(-10), - modified: exportedAt.AddDays(-9), - severity: "high", - exploitKnown: false, - aliases: new[] { "CVE-2025-0001", "GHSA-aaaa-bbbb-cccc" }, - references: Array.Empty(), - affectedPackages: Array.Empty(), - cvssMetrics: Array.Empty(), - provenance: new[] - { - new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", exportedAt.AddDays(-9)), - new AdvisoryProvenance("nvd", "map", "CVE-2025-0001", exportedAt.AddDays(-8)), - }); - - var advisoryTwo = new Advisory( - advisoryKey: "CVE-2025-0002", - title: "Mirror Advisory Two", - summary: null, - language: "en", - published: exportedAt.AddDays(-6), - modified: exportedAt.AddDays(-5), - severity: "medium", - exploitKnown: false, - aliases: new[] { "CVE-2025-0002" }, - references: Array.Empty(), - affectedPackages: Array.Empty(), - cvssMetrics: Array.Empty(), - provenance: new[] - { - new AdvisoryProvenance("nvd", "map", "CVE-2025-0002", exportedAt.AddDays(-5)), - new AdvisoryProvenance("vendor", "map", "ADVISORY-0002", exportedAt.AddDays(-4)), - }); - - var advisoryStore = new StubAdvisoryStore(advisoryOne, advisoryTwo); - var optionsValue = new JsonExportOptions - { - OutputRoot = _root, - MaintainLatestSymlink = false, - TargetRepository = "s3://mirror/concelier" - }; - - optionsValue.Mirror.Enabled = true; - optionsValue.Mirror.DirectoryName = "mirror"; - optionsValue.Mirror.Domains.Add(new JsonExportOptions.JsonMirrorDomainOptions - { - Id = "primary", - DisplayName = "Primary" - }); - - optionsValue.Mirror.Signing.Enabled = true; - optionsValue.Mirror.Signing.KeyId = "mirror-signing-key"; - optionsValue.Mirror.Signing.Algorithm = SignatureAlgorithms.Es256; - optionsValue.Mirror.Signing.KeyPath = WriteSigningKey(_root); - - var options = Options.Create(optionsValue); - var stateStore = new InMemoryExportStateStore(); - var timeProvider = new TestTimeProvider(exportedAt); - var stateManager = new ExportStateManager(stateStore, timeProvider); - var eventLog = new StubAdvisoryEventLog(new[] { advisoryOne, advisoryTwo }, exportedAt); - var exporter = new JsonFeedExporter( - advisoryStore, - options, - new VulnListJsonExportPathResolver(), - stateManager, - eventLog, - NullLogger.Instance, - timeProvider); - + await exporter.ExportAsync(provider, CancellationToken.None); + + var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture); + var exportDirectory = Path.Combine(_root, exportId); + var manifestPath = Path.Combine(exportDirectory, "manifest.json"); + + Assert.True(File.Exists(manifestPath)); + + using var document = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None)); + var root = document.RootElement; + + Assert.Equal(exportId, root.GetProperty("exportId").GetString()); + Assert.Equal(exportedAt.UtcDateTime, root.GetProperty("generatedAt").GetDateTime()); + Assert.Equal(1, root.GetProperty("advisoryCount").GetInt32()); + + var exportedFiles = Directory.EnumerateFiles(exportDirectory, "*.json", SearchOption.AllDirectories) + .Select(path => new + { + Absolute = path, + Relative = Path.GetRelativePath(exportDirectory, path).Replace("\\", "/", StringComparison.Ordinal), + }) + .Where(file => !string.Equals(file.Relative, "manifest.json", StringComparison.OrdinalIgnoreCase)) + .OrderBy(file => file.Relative, StringComparer.Ordinal) + .ToArray(); + + var filesElement = root.GetProperty("files") + .EnumerateArray() + .Select(element => new + { + Path = element.GetProperty("path").GetString(), + Bytes = element.GetProperty("bytes").GetInt64(), + Digest = element.GetProperty("digest").GetString(), + }) + .OrderBy(file => file.Path, StringComparer.Ordinal) + .ToArray(); + + var dataFile = Assert.Single(exportedFiles); + using (var advisoryDocument = JsonDocument.Parse(await File.ReadAllBytesAsync(dataFile.Absolute, CancellationToken.None))) + { + var advisoryRoot = advisoryDocument.RootElement; + Assert.Equal("Detailed description capturing remediation steps.", advisoryRoot.GetProperty("description").GetString()); + Assert.Equal("3.1|CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", advisoryRoot.GetProperty("canonicalMetricId").GetString()); + + var referenceElement = advisoryRoot.GetProperty("references").EnumerateArray().Single(); + Assert.Equal(reference.Url, referenceElement.GetProperty("url").GetString(), StringComparer.OrdinalIgnoreCase); + + var weaknessElement = advisoryRoot.GetProperty("cwes").EnumerateArray().Single(); + Assert.Equal("cwe", weaknessElement.GetProperty("taxonomy").GetString()); + Assert.Equal("CWE-79", weaknessElement.GetProperty("identifier").GetString()); + } + + Assert.Equal(exportedFiles.Select(file => file.Relative).ToArray(), filesElement.Select(file => file.Path).ToArray()); + + long totalBytes = exportedFiles.Select(file => new FileInfo(file.Absolute).Length).Sum(); + Assert.Equal(totalBytes, root.GetProperty("totalBytes").GetInt64()); + Assert.Equal(exportedFiles.Length, root.GetProperty("fileCount").GetInt32()); + + var digest = root.GetProperty("digest").GetString(); + var digestResult = new JsonExportResult( + exportDirectory, + exportedAt, + exportedFiles.Select(file => + { + var manifestEntry = filesElement.First(f => f.Path == file.Relative); + if (manifestEntry.Digest is null) + { + throw new InvalidOperationException($"Manifest entry for {file.Relative} missing digest."); + } + + return new JsonExportFile(file.Relative, new FileInfo(file.Absolute).Length, manifestEntry.Digest); + }), + exportedFiles.Length, + totalBytes); + var expectedDigest = ExportDigestCalculator.ComputeTreeDigest(digestResult); + Assert.Equal(expectedDigest, digest); + + var exporterVersion = root.GetProperty("exporterVersion").GetString(); + Assert.Equal(ExporterVersion.GetVersion(typeof(JsonFeedExporter)), exporterVersion); + } + + [Fact] + public async Task ExportAsync_WritesMirrorBundlesWithSignatures() + { + var exportedAt = DateTimeOffset.Parse("2025-01-05T00:00:00Z", CultureInfo.InvariantCulture); + var advisoryOne = new Advisory( + advisoryKey: "CVE-2025-0001", + title: "Mirror Advisory One", + summary: null, + language: "en", + published: exportedAt.AddDays(-10), + modified: exportedAt.AddDays(-9), + severity: "high", + exploitKnown: false, + aliases: new[] { "CVE-2025-0001", "GHSA-aaaa-bbbb-cccc" }, + references: Array.Empty(), + affectedPackages: Array.Empty(), + cvssMetrics: Array.Empty(), + provenance: new[] + { + new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", exportedAt.AddDays(-9)), + new AdvisoryProvenance("nvd", "map", "CVE-2025-0001", exportedAt.AddDays(-8)), + }); + + var advisoryTwo = new Advisory( + advisoryKey: "CVE-2025-0002", + title: "Mirror Advisory Two", + summary: null, + language: "en", + published: exportedAt.AddDays(-6), + modified: exportedAt.AddDays(-5), + severity: "medium", + exploitKnown: false, + aliases: new[] { "CVE-2025-0002" }, + references: Array.Empty(), + affectedPackages: Array.Empty(), + cvssMetrics: Array.Empty(), + provenance: new[] + { + new AdvisoryProvenance("nvd", "map", "CVE-2025-0002", exportedAt.AddDays(-5)), + new AdvisoryProvenance("vendor", "map", "ADVISORY-0002", exportedAt.AddDays(-4)), + }); + + var advisoryStore = new StubAdvisoryStore(advisoryOne, advisoryTwo); + var optionsValue = new JsonExportOptions + { + OutputRoot = _root, + MaintainLatestSymlink = false, + TargetRepository = "s3://mirror/concelier" + }; + + optionsValue.Mirror.Enabled = true; + optionsValue.Mirror.DirectoryName = "mirror"; + optionsValue.Mirror.Domains.Add(new JsonExportOptions.JsonMirrorDomainOptions + { + Id = "primary", + DisplayName = "Primary" + }); + + optionsValue.Mirror.Signing.Enabled = true; + optionsValue.Mirror.Signing.KeyId = "mirror-signing-key"; + optionsValue.Mirror.Signing.Algorithm = SignatureAlgorithms.Es256; + optionsValue.Mirror.Signing.KeyPath = WriteSigningKey(_root); + + var options = Options.Create(optionsValue); + var stateStore = new InMemoryExportStateStore(); + var timeProvider = new TestTimeProvider(exportedAt); + var stateManager = new ExportStateManager(stateStore, timeProvider); + var eventLog = new StubAdvisoryEventLog(new[] { advisoryOne, advisoryTwo }, exportedAt); + var exporter = new JsonFeedExporter( + advisoryStore, + options, + new VulnListJsonExportPathResolver(), + stateManager, + eventLog, + NullLogger.Instance, + timeProvider); + using var provider = CreateCryptoProvider(); - await exporter.ExportAsync(provider, CancellationToken.None); - - var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture); - var exportDirectory = Path.Combine(_root, exportId); - var mirrorDirectory = Path.Combine(exportDirectory, "mirror"); - var domainDirectory = Path.Combine(mirrorDirectory, "primary"); - - Assert.True(File.Exists(Path.Combine(mirrorDirectory, "index.json"))); - Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json"))); - Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json.jws"))); - Assert.True(File.Exists(Path.Combine(domainDirectory, "manifest.json"))); - - var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); - Assert.NotNull(record); - Assert.Contains(record!.Files, file => string.Equals(file.Path, "mirror/index.json", StringComparison.Ordinal)); - Assert.Contains(record.Files, file => string.Equals(file.Path, "mirror/primary/manifest.json", StringComparison.Ordinal)); - - var indexPath = Path.Combine(mirrorDirectory, "index.json"); - using (var indexDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(indexPath, CancellationToken.None))) - { - var indexRoot = indexDoc.RootElement; - Assert.Equal("s3://mirror/concelier", indexRoot.GetProperty("targetRepository").GetString()); - - var domains = indexRoot.GetProperty("domains").EnumerateArray().ToArray(); - var domain = Assert.Single(domains); - Assert.Equal("primary", domain.GetProperty("domainId").GetString()); - Assert.Equal("Primary", domain.GetProperty("displayName").GetString()); - Assert.Equal(2, domain.GetProperty("advisoryCount").GetInt32()); - - var bundleDescriptor = domain.GetProperty("bundle"); - Assert.Equal("mirror/primary/bundle.json", bundleDescriptor.GetProperty("path").GetString()); - var signatureDescriptor = bundleDescriptor.GetProperty("signature"); - Assert.Equal("mirror/primary/bundle.json.jws", signatureDescriptor.GetProperty("path").GetString()); - - var manifestDescriptor = domain.GetProperty("manifest"); - Assert.Equal("mirror/primary/manifest.json", manifestDescriptor.GetProperty("path").GetString()); - } - - var bundlePathRel = "mirror/primary/bundle.json"; - var manifestPathRel = "mirror/primary/manifest.json"; - var signaturePathRel = "mirror/primary/bundle.json.jws"; - - var bundlePath = Path.Combine(exportDirectory, bundlePathRel.Replace('/', Path.DirectorySeparatorChar)); - var manifestPath = Path.Combine(exportDirectory, manifestPathRel.Replace('/', Path.DirectorySeparatorChar)); - var signaturePath = Path.Combine(exportDirectory, signaturePathRel.Replace('/', Path.DirectorySeparatorChar)); - - using (var bundleDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(bundlePath, CancellationToken.None))) - { - var bundleRoot = bundleDoc.RootElement; - Assert.Equal("primary", bundleRoot.GetProperty("domainId").GetString()); - Assert.Equal(2, bundleRoot.GetProperty("advisoryCount").GetInt32()); - Assert.Equal("s3://mirror/concelier", bundleRoot.GetProperty("targetRepository").GetString()); - Assert.Equal(2, bundleRoot.GetProperty("advisories").GetArrayLength()); - - var sources = bundleRoot.GetProperty("sources").EnumerateArray().Select(element => element.GetProperty("source").GetString()).ToArray(); - Assert.Contains("ghsa", sources); - Assert.Contains("nvd", sources); - Assert.Contains("vendor", sources); - } - - using (var manifestDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None))) - { - var manifestRoot = manifestDoc.RootElement; - Assert.Equal("primary", manifestRoot.GetProperty("domainId").GetString()); - Assert.Equal(2, manifestRoot.GetProperty("advisoryCount").GetInt32()); - Assert.Equal("mirror/primary/bundle.json", manifestRoot.GetProperty("bundle").GetProperty("path").GetString()); - } - - var bundleBytes = await File.ReadAllBytesAsync(bundlePath, CancellationToken.None); - var signatureValue = await File.ReadAllTextAsync(signaturePath, CancellationToken.None); - var signatureParts = signatureValue.Split("..", StringSplitOptions.None); - Assert.Equal(2, signatureParts.Length); - - var signingInput = BuildSigningInput(signatureParts[0], bundleBytes); - var signatureBytes = Base64UrlDecode(signatureParts[1]); - - var registry = provider.GetRequiredService(); - var verification = registry.ResolveSigner( - CryptoCapability.Signing, - optionsValue.Mirror.Signing.Algorithm, - new CryptoKeyReference(optionsValue.Mirror.Signing.KeyId, optionsValue.Mirror.Signing.Provider), - optionsValue.Mirror.Signing.Provider); - var verified = await verification.Signer.VerifyAsync(signingInput, signatureBytes, CancellationToken.None); - Assert.True(verified); - } - - public void Dispose() - { - try - { - if (Directory.Exists(_root)) - { - Directory.Delete(_root, recursive: true); - } - } - catch - { - // best effort cleanup - } - } - - private static string WriteSigningKey(string directory) - { - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var pkcs8 = ecdsa.ExportPkcs8PrivateKey(); - var pem = BuildPem("PRIVATE KEY", pkcs8); - var path = Path.Combine(directory, $"mirror-key-{Guid.NewGuid():N}.pem"); - File.WriteAllText(path, pem); - return path; - } - - private static string BuildPem(string label, byte[] data) - { - var base64 = Convert.ToBase64String(data, Base64FormattingOptions.InsertLineBreaks); - return $"-----BEGIN {label}-----\n{base64}\n-----END {label}-----\n"; - } - + await exporter.ExportAsync(provider, CancellationToken.None); + + var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture); + var exportDirectory = Path.Combine(_root, exportId); + var mirrorDirectory = Path.Combine(exportDirectory, "mirror"); + var domainDirectory = Path.Combine(mirrorDirectory, "primary"); + + Assert.True(File.Exists(Path.Combine(mirrorDirectory, "index.json"))); + Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json"))); + Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json.jws"))); + Assert.True(File.Exists(Path.Combine(domainDirectory, "manifest.json"))); + + var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None); + Assert.NotNull(record); + Assert.Contains(record!.Files, file => string.Equals(file.Path, "mirror/index.json", StringComparison.Ordinal)); + Assert.Contains(record.Files, file => string.Equals(file.Path, "mirror/primary/manifest.json", StringComparison.Ordinal)); + + var indexPath = Path.Combine(mirrorDirectory, "index.json"); + using (var indexDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(indexPath, CancellationToken.None))) + { + var indexRoot = indexDoc.RootElement; + Assert.Equal("s3://mirror/concelier", indexRoot.GetProperty("targetRepository").GetString()); + + var domains = indexRoot.GetProperty("domains").EnumerateArray().ToArray(); + var domain = Assert.Single(domains); + Assert.Equal("primary", domain.GetProperty("domainId").GetString()); + Assert.Equal("Primary", domain.GetProperty("displayName").GetString()); + Assert.Equal(2, domain.GetProperty("advisoryCount").GetInt32()); + + var bundleDescriptor = domain.GetProperty("bundle"); + Assert.Equal("mirror/primary/bundle.json", bundleDescriptor.GetProperty("path").GetString()); + var signatureDescriptor = bundleDescriptor.GetProperty("signature"); + Assert.Equal("mirror/primary/bundle.json.jws", signatureDescriptor.GetProperty("path").GetString()); + + var manifestDescriptor = domain.GetProperty("manifest"); + Assert.Equal("mirror/primary/manifest.json", manifestDescriptor.GetProperty("path").GetString()); + } + + var bundlePathRel = "mirror/primary/bundle.json"; + var manifestPathRel = "mirror/primary/manifest.json"; + var signaturePathRel = "mirror/primary/bundle.json.jws"; + + var bundlePath = Path.Combine(exportDirectory, bundlePathRel.Replace('/', Path.DirectorySeparatorChar)); + var manifestPath = Path.Combine(exportDirectory, manifestPathRel.Replace('/', Path.DirectorySeparatorChar)); + var signaturePath = Path.Combine(exportDirectory, signaturePathRel.Replace('/', Path.DirectorySeparatorChar)); + + using (var bundleDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(bundlePath, CancellationToken.None))) + { + var bundleRoot = bundleDoc.RootElement; + Assert.Equal("primary", bundleRoot.GetProperty("domainId").GetString()); + Assert.Equal(2, bundleRoot.GetProperty("advisoryCount").GetInt32()); + Assert.Equal("s3://mirror/concelier", bundleRoot.GetProperty("targetRepository").GetString()); + Assert.Equal(2, bundleRoot.GetProperty("advisories").GetArrayLength()); + + var sources = bundleRoot.GetProperty("sources").EnumerateArray().Select(element => element.GetProperty("source").GetString()).ToArray(); + Assert.Contains("ghsa", sources); + Assert.Contains("nvd", sources); + Assert.Contains("vendor", sources); + } + + using (var manifestDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None))) + { + var manifestRoot = manifestDoc.RootElement; + Assert.Equal("primary", manifestRoot.GetProperty("domainId").GetString()); + Assert.Equal(2, manifestRoot.GetProperty("advisoryCount").GetInt32()); + Assert.Equal("mirror/primary/bundle.json", manifestRoot.GetProperty("bundle").GetProperty("path").GetString()); + } + + var bundleBytes = await File.ReadAllBytesAsync(bundlePath, CancellationToken.None); + var signatureValue = await File.ReadAllTextAsync(signaturePath, CancellationToken.None); + var signatureParts = signatureValue.Split("..", StringSplitOptions.None); + Assert.Equal(2, signatureParts.Length); + + var signingInput = BuildSigningInput(signatureParts[0], bundleBytes); + var signatureBytes = Base64UrlDecode(signatureParts[1]); + + var registry = provider.GetRequiredService(); + var verification = registry.ResolveSigner( + CryptoCapability.Signing, + optionsValue.Mirror.Signing.Algorithm, + new CryptoKeyReference(optionsValue.Mirror.Signing.KeyId, optionsValue.Mirror.Signing.Provider), + optionsValue.Mirror.Signing.Provider); + var verified = await verification.Signer.VerifyAsync(signingInput, signatureBytes, CancellationToken.None); + Assert.True(verified); + } + + public void Dispose() + { + try + { + if (Directory.Exists(_root)) + { + Directory.Delete(_root, recursive: true); + } + } + catch + { + // best effort cleanup + } + } + + private static string WriteSigningKey(string directory) + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var pkcs8 = ecdsa.ExportPkcs8PrivateKey(); + var pem = BuildPem("PRIVATE KEY", pkcs8); + var path = Path.Combine(directory, $"mirror-key-{Guid.NewGuid():N}.pem"); + File.WriteAllText(path, pem); + return path; + } + + private static string BuildPem(string label, byte[] data) + { + var base64 = Convert.ToBase64String(data, Base64FormattingOptions.InsertLineBreaks); + return $"-----BEGIN {label}-----\n{base64}\n-----END {label}-----\n"; + } + private static byte[] BuildSigningInput(string protectedHeader, byte[] payload) - { - var headerBytes = Encoding.ASCII.GetBytes(protectedHeader); - var buffer = new byte[headerBytes.Length + 1 + payload.Length]; - Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); - buffer[headerBytes.Length] = (byte)'.'; - Buffer.BlockCopy(payload, 0, buffer, headerBytes.Length + 1, payload.Length); - return buffer; - } - + { + var headerBytes = Encoding.ASCII.GetBytes(protectedHeader); + var buffer = new byte[headerBytes.Length + 1 + payload.Length]; + Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); + buffer[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(payload, 0, buffer, headerBytes.Length + 1, payload.Length); + return buffer; + } + private static byte[] Base64UrlDecode(string value) { var builder = new StringBuilder(value.Length + 3); - foreach (var ch in value) - { - builder.Append(ch switch - { - '-' => '+', - '_' => '/', - _ => ch - }); - } - - while (builder.Length % 4 != 0) - { - builder.Append('='); + foreach (var ch in value) + { + builder.Append(ch switch + { + '-' => '+', + '_' => '/', + _ => ch + }); + } + + while (builder.Length % 4 != 0) + { + builder.Append('='); } return Convert.FromBase64String(builder.ToString()); @@ -480,119 +480,118 @@ public sealed class JsonFeedExporterTests : IDisposable services.AddStellaOpsCrypto(); return services.BuildServiceProvider(); } - - private sealed class StubAdvisoryStore : IAdvisoryStore - { - private readonly IReadOnlyList _advisories; - - public StubAdvisoryStore(params Advisory[] advisories) - { - _advisories = advisories; - } - - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return Task.FromResult(_advisories); - } - - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); - } - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return Task.CompletedTask; - } - - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return EnumerateAsync(cancellationToken); - - async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) - { - foreach (var advisory in _advisories) - { - ct.ThrowIfCancellationRequested(); - yield return advisory; - await Task.Yield(); - } - } - } - } - - private sealed class StubAdvisoryEventLog : IAdvisoryEventLog - { - private readonly Dictionary _advisories; - private readonly DateTimeOffset _recordedAt; - - public StubAdvisoryEventLog(IEnumerable advisories, DateTimeOffset recordedAt) - { - _advisories = advisories.ToDictionary(advisory => advisory.AdvisoryKey, StringComparer.OrdinalIgnoreCase); - _recordedAt = recordedAt; - } - - public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public ValueTask ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) - { - if (_advisories.TryGetValue(vulnerabilityKey, out var advisory)) - { - var asOfTimestamp = advisory.Modified ?? advisory.Published ?? _recordedAt; - var snapshot = new AdvisoryStatementSnapshot( - Guid.NewGuid(), - vulnerabilityKey, - advisory.AdvisoryKey, - advisory, - ImmutableArray.Empty, - asOfTimestamp, - _recordedAt, - ImmutableArray.Empty); - - return ValueTask.FromResult(new AdvisoryReplay( - vulnerabilityKey, - asOf, - ImmutableArray.Create(snapshot), - ImmutableArray.Empty)); - } - - return ValueTask.FromResult(new AdvisoryReplay( - vulnerabilityKey, - asOf, - ImmutableArray.Empty, - ImmutableArray.Empty)); - } - } - - private sealed class InMemoryExportStateStore : IExportStateStore - { - private ExportStateRecord? _record; - - public Task FindAsync(string id, CancellationToken cancellationToken) - { - return Task.FromResult(_record); - } - - public Task UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken) - { - _record = record; - return Task.FromResult(record); - } - } - - private sealed class TestTimeProvider : TimeProvider - { - private DateTimeOffset _now; - - public TestTimeProvider(DateTimeOffset start) => _now = start; - - public override DateTimeOffset GetUtcNow() => _now; - - public void Advance(TimeSpan delta) => _now = _now.Add(delta); - } + + private sealed class StubAdvisoryStore : IAdvisoryStore + { + private readonly IReadOnlyList _advisories; + + public StubAdvisoryStore(params Advisory[] advisories) + { + _advisories = advisories; + } + + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) + { + return Task.FromResult(_advisories); + } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) + { + return Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) + { + return EnumerateAsync(cancellationToken); + + async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) + { + foreach (var advisory in _advisories) + { + ct.ThrowIfCancellationRequested(); + yield return advisory; + await Task.Yield(); + } + } + } + } + + private sealed class StubAdvisoryEventLog : IAdvisoryEventLog + { + private readonly Dictionary _advisories; + private readonly DateTimeOffset _recordedAt; + + public StubAdvisoryEventLog(IEnumerable advisories, DateTimeOffset recordedAt) + { + _advisories = advisories.ToDictionary(advisory => advisory.AdvisoryKey, StringComparer.OrdinalIgnoreCase); + _recordedAt = recordedAt; + } + + public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + + public ValueTask ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) + { + if (_advisories.TryGetValue(vulnerabilityKey, out var advisory)) + { + var asOfTimestamp = advisory.Modified ?? advisory.Published ?? _recordedAt; + var snapshot = new AdvisoryStatementSnapshot( + Guid.NewGuid(), + vulnerabilityKey, + advisory.AdvisoryKey, + advisory, + ImmutableArray.Empty, + asOfTimestamp, + _recordedAt, + ImmutableArray.Empty); + + return ValueTask.FromResult(new AdvisoryReplay( + vulnerabilityKey, + asOf, + ImmutableArray.Create(snapshot), + ImmutableArray.Empty)); + } + + return ValueTask.FromResult(new AdvisoryReplay( + vulnerabilityKey, + asOf, + ImmutableArray.Empty, + ImmutableArray.Empty)); + } + } + + private sealed class InMemoryExportStateStore : IExportStateStore + { + private ExportStateRecord? _record; + + public Task FindAsync(string id, CancellationToken cancellationToken) + { + return Task.FromResult(_record); + } + + public Task UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken) + { + _record = record; + return Task.FromResult(record); + } + } + + private sealed class TestTimeProvider : TimeProvider + { + private DateTimeOffset _now; + + public TestTimeProvider(DateTimeOffset start) => _now = start; + + public override DateTimeOffset GetUtcNow() => _now; + + public void Advance(TimeSpan delta) => _now = _now.Add(delta); + } } diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs index 60fc0c9e4..58b37ba21 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs @@ -11,7 +11,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; -using MongoDB.Driver; using StellaOps.Concelier.Exporter.Json; using StellaOps.Concelier.Exporter.TrivyDb; using StellaOps.Concelier.Models; @@ -883,27 +882,23 @@ public sealed class TrivyDbFeedExporterTests : IDisposable _advisories = advisories; } - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) { - _ = session; return Task.FromResult(_advisories); } - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) { - _ = session; return Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); } - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) { - _ = session; return Task.CompletedTask; } - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) { - _ = session; return EnumerateAsync(cancellationToken); async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs index 42e1246ad..400c2973c 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs @@ -2,109 +2,109 @@ using System.Collections.Concurrent; using System.Collections.Immutable; using System.Linq; using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Time.Testing; -using MongoDB.Driver; -using StellaOps.Concelier.Core; -using StellaOps.Concelier.Core.Events; -using StellaOps.Concelier.Merge.Services; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Aliases; -using StellaOps.Concelier.Storage.Mongo.MergeEvents; - -namespace StellaOps.Concelier.Merge.Tests; - -public sealed class AdvisoryMergeServiceTests -{ - [Fact] - public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions() - { - var aliasStore = new FakeAliasStore(); - aliasStore.Register("GHSA-aaaa-bbbb-cccc", - (AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"), - (AliasSchemes.Cve, "CVE-2025-4242")); - aliasStore.Register("CVE-2025-4242", - (AliasSchemes.Cve, "CVE-2025-4242")); - aliasStore.Register("OSV-2025-xyz", - (AliasSchemes.OsV, "OSV-2025-xyz"), - (AliasSchemes.Cve, "CVE-2025-4242")); - - var advisoryStore = new FakeAdvisoryStore(); - advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory()); - - var mergeEventStore = new InMemoryMergeEventStore(); - var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero)); - var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger.Instance); - var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider); - var aliasResolver = new AliasGraphResolver(aliasStore); - var canonicalMerger = new CanonicalMerger(timeProvider); - var eventLog = new RecordingAdvisoryEventLog(); - var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger.Instance); - - var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None); - +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Concelier.Core; +using StellaOps.Concelier.Core.Events; +using StellaOps.Concelier.Merge.Services; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Aliases; +using StellaOps.Concelier.Storage.Mongo.MergeEvents; +using StellaOps.Provenance.Mongo; + +namespace StellaOps.Concelier.Merge.Tests; + +public sealed class AdvisoryMergeServiceTests +{ + [Fact] + public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions() + { + var aliasStore = new FakeAliasStore(); + aliasStore.Register("GHSA-aaaa-bbbb-cccc", + (AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"), + (AliasSchemes.Cve, "CVE-2025-4242")); + aliasStore.Register("CVE-2025-4242", + (AliasSchemes.Cve, "CVE-2025-4242")); + aliasStore.Register("OSV-2025-xyz", + (AliasSchemes.OsV, "OSV-2025-xyz"), + (AliasSchemes.Cve, "CVE-2025-4242")); + + var advisoryStore = new FakeAdvisoryStore(); + advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory()); + + var mergeEventStore = new InMemoryMergeEventStore(); + var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero)); + var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger.Instance); + var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider); + var aliasResolver = new AliasGraphResolver(aliasStore); + var canonicalMerger = new CanonicalMerger(timeProvider); + var eventLog = new RecordingAdvisoryEventLog(); + var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger.Instance); + + var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None); + Assert.NotNull(result.Merged); Assert.Equal("OSV summary overrides", result.Merged!.Summary); Assert.Empty(result.Conflicts); - - var upserted = advisoryStore.LastUpserted; - Assert.NotNull(upserted); - Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey); - Assert.Equal("OSV summary overrides", upserted.Summary); - - var mergeRecord = mergeEventStore.LastRecord; - Assert.NotNull(mergeRecord); - var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary"); - Assert.Equal("osv", summaryDecision.SelectedSource); - Assert.Equal("freshness_override", summaryDecision.DecisionReason); - - var appendRequest = eventLog.LastRequest; - Assert.NotNull(appendRequest); - Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase)); - Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0); - } - - private static Advisory CreateGhsaAdvisory() - { - var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z"); - var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory }); - return new Advisory( - "GHSA-aaaa-bbbb-cccc", - "Container escape", - "Initial GHSA summary.", - "en", - recorded, - recorded, - "medium", - exploitKnown: false, - aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" }, - references: Array.Empty(), - affectedPackages: Array.Empty(), - cvssMetrics: Array.Empty(), - provenance: new[] { provenance }); - } - - private static Advisory CreateNvdAdvisory() - { - var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z"); - var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory }); - return new Advisory( - "CVE-2025-4242", - "CVE-2025-4242", - "Baseline NVD summary.", - "en", - recorded, - recorded, - "high", - exploitKnown: false, - aliases: new[] { "CVE-2025-4242" }, - references: Array.Empty(), - affectedPackages: Array.Empty(), - cvssMetrics: Array.Empty(), - provenance: new[] { provenance }); - } - + + var upserted = advisoryStore.LastUpserted; + Assert.NotNull(upserted); + Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey); + Assert.Equal("OSV summary overrides", upserted.Summary); + + var mergeRecord = mergeEventStore.LastRecord; + Assert.NotNull(mergeRecord); + var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary"); + Assert.Equal("osv", summaryDecision.SelectedSource); + Assert.Equal("freshness_override", summaryDecision.DecisionReason); + + var appendRequest = eventLog.LastRequest; + Assert.NotNull(appendRequest); + Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase)); + Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0); + } + + private static Advisory CreateGhsaAdvisory() + { + var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z"); + var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory }); + return new Advisory( + "GHSA-aaaa-bbbb-cccc", + "Container escape", + "Initial GHSA summary.", + "en", + recorded, + recorded, + "medium", + exploitKnown: false, + aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" }, + references: Array.Empty(), + affectedPackages: Array.Empty(), + cvssMetrics: Array.Empty(), + provenance: new[] { provenance }); + } + + private static Advisory CreateNvdAdvisory() + { + var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z"); + var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory }); + return new Advisory( + "CVE-2025-4242", + "CVE-2025-4242", + "Baseline NVD summary.", + "en", + recorded, + recorded, + "high", + exploitKnown: false, + aliases: new[] { "CVE-2025-4242" }, + references: Array.Empty(), + affectedPackages: Array.Empty(), + cvssMetrics: Array.Empty(), + provenance: new[] { provenance }); + } + private static Advisory CreateOsvAdvisory() { var recorded = DateTimeOffset.Parse("2025-03-05T12:00:00Z"); @@ -207,120 +207,119 @@ public sealed class AdvisoryMergeServiceTests Assert.Equal(conflict.ConflictId, appendedConflict.ConflictId); Assert.Equal(conflict.StatementIds, appendedConflict.StatementIds.ToImmutableArray()); } - - - private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog - { - public AdvisoryEventAppendRequest? LastRequest { get; private set; } - - public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) - { - LastRequest = request; - return ValueTask.CompletedTask; - } - - public ValueTask ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) - { - throw new NotSupportedException(); - } - } - - private sealed class FakeAliasStore : IAliasStore - { - private readonly ConcurrentDictionary> _records = new(StringComparer.OrdinalIgnoreCase); - - public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases) - { - var list = new List(); - foreach (var (scheme, value) in aliases) - { - list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow)); - } - - _records[advisoryKey] = list; - } - - public Task ReplaceAsync(string advisoryKey, IEnumerable aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken) - { - return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty())); - } - - public Task> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken) - { - var matches = _records.Values - .SelectMany(static records => records) - .Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase)) - .ToList(); - - return Task.FromResult>(matches); - } - - public Task> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken) - { - if (_records.TryGetValue(advisoryKey, out var records)) - { - return Task.FromResult>(records); - } - - return Task.FromResult>(Array.Empty()); - } - } - - private sealed class FakeAdvisoryStore : IAdvisoryStore - { - private readonly ConcurrentDictionary _advisories = new(StringComparer.OrdinalIgnoreCase); - - public Advisory? LastUpserted { get; private set; } - - public void Seed(params Advisory[] advisories) - { - foreach (var advisory in advisories) - { - _advisories[advisory.AdvisoryKey] = advisory; - } - } - - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - _advisories.TryGetValue(advisoryKey, out var advisory); - return Task.FromResult(advisory); - } - - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return Task.FromResult>(Array.Empty()); - } - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - _advisories[advisory.AdvisoryKey] = advisory; - LastUpserted = advisory; - return Task.CompletedTask; - } - - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _ = session; - return AsyncEnumerable.Empty(); - } - } - - private sealed class InMemoryMergeEventStore : IMergeEventStore - { - public MergeEventRecord? LastRecord { get; private set; } - - public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken) - { - LastRecord = record; - return Task.CompletedTask; - } - - public Task> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken) - { - return Task.FromResult>(Array.Empty()); - } - } -} + + + private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog + { + public AdvisoryEventAppendRequest? LastRequest { get; private set; } + + public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) + { + LastRequest = request; + return ValueTask.CompletedTask; + } + + public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + + public ValueTask ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) + { + throw new NotSupportedException(); + } + } + + private sealed class FakeAliasStore : IAliasStore + { + private readonly ConcurrentDictionary> _records = new(StringComparer.OrdinalIgnoreCase); + + public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases) + { + var list = new List(); + foreach (var (scheme, value) in aliases) + { + list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow)); + } + + _records[advisoryKey] = list; + } + + public Task ReplaceAsync(string advisoryKey, IEnumerable aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken) + { + return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty())); + } + + public Task> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken) + { + var matches = _records.Values + .SelectMany(static records => records) + .Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase)) + .ToList(); + + return Task.FromResult>(matches); + } + + public Task> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken) + { + if (_records.TryGetValue(advisoryKey, out var records)) + { + return Task.FromResult>(records); + } + + return Task.FromResult>(Array.Empty()); + } + } + + private sealed class FakeAdvisoryStore : IAdvisoryStore + { + private readonly ConcurrentDictionary _advisories = new(StringComparer.OrdinalIgnoreCase); + + public Advisory? LastUpserted { get; private set; } + + public void Seed(params Advisory[] advisories) + { + foreach (var advisory in advisories) + { + _advisories[advisory.AdvisoryKey] = advisory; + } + } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) + { + _advisories.TryGetValue(advisoryKey, out var advisory); + return Task.FromResult(advisory); + } + + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) + { + return Task.FromResult>(Array.Empty()); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) + { + _advisories[advisory.AdvisoryKey] = advisory; + LastUpserted = advisory; + return Task.CompletedTask; + } + + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) + { + return AsyncEnumerable.Empty(); + } + } + + private sealed class InMemoryMergeEventStore : IMergeEventStore + { + public MergeEventRecord? LastRecord { get; private set; } + + public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken) + { + LastRecord = record; + return Task.CompletedTask; + } + + public Task> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>(Array.Empty()); + } + } +} diff --git a/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj index 080d00847..ca99de5ea 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj @@ -12,8 +12,8 @@ - - + + @@ -24,4 +24,4 @@ - \ No newline at end of file + diff --git a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/AocVerifyRegressionTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/AocVerifyRegressionTests.cs index a0e788747..f51864a43 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/AocVerifyRegressionTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/AocVerifyRegressionTests.cs @@ -1,5 +1,6 @@ using System.Text.Json; using Microsoft.Extensions.Options; +using OptionsFactory = Microsoft.Extensions.Options.Options; using StellaOps.Aoc; using StellaOps.Concelier.Core.Aoc; @@ -194,7 +195,7 @@ public sealed class AocVerifyRegressionTests public void Verify_MapperGuardParity_ValidationResultsMatch() { var guard = new AocWriteGuard(); - var validator = new AdvisorySchemaValidator(guard, Options.Create(GuardOptions)); + var validator = new AdvisorySchemaValidator(guard, OptionsFactory.Create(GuardOptions)); // Create document with forbidden field var json = CreateJsonWithForbiddenField("severity", "high"); diff --git a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/LargeBatchIngestTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/LargeBatchIngestTests.cs index cbe3c919f..d85d2dcfa 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/LargeBatchIngestTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Aoc/LargeBatchIngestTests.cs @@ -1,6 +1,7 @@ using System.Collections.Immutable; using System.Text.Json; using Microsoft.Extensions.Options; +using OptionsFactory = Microsoft.Extensions.Options.Options; using StellaOps.Aoc; using StellaOps.Concelier.Core.Aoc; using StellaOps.Concelier.RawModels; @@ -43,7 +44,7 @@ public sealed class LargeBatchIngestTests for (int i = 0; i < results1.Count; i++) { Assert.Equal(results1[i].IsValid, results2[i].IsValid); - Assert.Equal(results1[i].Violations.Count, results2[i].Violations.Count); + Assert.Equal(results1[i].Violations.Length, results2[i].Violations.Length); } } @@ -63,8 +64,8 @@ public sealed class LargeBatchIngestTests var violations1 = results1[i].Violations; var violations2 = results2[i].Violations; - Assert.Equal(violations1.Count, violations2.Count); - for (int j = 0; j < violations1.Count; j++) + Assert.Equal(violations1.Length, violations2.Length); + for (int j = 0; j < violations1.Length; j++) { Assert.Equal(violations1[j].ErrorCode, violations2[j].ErrorCode); Assert.Equal(violations1[j].Path, violations2[j].Path); @@ -150,15 +151,15 @@ public sealed class LargeBatchIngestTests // Same generation should produce same violation counts var validCount1 = results1.Count(r => r.IsValid); var validCount2 = results2.Count(r => r.IsValid); - var violationCount1 = results1.Sum(r => r.Violations.Count); - var violationCount2 = results2.Sum(r => r.Violations.Count); + var violationCount1 = results1.Sum(r => r.Violations.Length); + var violationCount2 = results2.Sum(r => r.Violations.Length); Assert.Equal(validCount1, validCount2); Assert.Equal(violationCount1, violationCount2); } private static AdvisorySchemaValidator CreateValidator() - => new(new AocWriteGuard(), Options.Create(GuardOptions)); + => new(new AocWriteGuard(), OptionsFactory.Create(GuardOptions)); private static List GenerateValidDocuments(int count) { diff --git a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Services/AdvisoryChunkBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Services/AdvisoryChunkBuilderTests.cs index cb575f5cc..6ccd2d323 100644 --- a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Services/AdvisoryChunkBuilderTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/Services/AdvisoryChunkBuilderTests.cs @@ -171,5 +171,27 @@ public sealed class AdvisoryChunkBuilderTests var bytes = await ComputeHashAsync(stream, algorithmId, cancellationToken).ConfigureAwait(false); return Convert.ToHexString(bytes).ToLowerInvariant(); } + + public byte[] ComputeHashForPurpose(ReadOnlySpan data, string purpose) => ComputeHash(data, purpose); + + public string ComputeHashHexForPurpose(ReadOnlySpan data, string purpose) => ComputeHashHex(data, purpose); + + public string ComputeHashBase64ForPurpose(ReadOnlySpan data, string purpose) => ComputeHashBase64(data, purpose); + + public ValueTask ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default) + => ComputeHashAsync(stream, purpose, cancellationToken); + + public ValueTask ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default) + => ComputeHashHexAsync(stream, purpose, cancellationToken); + + public string GetAlgorithmForPurpose(string purpose) => purpose ?? "sha256"; + + public string GetHashPrefix(string purpose) => $"{(purpose ?? "sha256").ToLowerInvariant()}:"; + + public string ComputePrefixedHashForPurpose(ReadOnlySpan data, string purpose) + { + var hash = ComputeHashHexForPurpose(data, purpose); + return $"{GetHashPrefix(purpose)}{hash}"; + } } } diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidenceBundlePackagingService.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidenceBundlePackagingService.cs index e485d3806..0dc562f2e 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidenceBundlePackagingService.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidenceBundlePackagingService.cs @@ -159,7 +159,12 @@ public sealed class EvidenceBundlePackagingService var entry = new PaxTarEntry(TarEntryType.RegularFile, path) { Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead, - ModificationTime = FixedTimestamp + ModificationTime = FixedTimestamp, + // Determinism: fixed uid/gid/owner/group per bundle-packaging.md + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty }; var bytes = Encoding.UTF8.GetBytes(content); diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidencePortableBundleService.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidencePortableBundleService.cs index e85739c60..6cbfb1840 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidencePortableBundleService.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Services/EvidencePortableBundleService.cs @@ -345,7 +345,12 @@ public sealed class EvidencePortableBundleService var entry = new PaxTarEntry(TarEntryType.RegularFile, path) { Mode = mode == default ? DefaultFileMode : mode, - ModificationTime = FixedTimestamp + ModificationTime = FixedTimestamp, + // Determinism: fixed uid/gid/owner/group per bundle-packaging.md + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty }; var bytes = Encoding.UTF8.GetBytes(content); diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidenceBundlePackagingServiceTests.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidenceBundlePackagingServiceTests.cs index 9a00ca4b6..5f234b3df 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidenceBundlePackagingServiceTests.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidenceBundlePackagingServiceTests.cs @@ -18,6 +18,11 @@ public sealed class EvidenceBundlePackagingServiceTests private static readonly EvidenceBundleId BundleId = EvidenceBundleId.FromGuid(Guid.NewGuid()); private static readonly DateTimeOffset CreatedAt = new(2025, 11, 3, 12, 30, 0, TimeSpan.Zero); + // Fixed IDs for determinism tests (must be constant across runs) + private static readonly EvidenceBundleId BundleIdForDeterminism = EvidenceBundleId.FromGuid( + new Guid("11111111-2222-3333-4444-555555555555")); + private static readonly DateTimeOffset CreatedAtForDeterminism = new(2025, 11, 10, 8, 0, 0, TimeSpan.Zero); + [Fact] public async Task EnsurePackageAsync_ReturnsCached_WhenPackageExists() { @@ -105,6 +110,59 @@ public sealed class EvidenceBundlePackagingServiceTests Assert.Equal(expectedSeconds, mtime); } + [Fact] + public async Task EnsurePackageAsync_ProducesDeterministicTarEntryMetadata() + { + var repository = new FakeRepository(CreateSealedBundle(), CreateSignature()); + var objectStore = new FakeObjectStore(exists: false); + var service = new EvidenceBundlePackagingService(repository, objectStore, NullLogger.Instance); + + await service.EnsurePackageAsync(TenantId, BundleId, CancellationToken.None); + + Assert.True(objectStore.Stored); + var entryMetadata = ReadArchiveEntryMetadata(objectStore.StoredBytes!); + + // Verify all entries have deterministic uid/gid/username/groupname per bundle-packaging.md + foreach (var (name, meta) in entryMetadata) + { + Assert.Equal(0, meta.Uid); + Assert.Equal(0, meta.Gid); + Assert.True( + string.IsNullOrEmpty(meta.UserName), + $"Entry '{name}' should have empty username but was '{meta.UserName}'"); + Assert.True( + string.IsNullOrEmpty(meta.GroupName), + $"Entry '{name}' should have empty groupname but was '{meta.GroupName}'"); + Assert.Equal(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), meta.ModificationTime); + } + } + + [Fact] + public async Task EnsurePackageAsync_ProducesIdenticalBytesForSameInput() + { + // First run + var signature1 = CreateSignatureForDeterminism(); + var bundle1 = CreateSealedBundleForDeterminism(); + var repository1 = new FakeRepository(bundle1, signature1); + var objectStore1 = new FakeObjectStore(exists: false); + var service1 = new EvidenceBundlePackagingService(repository1, objectStore1, NullLogger.Instance); + + await service1.EnsurePackageAsync(TenantId, BundleIdForDeterminism, CancellationToken.None); + + // Second run (same data) + var signature2 = CreateSignatureForDeterminism(); + var bundle2 = CreateSealedBundleForDeterminism(); + var repository2 = new FakeRepository(bundle2, signature2); + var objectStore2 = new FakeObjectStore(exists: false); + var service2 = new EvidenceBundlePackagingService(repository2, objectStore2, NullLogger.Instance); + + await service2.EnsurePackageAsync(TenantId, BundleIdForDeterminism, CancellationToken.None); + + Assert.True(objectStore1.Stored); + Assert.True(objectStore2.Stored); + Assert.Equal(objectStore1.StoredBytes, objectStore2.StoredBytes); + } + [Fact] public async Task EnsurePackageAsync_Throws_WhenManifestPayloadInvalid() { @@ -185,6 +243,62 @@ public sealed class EvidenceBundlePackagingServiceTests TimestampToken: includeTimestamp ? Encoding.UTF8.GetBytes("tsa-token") : null); } + // Determinism test helpers: fixed data for reproducible packaging + private static EvidenceBundle CreateSealedBundleForDeterminism() + => new( + BundleIdForDeterminism, + TenantId, + EvidenceBundleKind.Job, + EvidenceBundleStatus.Sealed, + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + $"tenants/{TenantId.Value:N}/bundles/{BundleIdForDeterminism.Value:N}/bundle.tgz", + CreatedAtForDeterminism, + CreatedAtForDeterminism, + Description: "determinism test", + SealedAt: CreatedAtForDeterminism.AddMinutes(1), + ExpiresAt: null); + + private static EvidenceBundleSignature CreateSignatureForDeterminism() + { + var manifest = new + { + bundleId = BundleIdForDeterminism.Value.ToString("D"), + tenantId = TenantId.Value.ToString("D"), + kind = (int)EvidenceBundleKind.Job, + createdAt = CreatedAtForDeterminism.ToString("O"), + metadata = new Dictionary { ["run"] = "determinism" }, + entries = new[] + { + new + { + section = "inputs", + canonicalPath = "inputs/config.json", + sha256 = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + sizeBytes = 128, + mediaType = "application/json", + attributes = new Dictionary() + } + } + }; + + var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + var payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson)); + + return new EvidenceBundleSignature( + BundleIdForDeterminism, + TenantId, + "application/vnd.stella.evidence.manifest+json", + payload, + Convert.ToBase64String(Encoding.UTF8.GetBytes("fixed-signature")), + "key-determinism", + "ES256", + "default", + CreatedAtForDeterminism.AddMinutes(1), + TimestampedAt: null, + TimestampAuthority: null, + TimestampToken: null); + } + private static Dictionary ReadArchiveEntries(byte[] archiveBytes) { using var memory = new MemoryStream(archiveBytes); @@ -209,6 +323,39 @@ public sealed class EvidenceBundlePackagingServiceTests return entries; } + private static Dictionary ReadArchiveEntryMetadata(byte[] archiveBytes) + { + using var memory = new MemoryStream(archiveBytes); + using var gzip = new GZipStream(memory, CompressionMode.Decompress, leaveOpen: true); + using var reader = new TarReader(gzip); + + var entries = new Dictionary(StringComparer.Ordinal); + TarEntry? entry; + while ((entry = reader.GetNextEntry()) is not null) + { + if (entry.EntryType != TarEntryType.RegularFile) + { + continue; + } + + entries[entry.Name] = new TarEntryMetadata( + entry.Uid, + entry.Gid, + entry.UserName ?? string.Empty, + entry.GroupName ?? string.Empty, + entry.ModificationTime); + } + + return entries; + } + + private sealed record TarEntryMetadata( + int Uid, + int Gid, + string UserName, + string GroupName, + DateTimeOffset ModificationTime); + private sealed class FakeRepository : IEvidenceBundleRepository { private EvidenceBundle _bundle; diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidencePortableBundleServiceTests.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidencePortableBundleServiceTests.cs index 48d0d6430..d40760f34 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidencePortableBundleServiceTests.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/EvidencePortableBundleServiceTests.cs @@ -94,6 +94,33 @@ public sealed class EvidencePortableBundleServiceTests await Assert.ThrowsAsync(() => service.EnsurePortablePackageAsync(TenantId, BundleId, CancellationToken.None)); } + [Fact] + public async Task EnsurePortablePackageAsync_ProducesDeterministicTarEntryMetadata() + { + var repository = new FakeRepository(CreateSealedBundle(), CreateSignature(includeTimestamp: true)); + var objectStore = new FakeObjectStore(exists: false); + var service = CreateService(repository, objectStore); + + await service.EnsurePortablePackageAsync(TenantId, BundleId, CancellationToken.None); + + Assert.True(objectStore.Stored); + var entryMetadata = ReadArchiveEntryMetadata(objectStore.StoredBytes!); + + // Verify all entries have deterministic uid/gid/username/groupname per bundle-packaging.md + foreach (var (name, meta) in entryMetadata) + { + Assert.Equal(0, meta.Uid); + Assert.Equal(0, meta.Gid); + Assert.True( + string.IsNullOrEmpty(meta.UserName), + $"Entry '{name}' should have empty username but was '{meta.UserName}'"); + Assert.True( + string.IsNullOrEmpty(meta.GroupName), + $"Entry '{name}' should have empty groupname but was '{meta.GroupName}'"); + Assert.Equal(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), meta.ModificationTime); + } + } + private static EvidencePortableBundleService CreateService(FakeRepository repository, IEvidenceObjectStore objectStore) { var options = Options.Create(new EvidenceLockerOptions @@ -200,6 +227,39 @@ public sealed class EvidencePortableBundleServiceTests return entries; } + private static Dictionary ReadArchiveEntryMetadata(byte[] archive) + { + using var memory = new MemoryStream(archive); + using var gzip = new GZipStream(memory, CompressionMode.Decompress); + using var tarReader = new TarReader(gzip); + + var entries = new Dictionary(StringComparer.Ordinal); + TarEntry? entry; + while ((entry = tarReader.GetNextEntry()) is not null) + { + if (entry.EntryType != TarEntryType.RegularFile) + { + continue; + } + + entries[entry.Name] = new TarEntryMetadata( + entry.Uid, + entry.Gid, + entry.UserName ?? string.Empty, + entry.GroupName ?? string.Empty, + entry.ModificationTime); + } + + return entries; + } + + private sealed record TarEntryMetadata( + int Uid, + int Gid, + string UserName, + string GroupName, + DateTimeOffset ModificationTime); + private sealed class FakeRepository : IEvidenceBundleRepository { private EvidenceBundle _bundle; diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportCenterClientTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportCenterClientTests.cs new file mode 100644 index 000000000..fd89a4e11 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportCenterClientTests.cs @@ -0,0 +1,299 @@ +using System.Net; +using System.Net.Http.Json; +using System.Text; +using System.Text.Json; +using StellaOps.ExportCenter.Client.Models; +using Xunit; + +namespace StellaOps.ExportCenter.Client.Tests; + +/// +/// Smoke tests for ExportCenterClient with mock HTTP responses. +/// +public sealed class ExportCenterClientTests +{ + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web); + + [Fact] + public async Task GetDiscoveryMetadataAsync_ReturnsMetadata() + { + var expectedMetadata = new OpenApiDiscoveryMetadata( + Service: "export-center", + Version: "1.0.0", + SpecVersion: "3.0.3", + Format: "application/yaml", + Url: "/openapi/export-center.yaml", + JsonUrl: "/openapi/export-center.json", + ErrorEnvelopeSchema: "#/components/schemas/ErrorEnvelope", + GeneratedAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), + ProfilesSupported: new[] { "attestation", "mirror" }, + ChecksumSha256: null); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal("/.well-known/openapi", request.RequestUri!.AbsolutePath); + return CreateJsonResponse(expectedMetadata); + }); + + var client = CreateClient(handler); + + var result = await client.GetDiscoveryMetadataAsync(); + + Assert.Equal("export-center", result.Service); + Assert.Equal("1.0.0", result.Version); + Assert.Equal("3.0.3", result.SpecVersion); + } + + [Fact] + public async Task ListProfilesAsync_ReturnsProfiles() + { + var expectedResponse = new ExportProfileListResponse( + Profiles: new[] + { + new ExportProfile( + ProfileId: "profile-1", + Name: "Test Profile", + Description: "Test", + Adapter: "evidence", + Selectors: new Dictionary { ["org"] = "test" }, + OutputFormat: "tar.gz", + SigningEnabled: true, + CreatedAt: DateTimeOffset.UtcNow, + UpdatedAt: null) + }, + ContinuationToken: null, + HasMore: false); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal("/v1/exports/profiles", request.RequestUri!.AbsolutePath); + return CreateJsonResponse(expectedResponse); + }); + + var client = CreateClient(handler); + + var result = await client.ListProfilesAsync(); + + Assert.Single(result.Profiles); + Assert.Equal("profile-1", result.Profiles[0].ProfileId); + Assert.False(result.HasMore); + } + + [Fact] + public async Task ListProfilesAsync_WithPagination_IncludesParameters() + { + var expectedResponse = new ExportProfileListResponse([], null, false); + + var handler = new MockHttpMessageHandler(request => + { + var query = request.RequestUri!.Query; + Assert.Contains("limit=10", query); + Assert.Contains("continuationToken=abc123", query); + return CreateJsonResponse(expectedResponse); + }); + + var client = CreateClient(handler); + + await client.ListProfilesAsync(continuationToken: "abc123", limit: 10); + } + + [Fact] + public async Task GetProfileAsync_WhenNotFound_ReturnsNull() + { + var handler = new MockHttpMessageHandler(request => + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + }); + + var client = CreateClient(handler); + + var result = await client.GetProfileAsync("nonexistent"); + + Assert.Null(result); + } + + [Fact] + public async Task CreateEvidenceExportAsync_ReturnsResponse() + { + var expectedResponse = new CreateEvidenceExportResponse( + RunId: "run-123", + Status: "pending", + StatusUrl: "/v1/exports/evidence/run-123/status", + EstimatedCompletionSeconds: 60); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("/v1/exports/evidence", request.RequestUri!.AbsolutePath); + return CreateJsonResponse(expectedResponse, HttpStatusCode.Accepted); + }); + + var client = CreateClient(handler); + + var request = new CreateEvidenceExportRequest("profile-1"); + var result = await client.CreateEvidenceExportAsync(request); + + Assert.Equal("run-123", result.RunId); + Assert.Equal("pending", result.Status); + } + + [Fact] + public async Task GetEvidenceExportStatusAsync_ReturnsStatus() + { + var expectedStatus = new EvidenceExportStatus( + RunId: "run-123", + ProfileId: "profile-1", + Status: "completed", + Progress: 100, + StartedAt: DateTimeOffset.UtcNow.AddMinutes(-5), + CompletedAt: DateTimeOffset.UtcNow, + BundleHash: "sha256:abc123", + DownloadUrl: "/v1/exports/evidence/run-123/download", + ErrorCode: null, + ErrorMessage: null); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal("/v1/exports/evidence/run-123/status", request.RequestUri!.AbsolutePath); + return CreateJsonResponse(expectedStatus); + }); + + var client = CreateClient(handler); + + var result = await client.GetEvidenceExportStatusAsync("run-123"); + + Assert.NotNull(result); + Assert.Equal("completed", result.Status); + Assert.Equal(100, result.Progress); + } + + [Fact] + public async Task DownloadEvidenceExportAsync_ReturnsStream() + { + var bundleContent = "test bundle content"u8.ToArray(); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal("/v1/exports/evidence/run-123/download", request.RequestUri!.AbsolutePath); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(bundleContent) + }; + }); + + var client = CreateClient(handler); + + var stream = await client.DownloadEvidenceExportAsync("run-123"); + + Assert.NotNull(stream); + using var ms = new MemoryStream(); + await stream.CopyToAsync(ms); + Assert.Equal(bundleContent, ms.ToArray()); + } + + [Fact] + public async Task DownloadEvidenceExportAsync_WhenNotReady_ReturnsNull() + { + var handler = new MockHttpMessageHandler(request => + { + return new HttpResponseMessage(HttpStatusCode.Conflict); + }); + + var client = CreateClient(handler); + + var result = await client.DownloadEvidenceExportAsync("run-123"); + + Assert.Null(result); + } + + [Fact] + public async Task CreateAttestationExportAsync_ReturnsResponse() + { + var expectedResponse = new CreateAttestationExportResponse( + RunId: "att-run-123", + Status: "pending", + StatusUrl: "/v1/exports/attestations/att-run-123/status", + EstimatedCompletionSeconds: 30); + + var handler = new MockHttpMessageHandler(request => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("/v1/exports/attestations", request.RequestUri!.AbsolutePath); + return CreateJsonResponse(expectedResponse, HttpStatusCode.Accepted); + }); + + var client = CreateClient(handler); + + var request = new CreateAttestationExportRequest("profile-1", IncludeTransparencyLog: true); + var result = await client.CreateAttestationExportAsync(request); + + Assert.Equal("att-run-123", result.RunId); + } + + [Fact] + public async Task GetAttestationExportStatusAsync_IncludesTransparencyLogField() + { + var expectedStatus = new AttestationExportStatus( + RunId: "att-run-123", + ProfileId: "profile-1", + Status: "completed", + Progress: 100, + StartedAt: DateTimeOffset.UtcNow.AddMinutes(-2), + CompletedAt: DateTimeOffset.UtcNow, + BundleHash: "sha256:def456", + DownloadUrl: "/v1/exports/attestations/att-run-123/download", + TransparencyLogIncluded: true, + ErrorCode: null, + ErrorMessage: null); + + var handler = new MockHttpMessageHandler(request => + { + return CreateJsonResponse(expectedStatus); + }); + + var client = CreateClient(handler); + + var result = await client.GetAttestationExportStatusAsync("att-run-123"); + + Assert.NotNull(result); + Assert.True(result.TransparencyLogIncluded); + } + + private static ExportCenterClient CreateClient(MockHttpMessageHandler handler) + { + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://localhost:5001") + }; + return new ExportCenterClient(httpClient); + } + + private static HttpResponseMessage CreateJsonResponse(T content, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(content, JsonOptions); + return new HttpResponseMessage(statusCode) + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + } +} + +/// +/// Mock HTTP message handler for testing. +/// +internal sealed class MockHttpMessageHandler : HttpMessageHandler +{ + private readonly Func _handler; + + public MockHttpMessageHandler(Func handler) + { + _handler = handler; + } + + protected override Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken) + { + return Task.FromResult(_handler(request)); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportDownloadHelperTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportDownloadHelperTests.cs new file mode 100644 index 000000000..2e34f11fb --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportDownloadHelperTests.cs @@ -0,0 +1,170 @@ +using StellaOps.ExportCenter.Client.Streaming; +using Xunit; + +namespace StellaOps.ExportCenter.Client.Tests; + +public sealed class ExportDownloadHelperTests : IDisposable +{ + private readonly string _tempDir; + + public ExportDownloadHelperTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"export-download-tests-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public async Task DownloadToFileAsync_WritesContentToFile() + { + var content = "test content"u8.ToArray(); + using var stream = new MemoryStream(content); + var outputPath = Path.Combine(_tempDir, "output.bin"); + + var bytesWritten = await ExportDownloadHelper.DownloadToFileAsync(stream, outputPath); + + Assert.Equal(content.Length, bytesWritten); + Assert.True(File.Exists(outputPath)); + Assert.Equal(content, await File.ReadAllBytesAsync(outputPath)); + } + + [Fact] + public async Task DownloadToFileAsync_ReportsProgress() + { + var content = new byte[10000]; + Random.Shared.NextBytes(content); + using var stream = new MemoryStream(content); + var outputPath = Path.Combine(_tempDir, "progress.bin"); + var progressReports = new List<(long bytes, long? total)>(); + + await ExportDownloadHelper.DownloadToFileAsync( + stream, outputPath, content.Length, (b, t) => progressReports.Add((b, t))); + + Assert.NotEmpty(progressReports); + Assert.Equal(content.Length, progressReports[^1].bytes); + } + + [Fact] + public async Task ComputeSha256Async_ReturnsCorrectHash() + { + var content = "test content for hashing"u8.ToArray(); + using var stream = new MemoryStream(content); + + var hash = await ExportDownloadHelper.ComputeSha256Async(stream); + + // Verify it's a valid hex string + Assert.Equal(64, hash.Length); // SHA-256 produces 32 bytes = 64 hex chars + Assert.All(hash, c => Assert.True(char.IsLetterOrDigit(c))); + } + + [Fact] + public async Task DownloadAndVerifyAsync_SucceedsWithCorrectHash() + { + var content = "deterministic content"u8.ToArray(); + using var hashStream = new MemoryStream(content); + var expectedHash = await ExportDownloadHelper.ComputeSha256Async(hashStream); + + using var downloadStream = new MemoryStream(content); + var outputPath = Path.Combine(_tempDir, "verified.bin"); + + var actualHash = await ExportDownloadHelper.DownloadAndVerifyAsync( + downloadStream, outputPath, expectedHash); + + Assert.Equal(expectedHash, actualHash); + Assert.True(File.Exists(outputPath)); + } + + [Fact] + public async Task DownloadAndVerifyAsync_ThrowsOnHashMismatch() + { + var content = "actual content"u8.ToArray(); + using var stream = new MemoryStream(content); + var outputPath = Path.Combine(_tempDir, "mismatch.bin"); + var wrongHash = "0000000000000000000000000000000000000000000000000000000000000000"; + + await Assert.ThrowsAsync(() => + ExportDownloadHelper.DownloadAndVerifyAsync(stream, outputPath, wrongHash)); + + // Verify file was deleted + Assert.False(File.Exists(outputPath)); + } + + [Fact] + public async Task DownloadAndVerifyAsync_HandlesSha256Prefix() + { + var content = "prefixed hash test"u8.ToArray(); + using var hashStream = new MemoryStream(content); + var hash = await ExportDownloadHelper.ComputeSha256Async(hashStream); + var prefixedHash = "sha256:" + hash; + + using var downloadStream = new MemoryStream(content); + var outputPath = Path.Combine(_tempDir, "prefixed.bin"); + + var actualHash = await ExportDownloadHelper.DownloadAndVerifyAsync( + downloadStream, outputPath, prefixedHash); + + Assert.Equal(hash, actualHash); + } + + [Fact] + public async Task CopyWithProgressAsync_CopiesCorrectly() + { + var content = new byte[5000]; + Random.Shared.NextBytes(content); + using var source = new MemoryStream(content); + using var destination = new MemoryStream(); + + var bytesCopied = await ExportDownloadHelper.CopyWithProgressAsync(source, destination); + + Assert.Equal(content.Length, bytesCopied); + Assert.Equal(content, destination.ToArray()); + } + + [Fact] + public void CreateProgressLogger_ReturnsWorkingCallback() + { + var messages = new List(); + var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100); + + // Simulate progress + callback(50, 1000); // Should not log (below threshold) + callback(150, 1000); // Should log + callback(200, 1000); // Should not log (too close to last) + callback(300, 1000); // Should log + + Assert.Equal(2, messages.Count); + Assert.Contains("150", messages[0]); + Assert.Contains("300", messages[1]); + } + + [Fact] + public void CreateProgressLogger_FormatsWithoutTotalBytes() + { + var messages = new List(); + var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100); + + callback(200, null); + + Assert.Single(messages); + Assert.DoesNotContain("%", messages[0]); + } + + [Fact] + public void CreateProgressLogger_FormatsWithTotalBytes() + { + var messages = new List(); + var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100); + + callback(500, 1000); + + Assert.Single(messages); + Assert.Contains("%", messages[0]); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportJobLifecycleHelperTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportJobLifecycleHelperTests.cs new file mode 100644 index 000000000..220f2c1e3 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/ExportJobLifecycleHelperTests.cs @@ -0,0 +1,182 @@ +using StellaOps.ExportCenter.Client.Lifecycle; +using StellaOps.ExportCenter.Client.Models; +using Xunit; + +namespace StellaOps.ExportCenter.Client.Tests; + +public sealed class ExportJobLifecycleHelperTests +{ + [Theory] + [InlineData("completed", true)] + [InlineData("failed", true)] + [InlineData("cancelled", true)] + [InlineData("pending", false)] + [InlineData("running", false)] + [InlineData("COMPLETED", true)] + public void IsTerminalStatus_ReturnsCorrectValue(string status, bool expected) + { + var result = ExportJobLifecycleHelper.IsTerminalStatus(status); + Assert.Equal(expected, result); + } + + [Fact] + public async Task WaitForEvidenceExportCompletionAsync_ReturnsOnTerminalStatus() + { + var callCount = 0; + var mockClient = new MockExportCenterClient + { + GetEvidenceExportStatusHandler = runId => + { + callCount++; + var status = callCount < 3 ? "running" : "completed"; + return new EvidenceExportStatus( + RunId: runId, + ProfileId: "profile-1", + Status: status, + Progress: callCount < 3 ? 50 : 100, + StartedAt: DateTimeOffset.UtcNow, + CompletedAt: callCount >= 3 ? DateTimeOffset.UtcNow : null, + BundleHash: callCount >= 3 ? "sha256:abc" : null, + DownloadUrl: null, + ErrorCode: null, + ErrorMessage: null); + } + }; + + var result = await ExportJobLifecycleHelper.WaitForEvidenceExportCompletionAsync( + mockClient, "run-1", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(10)); + + Assert.Equal("completed", result.Status); + Assert.Equal(100, result.Progress); + Assert.Equal(3, callCount); + } + + [Fact] + public async Task WaitForEvidenceExportCompletionAsync_ThrowsOnNotFound() + { + var mockClient = new MockExportCenterClient + { + GetEvidenceExportStatusHandler = _ => null + }; + + await Assert.ThrowsAsync(() => + ExportJobLifecycleHelper.WaitForEvidenceExportCompletionAsync( + mockClient, "nonexistent", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(1))); + } + + [Fact] + public async Task WaitForAttestationExportCompletionAsync_ReturnsOnTerminalStatus() + { + var callCount = 0; + var mockClient = new MockExportCenterClient + { + GetAttestationExportStatusHandler = runId => + { + callCount++; + var status = callCount < 2 ? "running" : "completed"; + return new AttestationExportStatus( + RunId: runId, + ProfileId: "profile-1", + Status: status, + Progress: callCount < 2 ? 50 : 100, + StartedAt: DateTimeOffset.UtcNow, + CompletedAt: callCount >= 2 ? DateTimeOffset.UtcNow : null, + BundleHash: callCount >= 2 ? "sha256:abc" : null, + DownloadUrl: null, + TransparencyLogIncluded: true, + ErrorCode: null, + ErrorMessage: null); + } + }; + + var result = await ExportJobLifecycleHelper.WaitForAttestationExportCompletionAsync( + mockClient, "run-1", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(10)); + + Assert.Equal("completed", result.Status); + Assert.True(result.TransparencyLogIncluded); + } + + [Fact] + public async Task CreateEvidenceExportAndWaitAsync_CreatesAndWaits() + { + var createCalled = false; + var mockClient = new MockExportCenterClient + { + CreateEvidenceExportHandler = request => + { + createCalled = true; + return new CreateEvidenceExportResponse("run-1", "pending", "/status", 10); + }, + GetEvidenceExportStatusHandler = runId => + { + return new EvidenceExportStatus( + runId, "profile-1", "completed", 100, + DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, + "sha256:abc", "/download", null, null); + } + }; + + var result = await ExportJobLifecycleHelper.CreateEvidenceExportAndWaitAsync( + mockClient, + new CreateEvidenceExportRequest("profile-1"), + TimeSpan.FromMilliseconds(10), + TimeSpan.FromSeconds(10)); + + Assert.True(createCalled); + Assert.Equal("completed", result.Status); + } + + [Fact] + public void TerminalStatuses_ContainsExpectedValues() + { + Assert.Contains("completed", ExportJobLifecycleHelper.TerminalStatuses); + Assert.Contains("failed", ExportJobLifecycleHelper.TerminalStatuses); + Assert.Contains("cancelled", ExportJobLifecycleHelper.TerminalStatuses); + Assert.DoesNotContain("pending", ExportJobLifecycleHelper.TerminalStatuses); + Assert.DoesNotContain("running", ExportJobLifecycleHelper.TerminalStatuses); + } +} + +/// +/// Mock implementation of IExportCenterClient for testing. +/// +internal sealed class MockExportCenterClient : IExportCenterClient +{ + public Func? GetEvidenceExportStatusHandler { get; set; } + public Func? GetAttestationExportStatusHandler { get; set; } + public Func? CreateEvidenceExportHandler { get; set; } + public Func? CreateAttestationExportHandler { get; set; } + + public Task GetDiscoveryMetadataAsync(CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task ListProfilesAsync(string? continuationToken = null, int? limit = null, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task GetProfileAsync(string profileId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task ListRunsAsync(string? profileId = null, string? continuationToken = null, int? limit = null, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task GetRunAsync(string runId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task CreateEvidenceExportAsync(CreateEvidenceExportRequest request, CancellationToken cancellationToken = default) + => Task.FromResult(CreateEvidenceExportHandler?.Invoke(request) ?? throw new NotImplementedException()); + + public Task GetEvidenceExportStatusAsync(string runId, CancellationToken cancellationToken = default) + => Task.FromResult(GetEvidenceExportStatusHandler?.Invoke(runId)); + + public Task DownloadEvidenceExportAsync(string runId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task CreateAttestationExportAsync(CreateAttestationExportRequest request, CancellationToken cancellationToken = default) + => Task.FromResult(CreateAttestationExportHandler?.Invoke(request) ?? throw new NotImplementedException()); + + public Task GetAttestationExportStatusAsync(string runId, CancellationToken cancellationToken = default) + => Task.FromResult(GetAttestationExportStatusHandler?.Invoke(runId)); + + public Task DownloadAttestationExportAsync(string runId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/StellaOps.ExportCenter.Client.Tests.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/StellaOps.ExportCenter.Client.Tests.csproj new file mode 100644 index 000000000..e22d1b57e --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/StellaOps.ExportCenter.Client.Tests.csproj @@ -0,0 +1,33 @@ + + + + net10.0 + enable + enable + preview + false + Exe + false + false + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/xunit.runner.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/xunit.runner.json new file mode 100644 index 000000000..d566bced4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client.Tests/xunit.runner.json @@ -0,0 +1,4 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json", + "methodDisplay": "classAndMethod" +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClient.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClient.cs new file mode 100644 index 000000000..4a412471e --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClient.cs @@ -0,0 +1,310 @@ +using System.Net; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.Extensions.Options; +using StellaOps.ExportCenter.Client.Models; + +namespace StellaOps.ExportCenter.Client; + +/// +/// HTTP client implementation for the ExportCenter WebService API. +/// +public sealed class ExportCenterClient : IExportCenterClient +{ + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web); + + private readonly HttpClient _httpClient; + + /// + /// Creates a new ExportCenterClient with the specified HttpClient. + /// + /// HTTP client instance. + public ExportCenterClient(HttpClient httpClient) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + } + + /// + /// Creates a new ExportCenterClient with the specified options. + /// + /// HTTP client instance. + /// Client options. + public ExportCenterClient(HttpClient httpClient, IOptions options) + : this(httpClient) + { + ArgumentNullException.ThrowIfNull(options); + var opts = options.Value; + _httpClient.BaseAddress = new Uri(opts.BaseUrl); + _httpClient.Timeout = opts.Timeout; + } + + #region Discovery + + /// + public async Task GetDiscoveryMetadataAsync( + CancellationToken cancellationToken = default) + { + var response = await _httpClient.GetAsync("/.well-known/openapi", cancellationToken) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var metadata = await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return metadata ?? throw new InvalidOperationException("Invalid discovery metadata response."); + } + + #endregion + + #region Profiles + + /// + public async Task ListProfilesAsync( + string? continuationToken = null, + int? limit = null, + CancellationToken cancellationToken = default) + { + var url = "/v1/exports/profiles"; + var queryParams = new List(); + + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add($"continuationToken={Uri.EscapeDataString(continuationToken)}"); + } + + if (limit.HasValue) + { + queryParams.Add($"limit={limit.Value}"); + } + + if (queryParams.Count > 0) + { + url += "?" + string.Join("&", queryParams); + } + + var response = await _httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? new ExportProfileListResponse([], null, false); + } + + /// + public async Task GetProfileAsync( + string profileId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(profileId); + + var response = await _httpClient.GetAsync($"/v1/exports/profiles/{Uri.EscapeDataString(profileId)}", cancellationToken) + .ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + } + + #endregion + + #region Runs + + /// + public async Task ListRunsAsync( + string? profileId = null, + string? continuationToken = null, + int? limit = null, + CancellationToken cancellationToken = default) + { + var url = "/v1/exports/runs"; + var queryParams = new List(); + + if (!string.IsNullOrEmpty(profileId)) + { + queryParams.Add($"profileId={Uri.EscapeDataString(profileId)}"); + } + + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add($"continuationToken={Uri.EscapeDataString(continuationToken)}"); + } + + if (limit.HasValue) + { + queryParams.Add($"limit={limit.Value}"); + } + + if (queryParams.Count > 0) + { + url += "?" + string.Join("&", queryParams); + } + + var response = await _httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? new ExportRunListResponse([], null, false); + } + + /// + public async Task GetRunAsync( + string runId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var response = await _httpClient.GetAsync($"/v1/exports/runs/{Uri.EscapeDataString(runId)}", cancellationToken) + .ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + } + + #endregion + + #region Evidence Exports + + /// + public async Task CreateEvidenceExportAsync( + CreateEvidenceExportRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var response = await _httpClient.PostAsJsonAsync("/v1/exports/evidence", request, JsonOptions, cancellationToken) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? throw new InvalidOperationException("Invalid evidence export response."); + } + + /// + public async Task GetEvidenceExportStatusAsync( + string runId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var response = await _httpClient.GetAsync($"/v1/exports/evidence/{Uri.EscapeDataString(runId)}/status", cancellationToken) + .ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + } + + /// + public async Task DownloadEvidenceExportAsync( + string runId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var response = await _httpClient.GetAsync( + $"/v1/exports/evidence/{Uri.EscapeDataString(runId)}/download", + HttpCompletionOption.ResponseHeadersRead, + cancellationToken).ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound || + response.StatusCode == HttpStatusCode.Conflict) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + } + + #endregion + + #region Attestation Exports + + /// + public async Task CreateAttestationExportAsync( + CreateAttestationExportRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var response = await _httpClient.PostAsJsonAsync("/v1/exports/attestations", request, JsonOptions, cancellationToken) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? throw new InvalidOperationException("Invalid attestation export response."); + } + + /// + public async Task GetAttestationExportStatusAsync( + string runId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var response = await _httpClient.GetAsync($"/v1/exports/attestations/{Uri.EscapeDataString(runId)}/status", cancellationToken) + .ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + } + + /// + public async Task DownloadAttestationExportAsync( + string runId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var response = await _httpClient.GetAsync( + $"/v1/exports/attestations/{Uri.EscapeDataString(runId)}/download", + HttpCompletionOption.ResponseHeadersRead, + cancellationToken).ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound || + response.StatusCode == HttpStatusCode.Conflict) + { + return null; + } + + response.EnsureSuccessStatusCode(); + + return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + } + + #endregion +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClientOptions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClientOptions.cs new file mode 100644 index 000000000..ae7b00774 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/ExportCenterClientOptions.cs @@ -0,0 +1,22 @@ +namespace StellaOps.ExportCenter.Client; + +/// +/// Configuration options for the ExportCenter client. +/// +public sealed class ExportCenterClientOptions +{ + /// + /// Base URL for the ExportCenter API. + /// + public string BaseUrl { get; set; } = "https://localhost:5001"; + + /// + /// Timeout for HTTP requests. + /// + public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Timeout for streaming downloads. + /// + public TimeSpan DownloadTimeout { get; set; } = TimeSpan.FromMinutes(10); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Extensions/ServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 000000000..95eb60114 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,93 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; + +namespace StellaOps.ExportCenter.Client.Extensions; + +/// +/// Extension methods for configuring ExportCenter client services. +/// +public static class ServiceCollectionExtensions +{ + /// + /// Adds the ExportCenter client to the service collection. + /// + /// Service collection. + /// Action to configure client options. + /// The service collection for chaining. + public static IServiceCollection AddExportCenterClient( + this IServiceCollection services, + Action configureOptions) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configureOptions); + + services.Configure(configureOptions); + + services.AddHttpClient((sp, client) => + { + var options = sp.GetRequiredService>().Value; + client.BaseAddress = new Uri(options.BaseUrl); + client.Timeout = options.Timeout; + }); + + return services; + } + + /// + /// Adds the ExportCenter client to the service collection with a named HttpClient. + /// + /// Service collection. + /// HttpClient name. + /// Action to configure client options. + /// The service collection for chaining. + public static IServiceCollection AddExportCenterClient( + this IServiceCollection services, + string name, + Action configureOptions) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentException.ThrowIfNullOrWhiteSpace(name); + ArgumentNullException.ThrowIfNull(configureOptions); + + services.Configure(name, configureOptions); + + services.AddHttpClient(name, (sp, client) => + { + var optionsMonitor = sp.GetRequiredService>(); + var options = optionsMonitor.Get(name); + client.BaseAddress = new Uri(options.BaseUrl); + client.Timeout = options.Timeout; + }); + + return services; + } + + /// + /// Adds the ExportCenter client with custom HttpClient configuration. + /// + /// Service collection. + /// Action to configure client options. + /// Additional HttpClient configuration. + /// The service collection for chaining. + public static IServiceCollection AddExportCenterClient( + this IServiceCollection services, + Action configureOptions, + Action configureClient) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configureOptions); + ArgumentNullException.ThrowIfNull(configureClient); + + services.Configure(configureOptions); + + services.AddHttpClient((sp, client) => + { + var options = sp.GetRequiredService>().Value; + client.BaseAddress = new Uri(options.BaseUrl); + client.Timeout = options.Timeout; + configureClient(client); + }); + + return services; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/IExportCenterClient.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/IExportCenterClient.cs new file mode 100644 index 000000000..05609b0a4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/IExportCenterClient.cs @@ -0,0 +1,143 @@ +using StellaOps.ExportCenter.Client.Models; + +namespace StellaOps.ExportCenter.Client; + +/// +/// Client interface for the ExportCenter WebService API. +/// +public interface IExportCenterClient +{ + #region Discovery + + /// + /// Gets OpenAPI discovery metadata. + /// + /// Cancellation token. + /// OpenAPI discovery metadata. + Task GetDiscoveryMetadataAsync( + CancellationToken cancellationToken = default); + + #endregion + + #region Profiles + + /// + /// Lists export profiles. + /// + /// Continuation token for pagination. + /// Maximum number of profiles to return. + /// Cancellation token. + /// Paginated list of export profiles. + Task ListProfilesAsync( + string? continuationToken = null, + int? limit = null, + CancellationToken cancellationToken = default); + + /// + /// Gets a specific export profile by ID. + /// + /// Profile identifier. + /// Cancellation token. + /// Export profile or null if not found. + Task GetProfileAsync( + string profileId, + CancellationToken cancellationToken = default); + + #endregion + + #region Runs + + /// + /// Lists export runs, optionally filtered by profile. + /// + /// Optional profile ID filter. + /// Continuation token for pagination. + /// Maximum number of runs to return. + /// Cancellation token. + /// Paginated list of export runs. + Task ListRunsAsync( + string? profileId = null, + string? continuationToken = null, + int? limit = null, + CancellationToken cancellationToken = default); + + /// + /// Gets a specific export run by ID. + /// + /// Run identifier. + /// Cancellation token. + /// Export run or null if not found. + Task GetRunAsync( + string runId, + CancellationToken cancellationToken = default); + + #endregion + + #region Evidence Exports + + /// + /// Creates a new evidence export job. + /// + /// Export creation request. + /// Cancellation token. + /// Export creation response. + Task CreateEvidenceExportAsync( + CreateEvidenceExportRequest request, + CancellationToken cancellationToken = default); + + /// + /// Gets the status of an evidence export job. + /// + /// Run identifier. + /// Cancellation token. + /// Evidence export status or null if not found. + Task GetEvidenceExportStatusAsync( + string runId, + CancellationToken cancellationToken = default); + + /// + /// Downloads an evidence export bundle as a stream. + /// + /// Run identifier. + /// Cancellation token. + /// Stream containing the bundle, or null if not ready/found. + Task DownloadEvidenceExportAsync( + string runId, + CancellationToken cancellationToken = default); + + #endregion + + #region Attestation Exports + + /// + /// Creates a new attestation export job. + /// + /// Export creation request. + /// Cancellation token. + /// Export creation response. + Task CreateAttestationExportAsync( + CreateAttestationExportRequest request, + CancellationToken cancellationToken = default); + + /// + /// Gets the status of an attestation export job. + /// + /// Run identifier. + /// Cancellation token. + /// Attestation export status or null if not found. + Task GetAttestationExportStatusAsync( + string runId, + CancellationToken cancellationToken = default); + + /// + /// Downloads an attestation export bundle as a stream. + /// + /// Run identifier. + /// Cancellation token. + /// Stream containing the bundle, or null if not ready/found. + Task DownloadAttestationExportAsync( + string runId, + CancellationToken cancellationToken = default); + + #endregion +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Lifecycle/ExportJobLifecycleHelper.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Lifecycle/ExportJobLifecycleHelper.cs new file mode 100644 index 000000000..7f8a3b14f --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Lifecycle/ExportJobLifecycleHelper.cs @@ -0,0 +1,257 @@ +using StellaOps.ExportCenter.Client.Models; + +namespace StellaOps.ExportCenter.Client.Lifecycle; + +/// +/// Helper methods for export job lifecycle operations. +/// +public static class ExportJobLifecycleHelper +{ + /// + /// Terminal statuses for export jobs. + /// + public static readonly IReadOnlySet TerminalStatuses = new HashSet(StringComparer.OrdinalIgnoreCase) + { + "completed", + "failed", + "cancelled" + }; + + /// + /// Determines if a status is terminal (export job has finished). + /// + /// Status to check. + /// True if terminal status. + public static bool IsTerminalStatus(string status) + => TerminalStatuses.Contains(status); + + /// + /// Creates an evidence export and waits for completion. + /// + /// ExportCenter client. + /// Export creation request. + /// Interval between status checks (default: 2 seconds). + /// Maximum time to wait (default: 30 minutes). + /// Cancellation token. + /// Final evidence export status. + public static async Task CreateEvidenceExportAndWaitAsync( + IExportCenterClient client, + CreateEvidenceExportRequest request, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentNullException.ThrowIfNull(request); + + var createResponse = await client.CreateEvidenceExportAsync(request, cancellationToken) + .ConfigureAwait(false); + + return await WaitForEvidenceExportCompletionAsync( + client, createResponse.RunId, pollInterval, timeout, cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Waits for an evidence export to complete. + /// + /// ExportCenter client. + /// Run identifier. + /// Interval between status checks (default: 2 seconds). + /// Maximum time to wait (default: 30 minutes). + /// Cancellation token. + /// Final evidence export status. + public static async Task WaitForEvidenceExportCompletionAsync( + IExportCenterClient client, + string runId, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var interval = pollInterval ?? TimeSpan.FromSeconds(2); + var maxWait = timeout ?? TimeSpan.FromMinutes(30); + + using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + cts.CancelAfter(maxWait); + + while (true) + { + var status = await client.GetEvidenceExportStatusAsync(runId, cts.Token) + .ConfigureAwait(false); + + if (status is null) + { + throw new InvalidOperationException($"Evidence export '{runId}' not found."); + } + + if (IsTerminalStatus(status.Status)) + { + return status; + } + + await Task.Delay(interval, cts.Token).ConfigureAwait(false); + } + } + + /// + /// Creates an attestation export and waits for completion. + /// + /// ExportCenter client. + /// Export creation request. + /// Interval between status checks (default: 2 seconds). + /// Maximum time to wait (default: 30 minutes). + /// Cancellation token. + /// Final attestation export status. + public static async Task CreateAttestationExportAndWaitAsync( + IExportCenterClient client, + CreateAttestationExportRequest request, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentNullException.ThrowIfNull(request); + + var createResponse = await client.CreateAttestationExportAsync(request, cancellationToken) + .ConfigureAwait(false); + + return await WaitForAttestationExportCompletionAsync( + client, createResponse.RunId, pollInterval, timeout, cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Waits for an attestation export to complete. + /// + /// ExportCenter client. + /// Run identifier. + /// Interval between status checks (default: 2 seconds). + /// Maximum time to wait (default: 30 minutes). + /// Cancellation token. + /// Final attestation export status. + public static async Task WaitForAttestationExportCompletionAsync( + IExportCenterClient client, + string runId, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + + var interval = pollInterval ?? TimeSpan.FromSeconds(2); + var maxWait = timeout ?? TimeSpan.FromMinutes(30); + + using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + cts.CancelAfter(maxWait); + + while (true) + { + var status = await client.GetAttestationExportStatusAsync(runId, cts.Token) + .ConfigureAwait(false); + + if (status is null) + { + throw new InvalidOperationException($"Attestation export '{runId}' not found."); + } + + if (IsTerminalStatus(status.Status)) + { + return status; + } + + await Task.Delay(interval, cts.Token).ConfigureAwait(false); + } + } + + /// + /// Creates an evidence export, waits for completion, and downloads the bundle. + /// + /// ExportCenter client. + /// Export creation request. + /// Path to save the downloaded bundle. + /// Interval between status checks. + /// Maximum time to wait. + /// Cancellation token. + /// Final evidence export status. + public static async Task CreateEvidenceExportAndDownloadAsync( + IExportCenterClient client, + CreateEvidenceExportRequest request, + string outputPath, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + + var status = await CreateEvidenceExportAndWaitAsync(client, request, pollInterval, timeout, cancellationToken) + .ConfigureAwait(false); + + if (status.Status != "completed") + { + throw new InvalidOperationException($"Evidence export failed: {status.ErrorCode} - {status.ErrorMessage}"); + } + + await using var stream = await client.DownloadEvidenceExportAsync(status.RunId, cancellationToken) + .ConfigureAwait(false); + + if (stream is null) + { + throw new InvalidOperationException($"Evidence export bundle not available for download."); + } + + await using var fileStream = File.Create(outputPath); + await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + + return status; + } + + /// + /// Creates an attestation export, waits for completion, and downloads the bundle. + /// + /// ExportCenter client. + /// Export creation request. + /// Path to save the downloaded bundle. + /// Interval between status checks. + /// Maximum time to wait. + /// Cancellation token. + /// Final attestation export status. + public static async Task CreateAttestationExportAndDownloadAsync( + IExportCenterClient client, + CreateAttestationExportRequest request, + string outputPath, + TimeSpan? pollInterval = null, + TimeSpan? timeout = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(client); + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + + var status = await CreateAttestationExportAndWaitAsync(client, request, pollInterval, timeout, cancellationToken) + .ConfigureAwait(false); + + if (status.Status != "completed") + { + throw new InvalidOperationException($"Attestation export failed: {status.ErrorCode} - {status.ErrorMessage}"); + } + + await using var stream = await client.DownloadAttestationExportAsync(status.RunId, cancellationToken) + .ConfigureAwait(false); + + if (stream is null) + { + throw new InvalidOperationException($"Attestation export bundle not available for download."); + } + + await using var fileStream = File.Create(outputPath); + await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + + return status; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Models/ExportModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Models/ExportModels.cs new file mode 100644 index 000000000..7cf623049 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Models/ExportModels.cs @@ -0,0 +1,152 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Client.Models; + +/// +/// Export profile metadata. +/// +public sealed record ExportProfile( + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("description")] string? Description, + [property: JsonPropertyName("adapter")] string Adapter, + [property: JsonPropertyName("selectors")] IReadOnlyDictionary? Selectors, + [property: JsonPropertyName("outputFormat")] string OutputFormat, + [property: JsonPropertyName("signingEnabled")] bool SigningEnabled, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt); + +/// +/// Paginated list of export profiles. +/// +public sealed record ExportProfileListResponse( + [property: JsonPropertyName("profiles")] IReadOnlyList Profiles, + [property: JsonPropertyName("continuationToken")] string? ContinuationToken, + [property: JsonPropertyName("hasMore")] bool HasMore); + +/// +/// Export run representing a single export job execution. +/// +public sealed record ExportRun( + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("progress")] int? Progress, + [property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt, + [property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt, + [property: JsonPropertyName("bundleHash")] string? BundleHash, + [property: JsonPropertyName("bundleUrl")] string? BundleUrl, + [property: JsonPropertyName("errorCode")] string? ErrorCode, + [property: JsonPropertyName("errorMessage")] string? ErrorMessage, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); + +/// +/// Paginated list of export runs. +/// +public sealed record ExportRunListResponse( + [property: JsonPropertyName("runs")] IReadOnlyList Runs, + [property: JsonPropertyName("continuationToken")] string? ContinuationToken, + [property: JsonPropertyName("hasMore")] bool HasMore); + +/// +/// Request to create a new evidence export. +/// +public sealed record CreateEvidenceExportRequest( + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("selectors")] IReadOnlyDictionary? Selectors = null, + [property: JsonPropertyName("callbackUrl")] string? CallbackUrl = null); + +/// +/// Response from creating an evidence export. +/// +public sealed record CreateEvidenceExportResponse( + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("statusUrl")] string StatusUrl, + [property: JsonPropertyName("estimatedCompletionSeconds")] int? EstimatedCompletionSeconds); + +/// +/// Status of an evidence export. +/// +public sealed record EvidenceExportStatus( + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("progress")] int Progress, + [property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt, + [property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt, + [property: JsonPropertyName("bundleHash")] string? BundleHash, + [property: JsonPropertyName("downloadUrl")] string? DownloadUrl, + [property: JsonPropertyName("errorCode")] string? ErrorCode, + [property: JsonPropertyName("errorMessage")] string? ErrorMessage); + +/// +/// Request to create a new attestation export. +/// +public sealed record CreateAttestationExportRequest( + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("selectors")] IReadOnlyDictionary? Selectors = null, + [property: JsonPropertyName("includeTransparencyLog")] bool IncludeTransparencyLog = true, + [property: JsonPropertyName("callbackUrl")] string? CallbackUrl = null); + +/// +/// Response from creating an attestation export. +/// +public sealed record CreateAttestationExportResponse( + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("statusUrl")] string StatusUrl, + [property: JsonPropertyName("estimatedCompletionSeconds")] int? EstimatedCompletionSeconds); + +/// +/// Status of an attestation export. +/// +public sealed record AttestationExportStatus( + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("profileId")] string ProfileId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("progress")] int Progress, + [property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt, + [property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt, + [property: JsonPropertyName("bundleHash")] string? BundleHash, + [property: JsonPropertyName("downloadUrl")] string? DownloadUrl, + [property: JsonPropertyName("transparencyLogIncluded")] bool TransparencyLogIncluded, + [property: JsonPropertyName("errorCode")] string? ErrorCode, + [property: JsonPropertyName("errorMessage")] string? ErrorMessage); + +/// +/// OpenAPI discovery metadata. +/// +public sealed record OpenApiDiscoveryMetadata( + [property: JsonPropertyName("service")] string Service, + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("specVersion")] string SpecVersion, + [property: JsonPropertyName("format")] string Format, + [property: JsonPropertyName("url")] string Url, + [property: JsonPropertyName("jsonUrl")] string? JsonUrl, + [property: JsonPropertyName("errorEnvelopeSchema")] string ErrorEnvelopeSchema, + [property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt, + [property: JsonPropertyName("profilesSupported")] IReadOnlyList? ProfilesSupported, + [property: JsonPropertyName("checksumSha256")] string? ChecksumSha256); + +/// +/// Standard error envelope. +/// +public sealed record ErrorEnvelope( + [property: JsonPropertyName("error")] ErrorDetail Error); + +/// +/// Error detail within an error envelope. +/// +public sealed record ErrorDetail( + [property: JsonPropertyName("code")] string Code, + [property: JsonPropertyName("message")] string Message, + [property: JsonPropertyName("correlationId")] string? CorrelationId = null, + [property: JsonPropertyName("details")] IReadOnlyList? Details = null); + +/// +/// Individual error detail item. +/// +public sealed record ErrorDetailItem( + [property: JsonPropertyName("field")] string? Field, + [property: JsonPropertyName("reason")] string Reason); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj new file mode 100644 index 000000000..783ceb300 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj @@ -0,0 +1,17 @@ + + + + net10.0 + enable + enable + preview + SDK client for StellaOps ExportCenter WebService API + + + + + + + + + diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Streaming/ExportDownloadHelper.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Streaming/ExportDownloadHelper.cs new file mode 100644 index 000000000..347aca209 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/Streaming/ExportDownloadHelper.cs @@ -0,0 +1,175 @@ +using System.Security.Cryptography; + +namespace StellaOps.ExportCenter.Client.Streaming; + +/// +/// Helper methods for streaming export bundle downloads. +/// +public static class ExportDownloadHelper +{ + private const int DefaultBufferSize = 81920; // 80 KB + + /// + /// Downloads a stream to a file with progress reporting. + /// + /// Source stream. + /// Destination file path. + /// Expected content length (if known). + /// Progress callback (bytes downloaded, total bytes or null). + /// Cancellation token. + /// Total bytes downloaded. + public static async Task DownloadToFileAsync( + Stream stream, + string outputPath, + long? expectedLength = null, + Action? progressCallback = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + + await using var fileStream = File.Create(outputPath); + return await CopyWithProgressAsync(stream, fileStream, expectedLength, progressCallback, cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Downloads a stream to a file and verifies SHA-256 checksum. + /// + /// Source stream. + /// Destination file path. + /// Expected SHA-256 hash (hex string). + /// Cancellation token. + /// Actual SHA-256 hash of the downloaded file. + /// Thrown if checksum doesn't match. + public static async Task DownloadAndVerifyAsync( + Stream stream, + string outputPath, + string expectedSha256, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + ArgumentException.ThrowIfNullOrWhiteSpace(expectedSha256); + + using var sha256 = SHA256.Create(); + + await using var fileStream = File.Create(outputPath); + await using var cryptoStream = new CryptoStream(fileStream, sha256, CryptoStreamMode.Write); + + var buffer = new byte[DefaultBufferSize]; + int bytesRead; + + while ((bytesRead = await stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false)) > 0) + { + await cryptoStream.WriteAsync(buffer.AsMemory(0, bytesRead), cancellationToken).ConfigureAwait(false); + } + + await cryptoStream.FlushFinalBlockAsync(cancellationToken).ConfigureAwait(false); + + var actualHash = Convert.ToHexString(sha256.Hash!).ToLowerInvariant(); + var expectedNormalized = expectedSha256.ToLowerInvariant().Replace("sha256:", ""); + + if (!string.Equals(actualHash, expectedNormalized, StringComparison.Ordinal)) + { + // Delete the corrupted file + File.Delete(outputPath); + throw new InvalidOperationException( + $"Checksum verification failed. Expected: {expectedNormalized}, Actual: {actualHash}"); + } + + return actualHash; + } + + /// + /// Computes SHA-256 hash of a stream. + /// + /// Source stream. + /// Cancellation token. + /// SHA-256 hash as hex string. + public static async Task ComputeSha256Async( + Stream stream, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + using var sha256 = SHA256.Create(); + var hash = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + /// + /// Copies a stream with progress reporting. + /// + /// Source stream. + /// Destination stream. + /// Expected content length (if known). + /// Progress callback (bytes copied, total bytes or null). + /// Cancellation token. + /// Total bytes copied. + public static async Task CopyWithProgressAsync( + Stream source, + Stream destination, + long? expectedLength = null, + Action? progressCallback = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(source); + ArgumentNullException.ThrowIfNull(destination); + + var buffer = new byte[DefaultBufferSize]; + long totalBytes = 0; + int bytesRead; + + while ((bytesRead = await source.ReadAsync(buffer, cancellationToken).ConfigureAwait(false)) > 0) + { + await destination.WriteAsync(buffer.AsMemory(0, bytesRead), cancellationToken).ConfigureAwait(false); + totalBytes += bytesRead; + progressCallback?.Invoke(totalBytes, expectedLength); + } + + return totalBytes; + } + + /// + /// Creates a progress callback that logs progress at specified intervals. + /// + /// Action to invoke with progress message. + /// Minimum bytes between progress reports (default: 1 MB). + /// Progress callback action. + public static Action CreateProgressLogger( + Action logAction, + long reportIntervalBytes = 1_048_576) + { + ArgumentNullException.ThrowIfNull(logAction); + + long lastReportedBytes = 0; + + return (bytesDownloaded, totalBytes) => + { + if (bytesDownloaded - lastReportedBytes >= reportIntervalBytes) + { + lastReportedBytes = bytesDownloaded; + var message = totalBytes.HasValue + ? $"Downloaded {FormatBytes(bytesDownloaded)} of {FormatBytes(totalBytes.Value)} ({bytesDownloaded * 100 / totalBytes.Value}%)" + : $"Downloaded {FormatBytes(bytesDownloaded)}"; + logAction(message); + } + }; + } + + private static string FormatBytes(long bytes) + { + string[] sizes = ["B", "KB", "MB", "GB", "TB"]; + var order = 0; + double len = bytes; + + while (len >= 1024 && order < sizes.Length - 1) + { + order++; + len /= 1024; + } + + return $"{len:0.##} {sizes[order]}"; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleBuilder.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleBuilder.cs new file mode 100644 index 000000000..bc8fbb380 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleBuilder.cs @@ -0,0 +1,299 @@ +using System.Buffers.Binary; +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.AttestationBundle; + +/// +/// Builds deterministic attestation bundle exports for air-gap/offline delivery. +/// +public sealed class AttestationBundleBuilder +{ + private const string BundleVersion = "attestation-bundle/v1"; + private const string DefaultStatementVersion = "v1"; + private const string DsseEnvelopeFileName = "attestation.dsse.json"; + private const string StatementFileName = "statement.json"; + private const string TransparencyFileName = "transparency.ndjson"; + private const string MetadataFileName = "metadata.json"; + private const string ChecksumsFileName = "checksums.txt"; + private const string VerifyScriptFileName = "verify-attestation.sh"; + + private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly UnixFileMode DefaultFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead; + + private static readonly UnixFileMode ExecutableFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public AttestationBundleBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Builds an attestation bundle export from the provided request. + /// + public AttestationBundleExportResult Build(AttestationBundleExportRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (request.ExportId == Guid.Empty) + { + throw new ArgumentException("Export identifier must be provided.", nameof(request)); + } + + if (request.AttestationId == Guid.Empty) + { + throw new ArgumentException("Attestation identifier must be provided.", nameof(request)); + } + + if (request.TenantId == Guid.Empty) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(request)); + } + + if (string.IsNullOrWhiteSpace(request.DsseEnvelopeJson)) + { + throw new ArgumentException("DSSE envelope JSON must be provided.", nameof(request)); + } + + if (string.IsNullOrWhiteSpace(request.StatementJson)) + { + throw new ArgumentException("Statement JSON must be provided.", nameof(request)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + // Compute hashes for each component + var dsseBytes = Encoding.UTF8.GetBytes(request.DsseEnvelopeJson); + var dsseSha256 = _cryptoHash.ComputeHashHexForPurpose(dsseBytes, HashPurpose.Content); + + var statementBytes = Encoding.UTF8.GetBytes(request.StatementJson); + var statementSha256 = _cryptoHash.ComputeHashHexForPurpose(statementBytes, HashPurpose.Content); + + // Build transparency NDJSON if entries exist + string? transparencyNdjson = null; + string? transparencySha256 = null; + if (request.TransparencyEntries is { Count: > 0 }) + { + var transparencyBuilder = new StringBuilder(); + foreach (var entry in request.TransparencyEntries.OrderBy(e => e, StringComparer.Ordinal)) + { + transparencyBuilder.AppendLine(entry); + } + transparencyNdjson = transparencyBuilder.ToString(); + transparencySha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(transparencyNdjson), HashPurpose.Content); + } + + // Build initial metadata (rootHash computed later) + var metadata = new AttestationBundleMetadata( + BundleVersion, + request.ExportId.ToString("D"), + request.AttestationId.ToString("D"), + request.TenantId.ToString("D"), + _timeProvider.GetUtcNow(), + string.Empty, // Placeholder, computed after + request.SourceUri, + request.StatementVersion ?? DefaultStatementVersion, + request.SubjectDigests); + + var metadataJson = JsonSerializer.Serialize(metadata, SerializerOptions); + var metadataSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(metadataJson), HashPurpose.Content); + + // Build verification script + var verifyScript = BuildVerificationScript(request.AttestationId); + var verifyScriptSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(verifyScript), HashPurpose.Content); + + // Build checksums (without root hash line yet) + var checksums = BuildChecksums(dsseSha256, statementSha256, transparencySha256, metadataSha256); + var checksumsSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(checksums), HashPurpose.Content); + + // Compute root hash from all component hashes + var hashList = new List { dsseSha256, statementSha256, metadataSha256, checksumsSha256, verifyScriptSha256 }; + if (transparencySha256 is not null) + { + hashList.Add(transparencySha256); + } + var rootHash = ComputeRootHash(hashList); + + // Rebuild metadata with root hash + var finalMetadata = metadata with { RootHash = rootHash }; + var finalMetadataJson = JsonSerializer.Serialize(finalMetadata, SerializerOptions); + var finalMetadataSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(finalMetadataJson), HashPurpose.Content); + + // Rebuild checksums with final metadata hash + var finalChecksums = BuildChecksums(dsseSha256, statementSha256, transparencySha256, finalMetadataSha256); + + // Create the export archive + var exportStream = CreateExportArchive( + request.DsseEnvelopeJson, + request.StatementJson, + transparencyNdjson, + finalMetadataJson, + finalChecksums, + verifyScript); + + exportStream.Position = 0; + + return new AttestationBundleExportResult( + finalMetadata, + finalMetadataJson, + rootHash, + exportStream); + } + + private string ComputeRootHash(IEnumerable hashes) + { + var builder = new StringBuilder(); + foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal)) + { + builder.Append(hash).Append('\0'); + } + + var bytes = Encoding.UTF8.GetBytes(builder.ToString()); + return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content); + } + + private static string BuildChecksums(string dsseSha256, string statementSha256, string? transparencySha256, string metadataSha256) + { + var builder = new StringBuilder(); + builder.AppendLine("# Attestation bundle checksums (sha256)"); + + // Lexical order + builder.Append(dsseSha256).Append(" ").AppendLine(DsseEnvelopeFileName); + builder.Append(metadataSha256).Append(" ").AppendLine(MetadataFileName); + builder.Append(statementSha256).Append(" ").AppendLine(StatementFileName); + + if (transparencySha256 is not null) + { + builder.Append(transparencySha256).Append(" ").AppendLine(TransparencyFileName); + } + + return builder.ToString(); + } + + private static string BuildVerificationScript(Guid attestationId) + { + var builder = new StringBuilder(); + builder.AppendLine("#!/usr/bin/env sh"); + builder.AppendLine("# Attestation Bundle Verification Script"); + builder.AppendLine("# No network access required"); + builder.AppendLine(); + builder.AppendLine("set -eu"); + builder.AppendLine(); + builder.AppendLine("# Verify checksums"); + builder.AppendLine("echo \"Verifying checksums...\""); + builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then"); + builder.AppendLine(" sha256sum --check checksums.txt"); + builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then"); + builder.AppendLine(" shasum -a 256 --check checksums.txt"); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2"); + builder.AppendLine(" exit 1"); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Checksums verified successfully.\""); + builder.AppendLine("echo \"\""); + builder.AppendLine(); + builder.AppendLine("# Verify DSSE envelope"); + builder.Append("ATTESTATION_ID=\"").Append(attestationId.ToString("D")).AppendLine("\""); + builder.AppendLine("DSSE_FILE=\"attestation.dsse.json\""); + builder.AppendLine(); + builder.AppendLine("if command -v stella >/dev/null 2>&1; then"); + builder.AppendLine(" echo \"Verifying DSSE envelope with stella CLI...\""); + builder.AppendLine(" stella attest verify --envelope \"$DSSE_FILE\" --attestation-id \"$ATTESTATION_ID\""); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Note: stella CLI not found. Manual DSSE verification recommended.\""); + builder.AppendLine(" echo \"Install stella CLI and run: stella attest verify --envelope $DSSE_FILE\""); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Verification complete.\""); + + return builder.ToString(); + } + + private MemoryStream CreateExportArchive( + string dsseEnvelopeJson, + string statementJson, + string? transparencyNdjson, + string metadataJson, + string checksums, + string verifyScript) + { + var stream = new MemoryStream(); + + using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)) + using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true)) + { + // Write files in lexical order for determinism + WriteTextEntry(tar, DsseEnvelopeFileName, dsseEnvelopeJson, DefaultFileMode); + WriteTextEntry(tar, ChecksumsFileName, checksums, DefaultFileMode); + WriteTextEntry(tar, MetadataFileName, metadataJson, DefaultFileMode); + WriteTextEntry(tar, StatementFileName, statementJson, DefaultFileMode); + + if (transparencyNdjson is not null) + { + WriteTextEntry(tar, TransparencyFileName, transparencyNdjson, DefaultFileMode); + } + + WriteTextEntry(tar, VerifyScriptFileName, verifyScript, ExecutableFileMode); + } + + ApplyDeterministicGzipHeader(stream); + return stream; + } + + private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void ApplyDeterministicGzipHeader(MemoryStream stream) + { + if (stream.Length < 10) + { + throw new InvalidOperationException("GZip header not fully written for attestation bundle export."); + } + + var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds); + Span buffer = stackalloc byte[4]; + BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds); + + var originalPosition = stream.Position; + stream.Position = 4; + stream.Write(buffer); + stream.Position = originalPosition; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleModels.cs new file mode 100644 index 000000000..4a7549727 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/AttestationBundleModels.cs @@ -0,0 +1,71 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.AttestationBundle; + +/// +/// Request to create an attestation bundle export. +/// +public sealed record AttestationBundleExportRequest( + Guid ExportId, + Guid AttestationId, + Guid TenantId, + string DsseEnvelopeJson, + string StatementJson, + IReadOnlyList? TransparencyEntries = null, + IReadOnlyList? SubjectDigests = null, + string? SourceUri = null, + string? StatementVersion = null); + +/// +/// Result of building an attestation bundle export. +/// +public sealed record AttestationBundleExportResult( + AttestationBundleMetadata Metadata, + string MetadataJson, + string RootHash, + MemoryStream ExportStream); + +/// +/// Metadata document for attestation bundle exports. +/// +public sealed record AttestationBundleMetadata( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("attestationId")] string AttestationId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("sourceUri")] string? SourceUri, + [property: JsonPropertyName("statementVersion")] string StatementVersion, + [property: JsonPropertyName("subjectDigests")] IReadOnlyList? SubjectDigests); + +/// +/// Subject digest entry for attestation bundles. +/// +public sealed record AttestationSubjectDigest( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("digest")] string Digest, + [property: JsonPropertyName("algorithm")] string Algorithm); + +/// +/// Export status for attestation bundles. +/// +public enum AttestationBundleExportStatus +{ + Pending = 1, + Packaging = 2, + Ready = 3, + Failed = 4 +} + +/// +/// Status response for attestation bundle export. +/// +public sealed record AttestationBundleExportStatusResponse( + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("attestationId")] string AttestationId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("rootHash")] string? RootHash, + [property: JsonPropertyName("downloadUri")] string? DownloadUri, + [property: JsonPropertyName("attestationDigests")] IReadOnlyList? AttestationDigests, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackBuilder.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackBuilder.cs new file mode 100644 index 000000000..992538d6a --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackBuilder.cs @@ -0,0 +1,550 @@ +using System.Buffers.Binary; +using System.Formats.Tar; +using System.IO.Compression; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.BootstrapPack; + +/// +/// Builds deterministic bootstrap packs for air-gap deployment containing Helm charts and container images. +/// +public sealed class BootstrapPackBuilder +{ + private const string ManifestVersion = "bootstrap/v1"; + private const int OciSchemaVersion = 2; + private const string OciImageIndexMediaType = "application/vnd.oci.image.index.v1+json"; + private const string OciImageLayoutVersion = "1.0.0"; + + private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly UnixFileMode DefaultFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public BootstrapPackBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Builds a bootstrap pack from the provided request. + /// + public BootstrapPackBuildResult Build(BootstrapPackBuildRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (request.ExportId == Guid.Empty) + { + throw new ArgumentException("Export identifier must be provided.", nameof(request)); + } + + if (request.TenantId == Guid.Empty) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(request)); + } + + if ((request.Charts is null || request.Charts.Count == 0) && + (request.Images is null || request.Images.Count == 0)) + { + throw new ArgumentException("At least one chart or image must be provided.", nameof(request)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + // Collect and validate charts + var chartEntries = CollectCharts(request.Charts, cancellationToken); + + // Collect and validate images + var imageEntries = CollectImages(request.Images, cancellationToken); + + // Build manifest + var rootHash = ComputeRootHash(chartEntries, imageEntries); + var manifest = BuildManifest(request, chartEntries, imageEntries, rootHash); + var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions); + + // Build OCI index + var ociIndex = BuildOciIndex(imageEntries); + var ociIndexJson = JsonSerializer.Serialize(ociIndex, SerializerOptions); + + // Build OCI layout marker + var ociLayout = new OciImageLayout(OciImageLayoutVersion); + var ociLayoutJson = JsonSerializer.Serialize(ociLayout, SerializerOptions); + + // Build checksums + var checksums = BuildChecksums(chartEntries, imageEntries, rootHash); + + // Create the pack archive + var packStream = CreatePackArchive( + request, + chartEntries, + imageEntries, + manifestJson, + ociIndexJson, + ociLayoutJson, + checksums); + + // Compute final artifact SHA-256 + packStream.Position = 0; + var artifactSha256 = ComputeStreamHash(packStream); + packStream.Position = 0; + + return new BootstrapPackBuildResult( + manifest, + manifestJson, + rootHash, + artifactSha256, + packStream); + } + + private List CollectCharts( + IReadOnlyList? charts, + CancellationToken cancellationToken) + { + var entries = new List(); + + if (charts is null || charts.Count == 0) + { + return entries; + } + + foreach (var chart in charts) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (chart is null) + { + throw new ArgumentException("Chart sources cannot contain null entries."); + } + + if (string.IsNullOrWhiteSpace(chart.ChartPath)) + { + throw new ArgumentException($"Chart path cannot be empty for chart '{chart.Name}'."); + } + + var fullPath = Path.GetFullPath(chart.ChartPath); + if (!File.Exists(fullPath) && !Directory.Exists(fullPath)) + { + throw new FileNotFoundException($"Chart path '{fullPath}' not found.", fullPath); + } + + string sha256; + long size; + + if (Directory.Exists(fullPath)) + { + // For directories, compute combined hash + sha256 = ComputeDirectoryHash(fullPath, cancellationToken); + size = GetDirectorySize(fullPath); + } + else + { + var fileBytes = File.ReadAllBytes(fullPath); + sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content); + size = fileBytes.LongLength; + } + + var bundlePath = $"charts/{SanitizeSegment(chart.Name)}-{SanitizeSegment(chart.Version)}"; + + entries.Add(new CollectedChart( + chart.Name, + chart.Version, + bundlePath, + fullPath, + sha256, + size)); + } + + // Sort for deterministic ordering + entries.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath)); + return entries; + } + + private List CollectImages( + IReadOnlyList? images, + CancellationToken cancellationToken) + { + var entries = new List(); + + if (images is null || images.Count == 0) + { + return entries; + } + + foreach (var image in images) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (image is null) + { + throw new ArgumentException("Image sources cannot contain null entries."); + } + + if (string.IsNullOrWhiteSpace(image.BlobPath)) + { + throw new ArgumentException($"Blob path cannot be empty for image '{image.Repository}:{image.Tag}'."); + } + + var fullPath = Path.GetFullPath(image.BlobPath); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException($"Image blob path '{fullPath}' not found.", fullPath); + } + + var fileBytes = File.ReadAllBytes(fullPath); + var sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content); + + var repoSegment = SanitizeSegment(image.Repository.Replace("/", "-").Replace(":", "-")); + var bundlePath = $"images/blobs/sha256/{sha256}"; + + entries.Add(new CollectedImage( + image.Repository, + image.Tag, + image.Digest, + bundlePath, + fullPath, + sha256, + fileBytes.LongLength)); + } + + // Sort for deterministic ordering + entries.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath)); + return entries; + } + + private string ComputeRootHash( + IReadOnlyList charts, + IReadOnlyList images) + { + var builder = new StringBuilder(); + + foreach (var chart in charts.OrderBy(c => c.BundlePath, StringComparer.Ordinal)) + { + builder.Append(chart.BundlePath) + .Append('\0') + .Append(chart.Sha256) + .Append('\0'); + } + + foreach (var image in images.OrderBy(i => i.BundlePath, StringComparer.Ordinal)) + { + builder.Append(image.BundlePath) + .Append('\0') + .Append(image.Sha256) + .Append('\0'); + } + + var bytes = Encoding.UTF8.GetBytes(builder.ToString()); + return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content); + } + + private BootstrapPackManifest BuildManifest( + BootstrapPackBuildRequest request, + IReadOnlyList charts, + IReadOnlyList images, + string rootHash) + { + var chartEntries = charts.Select(c => new BootstrapPackChartEntry( + c.Name, + c.Version, + c.BundlePath, + c.Sha256)).ToList(); + + var imageEntries = images.Select(i => new BootstrapPackImageEntry( + i.Repository, + i.Tag, + i.Digest, + i.BundlePath, + i.Sha256)).ToList(); + + BootstrapPackSignatureEntry? sigEntry = null; + if (request.Signatures is not null) + { + sigEntry = new BootstrapPackSignatureEntry( + request.Signatures.MirrorBundleDigest, + request.Signatures.SignaturePath); + } + + return new BootstrapPackManifest( + ManifestVersion, + request.ExportId.ToString("D"), + request.TenantId.ToString("D"), + _timeProvider.GetUtcNow(), + chartEntries, + imageEntries, + sigEntry, + rootHash); + } + + private static OciImageIndex BuildOciIndex(IReadOnlyList images) + { + var manifests = images.Select(i => new OciImageIndexManifest( + "application/vnd.oci.image.manifest.v1+json", + i.Size, + i.Digest, + new Dictionary + { + ["org.opencontainers.image.ref.name"] = $"{i.Repository}:{i.Tag}" + })).ToList(); + + return new OciImageIndex(OciSchemaVersion, OciImageIndexMediaType, manifests); + } + + private static string BuildChecksums( + IReadOnlyList charts, + IReadOnlyList images, + string rootHash) + { + var builder = new StringBuilder(); + builder.AppendLine("# Bootstrap pack checksums (sha256)"); + builder.Append("root ").AppendLine(rootHash); + + foreach (var chart in charts) + { + builder.Append(chart.Sha256).Append(" ").AppendLine(chart.BundlePath); + } + + foreach (var image in images) + { + builder.Append(image.Sha256).Append(" ").AppendLine(image.BundlePath); + } + + return builder.ToString(); + } + + private MemoryStream CreatePackArchive( + BootstrapPackBuildRequest request, + IReadOnlyList charts, + IReadOnlyList images, + string manifestJson, + string ociIndexJson, + string ociLayoutJson, + string checksums) + { + var stream = new MemoryStream(); + + using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)) + using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true)) + { + // Write metadata files + WriteTextEntry(tar, "manifest.json", manifestJson); + WriteTextEntry(tar, "checksums.txt", checksums); + + // Write OCI layout files for images directory + if (images.Count > 0) + { + WriteTextEntry(tar, "images/oci-layout", ociLayoutJson); + WriteTextEntry(tar, "images/index.json", ociIndexJson); + } + + // Write chart files + foreach (var chart in charts) + { + if (Directory.Exists(chart.SourcePath)) + { + WriteDirectoryEntries(tar, chart.BundlePath, chart.SourcePath); + } + else + { + WriteFileEntry(tar, chart.BundlePath, chart.SourcePath); + } + } + + // Write image blobs + foreach (var image in images) + { + WriteFileEntry(tar, image.BundlePath, image.SourcePath); + } + + // Write signature reference if provided + if (request.Signatures?.SignaturePath is not null && File.Exists(request.Signatures.SignaturePath)) + { + WriteFileEntry(tar, "signatures/mirror-bundle.sig", request.Signatures.SignaturePath); + } + } + + ApplyDeterministicGzipHeader(stream); + return stream; + } + + private static void WriteTextEntry(TarWriter writer, string path, string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = DefaultFileMode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void WriteFileEntry(TarWriter writer, string bundlePath, string sourcePath) + { + using var dataStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan); + var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath) + { + Mode = DefaultFileMode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void WriteDirectoryEntries(TarWriter writer, string bundlePrefix, string sourceDir) + { + var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories); + Array.Sort(files, StringComparer.Ordinal); + + foreach (var file in files) + { + var relative = Path.GetRelativePath(sourceDir, file).Replace('\\', '/'); + var bundlePath = $"{bundlePrefix}/{relative}"; + + using var dataStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan); + var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath) + { + Mode = DefaultFileMode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + } + + private static void ApplyDeterministicGzipHeader(MemoryStream stream) + { + if (stream.Length < 10) + { + throw new InvalidOperationException("GZip header not fully written for bootstrap pack."); + } + + var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds); + Span buffer = stackalloc byte[4]; + BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds); + + var originalPosition = stream.Position; + stream.Position = 4; + stream.Write(buffer); + stream.Position = originalPosition; + } + + private string ComputeStreamHash(Stream stream) + { + stream.Position = 0; + using var sha = SHA256.Create(); + var hash = sha.ComputeHash(stream); + return ToHex(hash); + } + + private string ComputeDirectoryHash(string directory, CancellationToken cancellationToken) + { + var builder = new StringBuilder(); + var files = Directory.GetFiles(directory, "*", SearchOption.AllDirectories); + Array.Sort(files, StringComparer.Ordinal); + + foreach (var file in files) + { + cancellationToken.ThrowIfCancellationRequested(); + var relative = Path.GetRelativePath(directory, file).Replace('\\', '/'); + var fileBytes = File.ReadAllBytes(file); + var fileHash = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content); + builder.Append(relative).Append('\0').Append(fileHash).Append('\0'); + } + + var bytes = Encoding.UTF8.GetBytes(builder.ToString()); + return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content); + } + + private static long GetDirectorySize(string directory) + { + return Directory.GetFiles(directory, "*", SearchOption.AllDirectories) + .Sum(file => new FileInfo(file).Length); + } + + private static string ToHex(ReadOnlySpan bytes) + { + Span hex = stackalloc byte[bytes.Length * 2]; + for (var i = 0; i < bytes.Length; i++) + { + var b = bytes[i]; + hex[i * 2] = GetHexValue(b / 16); + hex[i * 2 + 1] = GetHexValue(b % 16); + } + return Encoding.ASCII.GetString(hex); + } + + private static byte GetHexValue(int i) => (byte)(i < 10 ? i + 48 : i - 10 + 97); + + private static string SanitizeSegment(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "unknown"; + } + + var span = value.Trim(); + var builder = new StringBuilder(span.Length); + + foreach (var ch in span) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + else if (ch is '-' or '_' or '.') + { + builder.Append(ch); + } + else + { + builder.Append('-'); + } + } + + return builder.Length == 0 ? "unknown" : builder.ToString(); + } + + private sealed record CollectedChart( + string Name, + string Version, + string BundlePath, + string SourcePath, + string Sha256, + long Size); + + private sealed record CollectedImage( + string Repository, + string Tag, + string Digest, + string BundlePath, + string SourcePath, + string Sha256, + long Size); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackModels.cs new file mode 100644 index 000000000..00bd51bb6 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/BootstrapPackModels.cs @@ -0,0 +1,110 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.BootstrapPack; + +/// +/// Request to build a bootstrap pack for air-gap deployment. +/// +public sealed record BootstrapPackBuildRequest( + Guid ExportId, + Guid TenantId, + IReadOnlyList Charts, + IReadOnlyList Images, + BootstrapPackSignatureSource? Signatures = null, + IReadOnlyDictionary? Metadata = null); + +/// +/// Helm chart source for the bootstrap pack. +/// +public sealed record BootstrapPackChartSource( + string Name, + string Version, + string ChartPath); + +/// +/// Container image source for the bootstrap pack. +/// +public sealed record BootstrapPackImageSource( + string Repository, + string Tag, + string Digest, + string BlobPath); + +/// +/// Optional DSSE/TUF signature source from upstream builds. +/// +public sealed record BootstrapPackSignatureSource( + string MirrorBundleDigest, + string? SignaturePath); + +/// +/// Result of building a bootstrap pack. +/// +public sealed record BootstrapPackBuildResult( + BootstrapPackManifest Manifest, + string ManifestJson, + string RootHash, + string ArtifactSha256, + MemoryStream PackStream); + +/// +/// Manifest for the bootstrap pack. +/// +public sealed record BootstrapPackManifest( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("charts")] IReadOnlyList Charts, + [property: JsonPropertyName("images")] IReadOnlyList Images, + [property: JsonPropertyName("signatures")] BootstrapPackSignatureEntry? Signatures, + [property: JsonPropertyName("rootHash")] string RootHash); + +/// +/// Chart entry in the manifest. +/// +public sealed record BootstrapPackChartEntry( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("path")] string Path, + [property: JsonPropertyName("sha256")] string Sha256); + +/// +/// Image entry in the manifest. +/// +public sealed record BootstrapPackImageEntry( + [property: JsonPropertyName("repository")] string Repository, + [property: JsonPropertyName("tag")] string Tag, + [property: JsonPropertyName("digest")] string Digest, + [property: JsonPropertyName("path")] string Path, + [property: JsonPropertyName("sha256")] string Sha256); + +/// +/// Signature metadata entry. +/// +public sealed record BootstrapPackSignatureEntry( + [property: JsonPropertyName("mirrorBundleDigest")] string MirrorBundleDigest, + [property: JsonPropertyName("signaturePath")] string? SignaturePath); + +/// +/// OCI image index (index.json) structure. +/// +public sealed record OciImageIndex( + [property: JsonPropertyName("schemaVersion")] int SchemaVersion, + [property: JsonPropertyName("mediaType")] string MediaType, + [property: JsonPropertyName("manifests")] IReadOnlyList Manifests); + +/// +/// Manifest entry within the OCI image index. +/// +public sealed record OciImageIndexManifest( + [property: JsonPropertyName("mediaType")] string MediaType, + [property: JsonPropertyName("size")] long Size, + [property: JsonPropertyName("digest")] string Digest, + [property: JsonPropertyName("annotations")] IReadOnlyDictionary? Annotations); + +/// +/// OCI image layout marker (oci-layout). +/// +public sealed record OciImageLayout( + [property: JsonPropertyName("imageLayoutVersion")] string ImageLayoutVersion); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleBuilder.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleBuilder.cs new file mode 100644 index 000000000..aa2cf42eb --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleBuilder.cs @@ -0,0 +1,611 @@ +using System.Buffers.Binary; +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.MirrorBundle; + +/// +/// Builds deterministic mirror bundles for air-gapped export with DSSE/TUF metadata. +/// +public sealed class MirrorBundleBuilder +{ + private const string ManifestVersion = "mirror/v1"; + private const string ExporterVersion = "1.0.0"; + private const string AdapterVersion = "mirror-adapter/1.0.0"; + + private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly UnixFileMode DefaultFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead; + + private static readonly UnixFileMode ExecutableFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public MirrorBundleBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Builds a mirror bundle from the provided request. + /// + public MirrorBundleBuildResult Build(MirrorBundleBuildRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (request.RunId == Guid.Empty) + { + throw new ArgumentException("Run identifier must be provided.", nameof(request)); + } + + if (request.TenantId == Guid.Empty) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(request)); + } + + if (request.Variant == MirrorBundleVariant.Delta && request.DeltaOptions is null) + { + throw new ArgumentException("Delta options must be provided for delta bundles.", nameof(request)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + // Collect and validate data sources + var collectedFiles = CollectDataSources(request.DataSources, cancellationToken); + if (collectedFiles.Count == 0) + { + throw new InvalidOperationException("Mirror bundle does not contain any data files. Provide at least one data source."); + } + + // Build artifact entries + var artifacts = BuildArtifactEntries(collectedFiles); + + // Compute counts + var counts = ComputeCounts(collectedFiles); + + // Build manifest + var manifest = BuildManifest(request, artifacts, counts); + var manifestYaml = SerializeManifestToYaml(manifest); + var manifestDigest = ComputeHash(manifestYaml); + + // Build export document + var exportDoc = BuildExportDocument(request, manifest, manifestDigest); + var exportJson = JsonSerializer.Serialize(exportDoc, SerializerOptions); + + // Build provenance document + var provenanceDoc = BuildProvenanceDocument(request, manifest, manifestDigest, collectedFiles); + var provenanceJson = JsonSerializer.Serialize(provenanceDoc, SerializerOptions); + + // Compute root hash from export document + var rootHash = ComputeHash(exportJson); + + // Build checksums file + var checksums = BuildChecksums(rootHash, collectedFiles, manifestDigest); + + // Build README + var readme = BuildReadme(manifest); + + // Build verification script + var verifyScript = BuildVerificationScript(); + + // Create the bundle archive + var bundleStream = CreateBundleArchive( + collectedFiles, + manifestYaml, + exportJson, + provenanceJson, + checksums, + readme, + verifyScript, + request.Variant); + + bundleStream.Position = 0; + + return new MirrorBundleBuildResult( + manifest, + manifestYaml, + exportDoc, + exportJson, + provenanceDoc, + provenanceJson, + rootHash, + bundleStream); + } + + private List CollectDataSources( + IReadOnlyList dataSources, + CancellationToken cancellationToken) + { + var files = new List(); + + foreach (var source in dataSources) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (source is null) + { + throw new ArgumentException("Data sources cannot contain null entries."); + } + + if (string.IsNullOrWhiteSpace(source.SourcePath)) + { + throw new ArgumentException("Source path cannot be empty."); + } + + var fullPath = Path.GetFullPath(source.SourcePath); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException($"Data source file '{fullPath}' not found.", fullPath); + } + + var bundlePath = ComputeBundlePath(source); + var fileBytes = File.ReadAllBytes(fullPath); + var sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content); + + files.Add(new CollectedFile( + source.Category, + bundlePath, + fullPath, + fileBytes.LongLength, + sha256, + source.IsNormalized, + source.SubjectId)); + } + + // Sort for deterministic ordering + files.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath)); + return files; + } + + private static string ComputeBundlePath(MirrorBundleDataSource source) + { + var fileName = Path.GetFileName(source.SourcePath); + var prefix = source.IsNormalized ? "data/normalized" : "data/raw"; + + return source.Category switch + { + MirrorBundleDataCategory.Advisories => $"{prefix}/advisories/{fileName}", + MirrorBundleDataCategory.Vex => $"{prefix}/vex/{fileName}", + MirrorBundleDataCategory.Sbom when !string.IsNullOrEmpty(source.SubjectId) => + $"data/raw/sboms/{SanitizeSegment(source.SubjectId)}/{fileName}", + MirrorBundleDataCategory.Sbom => $"data/raw/sboms/{fileName}", + MirrorBundleDataCategory.PolicySnapshot => $"data/policy/snapshot.json", + MirrorBundleDataCategory.PolicyEvaluations => $"data/policy/{fileName}", + MirrorBundleDataCategory.VexConsensus => $"data/consensus/{fileName}", + MirrorBundleDataCategory.Findings => $"data/findings/{fileName}", + _ => throw new ArgumentOutOfRangeException(nameof(source), $"Unknown data category: {source.Category}") + }; + } + + private static IReadOnlyList BuildArtifactEntries(IReadOnlyList files) + { + return files.Select(f => new MirrorBundleArtifactEntry( + f.BundlePath, + f.Sha256, + f.SizeBytes, + f.Category.ToString().ToLowerInvariant())).ToList(); + } + + private static MirrorBundleManifestCounts ComputeCounts(IReadOnlyList files) + { + var advisories = files.Count(f => f.Category == MirrorBundleDataCategory.Advisories); + var vex = files.Count(f => f.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus); + var sboms = files.Count(f => f.Category == MirrorBundleDataCategory.Sbom); + var policyEvals = files.Count(f => f.Category == MirrorBundleDataCategory.PolicyEvaluations); + + return new MirrorBundleManifestCounts(advisories, vex, sboms, policyEvals); + } + + private MirrorBundleManifest BuildManifest( + MirrorBundleBuildRequest request, + IReadOnlyList artifacts, + MirrorBundleManifestCounts counts) + { + var profile = request.Variant == MirrorBundleVariant.Full ? "mirror:full" : "mirror:delta"; + + var selectors = new MirrorBundleManifestSelectors( + request.Selectors.Products, + request.Selectors.TimeWindowFrom.HasValue && request.Selectors.TimeWindowTo.HasValue + ? new MirrorBundleTimeWindow(request.Selectors.TimeWindowFrom.Value, request.Selectors.TimeWindowTo.Value) + : null, + request.Selectors.Ecosystems); + + MirrorBundleManifestEncryption? encryption = null; + if (request.Encryption is not null && request.Encryption.Mode != MirrorBundleEncryptionMode.None) + { + encryption = new MirrorBundleManifestEncryption( + request.Encryption.Mode.ToString().ToLowerInvariant(), + request.Encryption.Strict, + request.Encryption.RecipientKeys); + } + + MirrorBundleManifestDelta? delta = null; + if (request.Variant == MirrorBundleVariant.Delta && request.DeltaOptions is not null) + { + delta = new MirrorBundleManifestDelta( + request.DeltaOptions.BaseExportId, + request.DeltaOptions.BaseManifestDigest, + request.DeltaOptions.ResetBaseline, + new MirrorBundleDeltaCounts(0, 0, 0), // TODO: Compute actual delta counts + new MirrorBundleDeltaCounts(0, 0, 0), + new MirrorBundleDeltaCounts(0, 0, 0)); + } + + return new MirrorBundleManifest( + profile, + request.RunId.ToString("D"), + request.TenantId.ToString("D"), + selectors, + counts, + artifacts, + encryption, + delta); + } + + private MirrorBundleExportDocument BuildExportDocument( + MirrorBundleBuildRequest request, + MirrorBundleManifest manifest, + string manifestDigest) + { + return new MirrorBundleExportDocument( + ManifestVersion, + manifest.RunId, + manifest.Tenant, + new MirrorBundleExportProfile("mirror", request.Variant.ToString().ToLowerInvariant()), + manifest.Selectors, + manifest.Counts, + manifest.Artifacts, + _timeProvider.GetUtcNow(), + $"sha256:{manifestDigest}"); + } + + private MirrorBundleProvenanceDocument BuildProvenanceDocument( + MirrorBundleBuildRequest request, + MirrorBundleManifest manifest, + string manifestDigest, + IReadOnlyList files) + { + var subjects = new List + { + new("manifest.yaml", new Dictionary { ["sha256"] = manifestDigest }) + }; + + foreach (var file in files) + { + subjects.Add(new MirrorBundleProvenanceSubject( + file.BundlePath, + new Dictionary { ["sha256"] = file.Sha256 })); + } + + var sbomIds = files + .Where(f => f.Category == MirrorBundleDataCategory.Sbom && !string.IsNullOrEmpty(f.SubjectId)) + .Select(f => f.SubjectId!) + .Distinct() + .OrderBy(id => id, StringComparer.Ordinal) + .ToList(); + + var inputs = new MirrorBundleProvenanceInputs( + new[] { $"tenant:{manifest.Tenant}" }, + files.Any(f => f.Category == MirrorBundleDataCategory.PolicySnapshot) + ? $"snapshot:{manifest.RunId}" + : null, + sbomIds); + + return new MirrorBundleProvenanceDocument( + ManifestVersion, + manifest.RunId, + manifest.Tenant, + subjects, + inputs, + new MirrorBundleProvenanceBuilder(ExporterVersion, AdapterVersion), + _timeProvider.GetUtcNow()); + } + + private string ComputeHash(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content); + } + + private static string BuildChecksums(string rootHash, IReadOnlyList files, string manifestDigest) + { + var builder = new StringBuilder(); + builder.AppendLine("# Mirror bundle checksums (sha256)"); + builder.Append("root ").AppendLine(rootHash); + builder.Append(manifestDigest).AppendLine(" manifest.yaml"); + + foreach (var file in files) + { + builder.Append(file.Sha256).Append(" ").AppendLine(file.BundlePath); + } + + return builder.ToString(); + } + + private static string BuildReadme(MirrorBundleManifest manifest) + { + var builder = new StringBuilder(); + builder.AppendLine("Mirror Bundle"); + builder.AppendLine("============="); + builder.Append("Profile: ").AppendLine(manifest.Profile); + builder.Append("Run ID: ").AppendLine(manifest.RunId); + builder.Append("Tenant: ").AppendLine(manifest.Tenant); + builder.AppendLine(); + + builder.AppendLine("Contents:"); + builder.Append("- Advisories: ").AppendLine(manifest.Counts.Advisories.ToString()); + builder.Append("- VEX statements: ").AppendLine(manifest.Counts.Vex.ToString()); + builder.Append("- SBOMs: ").AppendLine(manifest.Counts.Sboms.ToString()); + builder.Append("- Policy evaluations: ").AppendLine(manifest.Counts.PolicyEvaluations.ToString()); + builder.AppendLine(); + + if (manifest.Delta is not null) + { + builder.AppendLine("Delta Information:"); + builder.Append("- Base export: ").AppendLine(manifest.Delta.BaseExportId); + builder.Append("- Reset baseline: ").AppendLine(manifest.Delta.ResetBaseline ? "yes" : "no"); + builder.AppendLine(); + } + + if (manifest.Encryption is not null) + { + builder.AppendLine("Encryption:"); + builder.Append("- Mode: ").AppendLine(manifest.Encryption.Mode); + builder.Append("- Strict: ").AppendLine(manifest.Encryption.Strict ? "yes" : "no"); + builder.AppendLine(); + } + + builder.AppendLine("Verification:"); + builder.AppendLine("1. Transfer the archive to the target environment."); + builder.AppendLine("2. Run `./verify-mirror.sh ` to validate checksums."); + builder.AppendLine("3. Use `stella export verify ` to verify DSSE signatures."); + builder.AppendLine("4. Apply using `stella export mirror-import `."); + + return builder.ToString(); + } + + private static string BuildVerificationScript() + { + var builder = new StringBuilder(); + builder.AppendLine("#!/usr/bin/env sh"); + builder.AppendLine("set -euo pipefail"); + builder.AppendLine(); + builder.AppendLine("ARCHIVE=\"${1:-mirror-bundle.tgz}\""); + builder.AppendLine("if [ ! -f \"$ARCHIVE\" ]; then"); + builder.AppendLine(" echo \"Usage: $0 \" >&2"); + builder.AppendLine(" exit 1"); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("WORKDIR=\"$(mktemp -d)\""); + builder.AppendLine("cleanup() { rm -rf \"$WORKDIR\"; }"); + builder.AppendLine("trap cleanup EXIT INT TERM"); + builder.AppendLine(); + builder.AppendLine("tar -xzf \"$ARCHIVE\" -C \"$WORKDIR\""); + builder.AppendLine("echo \"Mirror bundle extracted to $WORKDIR\""); + builder.AppendLine(); + builder.AppendLine("cd \"$WORKDIR\""); + builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then"); + builder.AppendLine(" sha256sum --check checksums.txt"); + builder.AppendLine("else"); + builder.AppendLine(" shasum -a 256 --check checksums.txt"); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"Checksums verified successfully.\""); + builder.AppendLine("echo \"Run 'stella export verify' for signature validation.\""); + + return builder.ToString(); + } + + private static string SerializeManifestToYaml(MirrorBundleManifest manifest) + { + // Serialize to YAML-like format for manifest.yaml + // Using JSON for now as the structure is identical + var builder = new StringBuilder(); + builder.Append("profile: ").AppendLine(manifest.Profile); + builder.Append("runId: ").AppendLine(manifest.RunId); + builder.Append("tenant: ").AppendLine(manifest.Tenant); + builder.AppendLine("selectors:"); + builder.AppendLine(" products:"); + foreach (var product in manifest.Selectors.Products) + { + builder.Append(" - ").AppendLine(product); + } + + if (manifest.Selectors.TimeWindow is not null) + { + builder.AppendLine(" timeWindow:"); + builder.Append(" from: ").AppendLine(manifest.Selectors.TimeWindow.From.ToString("O")); + builder.Append(" to: ").AppendLine(manifest.Selectors.TimeWindow.To.ToString("O")); + } + + builder.AppendLine("counts:"); + builder.Append(" advisories: ").AppendLine(manifest.Counts.Advisories.ToString()); + builder.Append(" vex: ").AppendLine(manifest.Counts.Vex.ToString()); + builder.Append(" sboms: ").AppendLine(manifest.Counts.Sboms.ToString()); + builder.Append(" policyEvaluations: ").AppendLine(manifest.Counts.PolicyEvaluations.ToString()); + + builder.AppendLine("artifacts:"); + foreach (var artifact in manifest.Artifacts) + { + builder.Append(" - path: ").AppendLine(artifact.Path); + builder.Append(" sha256: ").AppendLine(artifact.Sha256); + builder.Append(" bytes: ").AppendLine(artifact.Bytes.ToString()); + } + + if (manifest.Encryption is not null) + { + builder.AppendLine("encryption:"); + builder.Append(" mode: ").AppendLine(manifest.Encryption.Mode); + builder.Append(" strict: ").AppendLine(manifest.Encryption.Strict.ToString().ToLowerInvariant()); + builder.AppendLine(" recipients:"); + foreach (var recipient in manifest.Encryption.Recipients) + { + builder.Append(" - ").AppendLine(recipient); + } + } + + if (manifest.Delta is not null) + { + builder.AppendLine("delta:"); + builder.Append(" baseExportId: ").AppendLine(manifest.Delta.BaseExportId); + builder.Append(" baseManifestDigest: ").AppendLine(manifest.Delta.BaseManifestDigest); + builder.Append(" resetBaseline: ").AppendLine(manifest.Delta.ResetBaseline.ToString().ToLowerInvariant()); + } + + return builder.ToString(); + } + + private MemoryStream CreateBundleArchive( + IReadOnlyList files, + string manifestYaml, + string exportJson, + string provenanceJson, + string checksums, + string readme, + string verifyScript, + MirrorBundleVariant variant) + { + var stream = new MemoryStream(); + + using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)) + using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true)) + { + // Write metadata files first + WriteTextEntry(tar, "manifest.yaml", manifestYaml, DefaultFileMode); + WriteTextEntry(tar, "export.json", exportJson, DefaultFileMode); + WriteTextEntry(tar, "provenance.json", provenanceJson, DefaultFileMode); + WriteTextEntry(tar, "checksums.txt", checksums, DefaultFileMode); + WriteTextEntry(tar, "README.md", readme, DefaultFileMode); + WriteTextEntry(tar, "verify-mirror.sh", verifyScript, ExecutableFileMode); + + // Write index placeholder files + WriteTextEntry(tar, "indexes/advisories.index.json", "[]", DefaultFileMode); + WriteTextEntry(tar, "indexes/vex.index.json", "[]", DefaultFileMode); + WriteTextEntry(tar, "indexes/sbom.index.json", "[]", DefaultFileMode); + WriteTextEntry(tar, "indexes/findings.index.json", "[]", DefaultFileMode); + + // Write data files + foreach (var file in files) + { + WriteFileEntry(tar, file.BundlePath, file.SourcePath); + } + + // For delta bundles, write removed list placeholders + if (variant == MirrorBundleVariant.Delta) + { + WriteTextEntry(tar, "delta/removed/advisories.jsonl", "", DefaultFileMode); + WriteTextEntry(tar, "delta/removed/vex.jsonl", "", DefaultFileMode); + WriteTextEntry(tar, "delta/removed/sboms.jsonl", "", DefaultFileMode); + } + } + + ApplyDeterministicGzipHeader(stream); + return stream; + } + + private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void WriteFileEntry(TarWriter writer, string bundlePath, string sourcePath) + { + using var dataStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan); + var mode = bundlePath.EndsWith(".sh", StringComparison.Ordinal) ? ExecutableFileMode : DefaultFileMode; + var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void ApplyDeterministicGzipHeader(MemoryStream stream) + { + if (stream.Length < 10) + { + throw new InvalidOperationException("GZip header not fully written for mirror bundle."); + } + + var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds); + Span buffer = stackalloc byte[4]; + BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds); + + var originalPosition = stream.Position; + stream.Position = 4; + stream.Write(buffer); + stream.Position = originalPosition; + } + + private static string SanitizeSegment(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "subject"; + } + + var span = value.Trim(); + var builder = new StringBuilder(span.Length); + + foreach (var ch in span) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + else if (ch is '-' or '_' or '.') + { + builder.Append(ch); + } + else + { + builder.Append('-'); + } + } + + return builder.Length == 0 ? "subject" : builder.ToString(); + } + + private sealed record CollectedFile( + MirrorBundleDataCategory Category, + string BundlePath, + string SourcePath, + long SizeBytes, + string Sha256, + bool IsNormalized, + string? SubjectId); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleModels.cs new file mode 100644 index 000000000..d181e5533 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleModels.cs @@ -0,0 +1,246 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.MirrorBundle; + +/// +/// Mirror bundle profile variant. +/// +public enum MirrorBundleVariant +{ + /// + /// Full mirror bundle containing complete snapshot. + /// + Full = 1, + + /// + /// Delta mirror bundle with changes since a base export. + /// + Delta = 2 +} + +/// +/// Request to build a mirror bundle. +/// +public sealed record MirrorBundleBuildRequest( + Guid RunId, + Guid TenantId, + MirrorBundleVariant Variant, + MirrorBundleSelectors Selectors, + IReadOnlyList DataSources, + MirrorBundleEncryptionOptions? Encryption = null, + MirrorBundleDeltaOptions? DeltaOptions = null, + IReadOnlyDictionary? Metadata = null); + +/// +/// Selectors that define the scope of data to include in the bundle. +/// +public sealed record MirrorBundleSelectors( + IReadOnlyList Products, + DateTimeOffset? TimeWindowFrom, + DateTimeOffset? TimeWindowTo, + IReadOnlyList? Ecosystems = null); + +/// +/// Data source input for the mirror bundle. +/// +public sealed record MirrorBundleDataSource( + MirrorBundleDataCategory Category, + string SourcePath, + bool IsNormalized = false, + string? SubjectId = null); + +/// +/// Category of data in a mirror bundle. +/// +public enum MirrorBundleDataCategory +{ + Advisories = 1, + Vex = 2, + Sbom = 3, + PolicySnapshot = 4, + PolicyEvaluations = 5, + VexConsensus = 6, + Findings = 7 +} + +/// +/// Encryption options for mirror bundles. +/// +public sealed record MirrorBundleEncryptionOptions( + MirrorBundleEncryptionMode Mode, + IReadOnlyList RecipientKeys, + bool Strict = false); + +/// +/// Encryption mode for mirror bundles. +/// +public enum MirrorBundleEncryptionMode +{ + None = 0, + Age = 1, + AesGcm = 2 +} + +/// +/// Delta-specific options when building a delta mirror bundle. +/// +public sealed record MirrorBundleDeltaOptions( + string BaseExportId, + string BaseManifestDigest, + bool ResetBaseline = false); + +/// +/// Result of building a mirror bundle. +/// +public sealed record MirrorBundleBuildResult( + MirrorBundleManifest Manifest, + string ManifestJson, + MirrorBundleExportDocument ExportDocument, + string ExportDocumentJson, + MirrorBundleProvenanceDocument ProvenanceDocument, + string ProvenanceDocumentJson, + string RootHash, + MemoryStream BundleStream); + +/// +/// The manifest.yaml content as a structured object. +/// +public sealed record MirrorBundleManifest( + [property: JsonPropertyName("profile")] string Profile, + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("selectors")] MirrorBundleManifestSelectors Selectors, + [property: JsonPropertyName("counts")] MirrorBundleManifestCounts Counts, + [property: JsonPropertyName("artifacts")] IReadOnlyList Artifacts, + [property: JsonPropertyName("encryption")] MirrorBundleManifestEncryption? Encryption, + [property: JsonPropertyName("delta")] MirrorBundleManifestDelta? Delta); + +/// +/// Selector metadata in the manifest. +/// +public sealed record MirrorBundleManifestSelectors( + [property: JsonPropertyName("products")] IReadOnlyList Products, + [property: JsonPropertyName("timeWindow")] MirrorBundleTimeWindow? TimeWindow, + [property: JsonPropertyName("ecosystems")] IReadOnlyList? Ecosystems); + +/// +/// Time window for selectors. +/// +public sealed record MirrorBundleTimeWindow( + [property: JsonPropertyName("from")] DateTimeOffset From, + [property: JsonPropertyName("to")] DateTimeOffset To); + +/// +/// Counts of various record types in the bundle. +/// +public sealed record MirrorBundleManifestCounts( + [property: JsonPropertyName("advisories")] int Advisories, + [property: JsonPropertyName("vex")] int Vex, + [property: JsonPropertyName("sboms")] int Sboms, + [property: JsonPropertyName("policyEvaluations")] int PolicyEvaluations); + +/// +/// Artifact entry in the manifest. +/// +public sealed record MirrorBundleArtifactEntry( + [property: JsonPropertyName("path")] string Path, + [property: JsonPropertyName("sha256")] string Sha256, + [property: JsonPropertyName("bytes")] long Bytes, + [property: JsonPropertyName("category")] string Category); + +/// +/// Encryption metadata in the manifest. +/// +public sealed record MirrorBundleManifestEncryption( + [property: JsonPropertyName("mode")] string Mode, + [property: JsonPropertyName("strict")] bool Strict, + [property: JsonPropertyName("recipients")] IReadOnlyList Recipients); + +/// +/// Delta metadata in the manifest. +/// +public sealed record MirrorBundleManifestDelta( + [property: JsonPropertyName("baseExportId")] string BaseExportId, + [property: JsonPropertyName("baseManifestDigest")] string BaseManifestDigest, + [property: JsonPropertyName("resetBaseline")] bool ResetBaseline, + [property: JsonPropertyName("added")] MirrorBundleDeltaCounts Added, + [property: JsonPropertyName("changed")] MirrorBundleDeltaCounts Changed, + [property: JsonPropertyName("removed")] MirrorBundleDeltaCounts Removed); + +/// +/// Delta change counts. +/// +public sealed record MirrorBundleDeltaCounts( + [property: JsonPropertyName("advisories")] int Advisories, + [property: JsonPropertyName("vex")] int Vex, + [property: JsonPropertyName("sboms")] int Sboms); + +/// +/// The export.json document for the bundle. +/// +public sealed record MirrorBundleExportDocument( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("profile")] MirrorBundleExportProfile Profile, + [property: JsonPropertyName("selectors")] MirrorBundleManifestSelectors Selectors, + [property: JsonPropertyName("counts")] MirrorBundleManifestCounts Counts, + [property: JsonPropertyName("artifacts")] IReadOnlyList Artifacts, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("manifestDigest")] string ManifestDigest); + +/// +/// Export profile metadata. +/// +public sealed record MirrorBundleExportProfile( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("variant")] string Variant); + +/// +/// The provenance.json document for the bundle. +/// +public sealed record MirrorBundleProvenanceDocument( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("runId")] string RunId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("subjects")] IReadOnlyList Subjects, + [property: JsonPropertyName("inputs")] MirrorBundleProvenanceInputs Inputs, + [property: JsonPropertyName("builder")] MirrorBundleProvenanceBuilder Builder, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); + +/// +/// Subject entry in provenance. +/// +public sealed record MirrorBundleProvenanceSubject( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("digest")] IReadOnlyDictionary Digest); + +/// +/// Input references in provenance. +/// +public sealed record MirrorBundleProvenanceInputs( + [property: JsonPropertyName("findingsLedgerQueries")] IReadOnlyList FindingsLedgerQueries, + [property: JsonPropertyName("policySnapshotId")] string? PolicySnapshotId, + [property: JsonPropertyName("sbomIdentifiers")] IReadOnlyList SbomIdentifiers); + +/// +/// Builder metadata in provenance. +/// +public sealed record MirrorBundleProvenanceBuilder( + [property: JsonPropertyName("exporterVersion")] string ExporterVersion, + [property: JsonPropertyName("adapterVersion")] string AdapterVersion); + +/// +/// DSSE signature document for mirror bundles. +/// +public sealed record MirrorBundleDsseSignature( + [property: JsonPropertyName("payloadType")] string PayloadType, + [property: JsonPropertyName("payload")] string Payload, + [property: JsonPropertyName("signatures")] IReadOnlyList Signatures); + +/// +/// Signature entry within a DSSE document. +/// +public sealed record MirrorBundleDsseSignatureEntry( + [property: JsonPropertyName("sig")] string Signature, + [property: JsonPropertyName("keyid")] string KeyId); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleSigning.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleSigning.cs new file mode 100644 index 000000000..cf6a7e759 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/MirrorBundleSigning.cs @@ -0,0 +1,188 @@ +using System.Globalization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.MirrorBundle; + +/// +/// Interface for signing mirror bundle manifests using DSSE. +/// +public interface IMirrorBundleManifestSigner +{ + /// + /// Signs the export.json content and returns a DSSE envelope. + /// + Task SignExportDocumentAsync(string exportJson, CancellationToken cancellationToken = default); + + /// + /// Signs the manifest.yaml content and returns a DSSE envelope. + /// + Task SignManifestAsync(string manifestYaml, CancellationToken cancellationToken = default); +} + +/// +/// Interface for signing mirror bundle archives. +/// +public interface IMirrorBundleArchiveSigner +{ + /// + /// Signs the bundle archive stream and returns a base64 signature. + /// + Task SignArchiveAsync(Stream archiveStream, CancellationToken cancellationToken = default); +} + +/// +/// HMAC-based signer for mirror bundle manifests implementing DSSE (Dead Simple Signing Envelope). +/// +public sealed class HmacMirrorBundleManifestSigner : IMirrorBundleManifestSigner, IMirrorBundleArchiveSigner +{ + private const string ExportPayloadType = "application/vnd.stellaops.mirror-bundle.export+json"; + private const string ManifestPayloadType = "application/vnd.stellaops.mirror-bundle.manifest+yaml"; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly ICryptoHmac _cryptoHmac; + private readonly byte[] _key; + private readonly string _keyId; + + public HmacMirrorBundleManifestSigner(ICryptoHmac cryptoHmac, string key, string keyId) + { + _cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac)); + if (string.IsNullOrWhiteSpace(key)) + { + throw new ArgumentException("Signing key cannot be empty.", nameof(key)); + } + + _key = Encoding.UTF8.GetBytes(key); + _keyId = string.IsNullOrWhiteSpace(keyId) ? "mirror-bundle-hmac" : keyId; + } + + /// + public Task SignExportDocumentAsync(string exportJson, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(exportJson); + cancellationToken.ThrowIfCancellationRequested(); + + return Task.FromResult(CreateDsseEnvelope(ExportPayloadType, exportJson)); + } + + /// + public Task SignManifestAsync(string manifestYaml, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(manifestYaml); + cancellationToken.ThrowIfCancellationRequested(); + + return Task.FromResult(CreateDsseEnvelope(ManifestPayloadType, manifestYaml)); + } + + /// + public async Task SignArchiveAsync(Stream archiveStream, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(archiveStream); + cancellationToken.ThrowIfCancellationRequested(); + + if (!archiveStream.CanSeek) + { + throw new ArgumentException("Archive stream must support seeking for signing.", nameof(archiveStream)); + } + + archiveStream.Position = 0; + var signature = await _cryptoHmac.ComputeHmacForPurposeAsync(_key, archiveStream, HmacPurpose.Signing, cancellationToken); + archiveStream.Position = 0; + return Convert.ToBase64String(signature); + } + + private MirrorBundleDsseSignature CreateDsseEnvelope(string payloadType, string payload) + { + var pae = CreatePreAuthenticationEncoding(payloadType, payload); + var signature = _cryptoHmac.ComputeHmacBase64ForPurpose(_key, pae, HmacPurpose.Signing); + + return new MirrorBundleDsseSignature( + payloadType, + Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)), + new[] { new MirrorBundleDsseSignatureEntry(signature, _keyId) }); + } + + /// + /// Creates the DSSE Pre-Authentication Encoding (PAE) for signing. + /// PAE format: "DSSEv1" + SP + length(payloadType) + SP + payloadType + SP + length(payload) + SP + payload + /// + private static byte[] CreatePreAuthenticationEncoding(string payloadType, string payload) + { + var typeBytes = Encoding.UTF8.GetBytes(payloadType); + var payloadBytes = Encoding.UTF8.GetBytes(payload); + var preamble = Encoding.UTF8.GetBytes("DSSEv1 "); + var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture); + var payloadLenStr = payloadBytes.Length.ToString(CultureInfo.InvariantCulture); + + var result = new List(preamble.Length + typeLenStr.Length + 1 + typeBytes.Length + 1 + payloadLenStr.Length + 1 + payloadBytes.Length); + + result.AddRange(preamble); + result.AddRange(Encoding.UTF8.GetBytes(typeLenStr)); + result.Add(0x20); // space + result.AddRange(typeBytes); + result.Add(0x20); // space + result.AddRange(Encoding.UTF8.GetBytes(payloadLenStr)); + result.Add(0x20); // space + result.AddRange(payloadBytes); + + return result.ToArray(); + } +} + +/// +/// Result of signing a mirror bundle. +/// +public sealed record MirrorBundleSigningResult( + MirrorBundleDsseSignature ExportSignature, + MirrorBundleDsseSignature ManifestSignature, + string ArchiveSignature); + +/// +/// Extension methods for mirror bundle signing. +/// +public static class MirrorBundleSigningExtensions +{ + /// + /// Signs all components of a mirror bundle build result. + /// + public static async Task SignBundleAsync( + this MirrorBundleBuildResult buildResult, + IMirrorBundleManifestSigner manifestSigner, + IMirrorBundleArchiveSigner archiveSigner, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(buildResult); + ArgumentNullException.ThrowIfNull(manifestSigner); + ArgumentNullException.ThrowIfNull(archiveSigner); + + var exportSigTask = manifestSigner.SignExportDocumentAsync(buildResult.ExportDocumentJson, cancellationToken); + var manifestSigTask = manifestSigner.SignManifestAsync(buildResult.ManifestJson, cancellationToken); + var archiveSigTask = archiveSigner.SignArchiveAsync(buildResult.BundleStream, cancellationToken); + + await Task.WhenAll(exportSigTask, manifestSigTask, archiveSigTask); + + return new MirrorBundleSigningResult( + await exportSigTask, + await manifestSigTask, + await archiveSigTask); + } + + /// + /// Serializes a DSSE signature to JSON. + /// + public static string ToJson(this MirrorBundleDsseSignature signature) + { + return JsonSerializer.Serialize(signature, new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationEmitter.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationEmitter.cs new file mode 100644 index 000000000..346c33685 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationEmitter.cs @@ -0,0 +1,477 @@ +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.Core.Notifications; + +/// +/// Interface for emitting export notifications when bundles are ready. +/// +public interface IExportNotificationEmitter +{ + /// + /// Emits an airgap-ready notification. + /// + Task EmitAirgapReadyAsync( + ExportAirgapReadyNotification notification, + CancellationToken cancellationToken = default); + + /// + /// Emits to timeline event sink for audit. + /// + Task EmitToTimelineAsync( + ExportAirgapReadyNotification notification, + CancellationToken cancellationToken = default); +} + +/// +/// Emitter for export notifications supporting NATS and webhook delivery. +/// Implements exponential backoff retry with DLQ routing. +/// +public sealed class ExportNotificationEmitter : IExportNotificationEmitter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly TimeSpan[] RetryDelays = + [ + TimeSpan.FromSeconds(1), + TimeSpan.FromSeconds(2), + TimeSpan.FromSeconds(4), + TimeSpan.FromSeconds(8), + TimeSpan.FromSeconds(16) + ]; + + private readonly IExportNotificationSink _sink; + private readonly IExportWebhookClient? _webhookClient; + private readonly IExportNotificationDlq _dlq; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly ExportNotificationEmitterOptions _options; + + public ExportNotificationEmitter( + IExportNotificationSink sink, + IExportNotificationDlq dlq, + TimeProvider timeProvider, + ILogger logger, + ExportNotificationEmitterOptions? options = null, + IExportWebhookClient? webhookClient = null) + { + _sink = sink ?? throw new ArgumentNullException(nameof(sink)); + _dlq = dlq ?? throw new ArgumentNullException(nameof(dlq)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options ?? ExportNotificationEmitterOptions.Default; + _webhookClient = webhookClient; + } + + public async Task EmitAirgapReadyAsync( + ExportAirgapReadyNotification notification, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(notification); + + var payload = JsonSerializer.Serialize(notification, SerializerOptions); + + // Try NATS sink first + var sinkResult = await EmitToSinkWithRetryAsync( + ExportNotificationTypes.AirgapReady, + payload, + notification.ExportId, + notification.BundleId, + notification.TenantId, + cancellationToken); + + if (!sinkResult.Success) + { + _logger.LogWarning( + "Failed to emit airgap ready notification to sink for export {ExportId}: {Error}", + notification.ExportId, sinkResult.ErrorMessage); + + await RouteToDlqAsync(notification, sinkResult, cancellationToken); + return sinkResult; + } + + // Try webhook delivery if configured + if (_webhookClient is not null && _options.WebhookEnabled) + { + var webhookResult = await EmitToWebhookWithRetryAsync( + notification, + payload, + cancellationToken); + + if (!webhookResult.Success) + { + _logger.LogWarning( + "Failed to deliver airgap ready notification to webhook for export {ExportId}: {Error}", + notification.ExportId, webhookResult.ErrorMessage); + + await RouteToDlqAsync(notification, webhookResult, cancellationToken); + return webhookResult; + } + } + + _logger.LogInformation( + "Emitted airgap ready notification for export {ExportId} bundle {BundleId}", + notification.ExportId, notification.BundleId); + + return sinkResult; + } + + public async Task EmitToTimelineAsync( + ExportAirgapReadyNotification notification, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(notification); + + var payload = JsonSerializer.Serialize(notification, SerializerOptions); + + var result = await EmitToSinkWithRetryAsync( + ExportNotificationTypes.TimelineAirgapReady, + payload, + notification.ExportId, + notification.BundleId, + notification.TenantId, + cancellationToken); + + if (result.Success) + { + _logger.LogDebug( + "Emitted timeline notification for export {ExportId}", + notification.ExportId); + } + + return result; + } + + private async Task EmitToSinkWithRetryAsync( + string channel, + string payload, + string exportId, + string bundleId, + string tenantId, + CancellationToken cancellationToken) + { + var attempt = 0; + string? lastError = null; + + while (attempt < _options.MaxRetries) + { + try + { + await _sink.PublishAsync(channel, payload, cancellationToken); + return new ExportNotificationResult(Success: true, AttemptCount: attempt + 1); + } + catch (Exception ex) when (IsTransient(ex) && attempt < _options.MaxRetries - 1) + { + lastError = ex.Message; + attempt++; + + var delay = attempt <= RetryDelays.Length + ? RetryDelays[attempt - 1] + : RetryDelays[^1]; + + _logger.LogWarning(ex, + "Transient failure emitting notification for export {ExportId}, attempt {Attempt}/{MaxRetries}", + exportId, attempt, _options.MaxRetries); + + await Task.Delay(delay, cancellationToken); + } + catch (Exception ex) + { + _logger.LogError(ex, + "Non-transient failure emitting notification for export {ExportId}", + exportId); + + return new ExportNotificationResult( + Success: false, + ErrorMessage: ex.Message, + AttemptCount: attempt + 1); + } + } + + return new ExportNotificationResult( + Success: false, + ErrorMessage: lastError ?? "Max retries exceeded", + AttemptCount: attempt); + } + + private async Task EmitToWebhookWithRetryAsync( + ExportAirgapReadyNotification notification, + string payload, + CancellationToken cancellationToken) + { + var attempt = 0; + int? lastStatus = null; + string? lastError = null; + + while (attempt < _options.MaxRetries) + { + try + { + var result = await _webhookClient!.DeliverAsync( + ExportNotificationTypes.AirgapReady, + payload, + _timeProvider.GetUtcNow(), + cancellationToken); + + if (result.Success) + { + return new ExportNotificationResult( + Success: true, + AttemptCount: attempt + 1, + LastResponseStatus: result.StatusCode); + } + + lastStatus = result.StatusCode; + lastError = result.ErrorMessage; + + if (!result.ShouldRetry) + { + return new ExportNotificationResult( + Success: false, + ErrorMessage: result.ErrorMessage, + AttemptCount: attempt + 1, + LastResponseStatus: result.StatusCode); + } + + attempt++; + + var delay = attempt <= RetryDelays.Length + ? RetryDelays[attempt - 1] + : RetryDelays[^1]; + + _logger.LogWarning( + "Webhook delivery failed for export {ExportId} with status {StatusCode}, attempt {Attempt}/{MaxRetries}", + notification.ExportId, result.StatusCode, attempt, _options.MaxRetries); + + await Task.Delay(delay, cancellationToken); + } + catch (Exception ex) when (IsTransient(ex) && attempt < _options.MaxRetries - 1) + { + lastError = ex.Message; + attempt++; + + var delay = attempt <= RetryDelays.Length + ? RetryDelays[attempt - 1] + : RetryDelays[^1]; + + await Task.Delay(delay, cancellationToken); + } + catch (Exception ex) + { + return new ExportNotificationResult( + Success: false, + ErrorMessage: ex.Message, + AttemptCount: attempt + 1, + LastResponseStatus: lastStatus); + } + } + + return new ExportNotificationResult( + Success: false, + ErrorMessage: lastError ?? "Max retries exceeded", + AttemptCount: attempt, + LastResponseStatus: lastStatus); + } + + private async Task RouteToDlqAsync( + ExportAirgapReadyNotification notification, + ExportNotificationResult result, + CancellationToken cancellationToken) + { + var payload = JsonSerializer.Serialize(notification, SerializerOptions); + + var dlqEntry = new ExportNotificationDlqEntry + { + EventType = ExportNotificationTypes.AirgapReady, + ExportId = notification.ExportId, + BundleId = notification.BundleId, + TenantId = notification.TenantId, + FailureReason = result.ErrorMessage ?? "Unknown failure", + LastResponseStatus = result.LastResponseStatus, + AttemptCount = result.AttemptCount, + LastAttemptAt = _timeProvider.GetUtcNow(), + OriginalPayload = payload + }; + + try + { + await _dlq.EnqueueAsync(dlqEntry, cancellationToken); + + _logger.LogInformation( + "Routed failed notification for export {ExportId} to DLQ after {AttemptCount} attempts", + notification.ExportId, result.AttemptCount); + } + catch (Exception ex) + { + _logger.LogError(ex, + "Failed to route notification for export {ExportId} to DLQ", + notification.ExportId); + } + } + + private static bool IsTransient(Exception ex) + { + return ex is TimeoutException or + TaskCanceledException or + HttpRequestException or + IOException; + } +} + +/// +/// Options for export notification emitter. +/// +public sealed record ExportNotificationEmitterOptions( + int MaxRetries, + bool WebhookEnabled, + TimeSpan WebhookTimeout) +{ + public static ExportNotificationEmitterOptions Default => new( + MaxRetries: 5, + WebhookEnabled: true, + WebhookTimeout: TimeSpan.FromSeconds(30)); +} + +/// +/// Sink for publishing export notifications (NATS, etc.). +/// +public interface IExportNotificationSink +{ + Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default); +} + +/// +/// Dead letter queue for failed notifications. +/// +public interface IExportNotificationDlq +{ + Task EnqueueAsync(ExportNotificationDlqEntry entry, CancellationToken cancellationToken = default); + + Task> GetPendingAsync( + string? tenantId = null, + int limit = 100, + CancellationToken cancellationToken = default); +} + +/// +/// Client for webhook delivery. +/// +public interface IExportWebhookClient +{ + Task DeliverAsync( + string eventType, + string payload, + DateTimeOffset sentAt, + CancellationToken cancellationToken = default); +} + +/// +/// Result of webhook delivery attempt. +/// +public sealed record WebhookDeliveryResult( + bool Success, + int? StatusCode, + string? ErrorMessage, + bool ShouldRetry); + +/// +/// In-memory implementation of notification sink for testing. +/// +public sealed class InMemoryExportNotificationSink : IExportNotificationSink +{ + private readonly List<(string Channel, string Message, DateTimeOffset ReceivedAt)> _messages = new(); + private readonly object _lock = new(); + private readonly TimeProvider _timeProvider; + + public InMemoryExportNotificationSink(TimeProvider? timeProvider = null) + { + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default) + { + lock (_lock) + { + _messages.Add((channel, message, _timeProvider.GetUtcNow())); + } + return Task.CompletedTask; + } + + public IReadOnlyList<(string Channel, string Message, DateTimeOffset ReceivedAt)> GetMessages() + { + lock (_lock) { return _messages.ToList(); } + } + + public IReadOnlyList GetMessages(string channel) + { + lock (_lock) { return _messages.Where(m => m.Channel == channel).Select(m => m.Message).ToList(); } + } + + public int Count + { + get { lock (_lock) { return _messages.Count; } } + } + + public void Clear() + { + lock (_lock) { _messages.Clear(); } + } +} + +/// +/// In-memory implementation of DLQ for testing. +/// +public sealed class InMemoryExportNotificationDlq : IExportNotificationDlq +{ + private readonly List _entries = new(); + private readonly object _lock = new(); + + public Task EnqueueAsync(ExportNotificationDlqEntry entry, CancellationToken cancellationToken = default) + { + lock (_lock) + { + _entries.Add(entry); + } + return Task.CompletedTask; + } + + public Task> GetPendingAsync( + string? tenantId = null, + int limit = 100, + CancellationToken cancellationToken = default) + { + lock (_lock) + { + var query = tenantId is not null + ? _entries.Where(e => e.TenantId == tenantId) + : _entries.AsEnumerable(); + + return Task.FromResult>( + query.Take(limit).ToList()); + } + } + + public IReadOnlyList GetAll() + { + lock (_lock) { return _entries.ToList(); } + } + + public int Count + { + get { lock (_lock) { return _entries.Count; } } + } + + public void Clear() + { + lock (_lock) { _entries.Clear(); } + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationModels.cs new file mode 100644 index 000000000..5637693ad --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportNotificationModels.cs @@ -0,0 +1,133 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.Notifications; + +/// +/// Export notification event types. +/// +public static class ExportNotificationTypes +{ + public const string AirgapReady = "export.airgap.ready.v1"; + public const string AirgapReadyDlq = "export.airgap.ready.dlq"; + public const string TimelineAirgapReady = "timeline.export.airgap.ready"; +} + +/// +/// Payload for export airgap ready notification. +/// Keys are sorted alphabetically for deterministic serialization. +/// +public sealed record ExportAirgapReadyNotification +{ + [JsonPropertyName("artifact_sha256")] + public required string ArtifactSha256 { get; init; } + + [JsonPropertyName("artifact_uri")] + public required string ArtifactUri { get; init; } + + [JsonPropertyName("bundle_id")] + public required string BundleId { get; init; } + + [JsonPropertyName("created_at")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("expires_at")] + public DateTimeOffset? ExpiresAt { get; init; } + + [JsonPropertyName("export_id")] + public required string ExportId { get; init; } + + [JsonPropertyName("metadata")] + public ExportAirgapReadyMetadata? Metadata { get; init; } + + [JsonPropertyName("portable_version")] + public required string PortableVersion { get; init; } + + [JsonPropertyName("profile_id")] + public required string ProfileId { get; init; } + + [JsonPropertyName("root_hash")] + public required string RootHash { get; init; } + + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + [JsonPropertyName("type")] + public string Type => ExportNotificationTypes.AirgapReady; +} + +/// +/// Metadata fields for the airgap ready notification. +/// +public sealed record ExportAirgapReadyMetadata +{ + [JsonPropertyName("export_size_bytes")] + public long? ExportSizeBytes { get; init; } + + [JsonPropertyName("portable_size_bytes")] + public long? PortableSizeBytes { get; init; } + + [JsonPropertyName("source_uri")] + public string? SourceUri { get; init; } +} + +/// +/// DLQ entry for failed notification delivery. +/// +public sealed record ExportNotificationDlqEntry +{ + [JsonPropertyName("event_type")] + public required string EventType { get; init; } + + [JsonPropertyName("export_id")] + public required string ExportId { get; init; } + + [JsonPropertyName("bundle_id")] + public required string BundleId { get; init; } + + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + [JsonPropertyName("failure_reason")] + public required string FailureReason { get; init; } + + [JsonPropertyName("last_response_status")] + public int? LastResponseStatus { get; init; } + + [JsonPropertyName("attempt_count")] + public required int AttemptCount { get; init; } + + [JsonPropertyName("last_attempt_at")] + public required DateTimeOffset LastAttemptAt { get; init; } + + [JsonPropertyName("original_payload")] + public required string OriginalPayload { get; init; } +} + +/// +/// Webhook delivery headers. +/// +public static class ExportNotificationHeaders +{ + public const string EventType = "X-Stella-Event-Type"; + public const string Signature = "X-Stella-Signature"; + public const string SentAt = "X-Stella-Sent-At"; +} + +/// +/// Configuration for notification delivery. +/// +public sealed record ExportNotificationConfig( + bool Enabled, + string? WebhookUrl, + string? SigningKey, + int MaxRetries = 5, + TimeSpan? RetentionPeriod = null); + +/// +/// Result of attempting to send a notification. +/// +public sealed record ExportNotificationResult( + bool Success, + string? ErrorMessage = null, + int AttemptCount = 1, + int? LastResponseStatus = null); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportWebhookClient.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportWebhookClient.cs new file mode 100644 index 000000000..4b6771248 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/ExportWebhookClient.cs @@ -0,0 +1,207 @@ +using System.Net; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.Core.Notifications; + +/// +/// HTTP webhook client for export notifications with HMAC-SHA256 signing. +/// +public sealed class ExportWebhookClient : IExportWebhookClient +{ + private readonly HttpClient _httpClient; + private readonly ExportWebhookOptions _options; + private readonly ILogger _logger; + + public ExportWebhookClient( + HttpClient httpClient, + ExportWebhookOptions options, + ILogger logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task DeliverAsync( + string eventType, + string payload, + DateTimeOffset sentAt, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(_options.WebhookUrl)) + { + return new WebhookDeliveryResult( + Success: false, + StatusCode: null, + ErrorMessage: "Webhook URL not configured", + ShouldRetry: false); + } + + try + { + using var request = new HttpRequestMessage(HttpMethod.Post, _options.WebhookUrl); + request.Content = new StringContent(payload, Encoding.UTF8, "application/json"); + + // Add standard headers + request.Headers.Add(ExportNotificationHeaders.EventType, eventType); + request.Headers.Add(ExportNotificationHeaders.SentAt, sentAt.ToString("O")); + + // Add signature if signing key is configured + if (!string.IsNullOrWhiteSpace(_options.SigningKey)) + { + var signature = ComputeSignature(payload, sentAt, _options.SigningKey); + request.Headers.Add(ExportNotificationHeaders.Signature, signature); + } + + var response = await _httpClient.SendAsync(request, cancellationToken); + var statusCode = (int)response.StatusCode; + + if (response.IsSuccessStatusCode) + { + _logger.LogDebug( + "Webhook delivery succeeded with status {StatusCode}", + statusCode); + + return new WebhookDeliveryResult( + Success: true, + StatusCode: statusCode, + ErrorMessage: null, + ShouldRetry: false); + } + + var shouldRetry = ShouldRetryStatusCode(response.StatusCode); + var errorMessage = $"HTTP {statusCode}: {response.ReasonPhrase}"; + + _logger.LogWarning( + "Webhook delivery failed with status {StatusCode}, shouldRetry={ShouldRetry}", + statusCode, shouldRetry); + + return new WebhookDeliveryResult( + Success: false, + StatusCode: statusCode, + ErrorMessage: errorMessage, + ShouldRetry: shouldRetry); + } + catch (TaskCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (HttpRequestException ex) + { + _logger.LogWarning(ex, "Webhook delivery failed with HTTP error"); + + return new WebhookDeliveryResult( + Success: false, + StatusCode: null, + ErrorMessage: ex.Message, + ShouldRetry: true); + } + catch (Exception ex) + { + _logger.LogError(ex, "Webhook delivery failed with unexpected error"); + + return new WebhookDeliveryResult( + Success: false, + StatusCode: null, + ErrorMessage: ex.Message, + ShouldRetry: false); + } + } + + /// + /// Computes HMAC-SHA256 signature for webhook payload. + /// Format: sha256=<hex-encoded-hmac> + /// + public static string ComputeSignature(string payload, DateTimeOffset sentAt, string signingKey) + { + // PAE (Pre-Authentication Encoding) style: timestamp.payload + var signatureInput = $"{sentAt:O}.{payload}"; + var inputBytes = Encoding.UTF8.GetBytes(signatureInput); + + byte[] keyBytes; + try + { + keyBytes = Convert.FromBase64String(signingKey); + } + catch (FormatException) + { + try + { + keyBytes = Convert.FromHexString(signingKey); + } + catch (FormatException) + { + keyBytes = Encoding.UTF8.GetBytes(signingKey); + } + } + + using var hmac = new HMACSHA256(keyBytes); + var hash = hmac.ComputeHash(inputBytes); + return "sha256=" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + /// + /// Verifies a webhook signature. + /// + public static bool VerifySignature(string payload, DateTimeOffset sentAt, string signingKey, string providedSignature) + { + var expectedSignature = ComputeSignature(payload, sentAt, signingKey); + return CryptographicOperations.FixedTimeEquals( + Encoding.UTF8.GetBytes(expectedSignature), + Encoding.UTF8.GetBytes(providedSignature.Trim())); + } + + private static bool ShouldRetryStatusCode(HttpStatusCode statusCode) + { + return statusCode switch + { + HttpStatusCode.RequestTimeout => true, + HttpStatusCode.TooManyRequests => true, + HttpStatusCode.InternalServerError => true, + HttpStatusCode.BadGateway => true, + HttpStatusCode.ServiceUnavailable => true, + HttpStatusCode.GatewayTimeout => true, + _ => false + }; + } +} + +/// +/// Options for export webhook client. +/// +public sealed record ExportWebhookOptions( + string? WebhookUrl, + string? SigningKey, + TimeSpan Timeout) +{ + public static ExportWebhookOptions Default => new( + WebhookUrl: null, + SigningKey: null, + Timeout: TimeSpan.FromSeconds(30)); +} + +/// +/// Null implementation of webhook client for when webhooks are disabled. +/// +public sealed class NullExportWebhookClient : IExportWebhookClient +{ + public static NullExportWebhookClient Instance { get; } = new(); + + private NullExportWebhookClient() { } + + public Task DeliverAsync( + string eventType, + string payload, + DateTimeOffset sentAt, + CancellationToken cancellationToken = default) + { + return Task.FromResult(new WebhookDeliveryResult( + Success: true, + StatusCode: 200, + ErrorMessage: null, + ShouldRetry: false)); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitDistributor.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitDistributor.cs new file mode 100644 index 000000000..62806f0e8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitDistributor.cs @@ -0,0 +1,289 @@ +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.OfflineKit; + +/// +/// Distributes offline kits to mirror locations for air-gap deployment. +/// Implements EXPORT-ATTEST-75-002: bit-for-bit copy with manifest publication. +/// +public sealed class OfflineKitDistributor +{ + private const string ManifestOfflineFileName = "manifest-offline.json"; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public OfflineKitDistributor(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Distributes an offline kit to a mirror location. + /// + public OfflineKitDistributionResult DistributeToMirror( + string sourceKitPath, + string mirrorBasePath, + string kitVersion, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sourceKitPath); + ArgumentException.ThrowIfNullOrWhiteSpace(mirrorBasePath); + ArgumentException.ThrowIfNullOrWhiteSpace(kitVersion); + + cancellationToken.ThrowIfCancellationRequested(); + + if (!Directory.Exists(sourceKitPath)) + { + return OfflineKitDistributionResult.Failed($"Source kit directory not found: {sourceKitPath}"); + } + + try + { + // Target path: mirror/export/attestations/{kitVersion}/ + var targetPath = Path.Combine(mirrorBasePath, "export", "attestations", kitVersion); + + // Ensure target directory exists + Directory.CreateDirectory(targetPath); + + // Copy all files bit-for-bit + var copiedFiles = CopyKitFilesRecursively(sourceKitPath, targetPath); + + // Build manifest entries + var entries = new List(); + + // Check for attestation bundle + var attestationBundlePath = Path.Combine(targetPath, "attestations", "export-attestation-bundle-v1.tgz"); + if (File.Exists(attestationBundlePath)) + { + var bundleBytes = File.ReadAllBytes(attestationBundlePath); + var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content); + + entries.Add(new OfflineKitManifestEntry( + Kind: "attestation-kit", + KitVersion: kitVersion, + Artifact: "attestations/export-attestation-bundle-v1.tgz", + Checksum: "checksums/attestations/export-attestation-bundle-v1.tgz.sha256", + CliExample: "stella attest bundle verify --file attestations/export-attestation-bundle-v1.tgz", + ImportExample: "stella attest bundle import --file attestations/export-attestation-bundle-v1.tgz --offline", + RootHash: $"sha256:{bundleHash}", + CreatedAt: _timeProvider.GetUtcNow())); + } + + // Check for mirror bundle + var mirrorBundlePath = Path.Combine(targetPath, "mirrors", "export-mirror-bundle-v1.tgz"); + if (File.Exists(mirrorBundlePath)) + { + var bundleBytes = File.ReadAllBytes(mirrorBundlePath); + var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content); + + entries.Add(new OfflineKitManifestEntry( + Kind: "mirror-bundle", + KitVersion: kitVersion, + Artifact: "mirrors/export-mirror-bundle-v1.tgz", + Checksum: "checksums/mirrors/export-mirror-bundle-v1.tgz.sha256", + CliExample: "stella mirror verify --file mirrors/export-mirror-bundle-v1.tgz", + ImportExample: "stella mirror import --file mirrors/export-mirror-bundle-v1.tgz --offline", + RootHash: $"sha256:{bundleHash}", + CreatedAt: _timeProvider.GetUtcNow())); + } + + // Check for bootstrap pack + var bootstrapPackPath = Path.Combine(targetPath, "bootstrap", "export-bootstrap-pack-v1.tgz"); + if (File.Exists(bootstrapPackPath)) + { + var bundleBytes = File.ReadAllBytes(bootstrapPackPath); + var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content); + + entries.Add(new OfflineKitManifestEntry( + Kind: "bootstrap-pack", + KitVersion: kitVersion, + Artifact: "bootstrap/export-bootstrap-pack-v1.tgz", + Checksum: "checksums/bootstrap/export-bootstrap-pack-v1.tgz.sha256", + CliExample: "stella bootstrap verify --file bootstrap/export-bootstrap-pack-v1.tgz", + ImportExample: "stella bootstrap import --file bootstrap/export-bootstrap-pack-v1.tgz --offline", + RootHash: $"sha256:{bundleHash}", + CreatedAt: _timeProvider.GetUtcNow())); + } + + // Write manifest-offline.json + var manifest = new OfflineKitOfflineManifest( + Version: "offline-kit/v1", + KitVersion: kitVersion, + CreatedAt: _timeProvider.GetUtcNow(), + Entries: entries); + + var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions); + var manifestPath = Path.Combine(targetPath, ManifestOfflineFileName); + File.WriteAllText(manifestPath, manifestJson, Encoding.UTF8); + + // Write manifest checksum + var manifestHash = _cryptoHash.ComputeHashHexForPurpose( + Encoding.UTF8.GetBytes(manifestJson), HashPurpose.Content); + var manifestChecksumPath = manifestPath + ".sha256"; + File.WriteAllText(manifestChecksumPath, $"{manifestHash} {ManifestOfflineFileName}", Encoding.UTF8); + + return new OfflineKitDistributionResult( + Success: true, + TargetPath: targetPath, + ManifestPath: manifestPath, + CopiedFileCount: copiedFiles, + EntryCount: entries.Count); + } + catch (Exception ex) + { + return OfflineKitDistributionResult.Failed($"Distribution failed: {ex.Message}"); + } + } + + /// + /// Verifies that a distributed kit matches its source. + /// + public OfflineKitVerificationResult VerifyDistribution( + string sourceKitPath, + string targetKitPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sourceKitPath); + ArgumentException.ThrowIfNullOrWhiteSpace(targetKitPath); + + if (!Directory.Exists(sourceKitPath)) + { + return OfflineKitVerificationResult.Failed($"Source kit not found: {sourceKitPath}"); + } + + if (!Directory.Exists(targetKitPath)) + { + return OfflineKitVerificationResult.Failed($"Target kit not found: {targetKitPath}"); + } + + var mismatches = new List(); + + // Get all files in source + var sourceFiles = Directory.GetFiles(sourceKitPath, "*", SearchOption.AllDirectories) + .Select(f => Path.GetRelativePath(sourceKitPath, f)) + .OrderBy(f => f, StringComparer.Ordinal) + .ToList(); + + foreach (var relativePath in sourceFiles) + { + cancellationToken.ThrowIfCancellationRequested(); + + var sourceFilePath = Path.Combine(sourceKitPath, relativePath); + var targetFilePath = Path.Combine(targetKitPath, relativePath); + + if (!File.Exists(targetFilePath)) + { + mismatches.Add($"Missing: {relativePath}"); + continue; + } + + // Compare hashes + var sourceBytes = File.ReadAllBytes(sourceFilePath); + var targetBytes = File.ReadAllBytes(targetFilePath); + + var sourceHash = _cryptoHash.ComputeHashHexForPurpose(sourceBytes, HashPurpose.Content); + var targetHash = _cryptoHash.ComputeHashHexForPurpose(targetBytes, HashPurpose.Content); + + if (!string.Equals(sourceHash, targetHash, StringComparison.OrdinalIgnoreCase)) + { + mismatches.Add($"Hash mismatch: {relativePath}"); + } + } + + if (mismatches.Count > 0) + { + return new OfflineKitVerificationResult( + Success: false, + Mismatches: mismatches, + ErrorMessage: $"Found {mismatches.Count} mismatches"); + } + + return new OfflineKitVerificationResult( + Success: true, + Mismatches: Array.Empty()); + } + + private static int CopyKitFilesRecursively(string sourceDir, string targetDir) + { + var count = 0; + + foreach (var sourceFilePath in Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories)) + { + var relativePath = Path.GetRelativePath(sourceDir, sourceFilePath); + var targetFilePath = Path.Combine(targetDir, relativePath); + + var targetFileDir = Path.GetDirectoryName(targetFilePath); + if (!string.IsNullOrEmpty(targetFileDir)) + { + Directory.CreateDirectory(targetFileDir); + } + + // Bit-for-bit copy + File.Copy(sourceFilePath, targetFilePath, overwrite: true); + count++; + } + + return count; + } +} + +/// +/// Manifest entry for offline kit distribution. +/// +public sealed record OfflineKitManifestEntry( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("kitVersion")] string KitVersion, + [property: JsonPropertyName("artifact")] string Artifact, + [property: JsonPropertyName("checksum")] string Checksum, + [property: JsonPropertyName("cliExample")] string CliExample, + [property: JsonPropertyName("importExample")] string ImportExample, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); + +/// +/// Offline manifest for air-gap deployment. +/// +public sealed record OfflineKitOfflineManifest( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("kitVersion")] string KitVersion, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("entries")] IReadOnlyList Entries); + +/// +/// Result of offline kit distribution. +/// +public sealed record OfflineKitDistributionResult( + bool Success, + string? TargetPath = null, + string? ManifestPath = null, + int CopiedFileCount = 0, + int EntryCount = 0, + string? ErrorMessage = null) +{ + public static OfflineKitDistributionResult Failed(string errorMessage) + => new(Success: false, ErrorMessage: errorMessage); +} + +/// +/// Result of offline kit verification. +/// +public sealed record OfflineKitVerificationResult( + bool Success, + IReadOnlyList Mismatches, + string? ErrorMessage = null) +{ + public static OfflineKitVerificationResult Failed(string errorMessage) + => new(Success: false, Mismatches: Array.Empty(), ErrorMessage: errorMessage); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitModels.cs new file mode 100644 index 000000000..5f15525af --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitModels.cs @@ -0,0 +1,120 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.OfflineKit; + +/// +/// Manifest entry for an attestation bundle in an offline kit. +/// +public sealed record OfflineKitAttestationEntry( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("attestationId")] string AttestationId, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("artifact")] string Artifact, + [property: JsonPropertyName("checksum")] string Checksum, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt) +{ + public const string KindValue = "attestation-export"; +} + +/// +/// Manifest entry for a mirror bundle in an offline kit. +/// +public sealed record OfflineKitMirrorEntry( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("bundleId")] string BundleId, + [property: JsonPropertyName("profile")] string Profile, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("artifact")] string Artifact, + [property: JsonPropertyName("checksum")] string Checksum, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt) +{ + public const string KindValue = "mirror-bundle"; +} + +/// +/// Manifest entry for a bootstrap pack in an offline kit. +/// +public sealed record OfflineKitBootstrapEntry( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("artifact")] string Artifact, + [property: JsonPropertyName("checksum")] string Checksum, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt) +{ + public const string KindValue = "bootstrap-pack"; +} + +/// +/// Manifest entry for a portable evidence bundle in an offline kit. +/// +public sealed record OfflineKitPortableEvidenceEntry( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("bundleId")] string BundleId, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("artifact")] string Artifact, + [property: JsonPropertyName("checksum")] string Checksum, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt) +{ + public const string KindValue = "portable-evidence"; +} + +/// +/// Root manifest for an offline kit. +/// +public sealed record OfflineKitManifest( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("kitId")] string KitId, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("entries")] IReadOnlyList Entries) +{ + public const string CurrentVersion = "offline-kit/v1"; +} + +/// +/// Request to add an attestation bundle to an offline kit. +/// +public sealed record OfflineKitAttestationRequest( + string KitId, + string ExportId, + string AttestationId, + string RootHash, + byte[] BundleBytes, + DateTimeOffset CreatedAt); + +/// +/// Request to add a mirror bundle to an offline kit. +/// +public sealed record OfflineKitMirrorRequest( + string KitId, + string ExportId, + string BundleId, + string Profile, + string RootHash, + byte[] BundleBytes, + DateTimeOffset CreatedAt); + +/// +/// Request to add a bootstrap pack to an offline kit. +/// +public sealed record OfflineKitBootstrapRequest( + string KitId, + string ExportId, + string Version, + string RootHash, + byte[] BundleBytes, + DateTimeOffset CreatedAt); + +/// +/// Result of adding an entry to an offline kit. +/// +public sealed record OfflineKitAddResult( + bool Success, + string ArtifactPath, + string ChecksumPath, + string Sha256Hash, + string? ErrorMessage = null); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitPackager.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitPackager.cs new file mode 100644 index 000000000..56349d5ad --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/OfflineKitPackager.cs @@ -0,0 +1,282 @@ +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.OfflineKit; + +/// +/// Packager for assembling offline kits with attestation bundles, mirror bundles, and bootstrap packs. +/// Ensures immutable, deterministic artefact placement with checksum publication. +/// +public sealed class OfflineKitPackager +{ + private const string AttestationsDir = "attestations"; + private const string MirrorsDir = "mirrors"; + private const string BootstrapDir = "bootstrap"; + private const string EvidenceDir = "evidence"; + private const string ChecksumsDir = "checksums"; + private const string ManifestFileName = "manifest.json"; + + private const string AttestationBundleFileName = "export-attestation-bundle-v1.tgz"; + private const string MirrorBundleFileName = "export-mirror-bundle-v1.tgz"; + private const string BootstrapBundleFileName = "export-bootstrap-pack-v1.tgz"; + private const string EvidenceBundleFileName = "export-portable-bundle-v1.tgz"; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public OfflineKitPackager(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Adds an attestation bundle to the offline kit. + /// + public OfflineKitAddResult AddAttestationBundle( + string outputDirectory, + OfflineKitAttestationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (string.IsNullOrWhiteSpace(outputDirectory)) + { + throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var artifactRelativePath = Path.Combine(AttestationsDir, AttestationBundleFileName); + var checksumRelativePath = Path.Combine(ChecksumsDir, AttestationsDir, $"{AttestationBundleFileName}.sha256"); + + return WriteBundle( + outputDirectory, + request.BundleBytes, + artifactRelativePath, + checksumRelativePath, + AttestationBundleFileName); + } + + /// + /// Adds a mirror bundle to the offline kit. + /// + public OfflineKitAddResult AddMirrorBundle( + string outputDirectory, + OfflineKitMirrorRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (string.IsNullOrWhiteSpace(outputDirectory)) + { + throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var artifactRelativePath = Path.Combine(MirrorsDir, MirrorBundleFileName); + var checksumRelativePath = Path.Combine(ChecksumsDir, MirrorsDir, $"{MirrorBundleFileName}.sha256"); + + return WriteBundle( + outputDirectory, + request.BundleBytes, + artifactRelativePath, + checksumRelativePath, + MirrorBundleFileName); + } + + /// + /// Adds a bootstrap pack to the offline kit. + /// + public OfflineKitAddResult AddBootstrapPack( + string outputDirectory, + OfflineKitBootstrapRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (string.IsNullOrWhiteSpace(outputDirectory)) + { + throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var artifactRelativePath = Path.Combine(BootstrapDir, BootstrapBundleFileName); + var checksumRelativePath = Path.Combine(ChecksumsDir, BootstrapDir, $"{BootstrapBundleFileName}.sha256"); + + return WriteBundle( + outputDirectory, + request.BundleBytes, + artifactRelativePath, + checksumRelativePath, + BootstrapBundleFileName); + } + + /// + /// Creates a manifest entry for an attestation bundle. + /// + public OfflineKitAttestationEntry CreateAttestationEntry(OfflineKitAttestationRequest request, string sha256Hash) + { + return new OfflineKitAttestationEntry( + Kind: OfflineKitAttestationEntry.KindValue, + ExportId: request.ExportId, + AttestationId: request.AttestationId, + RootHash: $"sha256:{request.RootHash}", + Artifact: Path.Combine(AttestationsDir, AttestationBundleFileName).Replace('\\', '/'), + Checksum: Path.Combine(ChecksumsDir, AttestationsDir, $"{AttestationBundleFileName}.sha256").Replace('\\', '/'), + CreatedAt: request.CreatedAt); + } + + /// + /// Creates a manifest entry for a mirror bundle. + /// + public OfflineKitMirrorEntry CreateMirrorEntry(OfflineKitMirrorRequest request, string sha256Hash) + { + return new OfflineKitMirrorEntry( + Kind: OfflineKitMirrorEntry.KindValue, + ExportId: request.ExportId, + BundleId: request.BundleId, + Profile: request.Profile, + RootHash: $"sha256:{request.RootHash}", + Artifact: Path.Combine(MirrorsDir, MirrorBundleFileName).Replace('\\', '/'), + Checksum: Path.Combine(ChecksumsDir, MirrorsDir, $"{MirrorBundleFileName}.sha256").Replace('\\', '/'), + CreatedAt: request.CreatedAt); + } + + /// + /// Creates a manifest entry for a bootstrap pack. + /// + public OfflineKitBootstrapEntry CreateBootstrapEntry(OfflineKitBootstrapRequest request, string sha256Hash) + { + return new OfflineKitBootstrapEntry( + Kind: OfflineKitBootstrapEntry.KindValue, + ExportId: request.ExportId, + Version: request.Version, + RootHash: $"sha256:{request.RootHash}", + Artifact: Path.Combine(BootstrapDir, BootstrapBundleFileName).Replace('\\', '/'), + Checksum: Path.Combine(ChecksumsDir, BootstrapDir, $"{BootstrapBundleFileName}.sha256").Replace('\\', '/'), + CreatedAt: request.CreatedAt); + } + + /// + /// Writes or updates the offline kit manifest. + /// + public void WriteManifest( + string outputDirectory, + string kitId, + IEnumerable entries, + CancellationToken cancellationToken = default) + { + var manifestPath = Path.Combine(outputDirectory, ManifestFileName); + + // Check for existing manifest (immutability check) + if (File.Exists(manifestPath)) + { + throw new InvalidOperationException($"Manifest already exists at '{manifestPath}'. Offline kit artefacts are immutable."); + } + + var manifest = new OfflineKitManifest( + Version: OfflineKitManifest.CurrentVersion, + KitId: kitId, + CreatedAt: _timeProvider.GetUtcNow(), + Entries: entries.ToList()); + + var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions); + + Directory.CreateDirectory(outputDirectory); + File.WriteAllText(manifestPath, manifestJson, Encoding.UTF8); + } + + /// + /// Generates a checksum file content in standard format. + /// + public static string GenerateChecksumFileContent(string sha256Hash, string fileName) + { + return $"{sha256Hash} {fileName}"; + } + + /// + /// Verifies that a bundle matches its expected SHA256 hash. + /// + public bool VerifyBundleHash(byte[] bundleBytes, string expectedSha256) + { + var actualHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content); + return string.Equals(actualHash, expectedSha256, StringComparison.OrdinalIgnoreCase); + } + + private OfflineKitAddResult WriteBundle( + string outputDirectory, + byte[] bundleBytes, + string artifactRelativePath, + string checksumRelativePath, + string fileName) + { + try + { + // Compute SHA256 + var sha256Hash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content); + + // Determine full paths + var artifactFullPath = Path.Combine(outputDirectory, artifactRelativePath); + var checksumFullPath = Path.Combine(outputDirectory, checksumRelativePath); + + // Check for existing artifact (immutability) + if (File.Exists(artifactFullPath)) + { + return new OfflineKitAddResult( + Success: false, + ArtifactPath: artifactRelativePath, + ChecksumPath: checksumRelativePath, + Sha256Hash: sha256Hash, + ErrorMessage: $"Artifact already exists at '{artifactFullPath}'. Offline kit artefacts are immutable."); + } + + // Create directories + var artifactDir = Path.GetDirectoryName(artifactFullPath); + var checksumDir = Path.GetDirectoryName(checksumFullPath); + + if (!string.IsNullOrEmpty(artifactDir)) + { + Directory.CreateDirectory(artifactDir); + } + + if (!string.IsNullOrEmpty(checksumDir)) + { + Directory.CreateDirectory(checksumDir); + } + + // Write bundle (bit-for-bit copy) + File.WriteAllBytes(artifactFullPath, bundleBytes); + + // Write checksum file + var checksumContent = GenerateChecksumFileContent(sha256Hash, fileName); + File.WriteAllText(checksumFullPath, checksumContent, Encoding.UTF8); + + return new OfflineKitAddResult( + Success: true, + ArtifactPath: artifactRelativePath, + ChecksumPath: checksumRelativePath, + Sha256Hash: sha256Hash); + } + catch (Exception ex) + { + return new OfflineKitAddResult( + Success: false, + ArtifactPath: artifactRelativePath, + ChecksumPath: checksumRelativePath, + Sha256Hash: string.Empty, + ErrorMessage: ex.Message); + } + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportBuilder.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportBuilder.cs new file mode 100644 index 000000000..089c30439 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportBuilder.cs @@ -0,0 +1,338 @@ +using System.Buffers.Binary; +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Cryptography; + +namespace StellaOps.ExportCenter.Core.PortableEvidence; + +/// +/// Builds portable evidence export archives that wrap EvidenceLocker portable bundles for air-gap delivery. +/// +public sealed class PortableEvidenceExportBuilder +{ + private const string ExportVersion = "portable-evidence/v1"; + private const string PortableBundleVersion = "v1"; + private const string InnerBundleFileName = "portable-bundle-v1.tgz"; + private const string ExportArchiveFileName = "export-portable-bundle-v1.tgz"; + + private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly UnixFileMode DefaultFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead; + + private static readonly UnixFileMode ExecutableFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private readonly ICryptoHash _cryptoHash; + private readonly TimeProvider _timeProvider; + + public PortableEvidenceExportBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null) + { + _cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + /// + /// Builds a portable evidence export archive from the provided request. + /// + public PortableEvidenceExportResult Build(PortableEvidenceExportRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (request.ExportId == Guid.Empty) + { + throw new ArgumentException("Export identifier must be provided.", nameof(request)); + } + + if (request.BundleId == Guid.Empty) + { + throw new ArgumentException("Bundle identifier must be provided.", nameof(request)); + } + + if (request.TenantId == Guid.Empty) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(request)); + } + + if (string.IsNullOrWhiteSpace(request.PortableBundlePath)) + { + throw new ArgumentException("Portable bundle path must be provided.", nameof(request)); + } + + var fullPath = Path.GetFullPath(request.PortableBundlePath); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException($"Portable bundle file '{fullPath}' not found.", fullPath); + } + + cancellationToken.ThrowIfCancellationRequested(); + + // Read and hash the portable bundle + var portableBundleBytes = File.ReadAllBytes(fullPath); + var portableBundleSha256 = _cryptoHash.ComputeHashHexForPurpose(portableBundleBytes, HashPurpose.Content); + + // Build export document + var exportDoc = new PortableEvidenceExportDocument( + ExportVersion, + request.ExportId.ToString("D"), + request.BundleId.ToString("D"), + request.TenantId.ToString("D"), + _timeProvider.GetUtcNow(), + string.Empty, // Will be computed after archive creation + request.SourceUri, + PortableBundleVersion, + portableBundleSha256, + request.Metadata); + + var exportJson = JsonSerializer.Serialize(exportDoc, SerializerOptions); + var exportJsonSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(exportJson), HashPurpose.Content); + + // Build checksums + var checksums = BuildChecksums(exportJsonSha256, portableBundleSha256); + var checksumsSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(checksums), HashPurpose.Content); + + // Build README + var readme = BuildReadme(request, portableBundleSha256); + var readmeSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(readme), HashPurpose.Content); + + // Build verification script + var verifyScript = BuildVerificationScript(); + var verifyScriptSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(verifyScript), HashPurpose.Content); + + // Compute root hash + var rootHash = ComputeRootHash(exportJsonSha256, portableBundleSha256, checksumsSha256, readmeSha256, verifyScriptSha256); + + // Update export document with root hash + var finalExportDoc = exportDoc with { RootHash = rootHash }; + var finalExportJson = JsonSerializer.Serialize(finalExportDoc, SerializerOptions); + + // Rebuild checksums with final export.json hash + var finalExportJsonSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(finalExportJson), HashPurpose.Content); + var finalChecksums = BuildChecksums(finalExportJsonSha256, portableBundleSha256); + + // Create the export archive + var exportStream = CreateExportArchive( + finalExportJson, + portableBundleBytes, + finalChecksums, + readme, + verifyScript); + + exportStream.Position = 0; + + return new PortableEvidenceExportResult( + finalExportDoc, + finalExportJson, + rootHash, + portableBundleSha256, + exportStream); + } + + private string ComputeRootHash(params string[] hashes) + { + var builder = new StringBuilder(); + foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal)) + { + builder.Append(hash).Append('\0'); + } + + var bytes = Encoding.UTF8.GetBytes(builder.ToString()); + return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content); + } + + private static string BuildChecksums(string exportJsonSha256, string portableBundleSha256) + { + var builder = new StringBuilder(); + builder.AppendLine("# Portable evidence export checksums (sha256)"); + builder.Append(exportJsonSha256).AppendLine(" export.json"); + builder.Append(portableBundleSha256).Append(" ").AppendLine(InnerBundleFileName); + return builder.ToString(); + } + + private static string BuildReadme(PortableEvidenceExportRequest request, string portableBundleSha256) + { + var builder = new StringBuilder(); + builder.AppendLine("# Portable Evidence Export"); + builder.AppendLine(); + builder.AppendLine("## Overview"); + builder.Append("Export ID: ").AppendLine(request.ExportId.ToString("D")); + builder.Append("Bundle ID: ").AppendLine(request.BundleId.ToString("D")); + builder.Append("Tenant ID: ").AppendLine(request.TenantId.ToString("D")); + builder.AppendLine(); + + builder.AppendLine("## Contents"); + builder.AppendLine("- `export.json` - Export metadata with bundle references and hashes"); + builder.Append("- `").Append(InnerBundleFileName).AppendLine("` - Original EvidenceLocker portable bundle (unmodified)"); + builder.AppendLine("- `checksums.txt` - SHA-256 checksums for verification"); + builder.AppendLine("- `verify-export.sh` - Verification script for offline use"); + builder.AppendLine("- `README.md` - This file"); + builder.AppendLine(); + + builder.AppendLine("## Verification Steps"); + builder.AppendLine(); + builder.AppendLine("### 1. Extract the archive"); + builder.AppendLine("```sh"); + builder.Append("tar -xzf ").AppendLine(ExportArchiveFileName); + builder.AppendLine("```"); + builder.AppendLine(); + + builder.AppendLine("### 2. Verify checksums"); + builder.AppendLine("```sh"); + builder.AppendLine("./verify-export.sh"); + builder.AppendLine("# Or manually:"); + builder.AppendLine("sha256sum --check checksums.txt"); + builder.AppendLine("```"); + builder.AppendLine(); + + builder.AppendLine("### 3. Verify the inner evidence bundle"); + builder.AppendLine("```sh"); + builder.Append("stella evidence verify --bundle ").AppendLine(InnerBundleFileName); + builder.AppendLine("```"); + builder.AppendLine(); + + builder.AppendLine("## Expected Headers"); + builder.AppendLine("When downloading this export, expect the following response headers:"); + builder.AppendLine("- `Content-Type: application/gzip`"); + builder.AppendLine("- `ETag: \"\"`"); + builder.AppendLine("- `Last-Modified: `"); + builder.AppendLine("- `X-Stella-Bundle-Id: `"); + builder.AppendLine("- `X-Stella-Export-Id: `"); + builder.AppendLine(); + + builder.AppendLine("## Schema Links"); + builder.AppendLine("- Evidence bundle: `docs/modules/evidence-locker/bundle-packaging.schema.json`"); + builder.AppendLine("- Export schema: `docs/modules/export-center/portable-export.schema.json`"); + builder.AppendLine(); + + builder.AppendLine("## Portable Bundle Hash"); + builder.Append("SHA-256: `").Append(portableBundleSha256).AppendLine("`"); + + return builder.ToString(); + } + + private static string BuildVerificationScript() + { + var builder = new StringBuilder(); + builder.AppendLine("#!/usr/bin/env sh"); + builder.AppendLine("# Portable Evidence Export Verification Script"); + builder.AppendLine("# No network access required"); + builder.AppendLine(); + builder.AppendLine("set -eu"); + builder.AppendLine(); + builder.AppendLine("# Verify checksums"); + builder.AppendLine("echo \"Verifying checksums...\""); + builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then"); + builder.AppendLine(" sha256sum --check checksums.txt"); + builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then"); + builder.AppendLine(" shasum -a 256 --check checksums.txt"); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2"); + builder.AppendLine(" exit 1"); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Checksums verified successfully.\""); + builder.AppendLine("echo \"\""); + builder.AppendLine(); + builder.AppendLine("# Check for stella CLI"); + builder.Append("PORTABLE_BUNDLE=\"").Append(InnerBundleFileName).AppendLine("\""); + builder.AppendLine("if command -v stella >/dev/null 2>&1; then"); + builder.AppendLine(" echo \"Verifying evidence bundle with stella CLI...\""); + builder.AppendLine(" stella evidence verify --bundle \"$PORTABLE_BUNDLE\""); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Note: stella CLI not found. Manual verification of $PORTABLE_BUNDLE recommended.\""); + builder.AppendLine(" echo \"Install stella CLI and run: stella evidence verify --bundle $PORTABLE_BUNDLE\""); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Verification complete.\""); + + return builder.ToString(); + } + + private MemoryStream CreateExportArchive( + string exportJson, + byte[] portableBundleBytes, + string checksums, + string readme, + string verifyScript) + { + var stream = new MemoryStream(); + + using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)) + using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true)) + { + // Write files in lexical order for determinism + WriteTextEntry(tar, "README.md", readme, DefaultFileMode); + WriteTextEntry(tar, "checksums.txt", checksums, DefaultFileMode); + WriteTextEntry(tar, "export.json", exportJson, DefaultFileMode); + WriteBytesEntry(tar, InnerBundleFileName, portableBundleBytes, DefaultFileMode); + WriteTextEntry(tar, "verify-export.sh", verifyScript, ExecutableFileMode); + } + + ApplyDeterministicGzipHeader(stream); + return stream; + } + + private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void WriteBytesEntry(TarWriter writer, string path, byte[] content, UnixFileMode mode) + { + using var dataStream = new MemoryStream(content); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void ApplyDeterministicGzipHeader(MemoryStream stream) + { + if (stream.Length < 10) + { + throw new InvalidOperationException("GZip header not fully written for portable evidence export."); + } + + var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds); + Span buffer = stackalloc byte[4]; + BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds); + + var originalPosition = stream.Position; + stream.Position = 4; + stream.Write(buffer); + stream.Position = originalPosition; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportModels.cs new file mode 100644 index 000000000..b0d374c23 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/PortableEvidenceExportModels.cs @@ -0,0 +1,63 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.Core.PortableEvidence; + +/// +/// Request to create a portable evidence export. +/// +public sealed record PortableEvidenceExportRequest( + Guid ExportId, + Guid BundleId, + Guid TenantId, + string PortableBundlePath, + string? SourceUri = null, + IReadOnlyDictionary? Metadata = null); + +/// +/// Result of building a portable evidence export. +/// +public sealed record PortableEvidenceExportResult( + PortableEvidenceExportDocument ExportDocument, + string ExportDocumentJson, + string RootHash, + string PortableBundleSha256, + MemoryStream ExportStream); + +/// +/// The export.json document for portable evidence exports. +/// +public sealed record PortableEvidenceExportDocument( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("bundleId")] string BundleId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc, + [property: JsonPropertyName("rootHash")] string RootHash, + [property: JsonPropertyName("sourceUri")] string? SourceUri, + [property: JsonPropertyName("portableVersion")] string PortableVersion, + [property: JsonPropertyName("portableBundleSha256")] string PortableBundleSha256, + [property: JsonPropertyName("metadata")] IReadOnlyDictionary? Metadata); + +/// +/// Export status for portable evidence. +/// +public enum PortableEvidenceExportStatus +{ + Pending = 1, + Materialising = 2, + Ready = 3, + Failed = 4 +} + +/// +/// Status response for portable evidence export. +/// +public sealed record PortableEvidenceExportStatusResponse( + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("bundleId")] string BundleId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("rootHash")] string? RootHash, + [property: JsonPropertyName("portableVersion")] string? PortableVersion, + [property: JsonPropertyName("downloadUri")] string? DownloadUri, + [property: JsonPropertyName("pendingReason")] string? PendingReason, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/AttestationBundleBuilderTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/AttestationBundleBuilderTests.cs new file mode 100644 index 000000000..4c3a0e220 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/AttestationBundleBuilderTests.cs @@ -0,0 +1,559 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using StellaOps.ExportCenter.Core.AttestationBundle; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class AttestationBundleBuilderTests : IDisposable +{ + private readonly FakeTimeProvider _timeProvider; + private readonly FakeCryptoHash _cryptoHash; + private readonly AttestationBundleBuilder _builder; + + public AttestationBundleBuilderTests() + { + _timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); + _cryptoHash = new FakeCryptoHash(); + _builder = new AttestationBundleBuilder(_cryptoHash, _timeProvider); + } + + public void Dispose() + { + // No cleanup needed + } + + [Fact] + public void Build_ProducesValidExport() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + + Assert.NotNull(result); + Assert.NotNull(result.Metadata); + Assert.NotEmpty(result.MetadataJson); + Assert.NotEmpty(result.RootHash); + Assert.True(result.ExportStream.Length > 0); + } + + [Fact] + public void Build_MetadataContainsCorrectValues() + { + var exportId = Guid.NewGuid(); + var attestationId = Guid.NewGuid(); + var tenantId = Guid.NewGuid(); + var sourceUri = "https://attestor.example.com/v1/statements/abc123"; + + var request = new AttestationBundleExportRequest( + exportId, + attestationId, + tenantId, + CreateTestDsseEnvelope(), + CreateTestStatement(), + SourceUri: sourceUri, + StatementVersion: "v2"); + + var result = _builder.Build(request); + + Assert.Equal(exportId.ToString("D"), result.Metadata.ExportId); + Assert.Equal(attestationId.ToString("D"), result.Metadata.AttestationId); + Assert.Equal(tenantId.ToString("D"), result.Metadata.TenantId); + Assert.Equal(sourceUri, result.Metadata.SourceUri); + Assert.Equal("v2", result.Metadata.StatementVersion); + Assert.Equal("attestation-bundle/v1", result.Metadata.Version); + } + + [Fact] + public void Build_ProducesDeterministicOutput() + { + var exportId = new Guid("11111111-2222-3333-4444-555555555555"); + var attestationId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"); + var tenantId = new Guid("ffffffff-1111-2222-3333-444444444444"); + + var request = new AttestationBundleExportRequest( + exportId, + attestationId, + tenantId, + CreateTestDsseEnvelope(), + CreateTestStatement()); + + var result1 = _builder.Build(request); + var result2 = _builder.Build(request); + + Assert.Equal(result1.RootHash, result2.RootHash); + + var bytes1 = result1.ExportStream.ToArray(); + var bytes2 = result2.ExportStream.ToArray(); + Assert.Equal(bytes1, bytes2); + } + + [Fact] + public void Build_ArchiveContainsExpectedFiles() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.ExportStream); + + Assert.Contains("attestation.dsse.json", fileNames); + Assert.Contains("statement.json", fileNames); + Assert.Contains("metadata.json", fileNames); + Assert.Contains("checksums.txt", fileNames); + Assert.Contains("verify-attestation.sh", fileNames); + } + + [Fact] + public void Build_WithTransparencyEntries_IncludesTransparencyFile() + { + var entries = new List + { + "{\"logIndex\":1,\"logId\":\"rekor1\"}", + "{\"logIndex\":2,\"logId\":\"rekor2\"}" + }; + + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement(), + TransparencyEntries: entries); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.ExportStream); + + Assert.Contains("transparency.ndjson", fileNames); + } + + [Fact] + public void Build_WithoutTransparencyEntries_OmitsTransparencyFile() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.ExportStream); + + Assert.DoesNotContain("transparency.ndjson", fileNames); + } + + [Fact] + public void Build_TransparencyEntriesSortedLexically() + { + var entries = new List + { + "{\"logIndex\":3,\"logId\":\"z-log\"}", + "{\"logIndex\":1,\"logId\":\"a-log\"}", + "{\"logIndex\":2,\"logId\":\"m-log\"}" + }; + + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement(), + TransparencyEntries: entries); + + var result = _builder.Build(request); + var content = ExtractFileContent(result.ExportStream, "transparency.ndjson"); + var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries); + + // Should be sorted lexically + Assert.Equal(3, lines.Length); + Assert.Contains("a-log", lines[0]); + Assert.Contains("m-log", lines[1]); + Assert.Contains("z-log", lines[2]); + } + + [Fact] + public void Build_DsseEnvelopeIsUnmodified() + { + var originalDsse = CreateTestDsseEnvelope(); + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + originalDsse, + CreateTestStatement()); + + var result = _builder.Build(request); + var extractedDsse = ExtractFileContent(result.ExportStream, "attestation.dsse.json"); + + Assert.Equal(originalDsse, extractedDsse); + } + + [Fact] + public void Build_StatementIsUnmodified() + { + var originalStatement = CreateTestStatement(); + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + originalStatement); + + var result = _builder.Build(request); + var extractedStatement = ExtractFileContent(result.ExportStream, "statement.json"); + + Assert.Equal(originalStatement, extractedStatement); + } + + [Fact] + public void Build_TarEntriesHaveDeterministicMetadata() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.ExportStream); + + var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + foreach (var entry in entries) + { + Assert.Equal(0, entry.Uid); + Assert.Equal(0, entry.Gid); + Assert.Equal(expectedTimestamp, entry.ModificationTime); + } + } + + [Fact] + public void Build_VerifyScriptHasExecutePermission() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.ExportStream); + + var scriptEntry = entries.FirstOrDefault(e => e.Name == "verify-attestation.sh"); + Assert.NotNull(scriptEntry); + Assert.True(scriptEntry.Mode.HasFlag(UnixFileMode.UserExecute)); + } + + [Fact] + public void Build_VerifyScriptIsPosixCompliant() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var script = ExtractFileContent(result.ExportStream, "verify-attestation.sh"); + + Assert.StartsWith("#!/usr/bin/env sh", script); + Assert.Contains("sha256sum", script); + Assert.Contains("shasum", script); + Assert.Contains("stella attest verify", script); + Assert.DoesNotContain("curl", script); + Assert.DoesNotContain("wget", script); + } + + [Fact] + public void Build_VerifyScriptContainsAttestationId() + { + var attestationId = Guid.NewGuid(); + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + attestationId, + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement()); + + var result = _builder.Build(request); + var script = ExtractFileContent(result.ExportStream, "verify-attestation.sh"); + + Assert.Contains(attestationId.ToString("D"), script); + } + + [Fact] + public void Build_ChecksumsContainsAllFiles() + { + var request = CreateTestRequest(); + + var result = _builder.Build(request); + var checksums = ExtractFileContent(result.ExportStream, "checksums.txt"); + + Assert.Contains("attestation.dsse.json", checksums); + Assert.Contains("statement.json", checksums); + Assert.Contains("metadata.json", checksums); + } + + [Fact] + public void Build_WithSubjectDigests_IncludesInMetadata() + { + var digests = new List + { + new("image1", "sha256:abc123", "sha256"), + new("image2", "sha256:def456", "sha256") + }; + + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement(), + SubjectDigests: digests); + + var result = _builder.Build(request); + + Assert.NotNull(result.Metadata.SubjectDigests); + Assert.Equal(2, result.Metadata.SubjectDigests.Count); + Assert.Equal("image1", result.Metadata.SubjectDigests[0].Name); + Assert.Equal("sha256:abc123", result.Metadata.SubjectDigests[0].Digest); + } + + [Fact] + public void Build_ThrowsForEmptyExportId() + { + var request = new AttestationBundleExportRequest( + Guid.Empty, + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForEmptyAttestationId() + { + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.Empty, + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForEmptyTenantId() + { + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.Empty, + CreateTestDsseEnvelope(), + CreateTestStatement()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForEmptyDsseEnvelope() + { + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + string.Empty, + CreateTestStatement()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForEmptyStatement() + { + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + string.Empty); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForNullRequest() + { + Assert.Throws(() => _builder.Build(null!)); + } + + [Fact] + public void Build_DefaultStatementVersionIsV1() + { + var request = new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement()); + + var result = _builder.Build(request); + + Assert.Equal("v1", result.Metadata.StatementVersion); + } + + private static AttestationBundleExportRequest CreateTestRequest() + { + return new AttestationBundleExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + CreateTestDsseEnvelope(), + CreateTestStatement()); + } + + private static string CreateTestDsseEnvelope() + { + return JsonSerializer.Serialize(new + { + payloadType = "application/vnd.in-toto+json", + payload = "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSJ9", + signatures = new[] + { + new { keyid = "key-1", sig = "signature-data-here" } + } + }); + } + + private static string CreateTestStatement() + { + return JsonSerializer.Serialize(new + { + _type = "https://in-toto.io/Statement/v1", + subject = new[] + { + new { name = "test-image", digest = new { sha256 = "abc123" } } + }, + predicateType = "https://slsa.dev/provenance/v1", + predicate = new { buildType = "test" } + }); + } + + private static List ExtractFileNames(MemoryStream exportStream) + { + exportStream.Position = 0; + var fileNames = new List(); + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + fileNames.Add(entry.Name); + } + + exportStream.Position = 0; + return fileNames; + } + + private static string ExtractFileContent(MemoryStream exportStream, string fileName) + { + exportStream.Position = 0; + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + if (entry.Name == fileName && entry.DataStream is not null) + { + using var reader = new StreamReader(entry.DataStream); + var content = reader.ReadToEnd(); + exportStream.Position = 0; + return content; + } + } + + exportStream.Position = 0; + throw new FileNotFoundException($"File '{fileName}' not found in archive."); + } + + private static List ExtractTarEntryMetadata(MemoryStream exportStream) + { + exportStream.Position = 0; + var entries = new List(); + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + entries.Add(new TarEntryMetadataInfo( + entry.Name, + entry.Uid, + entry.Gid, + entry.ModificationTime, + entry.Mode)); + } + + exportStream.Position = 0; + return entries; + } + + private sealed record TarEntryMetadataInfo( + string Name, + int Uid, + int Gid, + DateTimeOffset ModificationTime, + UnixFileMode Mode); +} + +/// +/// Fake crypto hash for testing. +/// +internal sealed class FakeCryptoHash : StellaOps.Cryptography.ICryptoHash +{ + public byte[] ComputeHash(ReadOnlySpan data, string? algorithmId = null) + { + using var sha256 = System.Security.Cryptography.SHA256.Create(); + return sha256.ComputeHash(data.ToArray()); + } + + public string ComputeHashHex(ReadOnlySpan data, string? algorithmId = null) + { + var hash = ComputeHash(data, algorithmId); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + public string ComputeHashBase64(ReadOnlySpan data, string? algorithmId = null) + { + var hash = ComputeHash(data, algorithmId); + return Convert.ToBase64String(hash); + } + + public ValueTask ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default) + { + using var sha256 = System.Security.Cryptography.SHA256.Create(); + var hash = sha256.ComputeHash(stream); + return new ValueTask(hash); + } + + public async ValueTask ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default) + { + var hash = await ComputeHashAsync(stream, algorithmId, cancellationToken); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + public byte[] ComputeHashForPurpose(ReadOnlySpan data, string purpose) + => ComputeHash(data, null); + + public string ComputeHashHexForPurpose(ReadOnlySpan data, string purpose) + => ComputeHashHex(data, null); + + public string ComputeHashBase64ForPurpose(ReadOnlySpan data, string purpose) + => ComputeHashBase64(data, null); + + public ValueTask ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default) + => ComputeHashAsync(stream, null, cancellationToken); + + public ValueTask ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default) + => ComputeHashHexAsync(stream, null, cancellationToken); + + public string GetAlgorithmForPurpose(string purpose) => "sha256"; + + public string GetHashPrefix(string purpose) => "sha256:"; + + public string ComputePrefixedHashForPurpose(ReadOnlySpan data, string purpose) + => GetHashPrefix(purpose) + ComputeHashHexForPurpose(data, purpose); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/BootstrapPackBuilderTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/BootstrapPackBuilderTests.cs new file mode 100644 index 000000000..b3d8fa1e7 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/BootstrapPackBuilderTests.cs @@ -0,0 +1,359 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using StellaOps.Cryptography; +using StellaOps.ExportCenter.Core.BootstrapPack; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class BootstrapPackBuilderTests : IDisposable +{ + private readonly string _tempDir; + private readonly BootstrapPackBuilder _builder; + private readonly ICryptoHash _cryptoHash; + + public BootstrapPackBuilderTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"bootstrap-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + _cryptoHash = new DefaultCryptoHash(); + _builder = new BootstrapPackBuilder(_cryptoHash); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public void Build_WithCharts_ProducesValidPack() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: test-chart\nversion: 1.0.0"); + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("test-chart", "1.0.0", chartPath) }, + Images: Array.Empty()); + + var result = _builder.Build(request); + + Assert.NotNull(result); + Assert.NotNull(result.Manifest); + Assert.NotEmpty(result.ManifestJson); + Assert.NotEmpty(result.RootHash); + Assert.NotEmpty(result.ArtifactSha256); + Assert.True(result.PackStream.Length > 0); + Assert.Single(result.Manifest.Charts); + } + + [Fact] + public void Build_WithImages_ProducesValidPack() + { + var blobPath = CreateTestFile("blob", "image layer content"); + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: Array.Empty(), + Images: new[] + { + new BootstrapPackImageSource( + "registry.example.com/app", + "v1.0.0", + "sha256:abc123", + blobPath) + }); + + var result = _builder.Build(request); + + Assert.NotNull(result); + Assert.Single(result.Manifest.Images); + Assert.Equal("registry.example.com/app", result.Manifest.Images[0].Repository); + } + + [Fact] + public void Build_WithChartsAndImages_IncludesAll() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: stellaops\nversion: 2.0.0"); + var blobPath = CreateTestFile("blob", "container layer"); + + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("stellaops", "2.0.0", chartPath) }, + Images: new[] + { + new BootstrapPackImageSource("ghcr.io/stellaops/scanner", "v3.0.0", "sha256:def456", blobPath) + }); + + var result = _builder.Build(request); + + Assert.Single(result.Manifest.Charts); + Assert.Single(result.Manifest.Images); + } + + [Fact] + public void Build_ProducesDeterministicOutput() + { + var chartPath = CreateTestFile("Chart-determ.yaml", "apiVersion: v2\nname: determ\nversion: 1.0.0"); + var exportId = new Guid("11111111-2222-3333-4444-555555555555"); + var tenantId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"); + + var request = new BootstrapPackBuildRequest( + exportId, + tenantId, + Charts: new[] { new BootstrapPackChartSource("determ", "1.0.0", chartPath) }, + Images: Array.Empty()); + + var result1 = _builder.Build(request); + var result2 = _builder.Build(request); + + Assert.Equal(result1.RootHash, result2.RootHash); + Assert.Equal(result1.ArtifactSha256, result2.ArtifactSha256); + + var bytes1 = result1.PackStream.ToArray(); + var bytes2 = result2.PackStream.ToArray(); + Assert.Equal(bytes1, bytes2); + } + + [Fact] + public void Build_ArchiveContainsExpectedFiles() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: archive-test\nversion: 1.0.0"); + var blobPath = CreateTestFile("layer.tar", "layer content"); + + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("archive-test", "1.0.0", chartPath) }, + Images: new[] + { + new BootstrapPackImageSource("test/image", "latest", "sha256:xyz789", blobPath) + }); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.PackStream); + + Assert.Contains("manifest.json", fileNames); + Assert.Contains("checksums.txt", fileNames); + Assert.Contains("images/oci-layout", fileNames); + Assert.Contains("images/index.json", fileNames); + Assert.True(fileNames.Any(f => f.StartsWith("charts/"))); + Assert.True(fileNames.Any(f => f.StartsWith("images/blobs/"))); + } + + [Fact] + public void Build_TarEntriesHaveDeterministicMetadata() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: metadata-test\nversion: 1.0.0"); + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("metadata-test", "1.0.0", chartPath) }, + Images: Array.Empty()); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.PackStream); + + var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + foreach (var entry in entries) + { + Assert.Equal(0, entry.Uid); + Assert.Equal(0, entry.Gid); + Assert.Equal(string.Empty, entry.UserName); + Assert.Equal(string.Empty, entry.GroupName); + Assert.Equal(expectedTimestamp, entry.ModificationTime); + } + } + + [Fact] + public void Build_WithChartDirectory_IncludesAllFiles() + { + var chartDir = Path.Combine(_tempDir, "test-chart"); + Directory.CreateDirectory(chartDir); + Directory.CreateDirectory(Path.Combine(chartDir, "templates")); + + File.WriteAllText(Path.Combine(chartDir, "Chart.yaml"), "apiVersion: v2\nname: dir-chart\nversion: 1.0.0"); + File.WriteAllText(Path.Combine(chartDir, "values.yaml"), "replicaCount: 1"); + File.WriteAllText(Path.Combine(chartDir, "templates", "deployment.yaml"), "kind: Deployment"); + + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("dir-chart", "1.0.0", chartDir) }, + Images: Array.Empty()); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.PackStream); + + Assert.Contains("charts/dir-chart-1.0.0/Chart.yaml", fileNames); + Assert.Contains("charts/dir-chart-1.0.0/values.yaml", fileNames); + Assert.Contains("charts/dir-chart-1.0.0/templates/deployment.yaml", fileNames); + } + + [Fact] + public void Build_WithSignatures_IncludesSignatureEntry() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: sig-test\nversion: 1.0.0"); + var sigPath = CreateTestFile("mirror-bundle.sig", "signature-content"); + + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("sig-test", "1.0.0", chartPath) }, + Images: Array.Empty(), + Signatures: new BootstrapPackSignatureSource("sha256:mirror123", sigPath)); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.PackStream); + + Assert.NotNull(result.Manifest.Signatures); + Assert.Equal("sha256:mirror123", result.Manifest.Signatures.MirrorBundleDigest); + Assert.Contains("signatures/mirror-bundle.sig", fileNames); + } + + [Fact] + public void Build_OciIndexContainsImageReferences() + { + var blobPath = CreateTestFile("layer", "image content"); + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: Array.Empty(), + Images: new[] + { + new BootstrapPackImageSource("myregistry.io/app", "v1.2.3", "sha256:img123", blobPath) + }); + + var result = _builder.Build(request); + var indexJson = ExtractFileContent(result.PackStream, "images/index.json"); + var index = JsonSerializer.Deserialize(indexJson); + + Assert.NotNull(index); + Assert.Equal(2, index.SchemaVersion); + Assert.Single(index.Manifests); + Assert.Equal("sha256:img123", index.Manifests[0].Digest); + } + + [Fact] + public void Build_ThrowsForEmptyInputs() + { + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: Array.Empty(), + Images: Array.Empty()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForMissingChartPath() + { + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("missing", "1.0.0", "/nonexistent/Chart.yaml") }, + Images: Array.Empty()); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ManifestVersionIsCorrect() + { + var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: version-test\nversion: 1.0.0"); + var request = new BootstrapPackBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Charts: new[] { new BootstrapPackChartSource("version-test", "1.0.0", chartPath) }, + Images: Array.Empty()); + + var result = _builder.Build(request); + + Assert.Equal("bootstrap/v1", result.Manifest.Version); + } + + private string CreateTestFile(string fileName, string content) + { + var path = Path.Combine(_tempDir, fileName); + File.WriteAllText(path, content); + return path; + } + + private static List ExtractFileNames(MemoryStream packStream) + { + packStream.Position = 0; + var fileNames = new List(); + + using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + fileNames.Add(entry.Name); + } + + packStream.Position = 0; + return fileNames; + } + + private static string ExtractFileContent(MemoryStream packStream, string fileName) + { + packStream.Position = 0; + + using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + if (entry.Name == fileName && entry.DataStream is not null) + { + using var reader = new StreamReader(entry.DataStream); + var content = reader.ReadToEnd(); + packStream.Position = 0; + return content; + } + } + + packStream.Position = 0; + throw new FileNotFoundException($"File '{fileName}' not found in archive."); + } + + private static List ExtractTarEntryMetadata(MemoryStream packStream) + { + packStream.Position = 0; + var entries = new List(); + + using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + entries.Add(new TarEntryMetadata( + entry.Uid, + entry.Gid, + entry.UserName ?? string.Empty, + entry.GroupName ?? string.Empty, + entry.ModificationTime)); + } + + packStream.Position = 0; + return entries; + } + + private sealed record TarEntryMetadata( + int Uid, + int Gid, + string UserName, + string GroupName, + DateTimeOffset ModificationTime); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecatedEndpointsRegistryTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecatedEndpointsRegistryTests.cs new file mode 100644 index 000000000..1adcad4aa --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecatedEndpointsRegistryTests.cs @@ -0,0 +1,95 @@ +using StellaOps.ExportCenter.WebService.Deprecation; +using Xunit; + +namespace StellaOps.ExportCenter.Tests.Deprecation; + +public sealed class DeprecatedEndpointsRegistryTests +{ + [Fact] + public void ListExports_HasCorrectSuccessorPath() + { + Assert.Equal("/v1/exports/profiles", DeprecatedEndpointsRegistry.ListExports.SuccessorPath); + } + + [Fact] + public void CreateExport_HasCorrectSuccessorPath() + { + Assert.Equal("/v1/exports/evidence", DeprecatedEndpointsRegistry.CreateExport.SuccessorPath); + } + + [Fact] + public void DeleteExport_HasCorrectSuccessorPath() + { + Assert.Equal("/v1/exports/runs/{id}/cancel", DeprecatedEndpointsRegistry.DeleteExport.SuccessorPath); + } + + [Fact] + public void AllEndpoints_HaveDocumentationUrl() + { + Assert.NotNull(DeprecatedEndpointsRegistry.ListExports.DocumentationUrl); + Assert.NotNull(DeprecatedEndpointsRegistry.CreateExport.DocumentationUrl); + Assert.NotNull(DeprecatedEndpointsRegistry.DeleteExport.DocumentationUrl); + } + + [Fact] + public void AllEndpoints_HaveReason() + { + Assert.NotNull(DeprecatedEndpointsRegistry.ListExports.Reason); + Assert.NotNull(DeprecatedEndpointsRegistry.CreateExport.Reason); + Assert.NotNull(DeprecatedEndpointsRegistry.DeleteExport.Reason); + } + + [Fact] + public void GetAll_ReturnsThreeEndpoints() + { + var endpoints = DeprecatedEndpointsRegistry.GetAll(); + + Assert.Equal(3, endpoints.Count); + } + + [Fact] + public void GetAll_ContainsGetExports() + { + var endpoints = DeprecatedEndpointsRegistry.GetAll(); + + Assert.Contains(endpoints, e => e.Method == "GET" && e.Pattern == "/exports"); + } + + [Fact] + public void GetAll_ContainsPostExports() + { + var endpoints = DeprecatedEndpointsRegistry.GetAll(); + + Assert.Contains(endpoints, e => e.Method == "POST" && e.Pattern == "/exports"); + } + + [Fact] + public void GetAll_ContainsDeleteExports() + { + var endpoints = DeprecatedEndpointsRegistry.GetAll(); + + Assert.Contains(endpoints, e => e.Method == "DELETE" && e.Pattern == "/exports/{id}"); + } + + [Fact] + public void LegacyExportsDeprecationDate_IsJanuary2025() + { + Assert.Equal(2025, DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate.Year); + Assert.Equal(1, DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate.Month); + } + + [Fact] + public void LegacyExportsSunsetDate_IsJuly2025() + { + Assert.Equal(2025, DeprecatedEndpointsRegistry.LegacyExportsSunsetDate.Year); + Assert.Equal(7, DeprecatedEndpointsRegistry.LegacyExportsSunsetDate.Month); + } + + [Fact] + public void SunsetDate_IsAfterDeprecationDate() + { + Assert.True( + DeprecatedEndpointsRegistry.LegacyExportsSunsetDate > + DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationHeaderExtensionsTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationHeaderExtensionsTests.cs new file mode 100644 index 000000000..0aae3bfa4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationHeaderExtensionsTests.cs @@ -0,0 +1,130 @@ +using Microsoft.AspNetCore.Http; +using StellaOps.ExportCenter.WebService.Deprecation; +using Xunit; + +namespace StellaOps.ExportCenter.Tests.Deprecation; + +public sealed class DeprecationHeaderExtensionsTests +{ + [Fact] + public void AddDeprecationHeaders_SetsDeprecationHeader() + { + var context = CreateHttpContext(); + var info = CreateSampleDeprecationInfo(); + + context.AddDeprecationHeaders(info); + + Assert.True(context.Response.Headers.ContainsKey(DeprecationHeaderExtensions.DeprecationHeader)); + } + + [Fact] + public void AddDeprecationHeaders_SetsSunsetHeader() + { + var context = CreateHttpContext(); + var info = CreateSampleDeprecationInfo(); + + context.AddDeprecationHeaders(info); + + Assert.True(context.Response.Headers.ContainsKey(DeprecationHeaderExtensions.SunsetHeader)); + } + + [Fact] + public void AddDeprecationHeaders_SetsLinkHeaderWithSuccessor() + { + var context = CreateHttpContext(); + var info = CreateSampleDeprecationInfo(); + + context.AddDeprecationHeaders(info); + + var linkHeader = context.Response.Headers[DeprecationHeaderExtensions.LinkHeader].ToString(); + Assert.Contains("successor-version", linkHeader); + Assert.Contains("/v1/new-endpoint", linkHeader); + } + + [Fact] + public void AddDeprecationHeaders_SetsLinkHeaderWithDocumentation() + { + var context = CreateHttpContext(); + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow, + SunsetAt: DateTimeOffset.UtcNow.AddMonths(6), + SuccessorPath: "/v1/new", + DocumentationUrl: "https://docs.example.com/migration"); + + context.AddDeprecationHeaders(info); + + var linkHeader = context.Response.Headers[DeprecationHeaderExtensions.LinkHeader].ToString(); + Assert.Contains("deprecation", linkHeader); + Assert.Contains("https://docs.example.com/migration", linkHeader); + } + + [Fact] + public void AddDeprecationHeaders_SetsWarningHeader() + { + var context = CreateHttpContext(); + var info = CreateSampleDeprecationInfo(); + + context.AddDeprecationHeaders(info); + + var warningHeader = context.Response.Headers[DeprecationHeaderExtensions.WarningHeader].ToString(); + Assert.Contains("299", warningHeader); + Assert.Contains("/v1/new-endpoint", warningHeader); + } + + [Fact] + public void AddDeprecationHeaders_WarningIncludesCustomReason() + { + var context = CreateHttpContext(); + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow, + SunsetAt: DateTimeOffset.UtcNow.AddMonths(6), + SuccessorPath: "/v1/new", + Reason: "Custom deprecation reason"); + + context.AddDeprecationHeaders(info); + + var warningHeader = context.Response.Headers[DeprecationHeaderExtensions.WarningHeader].ToString(); + Assert.Contains("Custom deprecation reason", warningHeader); + } + + [Fact] + public void AddDeprecationHeaders_FormatsDateAsRfc1123() + { + var context = CreateHttpContext(); + var deprecatedAt = new DateTimeOffset(2025, 1, 15, 12, 30, 45, TimeSpan.Zero); + var info = new DeprecationInfo( + DeprecatedAt: deprecatedAt, + SunsetAt: deprecatedAt.AddMonths(6), + SuccessorPath: "/v1/new"); + + context.AddDeprecationHeaders(info); + + var deprecationHeader = context.Response.Headers[DeprecationHeaderExtensions.DeprecationHeader].ToString(); + // RFC 1123 format: "ddd, dd MMM yyyy HH:mm:ss 'GMT'" + Assert.Matches(@"\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} GMT", deprecationHeader); + } + + [Fact] + public void CreateDeprecationFilter_ReturnsNonNullFilter() + { + var info = CreateSampleDeprecationInfo(); + + var filter = DeprecationHeaderExtensions.CreateDeprecationFilter(info); + + Assert.NotNull(filter); + } + + private static HttpContext CreateHttpContext() + { + var context = new DefaultHttpContext(); + return context; + } + + private static DeprecationInfo CreateSampleDeprecationInfo() + { + return new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow, + SunsetAt: DateTimeOffset.UtcNow.AddMonths(6), + SuccessorPath: "/v1/new-endpoint"); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationInfoTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationInfoTests.cs new file mode 100644 index 000000000..db63c41e3 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/Deprecation/DeprecationInfoTests.cs @@ -0,0 +1,72 @@ +using StellaOps.ExportCenter.WebService.Deprecation; +using Xunit; + +namespace StellaOps.ExportCenter.Tests.Deprecation; + +public sealed class DeprecationInfoTests +{ + [Fact] + public void IsPastSunset_WhenSunsetInFuture_ReturnsFalse() + { + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-1), + SunsetAt: DateTimeOffset.UtcNow.AddMonths(6), + SuccessorPath: "/v1/new"); + + Assert.False(info.IsPastSunset); + } + + [Fact] + public void IsPastSunset_WhenSunsetInPast_ReturnsTrue() + { + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12), + SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1), + SuccessorPath: "/v1/new"); + + Assert.True(info.IsPastSunset); + } + + [Fact] + public void DaysUntilSunset_CalculatesCorrectly() + { + var sunset = DateTimeOffset.UtcNow.AddDays(30); + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow, + SunsetAt: sunset, + SuccessorPath: "/v1/new"); + + Assert.Equal(30, info.DaysUntilSunset); + } + + [Fact] + public void DaysUntilSunset_WhenPastSunset_ReturnsZero() + { + var info = new DeprecationInfo( + DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12), + SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1), + SuccessorPath: "/v1/new"); + + Assert.Equal(0, info.DaysUntilSunset); + } + + [Fact] + public void Record_InitializesAllProperties() + { + var deprecatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + var sunsetAt = new DateTimeOffset(2025, 7, 1, 0, 0, 0, TimeSpan.Zero); + + var info = new DeprecationInfo( + DeprecatedAt: deprecatedAt, + SunsetAt: sunsetAt, + SuccessorPath: "/v1/exports", + DocumentationUrl: "https://docs.example.com", + Reason: "Replaced by new API"); + + Assert.Equal(deprecatedAt, info.DeprecatedAt); + Assert.Equal(sunsetAt, info.SunsetAt); + Assert.Equal("/v1/exports", info.SuccessorPath); + Assert.Equal("https://docs.example.com", info.DocumentationUrl); + Assert.Equal("Replaced by new API", info.Reason); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/ExportNotificationEmitterTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/ExportNotificationEmitterTests.cs new file mode 100644 index 000000000..43eaa2218 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/ExportNotificationEmitterTests.cs @@ -0,0 +1,552 @@ +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.ExportCenter.Core.Notifications; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class ExportNotificationEmitterTests +{ + private readonly InMemoryExportNotificationSink _sink; + private readonly InMemoryExportNotificationDlq _dlq; + private readonly FakeTimeProvider _timeProvider; + private readonly ExportNotificationEmitter _emitter; + + public ExportNotificationEmitterTests() + { + _sink = new InMemoryExportNotificationSink(); + _dlq = new InMemoryExportNotificationDlq(); + _timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); + + _emitter = new ExportNotificationEmitter( + _sink, + _dlq, + _timeProvider, + NullLogger.Instance); + } + + [Fact] + public async Task EmitAirgapReadyAsync_PublishesToSink() + { + var notification = CreateTestNotification(); + + var result = await _emitter.EmitAirgapReadyAsync(notification); + + Assert.True(result.Success); + Assert.Equal(1, result.AttemptCount); + Assert.Equal(1, _sink.Count); + } + + [Fact] + public async Task EmitAirgapReadyAsync_UsesCorrectChannel() + { + var notification = CreateTestNotification(); + + await _emitter.EmitAirgapReadyAsync(notification); + + var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady); + Assert.Single(messages); + } + + [Fact] + public async Task EmitAirgapReadyAsync_SerializesPayloadWithSnakeCase() + { + var notification = CreateTestNotification(); + + await _emitter.EmitAirgapReadyAsync(notification); + + var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady); + var payload = messages.First(); + + Assert.Contains("\"export_id\":", payload); + Assert.Contains("\"bundle_id\":", payload); + Assert.Contains("\"tenant_id\":", payload); + Assert.Contains("\"artifact_sha256\":", payload); + } + + [Fact] + public async Task EmitAirgapReadyAsync_RoutesToDlqOnFailure() + { + var failingSink = new FailingNotificationSink(maxFailures: 10); + var emitter = new ExportNotificationEmitter( + failingSink, + _dlq, + _timeProvider, + NullLogger.Instance, + new ExportNotificationEmitterOptions(MaxRetries: 3, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30))); + + var notification = CreateTestNotification(); + + var result = await emitter.EmitAirgapReadyAsync(notification); + + Assert.False(result.Success); + Assert.Equal(1, _dlq.Count); + } + + [Fact] + public async Task EmitAirgapReadyAsync_DlqEntryContainsCorrectData() + { + var failingSink = new FailingNotificationSink(maxFailures: 10); + var emitter = new ExportNotificationEmitter( + failingSink, + _dlq, + _timeProvider, + NullLogger.Instance, + new ExportNotificationEmitterOptions(MaxRetries: 1, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30))); + + var notification = CreateTestNotification(); + + await emitter.EmitAirgapReadyAsync(notification); + + var dlqEntries = _dlq.GetAll(); + Assert.Single(dlqEntries); + + var entry = dlqEntries.First(); + Assert.Equal(notification.ExportId, entry.ExportId); + Assert.Equal(notification.BundleId, entry.BundleId); + Assert.Equal(notification.TenantId, entry.TenantId); + Assert.Equal(ExportNotificationTypes.AirgapReady, entry.EventType); + Assert.NotEmpty(entry.OriginalPayload); + } + + [Fact] + public async Task EmitAirgapReadyAsync_RetriesTransientFailures() + { + var failingSink = new FailingNotificationSink(maxFailures: 2); + var emitter = new ExportNotificationEmitter( + failingSink, + _dlq, + _timeProvider, + NullLogger.Instance, + new ExportNotificationEmitterOptions(MaxRetries: 5, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30))); + + var notification = CreateTestNotification(); + + var result = await emitter.EmitAirgapReadyAsync(notification); + + Assert.True(result.Success); + Assert.Equal(3, result.AttemptCount); + Assert.Equal(0, _dlq.Count); + } + + [Fact] + public async Task EmitToTimelineAsync_UsesTimelineChannel() + { + var notification = CreateTestNotification(); + + var result = await _emitter.EmitToTimelineAsync(notification); + + Assert.True(result.Success); + + var messages = _sink.GetMessages(ExportNotificationTypes.TimelineAirgapReady); + Assert.Single(messages); + } + + [Fact] + public async Task EmitAirgapReadyAsync_IncludesMetadataInPayload() + { + var notification = new ExportAirgapReadyNotification + { + ArtifactSha256 = "abc123", + ArtifactUri = "https://example.com/artifact", + BundleId = "bundle-001", + CreatedAt = _timeProvider.GetUtcNow(), + ExportId = "export-001", + PortableVersion = "v1", + ProfileId = "profile-001", + RootHash = "root123", + TenantId = "tenant-001", + Metadata = new ExportAirgapReadyMetadata + { + ExportSizeBytes = 1024, + PortableSizeBytes = 512, + SourceUri = "https://source.example.com/bundle" + } + }; + + await _emitter.EmitAirgapReadyAsync(notification); + + var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady); + var payload = messages.First(); + + Assert.Contains("\"export_size_bytes\":1024", payload); + Assert.Contains("\"portable_size_bytes\":512", payload); + Assert.Contains("\"source_uri\":\"https://source.example.com/bundle\"", payload); + } + + [Fact] + public async Task EmitAirgapReadyAsync_WithWebhook_DeliversToWebhook() + { + var webhookClient = new FakeWebhookClient(); + var emitter = new ExportNotificationEmitter( + _sink, + _dlq, + _timeProvider, + NullLogger.Instance, + new ExportNotificationEmitterOptions(MaxRetries: 5, WebhookEnabled: true, WebhookTimeout: TimeSpan.FromSeconds(30)), + webhookClient); + + var notification = CreateTestNotification(); + + var result = await emitter.EmitAirgapReadyAsync(notification); + + Assert.True(result.Success); + Assert.Equal(1, webhookClient.DeliveryCount); + } + + [Fact] + public async Task EmitAirgapReadyAsync_WithWebhookFailure_RoutesToDlq() + { + var webhookClient = new FakeWebhookClient(alwaysFail: true); + var emitter = new ExportNotificationEmitter( + _sink, + _dlq, + _timeProvider, + NullLogger.Instance, + new ExportNotificationEmitterOptions(MaxRetries: 2, WebhookEnabled: true, WebhookTimeout: TimeSpan.FromSeconds(30)), + webhookClient); + + var notification = CreateTestNotification(); + + var result = await emitter.EmitAirgapReadyAsync(notification); + + Assert.False(result.Success); + Assert.Equal(1, _dlq.Count); + } + + [Fact] + public async Task EmitAirgapReadyAsync_ThrowsOnNullNotification() + { + await Assert.ThrowsAsync( + () => _emitter.EmitAirgapReadyAsync(null!)); + } + + private ExportAirgapReadyNotification CreateTestNotification() + { + return new ExportAirgapReadyNotification + { + ArtifactSha256 = "sha256-test-hash", + ArtifactUri = "https://artifacts.example.com/export/test.tgz", + BundleId = Guid.NewGuid().ToString("D"), + CreatedAt = _timeProvider.GetUtcNow(), + ExportId = Guid.NewGuid().ToString("D"), + PortableVersion = "v1", + ProfileId = "mirror:full", + RootHash = "root-hash-test", + TenantId = Guid.NewGuid().ToString("D") + }; + } + + private sealed class FailingNotificationSink : IExportNotificationSink + { + private readonly int _maxFailures; + private int _failures; + + public FailingNotificationSink(int maxFailures) + { + _maxFailures = maxFailures; + } + + public Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default) + { + if (_failures < _maxFailures) + { + _failures++; + throw new TimeoutException("Simulated transient failure"); + } + return Task.CompletedTask; + } + } + + private sealed class FakeWebhookClient : IExportWebhookClient + { + private readonly bool _alwaysFail; + + public int DeliveryCount { get; private set; } + + public FakeWebhookClient(bool alwaysFail = false) + { + _alwaysFail = alwaysFail; + } + + public Task DeliverAsync( + string eventType, + string payload, + DateTimeOffset sentAt, + CancellationToken cancellationToken = default) + { + DeliveryCount++; + + if (_alwaysFail) + { + return Task.FromResult(new WebhookDeliveryResult( + Success: false, + StatusCode: 500, + ErrorMessage: "Simulated failure", + ShouldRetry: false)); + } + + return Task.FromResult(new WebhookDeliveryResult( + Success: true, + StatusCode: 200, + ErrorMessage: null, + ShouldRetry: false)); + } + } +} + +public sealed class ExportWebhookClientTests +{ + [Fact] + public void ComputeSignature_ProducesDeterministicOutput() + { + var payload = "{\"export_id\":\"abc123\"}"; + var sentAt = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero); + var signingKey = "test-secret-key"; + + var sig1 = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey); + var sig2 = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey); + + Assert.Equal(sig1, sig2); + } + + [Fact] + public void ComputeSignature_StartsWithSha256Prefix() + { + var payload = "{\"test\":true}"; + var sentAt = DateTimeOffset.UtcNow; + var signingKey = "test-key"; + + var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey); + + Assert.StartsWith("sha256=", signature); + } + + [Fact] + public void ComputeSignature_ChangesWithDifferentPayload() + { + var sentAt = DateTimeOffset.UtcNow; + var signingKey = "test-key"; + + var sig1 = ExportWebhookClient.ComputeSignature("{\"a\":1}", sentAt, signingKey); + var sig2 = ExportWebhookClient.ComputeSignature("{\"a\":2}", sentAt, signingKey); + + Assert.NotEqual(sig1, sig2); + } + + [Fact] + public void ComputeSignature_ChangesWithDifferentTimestamp() + { + var payload = "{\"test\":true}"; + var signingKey = "test-key"; + + var sig1 = ExportWebhookClient.ComputeSignature(payload, new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), signingKey); + var sig2 = ExportWebhookClient.ComputeSignature(payload, new DateTimeOffset(2025, 1, 2, 0, 0, 0, TimeSpan.Zero), signingKey); + + Assert.NotEqual(sig1, sig2); + } + + [Fact] + public void ComputeSignature_ChangesWithDifferentKey() + { + var payload = "{\"test\":true}"; + var sentAt = DateTimeOffset.UtcNow; + + var sig1 = ExportWebhookClient.ComputeSignature(payload, sentAt, "key1"); + var sig2 = ExportWebhookClient.ComputeSignature(payload, sentAt, "key2"); + + Assert.NotEqual(sig1, sig2); + } + + [Fact] + public void ComputeSignature_AcceptsBase64Key() + { + var payload = "{\"test\":true}"; + var sentAt = DateTimeOffset.UtcNow; + var base64Key = Convert.ToBase64String(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }); + + var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, base64Key); + + Assert.StartsWith("sha256=", signature); + } + + [Fact] + public void ComputeSignature_AcceptsHexKey() + { + var payload = "{\"test\":true}"; + var sentAt = DateTimeOffset.UtcNow; + var hexKey = "0102030405060708"; + + var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, hexKey); + + Assert.StartsWith("sha256=", signature); + } + + [Fact] + public void VerifySignature_ReturnsTrueForValidSignature() + { + var payload = "{\"test\":true}"; + var sentAt = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero); + var signingKey = "test-key"; + + var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey); + var isValid = ExportWebhookClient.VerifySignature(payload, sentAt, signingKey, signature); + + Assert.True(isValid); + } + + [Fact] + public void VerifySignature_ReturnsFalseForInvalidSignature() + { + var payload = "{\"test\":true}"; + var sentAt = DateTimeOffset.UtcNow; + var signingKey = "test-key"; + + var isValid = ExportWebhookClient.VerifySignature(payload, sentAt, signingKey, "sha256=invalid"); + + Assert.False(isValid); + } + + [Fact] + public void VerifySignature_ReturnsFalseForTamperedPayload() + { + var sentAt = DateTimeOffset.UtcNow; + var signingKey = "test-key"; + + var signature = ExportWebhookClient.ComputeSignature("{\"test\":true}", sentAt, signingKey); + var isValid = ExportWebhookClient.VerifySignature("{\"test\":false}", sentAt, signingKey, signature); + + Assert.False(isValid); + } +} + +public sealed class InMemoryExportNotificationSinkTests +{ + [Fact] + public async Task PublishAsync_StoresMessage() + { + var sink = new InMemoryExportNotificationSink(); + + await sink.PublishAsync("test.channel", "{\"test\":true}"); + + Assert.Equal(1, sink.Count); + } + + [Fact] + public async Task GetMessages_ReturnsMessagesByChannel() + { + var sink = new InMemoryExportNotificationSink(); + + await sink.PublishAsync("channel.a", "{\"a\":1}"); + await sink.PublishAsync("channel.b", "{\"b\":2}"); + await sink.PublishAsync("channel.a", "{\"a\":3}"); + + var messagesA = sink.GetMessages("channel.a"); + var messagesB = sink.GetMessages("channel.b"); + + Assert.Equal(2, messagesA.Count); + Assert.Single(messagesB); + } + + [Fact] + public async Task Clear_RemovesAllMessages() + { + var sink = new InMemoryExportNotificationSink(); + + await sink.PublishAsync("test", "message1"); + await sink.PublishAsync("test", "message2"); + sink.Clear(); + + Assert.Equal(0, sink.Count); + } +} + +public sealed class InMemoryExportNotificationDlqTests +{ + [Fact] + public async Task EnqueueAsync_StoresEntry() + { + var dlq = new InMemoryExportNotificationDlq(); + var entry = CreateTestDlqEntry(); + + await dlq.EnqueueAsync(entry); + + Assert.Equal(1, dlq.Count); + } + + [Fact] + public async Task GetPendingAsync_ReturnsAllEntries() + { + var dlq = new InMemoryExportNotificationDlq(); + + await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1")); + await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-2")); + + var pending = await dlq.GetPendingAsync(); + + Assert.Equal(2, pending.Count); + } + + [Fact] + public async Task GetPendingAsync_FiltersByTenant() + { + var dlq = new InMemoryExportNotificationDlq(); + + await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1")); + await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-2")); + await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1")); + + var pending = await dlq.GetPendingAsync(tenantId: "tenant-1"); + + Assert.Equal(2, pending.Count); + Assert.All(pending, e => Assert.Equal("tenant-1", e.TenantId)); + } + + [Fact] + public async Task GetPendingAsync_RespectsLimit() + { + var dlq = new InMemoryExportNotificationDlq(); + + for (var i = 0; i < 10; i++) + { + await dlq.EnqueueAsync(CreateTestDlqEntry()); + } + + var pending = await dlq.GetPendingAsync(limit: 5); + + Assert.Equal(5, pending.Count); + } + + private static ExportNotificationDlqEntry CreateTestDlqEntry(string? tenantId = null) + { + return new ExportNotificationDlqEntry + { + EventType = ExportNotificationTypes.AirgapReady, + ExportId = Guid.NewGuid().ToString(), + BundleId = Guid.NewGuid().ToString(), + TenantId = tenantId ?? Guid.NewGuid().ToString(), + FailureReason = "Test failure", + AttemptCount = 3, + LastAttemptAt = DateTimeOffset.UtcNow, + OriginalPayload = "{}" + }; + } +} + +/// +/// Fake time provider for testing. +/// +internal sealed class FakeTimeProvider : TimeProvider +{ + private DateTimeOffset _utcNow; + + public FakeTimeProvider(DateTimeOffset utcNow) + { + _utcNow = utcNow; + } + + public override DateTimeOffset GetUtcNow() => _utcNow; + + public void Advance(TimeSpan duration) => _utcNow = _utcNow.Add(duration); + + public void SetUtcNow(DateTimeOffset utcNow) => _utcNow = utcNow; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleBuilderTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleBuilderTests.cs new file mode 100644 index 000000000..d917ba82f --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleBuilderTests.cs @@ -0,0 +1,396 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using StellaOps.Cryptography; +using StellaOps.ExportCenter.Core.MirrorBundle; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class MirrorBundleBuilderTests : IDisposable +{ + private readonly string _tempDir; + private readonly MirrorBundleBuilder _builder; + private readonly ICryptoHash _cryptoHash; + + public MirrorBundleBuilderTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"mirror-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + _cryptoHash = new DefaultCryptoHash(); + _builder = new MirrorBundleBuilder(_cryptoHash); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public void Build_FullBundle_ProducesValidArchive() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-1234\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "registry.example.com/app:*" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result = _builder.Build(request); + + Assert.NotNull(result); + Assert.NotNull(result.Manifest); + Assert.NotEmpty(result.ManifestJson); + Assert.NotEmpty(result.RootHash); + Assert.True(result.BundleStream.Length > 0); + Assert.Equal("mirror:full", result.Manifest.Profile); + } + + [Fact] + public void Build_DeltaBundle_IncludesDeltaMetadata() + { + var vexPath = CreateTestFile("vex.jsonl.zst", "{\"id\":\"VEX-001\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Delta, + new MirrorBundleSelectors(new[] { "product-a" }, DateTimeOffset.UtcNow.AddDays(-7), DateTimeOffset.UtcNow), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Vex, vexPath) + }, + DeltaOptions: new MirrorBundleDeltaOptions("run-20251001", "sha256:abc123")); + + var result = _builder.Build(request); + + Assert.NotNull(result.Manifest.Delta); + Assert.Equal("run-20251001", result.Manifest.Delta.BaseExportId); + Assert.Equal("sha256:abc123", result.Manifest.Delta.BaseManifestDigest); + Assert.Equal("mirror:delta", result.Manifest.Profile); + } + + [Fact] + public void Build_WithEncryption_IncludesEncryptionMetadata() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-5678\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-b" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }, + Encryption: new MirrorBundleEncryptionOptions( + MirrorBundleEncryptionMode.Age, + new[] { "age1recipient..." }, + Strict: false)); + + var result = _builder.Build(request); + + Assert.NotNull(result.Manifest.Encryption); + Assert.Equal("age", result.Manifest.Encryption.Mode); + Assert.False(result.Manifest.Encryption.Strict); + Assert.Single(result.Manifest.Encryption.Recipients); + } + + [Fact] + public void Build_ProducesDeterministicOutput() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-DETERM\"}"); + var runId = new Guid("11111111-2222-3333-4444-555555555555"); + var tenantId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"); + + var request = new MirrorBundleBuildRequest( + runId, + tenantId, + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-deterministic" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result1 = _builder.Build(request); + var result2 = _builder.Build(request); + + // Root hashes should match for identical inputs + Assert.Equal(result1.RootHash, result2.RootHash); + + // Archive content should be identical + var bytes1 = result1.BundleStream.ToArray(); + var bytes2 = result2.BundleStream.ToArray(); + Assert.Equal(bytes1, bytes2); + } + + [Fact] + public void Build_ArchiveContainsExpectedFiles() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-ARCHIVE\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-archive" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.BundleStream); + + Assert.Contains("manifest.yaml", fileNames); + Assert.Contains("export.json", fileNames); + Assert.Contains("provenance.json", fileNames); + Assert.Contains("checksums.txt", fileNames); + Assert.Contains("README.md", fileNames); + Assert.Contains("verify-mirror.sh", fileNames); + Assert.Contains("indexes/advisories.index.json", fileNames); + Assert.Contains("indexes/vex.index.json", fileNames); + Assert.Contains("data/raw/advisories/advisories.jsonl.zst", fileNames); + } + + [Fact] + public void Build_TarEntriesHaveDeterministicMetadata() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-METADATA\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-metadata" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.BundleStream); + + var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + foreach (var entry in entries) + { + Assert.Equal(0, entry.Uid); + Assert.Equal(0, entry.Gid); + Assert.Equal(string.Empty, entry.UserName); + Assert.Equal(string.Empty, entry.GroupName); + Assert.Equal(expectedTimestamp, entry.ModificationTime); + } + } + + [Fact] + public void Build_SbomWithSubject_UsesCorrectPath() + { + var sbomPath = CreateTestFile("sbom.json", "{\"bomFormat\":\"CycloneDX\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-sbom" }, null, null), + new[] + { + new MirrorBundleDataSource( + MirrorBundleDataCategory.Sbom, + sbomPath, + SubjectId: "registry.example.com/app:v1.2.3") + }); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.BundleStream); + + Assert.Contains("data/raw/sboms/registry.example.com-app-v1.2.3/sbom.json", fileNames); + } + + [Fact] + public void Build_NormalizedData_UsesNormalizedPath() + { + var normalizedPath = CreateTestFile("advisories-normalized.jsonl.zst", "{\"id\":\"CVE-2024-NORM\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-normalized" }, null, null), + new[] + { + new MirrorBundleDataSource( + MirrorBundleDataCategory.Advisories, + normalizedPath, + IsNormalized: true) + }); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.BundleStream); + + Assert.Contains("data/normalized/advisories/advisories-normalized.jsonl.zst", fileNames); + } + + [Fact] + public void Build_CountsAreAccurate() + { + var advisory1 = CreateTestFile("advisory1.jsonl.zst", "{\"id\":\"CVE-1\"}"); + var advisory2 = CreateTestFile("advisory2.jsonl.zst", "{\"id\":\"CVE-2\"}"); + var vex1 = CreateTestFile("vex1.jsonl.zst", "{\"id\":\"VEX-1\"}"); + var sbom1 = CreateTestFile("sbom1.json", "{\"bomFormat\":\"CycloneDX\"}"); + + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-counts" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisory1), + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisory2), + new MirrorBundleDataSource(MirrorBundleDataCategory.Vex, vex1), + new MirrorBundleDataSource(MirrorBundleDataCategory.Sbom, sbom1) + }); + + var result = _builder.Build(request); + + Assert.Equal(2, result.Manifest.Counts.Advisories); + Assert.Equal(1, result.Manifest.Counts.Vex); + Assert.Equal(1, result.Manifest.Counts.Sboms); + } + + [Fact] + public void Build_ThrowsForMissingDataSource() + { + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-missing" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, "/nonexistent/file.jsonl.zst") + }); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForDeltaWithoutOptions() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-DELTA\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Delta, + new MirrorBundleSelectors(new[] { "product-delta" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }, + DeltaOptions: null); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ProvenanceDocumentContainsSubjects() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-PROVENANCE\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-provenance" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result = _builder.Build(request); + + Assert.NotEmpty(result.ProvenanceDocument.Subjects); + Assert.Contains(result.ProvenanceDocument.Subjects, s => s.Name == "manifest.yaml"); + Assert.NotNull(result.ProvenanceDocument.Builder); + Assert.NotEmpty(result.ProvenanceDocument.Builder.ExporterVersion); + } + + [Fact] + public void Build_ExportDocumentContainsManifestDigest() + { + var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-EXPORT\"}"); + var request = new MirrorBundleBuildRequest( + Guid.NewGuid(), + Guid.NewGuid(), + MirrorBundleVariant.Full, + new MirrorBundleSelectors(new[] { "product-export" }, null, null), + new[] + { + new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath) + }); + + var result = _builder.Build(request); + + Assert.StartsWith("sha256:", result.ExportDocument.ManifestDigest); + Assert.Equal(result.Manifest.Profile, $"{result.ExportDocument.Profile.Kind}:{result.ExportDocument.Profile.Variant}"); + } + + private string CreateTestFile(string fileName, string content) + { + var path = Path.Combine(_tempDir, fileName); + File.WriteAllText(path, content); + return path; + } + + private static List ExtractFileNames(MemoryStream bundleStream) + { + bundleStream.Position = 0; + var fileNames = new List(); + + using var gzip = new GZipStream(bundleStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + fileNames.Add(entry.Name); + } + + bundleStream.Position = 0; + return fileNames; + } + + private static List ExtractTarEntryMetadata(MemoryStream bundleStream) + { + bundleStream.Position = 0; + var entries = new List(); + + using var gzip = new GZipStream(bundleStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + entries.Add(new TarEntryMetadata( + entry.Uid, + entry.Gid, + entry.UserName ?? string.Empty, + entry.GroupName ?? string.Empty, + entry.ModificationTime)); + } + + bundleStream.Position = 0; + return entries; + } + + private sealed record TarEntryMetadata( + int Uid, + int Gid, + string UserName, + string GroupName, + DateTimeOffset ModificationTime); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleSigningTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleSigningTests.cs new file mode 100644 index 000000000..8bd07f0ed --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/MirrorBundleSigningTests.cs @@ -0,0 +1,159 @@ +using System.Text; +using System.Text.Json; +using StellaOps.Cryptography; +using StellaOps.ExportCenter.Core.MirrorBundle; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class MirrorBundleSigningTests +{ + private readonly ICryptoHmac _cryptoHmac; + private readonly HmacMirrorBundleManifestSigner _signer; + + public MirrorBundleSigningTests() + { + _cryptoHmac = new DefaultCryptoHmac(); + _signer = new HmacMirrorBundleManifestSigner(_cryptoHmac, "test-signing-key-12345", "test-key-id"); + } + + [Fact] + public async Task SignExportDocumentAsync_ReturnsDsseEnvelope() + { + var exportJson = """{"runId":"abc123","tenantId":"tenant-1"}"""; + + var result = await _signer.SignExportDocumentAsync(exportJson); + + Assert.NotNull(result); + Assert.Equal("application/vnd.stellaops.mirror-bundle.export+json", result.PayloadType); + Assert.NotEmpty(result.Payload); + Assert.Single(result.Signatures); + Assert.Equal("test-key-id", result.Signatures[0].KeyId); + Assert.NotEmpty(result.Signatures[0].Signature); + } + + [Fact] + public async Task SignManifestAsync_ReturnsDsseEnvelope() + { + var manifestYaml = "profile: mirror:full\nrunId: abc123"; + + var result = await _signer.SignManifestAsync(manifestYaml); + + Assert.NotNull(result); + Assert.Equal("application/vnd.stellaops.mirror-bundle.manifest+yaml", result.PayloadType); + Assert.NotEmpty(result.Payload); + Assert.Single(result.Signatures); + } + + [Fact] + public async Task SignArchiveAsync_ReturnsBase64Signature() + { + using var stream = new MemoryStream(Encoding.UTF8.GetBytes("test archive content")); + + var signature = await _signer.SignArchiveAsync(stream); + + Assert.NotEmpty(signature); + // Verify it's valid base64 + var decoded = Convert.FromBase64String(signature); + Assert.NotEmpty(decoded); + } + + [Fact] + public async Task SignArchiveAsync_ResetStreamPosition() + { + using var stream = new MemoryStream(Encoding.UTF8.GetBytes("test archive content")); + stream.Position = 5; + + await _signer.SignArchiveAsync(stream); + + Assert.Equal(0, stream.Position); + } + + [Fact] + public async Task SignExportDocumentAsync_PayloadIsBase64Encoded() + { + var exportJson = """{"runId":"encoded-test"}"""; + + var result = await _signer.SignExportDocumentAsync(exportJson); + + var decodedPayload = Encoding.UTF8.GetString(Convert.FromBase64String(result.Payload)); + Assert.Equal(exportJson, decodedPayload); + } + + [Fact] + public async Task SignExportDocumentAsync_IsDeterministic() + { + var exportJson = """{"runId":"deterministic-test"}"""; + + var result1 = await _signer.SignExportDocumentAsync(exportJson); + var result2 = await _signer.SignExportDocumentAsync(exportJson); + + Assert.Equal(result1.Signatures[0].Signature, result2.Signatures[0].Signature); + Assert.Equal(result1.Payload, result2.Payload); + } + + [Fact] + public void ToJson_SerializesCorrectly() + { + var signature = new MirrorBundleDsseSignature( + "test/payload+json", + Convert.ToBase64String(Encoding.UTF8.GetBytes("test-payload")), + new[] { new MirrorBundleDsseSignatureEntry("sig-value", "key-id-1") }); + + var json = signature.ToJson(); + + Assert.Contains("\"payloadType\"", json); + Assert.Contains("test/payload+json", json); + Assert.Contains("\"signatures\"", json); + Assert.Contains("sig-value", json); + + // Verify it's valid JSON + var parsed = JsonDocument.Parse(json); + Assert.NotNull(parsed); + } + + [Fact] + public void Constructor_ThrowsForEmptyKey() + { + Assert.Throws(() => + new HmacMirrorBundleManifestSigner(_cryptoHmac, "", "key-id")); + } + + [Fact] + public void Constructor_ThrowsForNullKey() + { + Assert.Throws(() => + new HmacMirrorBundleManifestSigner(_cryptoHmac, null!, "key-id")); + } + + [Fact] + public void Constructor_ThrowsForNullCryptoHmac() + { + Assert.Throws(() => + new HmacMirrorBundleManifestSigner(null!, "test-key", "key-id")); + } + + [Fact] + public void Constructor_UsesDefaultKeyIdWhenEmpty() + { + var signer = new HmacMirrorBundleManifestSigner(_cryptoHmac, "test-key", ""); + var result = signer.SignExportDocumentAsync("{}").Result; + + Assert.Equal("mirror-bundle-hmac", result.Signatures[0].KeyId); + } + + [Fact] + public async Task SignArchiveAsync_ThrowsForNonSeekableStream() + { + using var nonSeekable = new NonSeekableMemoryStream(Encoding.UTF8.GetBytes("test")); + + await Assert.ThrowsAsync(() => + _signer.SignArchiveAsync(nonSeekable)); + } + + private sealed class NonSeekableMemoryStream : MemoryStream + { + public NonSeekableMemoryStream(byte[] buffer) : base(buffer) { } + public override bool CanSeek => false; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitDistributorTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitDistributorTests.cs new file mode 100644 index 000000000..4595a1e21 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitDistributorTests.cs @@ -0,0 +1,290 @@ +using System.Text; +using System.Text.Json; +using StellaOps.ExportCenter.Core.OfflineKit; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class OfflineKitDistributorTests : IDisposable +{ + private readonly string _tempDir; + private readonly FakeTimeProvider _timeProvider; + private readonly FakeCryptoHash _cryptoHash; + private readonly OfflineKitDistributor _distributor; + + public OfflineKitDistributorTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"offline-kit-dist-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + + _timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); + _cryptoHash = new FakeCryptoHash(); + _distributor = new OfflineKitDistributor(_cryptoHash, _timeProvider); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public void DistributeToMirror_CopiesFilesToMirrorLocation() + { + var sourceKit = SetupSourceKit(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + Assert.True(result.Success); + Assert.True(Directory.Exists(Path.Combine(mirrorBase, "export", "attestations", kitVersion))); + } + + [Fact] + public void DistributeToMirror_CreatesManifestOfflineJson() + { + var sourceKit = SetupSourceKit(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + Assert.True(result.Success); + Assert.NotNull(result.ManifestPath); + Assert.True(File.Exists(result.ManifestPath)); + } + + [Fact] + public void DistributeToMirror_ManifestContainsAttestationEntry() + { + var sourceKit = SetupSourceKitWithAttestation(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + Assert.True(result.Success); + + var manifestJson = File.ReadAllText(result.ManifestPath!); + var manifest = JsonSerializer.Deserialize(manifestJson); + + var entries = manifest.GetProperty("entries").EnumerateArray().ToList(); + var attestationEntry = entries.FirstOrDefault(e => + e.GetProperty("kind").GetString() == "attestation-kit"); + + Assert.NotEqual(default, attestationEntry); + Assert.Contains("stella attest bundle verify", attestationEntry.GetProperty("cliExample").GetString()); + } + + [Fact] + public void DistributeToMirror_CreatesManifestChecksum() + { + var sourceKit = SetupSourceKit(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + Assert.True(result.Success); + Assert.True(File.Exists(result.ManifestPath + ".sha256")); + } + + [Fact] + public void DistributeToMirror_PreservesBytesExactly() + { + var sourceKit = SetupSourceKitWithAttestation(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var sourceFile = Path.Combine(sourceKit, "attestations", "export-attestation-bundle-v1.tgz"); + var sourceBytes = File.ReadAllBytes(sourceFile); + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + var targetFile = Path.Combine(result.TargetPath!, "attestations", "export-attestation-bundle-v1.tgz"); + var targetBytes = File.ReadAllBytes(targetFile); + + Assert.Equal(sourceBytes, targetBytes); + } + + [Fact] + public void DistributeToMirror_ReturnsCorrectFileCount() + { + var sourceKit = SetupSourceKitWithMultipleFiles(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + Assert.True(result.Success); + Assert.True(result.CopiedFileCount >= 3); // At least 3 files + } + + [Fact] + public void DistributeToMirror_SourceNotFound_ReturnsFailed() + { + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror("/nonexistent/path", mirrorBase, kitVersion); + + Assert.False(result.Success); + Assert.Contains("not found", result.ErrorMessage); + } + + [Fact] + public void VerifyDistribution_MatchingKits_ReturnsSuccess() + { + var sourceKit = SetupSourceKitWithAttestation(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var distResult = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + Assert.True(distResult.Success); + + var verifyResult = _distributor.VerifyDistribution(sourceKit, distResult.TargetPath!); + + Assert.True(verifyResult.Success); + Assert.Empty(verifyResult.Mismatches); + } + + [Fact] + public void VerifyDistribution_MissingFile_ReportsError() + { + var sourceKit = SetupSourceKitWithAttestation(); + var targetKit = Path.Combine(_tempDir, "target-incomplete"); + Directory.CreateDirectory(targetKit); + + // Copy only some files + var sourceFile = Directory.GetFiles(sourceKit, "*", SearchOption.AllDirectories).First(); + // Don't copy anything to target + + var result = _distributor.VerifyDistribution(sourceKit, targetKit); + + Assert.False(result.Success); + Assert.NotEmpty(result.Mismatches); + } + + [Fact] + public void VerifyDistribution_ModifiedFile_ReportsHashMismatch() + { + var sourceKit = SetupSourceKitWithAttestation(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var distResult = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + Assert.True(distResult.Success); + + // Modify a file in target + var targetFile = Path.Combine(distResult.TargetPath!, "attestations", "export-attestation-bundle-v1.tgz"); + File.WriteAllText(targetFile, "modified content"); + + var verifyResult = _distributor.VerifyDistribution(sourceKit, distResult.TargetPath!); + + Assert.False(verifyResult.Success); + Assert.Contains(verifyResult.Mismatches, m => m.Contains("Hash mismatch")); + } + + [Fact] + public void DistributeToMirror_ManifestHasCorrectVersion() + { + var sourceKit = SetupSourceKit(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v2.0.0"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + var manifestJson = File.ReadAllText(result.ManifestPath!); + var manifest = JsonSerializer.Deserialize(manifestJson); + + Assert.Equal("offline-kit/v1", manifest.GetProperty("version").GetString()); + Assert.Equal(kitVersion, manifest.GetProperty("kitVersion").GetString()); + } + + [Fact] + public void DistributeToMirror_MirrorBundleEntry_HasCorrectPaths() + { + var sourceKit = SetupSourceKitWithMirror(); + var mirrorBase = Path.Combine(_tempDir, "mirror"); + var kitVersion = "v1"; + + var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion); + + var manifestJson = File.ReadAllText(result.ManifestPath!); + var manifest = JsonSerializer.Deserialize(manifestJson); + + var entries = manifest.GetProperty("entries").EnumerateArray().ToList(); + var mirrorEntry = entries.FirstOrDefault(e => + e.GetProperty("kind").GetString() == "mirror-bundle"); + + Assert.NotEqual(default, mirrorEntry); + Assert.Equal("mirrors/export-mirror-bundle-v1.tgz", mirrorEntry.GetProperty("artifact").GetString()); + } + + private string SetupSourceKit() + { + var kitPath = Path.Combine(_tempDir, $"source-kit-{Guid.NewGuid():N}"); + Directory.CreateDirectory(kitPath); + File.WriteAllText(Path.Combine(kitPath, "manifest.json"), "{}"); + return kitPath; + } + + private string SetupSourceKitWithAttestation() + { + var kitPath = Path.Combine(_tempDir, $"source-kit-{Guid.NewGuid():N}"); + Directory.CreateDirectory(Path.Combine(kitPath, "attestations")); + Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "attestations")); + + File.WriteAllBytes( + Path.Combine(kitPath, "attestations", "export-attestation-bundle-v1.tgz"), + Encoding.UTF8.GetBytes("test-attestation-bundle")); + + File.WriteAllText( + Path.Combine(kitPath, "checksums", "attestations", "export-attestation-bundle-v1.tgz.sha256"), + "abc123 export-attestation-bundle-v1.tgz"); + + File.WriteAllText(Path.Combine(kitPath, "manifest.json"), "{}"); + + return kitPath; + } + + private string SetupSourceKitWithMirror() + { + var kitPath = SetupSourceKitWithAttestation(); + + Directory.CreateDirectory(Path.Combine(kitPath, "mirrors")); + Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "mirrors")); + + File.WriteAllBytes( + Path.Combine(kitPath, "mirrors", "export-mirror-bundle-v1.tgz"), + Encoding.UTF8.GetBytes("test-mirror-bundle")); + + File.WriteAllText( + Path.Combine(kitPath, "checksums", "mirrors", "export-mirror-bundle-v1.tgz.sha256"), + "def456 export-mirror-bundle-v1.tgz"); + + return kitPath; + } + + private string SetupSourceKitWithMultipleFiles() + { + var kitPath = SetupSourceKitWithAttestation(); + + // Add bootstrap + Directory.CreateDirectory(Path.Combine(kitPath, "bootstrap")); + Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "bootstrap")); + + File.WriteAllBytes( + Path.Combine(kitPath, "bootstrap", "export-bootstrap-pack-v1.tgz"), + Encoding.UTF8.GetBytes("test-bootstrap")); + + File.WriteAllText( + Path.Combine(kitPath, "checksums", "bootstrap", "export-bootstrap-pack-v1.tgz.sha256"), + "ghi789 export-bootstrap-pack-v1.tgz"); + + return kitPath; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitPackagerTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitPackagerTests.cs new file mode 100644 index 000000000..7cc3f8d39 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OfflineKitPackagerTests.cs @@ -0,0 +1,326 @@ +using System.Text; +using System.Text.Json; +using StellaOps.ExportCenter.Core.OfflineKit; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class OfflineKitPackagerTests : IDisposable +{ + private readonly string _tempDir; + private readonly FakeTimeProvider _timeProvider; + private readonly FakeCryptoHash _cryptoHash; + private readonly OfflineKitPackager _packager; + + public OfflineKitPackagerTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"offline-kit-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + + _timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); + _cryptoHash = new FakeCryptoHash(); + _packager = new OfflineKitPackager(_cryptoHash, _timeProvider); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public void AddAttestationBundle_CreatesArtifactAndChecksum() + { + var request = CreateTestAttestationRequest(); + + var result = _packager.AddAttestationBundle(_tempDir, request); + + Assert.True(result.Success); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath))); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath))); + } + + [Fact] + public void AddAttestationBundle_PreservesBytesExactly() + { + var originalBytes = Encoding.UTF8.GetBytes("test-attestation-bundle-content"); + var request = new OfflineKitAttestationRequest( + KitId: "kit-001", + ExportId: Guid.NewGuid().ToString(), + AttestationId: Guid.NewGuid().ToString(), + RootHash: "abc123", + BundleBytes: originalBytes, + CreatedAt: _timeProvider.GetUtcNow()); + + var result = _packager.AddAttestationBundle(_tempDir, request); + + var writtenBytes = File.ReadAllBytes(Path.Combine(_tempDir, result.ArtifactPath)); + Assert.Equal(originalBytes, writtenBytes); + } + + [Fact] + public void AddAttestationBundle_ChecksumFileContainsCorrectFormat() + { + var request = CreateTestAttestationRequest(); + + var result = _packager.AddAttestationBundle(_tempDir, request); + + var checksumContent = File.ReadAllText(Path.Combine(_tempDir, result.ChecksumPath)); + Assert.Contains("export-attestation-bundle-v1.tgz", checksumContent); + Assert.Contains(result.Sha256Hash, checksumContent); + Assert.Contains(" ", checksumContent); // Two spaces before filename + } + + [Fact] + public void AddAttestationBundle_RejectsOverwrite() + { + var request = CreateTestAttestationRequest(); + + // First write succeeds + var result1 = _packager.AddAttestationBundle(_tempDir, request); + Assert.True(result1.Success); + + // Second write fails (immutability) + var result2 = _packager.AddAttestationBundle(_tempDir, request); + Assert.False(result2.Success); + Assert.Contains("immutable", result2.ErrorMessage, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void AddMirrorBundle_CreatesArtifactAndChecksum() + { + var request = CreateTestMirrorRequest(); + + var result = _packager.AddMirrorBundle(_tempDir, request); + + Assert.True(result.Success); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath))); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath))); + } + + [Fact] + public void AddBootstrapPack_CreatesArtifactAndChecksum() + { + var request = CreateTestBootstrapRequest(); + + var result = _packager.AddBootstrapPack(_tempDir, request); + + Assert.True(result.Success); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath))); + Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath))); + } + + [Fact] + public void CreateAttestationEntry_HasCorrectKind() + { + var request = CreateTestAttestationRequest(); + + var entry = _packager.CreateAttestationEntry(request, "sha256hash"); + + Assert.Equal("attestation-export", entry.Kind); + } + + [Fact] + public void CreateAttestationEntry_HasCorrectPaths() + { + var request = CreateTestAttestationRequest(); + + var entry = _packager.CreateAttestationEntry(request, "sha256hash"); + + Assert.Equal("attestations/export-attestation-bundle-v1.tgz", entry.Artifact); + Assert.Equal("checksums/attestations/export-attestation-bundle-v1.tgz.sha256", entry.Checksum); + } + + [Fact] + public void CreateAttestationEntry_FormatsRootHashWithPrefix() + { + var request = new OfflineKitAttestationRequest( + KitId: "kit-001", + ExportId: Guid.NewGuid().ToString(), + AttestationId: Guid.NewGuid().ToString(), + RootHash: "abc123def456", + BundleBytes: new byte[] { 1, 2, 3 }, + CreatedAt: _timeProvider.GetUtcNow()); + + var entry = _packager.CreateAttestationEntry(request, "sha256hash"); + + Assert.Equal("sha256:abc123def456", entry.RootHash); + } + + [Fact] + public void CreateMirrorEntry_HasCorrectKind() + { + var request = CreateTestMirrorRequest(); + + var entry = _packager.CreateMirrorEntry(request, "sha256hash"); + + Assert.Equal("mirror-bundle", entry.Kind); + } + + [Fact] + public void CreateBootstrapEntry_HasCorrectKind() + { + var request = CreateTestBootstrapRequest(); + + var entry = _packager.CreateBootstrapEntry(request, "sha256hash"); + + Assert.Equal("bootstrap-pack", entry.Kind); + } + + [Fact] + public void WriteManifest_CreatesManifestFile() + { + var kitId = "kit-" + Guid.NewGuid().ToString("N"); + var entries = new List + { + _packager.CreateAttestationEntry(CreateTestAttestationRequest(), "hash1") + }; + + _packager.WriteManifest(_tempDir, kitId, entries); + + Assert.True(File.Exists(Path.Combine(_tempDir, "manifest.json"))); + } + + [Fact] + public void WriteManifest_ContainsCorrectVersion() + { + var kitId = "kit-" + Guid.NewGuid().ToString("N"); + var entries = new List(); + + _packager.WriteManifest(_tempDir, kitId, entries); + + var manifestJson = File.ReadAllText(Path.Combine(_tempDir, "manifest.json")); + var manifest = JsonSerializer.Deserialize(manifestJson); + + Assert.Equal("offline-kit/v1", manifest.GetProperty("version").GetString()); + } + + [Fact] + public void WriteManifest_ContainsKitId() + { + var kitId = "test-kit-123"; + var entries = new List(); + + _packager.WriteManifest(_tempDir, kitId, entries); + + var manifestJson = File.ReadAllText(Path.Combine(_tempDir, "manifest.json")); + var manifest = JsonSerializer.Deserialize(manifestJson); + + Assert.Equal(kitId, manifest.GetProperty("kitId").GetString()); + } + + [Fact] + public void WriteManifest_RejectsOverwrite() + { + var kitId = "kit-001"; + var entries = new List(); + + // First write succeeds + _packager.WriteManifest(_tempDir, kitId, entries); + + // Second write fails (immutability) + Assert.Throws(() => + _packager.WriteManifest(_tempDir, kitId, entries)); + } + + [Fact] + public void GenerateChecksumFileContent_HasCorrectFormat() + { + var content = OfflineKitPackager.GenerateChecksumFileContent("abc123def456", "test.tgz"); + + Assert.Equal("abc123def456 test.tgz", content); + } + + [Fact] + public void VerifyBundleHash_ReturnsTrueForMatchingHash() + { + var bundleBytes = Encoding.UTF8.GetBytes("test-content"); + var expectedHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, StellaOps.Cryptography.HashPurpose.Content); + + var result = _packager.VerifyBundleHash(bundleBytes, expectedHash); + + Assert.True(result); + } + + [Fact] + public void VerifyBundleHash_ReturnsFalseForMismatchedHash() + { + var bundleBytes = Encoding.UTF8.GetBytes("test-content"); + + var result = _packager.VerifyBundleHash(bundleBytes, "wrong-hash"); + + Assert.False(result); + } + + [Fact] + public void AddAttestationBundle_ThrowsForNullRequest() + { + Assert.Throws(() => + _packager.AddAttestationBundle(_tempDir, null!)); + } + + [Fact] + public void AddAttestationBundle_ThrowsForEmptyOutputDirectory() + { + var request = CreateTestAttestationRequest(); + + Assert.Throws(() => + _packager.AddAttestationBundle(string.Empty, request)); + } + + [Fact] + public void DirectoryStructure_FollowsOfflineKitLayout() + { + var attestationRequest = CreateTestAttestationRequest(); + var mirrorRequest = CreateTestMirrorRequest(); + var bootstrapRequest = CreateTestBootstrapRequest(); + + var attestResult = _packager.AddAttestationBundle(_tempDir, attestationRequest); + var mirrorResult = _packager.AddMirrorBundle(_tempDir, mirrorRequest); + var bootstrapResult = _packager.AddBootstrapPack(_tempDir, bootstrapRequest); + + // Verify directory structure + Assert.True(Directory.Exists(Path.Combine(_tempDir, "attestations"))); + Assert.True(Directory.Exists(Path.Combine(_tempDir, "mirrors"))); + Assert.True(Directory.Exists(Path.Combine(_tempDir, "bootstrap"))); + Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "attestations"))); + Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "mirrors"))); + Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "bootstrap"))); + } + + private OfflineKitAttestationRequest CreateTestAttestationRequest() + { + return new OfflineKitAttestationRequest( + KitId: "kit-001", + ExportId: Guid.NewGuid().ToString(), + AttestationId: Guid.NewGuid().ToString(), + RootHash: "test-root-hash", + BundleBytes: Encoding.UTF8.GetBytes("test-attestation-bundle"), + CreatedAt: _timeProvider.GetUtcNow()); + } + + private OfflineKitMirrorRequest CreateTestMirrorRequest() + { + return new OfflineKitMirrorRequest( + KitId: "kit-001", + ExportId: Guid.NewGuid().ToString(), + BundleId: Guid.NewGuid().ToString(), + Profile: "mirror:full", + RootHash: "test-root-hash", + BundleBytes: Encoding.UTF8.GetBytes("test-mirror-bundle"), + CreatedAt: _timeProvider.GetUtcNow()); + } + + private OfflineKitBootstrapRequest CreateTestBootstrapRequest() + { + return new OfflineKitBootstrapRequest( + KitId: "kit-001", + ExportId: Guid.NewGuid().ToString(), + Version: "v1.0.0", + RootHash: "test-root-hash", + BundleBytes: Encoding.UTF8.GetBytes("test-bootstrap-pack"), + CreatedAt: _timeProvider.GetUtcNow()); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OpenApiDiscoveryEndpointsTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OpenApiDiscoveryEndpointsTests.cs new file mode 100644 index 000000000..eb20f0c0d --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/OpenApiDiscoveryEndpointsTests.cs @@ -0,0 +1,185 @@ +using System.Net; +using System.Text.Json; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.HttpResults; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class OpenApiDiscoveryEndpointsTests +{ + [Fact] + public void DiscoveryResponse_ContainsRequiredFields() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope" + }; + + Assert.Equal("export-center", response.Service); + Assert.Equal("1.0.0", response.Version); + Assert.Equal("3.0.3", response.SpecVersion); + Assert.Equal("application/yaml", response.Format); + Assert.Equal("/openapi/export-center.yaml", response.Url); + Assert.Equal("#/components/schemas/ErrorEnvelope", response.ErrorEnvelopeSchema); + } + + [Fact] + public void DiscoveryResponse_SupportedProfilesCanBeNull() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + ProfilesSupported = null + }; + + Assert.Null(response.ProfilesSupported); + } + + [Fact] + public void DiscoveryResponse_SupportedProfiles_ContainsExpectedValues() + { + var profiles = new[] { "attestation", "mirror", "bootstrap", "airgap-evidence" }; + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + ProfilesSupported = profiles + }; + + Assert.NotNull(response.ProfilesSupported); + Assert.Contains("attestation", response.ProfilesSupported); + Assert.Contains("mirror", response.ProfilesSupported); + Assert.Contains("bootstrap", response.ProfilesSupported); + Assert.Contains("airgap-evidence", response.ProfilesSupported); + } + + [Fact] + public void DiscoveryResponse_SerializesToCamelCase() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + GeneratedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero) + }; + + var options = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var json = JsonSerializer.Serialize(response, options); + + Assert.Contains("\"service\":", json); + Assert.Contains("\"version\":", json); + Assert.Contains("\"specVersion\":", json); + Assert.Contains("\"format\":", json); + Assert.Contains("\"url\":", json); + Assert.Contains("\"errorEnvelopeSchema\":", json); + Assert.Contains("\"generatedAt\":", json); + } + + [Fact] + public void DiscoveryResponse_JsonUrlIsOptional() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + JsonUrl = "/openapi/export-center.json" + }; + + Assert.Equal("/openapi/export-center.json", response.JsonUrl); + } + + [Fact] + public void DiscoveryResponse_ChecksumSha256IsOptional() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + ChecksumSha256 = "abc123" + }; + + Assert.Equal("abc123", response.ChecksumSha256); + } + + [Fact] + public void MinimalSpec_ContainsOpenApi303Header() + { + // The minimal spec should be a valid OpenAPI 3.0.3 document + var minimalSpecCheck = "openapi: 3.0.3"; + Assert.NotEmpty(minimalSpecCheck); + } + + [Fact] + public void DiscoveryResponse_GeneratedAtIsDateTimeOffset() + { + var generatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + GeneratedAt = generatedAt + }; + + Assert.Equal(generatedAt, response.GeneratedAt); + } + + [Fact] + public void DiscoveryResponse_CanSerializeToJsonWithNulls() + { + var response = new WebService.OpenApiDiscoveryResponse + { + Service = "export-center", + Version = "1.0.0", + SpecVersion = "3.0.3", + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + JsonUrl = null, + ProfilesSupported = null, + ChecksumSha256 = null + }; + + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + var json = JsonSerializer.Serialize(response, options); + + // Should NOT contain null fields + Assert.DoesNotContain("\"jsonUrl\":", json); + Assert.DoesNotContain("\"profilesSupported\":", json); + Assert.DoesNotContain("\"checksumSha256\":", json); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/PortableEvidenceExportBuilderTests.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/PortableEvidenceExportBuilderTests.cs new file mode 100644 index 000000000..c0ec5115b --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/PortableEvidenceExportBuilderTests.cs @@ -0,0 +1,386 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Text; +using System.Text.Json; +using StellaOps.Cryptography; +using StellaOps.ExportCenter.Core.PortableEvidence; +using Xunit; + +namespace StellaOps.ExportCenter.Tests; + +public sealed class PortableEvidenceExportBuilderTests : IDisposable +{ + private readonly string _tempDir; + private readonly PortableEvidenceExportBuilder _builder; + private readonly ICryptoHash _cryptoHash; + + public PortableEvidenceExportBuilderTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"portable-evidence-test-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + _cryptoHash = new DefaultCryptoHash(); + _builder = new PortableEvidenceExportBuilder(_cryptoHash); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Fact] + public void Build_ProducesValidExport() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + + Assert.NotNull(result); + Assert.NotNull(result.ExportDocument); + Assert.NotEmpty(result.ExportDocumentJson); + Assert.NotEmpty(result.RootHash); + Assert.NotEmpty(result.PortableBundleSha256); + Assert.True(result.ExportStream.Length > 0); + } + + [Fact] + public void Build_ExportDocumentContainsCorrectMetadata() + { + var exportId = Guid.NewGuid(); + var bundleId = Guid.NewGuid(); + var tenantId = Guid.NewGuid(); + var portableBundlePath = CreateTestPortableBundle(); + var sourceUri = "https://evidencelocker.example.com/v1/bundles/portable/abc123"; + + var request = new PortableEvidenceExportRequest( + exportId, + bundleId, + tenantId, + portableBundlePath, + sourceUri); + + var result = _builder.Build(request); + + Assert.Equal(exportId.ToString("D"), result.ExportDocument.ExportId); + Assert.Equal(bundleId.ToString("D"), result.ExportDocument.BundleId); + Assert.Equal(tenantId.ToString("D"), result.ExportDocument.TenantId); + Assert.Equal(sourceUri, result.ExportDocument.SourceUri); + Assert.Equal("v1", result.ExportDocument.PortableVersion); + Assert.NotEmpty(result.ExportDocument.PortableBundleSha256); + Assert.NotEmpty(result.ExportDocument.RootHash); + } + + [Fact] + public void Build_ProducesDeterministicOutput() + { + var exportId = new Guid("11111111-2222-3333-4444-555555555555"); + var bundleId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"); + var tenantId = new Guid("ffffffff-1111-2222-3333-444444444444"); + var portableBundlePath = CreateTestPortableBundle("deterministic-content"); + + var request = new PortableEvidenceExportRequest( + exportId, + bundleId, + tenantId, + portableBundlePath); + + var result1 = _builder.Build(request); + var result2 = _builder.Build(request); + + Assert.Equal(result1.RootHash, result2.RootHash); + Assert.Equal(result1.PortableBundleSha256, result2.PortableBundleSha256); + + var bytes1 = result1.ExportStream.ToArray(); + var bytes2 = result2.ExportStream.ToArray(); + Assert.Equal(bytes1, bytes2); + } + + [Fact] + public void Build_ArchiveContainsExpectedFiles() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var fileNames = ExtractFileNames(result.ExportStream); + + Assert.Contains("export.json", fileNames); + Assert.Contains("portable-bundle-v1.tgz", fileNames); + Assert.Contains("checksums.txt", fileNames); + Assert.Contains("verify-export.sh", fileNames); + Assert.Contains("README.md", fileNames); + } + + [Fact] + public void Build_TarEntriesHaveDeterministicMetadata() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.ExportStream); + + var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + foreach (var entry in entries) + { + Assert.Equal(0, entry.Uid); + Assert.Equal(0, entry.Gid); + Assert.Equal(string.Empty, entry.UserName); + Assert.Equal(string.Empty, entry.GroupName); + Assert.Equal(expectedTimestamp, entry.ModificationTime); + } + } + + [Fact] + public void Build_PortableBundleIsIncludedUnmodified() + { + var originalContent = "original-portable-bundle-content-bytes"; + var portableBundlePath = CreateTestPortableBundle(originalContent); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var extractedContent = ExtractFileContent(result.ExportStream, "portable-bundle-v1.tgz"); + + Assert.Equal(originalContent, extractedContent); + } + + [Fact] + public void Build_ChecksumsContainsAllFiles() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var checksums = ExtractFileContent(result.ExportStream, "checksums.txt"); + + Assert.Contains("export.json", checksums); + Assert.Contains("portable-bundle-v1.tgz", checksums); + } + + [Fact] + public void Build_ReadmeContainsBundleInfo() + { + var bundleId = Guid.NewGuid(); + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + bundleId, + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var readme = ExtractFileContent(result.ExportStream, "README.md"); + + Assert.Contains(bundleId.ToString("D"), readme); + Assert.Contains("Portable Evidence Export", readme); + Assert.Contains("stella evidence verify", readme); + } + + [Fact] + public void Build_VerifyScriptIsPosixCompliant() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var script = ExtractFileContent(result.ExportStream, "verify-export.sh"); + + Assert.StartsWith("#!/usr/bin/env sh", script); + Assert.Contains("sha256sum", script); + Assert.Contains("shasum", script); + Assert.DoesNotContain("curl", script); + Assert.DoesNotContain("wget", script); + } + + [Fact] + public void Build_VerifyScriptHasExecutePermission() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + var entries = ExtractTarEntryMetadata(result.ExportStream); + + var scriptEntry = entries.FirstOrDefault(e => e.Name == "verify-export.sh"); + Assert.NotNull(scriptEntry); + Assert.True(scriptEntry.Mode.HasFlag(UnixFileMode.UserExecute)); + } + + [Fact] + public void Build_WithMetadata_IncludesInExportDocument() + { + var portableBundlePath = CreateTestPortableBundle(); + var metadata = new Dictionary + { + ["environment"] = "production", + ["scannerVersion"] = "v3.0.0" + }; + + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath, + Metadata: metadata); + + var result = _builder.Build(request); + + Assert.NotNull(result.ExportDocument.Metadata); + Assert.Equal("production", result.ExportDocument.Metadata["environment"]); + Assert.Equal("v3.0.0", result.ExportDocument.Metadata["scannerVersion"]); + } + + [Fact] + public void Build_ThrowsForMissingPortableBundle() + { + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + "/nonexistent/portable-bundle.tgz"); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_ThrowsForEmptyBundleId() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.Empty, + Guid.NewGuid(), + portableBundlePath); + + Assert.Throws(() => _builder.Build(request)); + } + + [Fact] + public void Build_VersionIsCorrect() + { + var portableBundlePath = CreateTestPortableBundle(); + var request = new PortableEvidenceExportRequest( + Guid.NewGuid(), + Guid.NewGuid(), + Guid.NewGuid(), + portableBundlePath); + + var result = _builder.Build(request); + + Assert.Equal("portable-evidence/v1", result.ExportDocument.Version); + } + + private string CreateTestPortableBundle(string? content = null) + { + var path = Path.Combine(_tempDir, $"portable-bundle-{Guid.NewGuid():N}.tgz"); + File.WriteAllText(path, content ?? "test-portable-bundle-content"); + return path; + } + + private static List ExtractFileNames(MemoryStream exportStream) + { + exportStream.Position = 0; + var fileNames = new List(); + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + fileNames.Add(entry.Name); + } + + exportStream.Position = 0; + return fileNames; + } + + private static string ExtractFileContent(MemoryStream exportStream, string fileName) + { + exportStream.Position = 0; + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + if (entry.Name == fileName && entry.DataStream is not null) + { + using var reader = new StreamReader(entry.DataStream); + var content = reader.ReadToEnd(); + exportStream.Position = 0; + return content; + } + } + + exportStream.Position = 0; + throw new FileNotFoundException($"File '{fileName}' not found in archive."); + } + + private static List ExtractTarEntryMetadata(MemoryStream exportStream) + { + exportStream.Position = 0; + var entries = new List(); + + using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true); + using var tar = new TarReader(gzip, leaveOpen: true); + + TarEntry? entry; + while ((entry = tar.GetNextEntry()) is not null) + { + entries.Add(new TarEntryMetadataWithName( + entry.Name, + entry.Uid, + entry.Gid, + entry.UserName ?? string.Empty, + entry.GroupName ?? string.Empty, + entry.ModificationTime, + entry.Mode)); + } + + exportStream.Position = 0; + return entries; + } + + private sealed record TarEntryMetadataWithName( + string Name, + int Uid, + int Gid, + string UserName, + string GroupName, + DateTimeOffset ModificationTime, + UnixFileMode Mode); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj index 31124f673..d57cd9ed9 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj @@ -112,21 +112,23 @@ - - - - + + + + - - + + + - - - - + + + + + diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationEndpoints.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationEndpoints.cs new file mode 100644 index 000000000..ef05700c9 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationEndpoints.cs @@ -0,0 +1,147 @@ +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Extension methods for mapping attestation endpoints. +/// +public static class AttestationEndpoints +{ + /// + /// Maps attestation endpoints to the application. + /// + public static WebApplication MapAttestationEndpoints(this WebApplication app) + { + var group = app.MapGroup("/v1/exports") + .WithTags("Attestations") + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportViewer); + + // GET /v1/exports/{id}/attestation - Get attestation by export run ID + group.MapGet("/{id}/attestation", GetAttestationByExportRunAsync) + .WithName("GetExportAttestation") + .WithSummary("Get attestation for an export run") + .WithDescription("Returns the DSSE attestation envelope for the specified export run.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // GET /v1/exports/attestations/{attestationId} - Get attestation by ID + group.MapGet("/attestations/{attestationId}", GetAttestationByIdAsync) + .WithName("GetAttestationById") + .WithSummary("Get attestation by ID") + .WithDescription("Returns the DSSE attestation envelope for the specified attestation ID.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // POST /v1/exports/{id}/attestation/verify - Verify attestation + group.MapPost("/{id}/attestation/verify", VerifyAttestationAsync) + .WithName("VerifyExportAttestation") + .WithSummary("Verify attestation signature") + .WithDescription("Verifies the cryptographic signature of the export attestation.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + return app; + } + + private static async Task, NotFound>> GetAttestationByExportRunAsync( + string id, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IExportAttestationService attestationService, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var result = await attestationService.GetAttestationByExportRunAsync(id, tenantId, cancellationToken); + if (result is null) + { + return TypedResults.NotFound(); + } + + return TypedResults.Ok(result); + } + + private static async Task, NotFound>> GetAttestationByIdAsync( + string attestationId, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IExportAttestationService attestationService, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var result = await attestationService.GetAttestationAsync(attestationId, tenantId, cancellationToken); + if (result is null) + { + return TypedResults.NotFound(); + } + + return TypedResults.Ok(result); + } + + private static async Task, NotFound>> VerifyAttestationAsync( + string id, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IExportAttestationService attestationService, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var attestation = await attestationService.GetAttestationByExportRunAsync(id, tenantId, cancellationToken); + if (attestation is null) + { + return TypedResults.NotFound(); + } + + var isValid = await attestationService.VerifyAttestationAsync( + attestation.AttestationId, tenantId, cancellationToken); + + return TypedResults.Ok(new AttestationVerifyResponse + { + AttestationId = attestation.AttestationId, + IsValid = isValid, + VerifiedAt = DateTimeOffset.UtcNow + }); + } + + private static string? ResolveTenantId(string? header, HttpContext httpContext) + { + if (!string.IsNullOrWhiteSpace(header)) + { + return header; + } + + // Try to get from claims + var tenantClaim = httpContext.User.FindFirst("tenant_id") + ?? httpContext.User.FindFirst("tid"); + + return tenantClaim?.Value; + } +} + +/// +/// Response for attestation verification. +/// +public sealed record AttestationVerifyResponse +{ + public required string AttestationId { get; init; } + public required bool IsValid { get; init; } + public required DateTimeOffset VerifiedAt { get; init; } + public string? ErrorMessage { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationServiceCollectionExtensions.cs new file mode 100644 index 000000000..652ff3dab --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/AttestationServiceCollectionExtensions.cs @@ -0,0 +1,50 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Extension methods for registering attestation services. +/// +public static class AttestationServiceCollectionExtensions +{ + /// + /// Adds export attestation services to the service collection. + /// + /// The service collection. + /// Optional configuration for attestation options. + /// Optional configuration for signer options. + /// The service collection for chaining. + public static IServiceCollection AddExportAttestation( + this IServiceCollection services, + Action? configureOptions = null, + Action? configureSignerOptions = null) + { + ArgumentNullException.ThrowIfNull(services); + + // Configure options + if (configureOptions is not null) + { + services.Configure(configureOptions); + } + + if (configureSignerOptions is not null) + { + services.Configure(configureSignerOptions); + } + + // Register TimeProvider if not already registered + services.TryAddSingleton(TimeProvider.System); + + // Register signer + services.TryAddSingleton(); + + // Register attestation service + services.TryAddSingleton(); + + // Register promotion attestation assembler + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationModels.cs new file mode 100644 index 000000000..a457cc84d --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationModels.cs @@ -0,0 +1,192 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Export attestation payload types. +/// +public static class ExportAttestationPayloadTypes +{ + public const string DssePayloadType = "application/vnd.in-toto+json"; + public const string ExportBundlePredicateType = "stella.ops/export-bundle@v1"; + public const string ExportArtifactPredicateType = "stella.ops/export-artifact@v1"; + public const string ExportProvenancePredicateType = "stella.ops/export-provenance@v1"; +} + +/// +/// Request to create attestation for an export artifact. +/// +public sealed record ExportAttestationRequest +{ + public required string TenantId { get; init; } + public required string ExportRunId { get; init; } + public string? ProfileId { get; init; } + public required string ArtifactDigest { get; init; } + public required string ArtifactName { get; init; } + public required string ArtifactMediaType { get; init; } + public long ArtifactSizeBytes { get; init; } + public string? BundleId { get; init; } + public string? BundleRootHash { get; init; } + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Result of attestation creation. +/// +public sealed record ExportAttestationResult +{ + public bool Success { get; init; } + public string? AttestationId { get; init; } + public ExportDsseEnvelope? Envelope { get; init; } + public string? ErrorMessage { get; init; } + + public static ExportAttestationResult Succeeded(string attestationId, ExportDsseEnvelope envelope) => + new() { Success = true, AttestationId = attestationId, Envelope = envelope }; + + public static ExportAttestationResult Failed(string errorMessage) => + new() { Success = false, ErrorMessage = errorMessage }; +} + +/// +/// DSSE envelope for export attestations. +/// +public sealed record ExportDsseEnvelope +{ + [JsonPropertyName("payloadType")] + public required string PayloadType { get; init; } + + [JsonPropertyName("payload")] + public required string Payload { get; init; } + + [JsonPropertyName("signatures")] + public required IReadOnlyList Signatures { get; init; } +} + +/// +/// Signature within a DSSE envelope. +/// +public sealed record ExportDsseEnvelopeSignature +{ + [JsonPropertyName("keyid")] + public string? KeyId { get; init; } + + [JsonPropertyName("sig")] + public required string Signature { get; init; } +} + +/// +/// In-toto statement for export attestations. +/// +public sealed record ExportInTotoStatement +{ + [JsonPropertyName("_type")] + public string Type { get; init; } = "https://in-toto.io/Statement/v0.1"; + + [JsonPropertyName("predicateType")] + public required string PredicateType { get; init; } + + [JsonPropertyName("subject")] + public required IReadOnlyList Subject { get; init; } + + [JsonPropertyName("predicate")] + public required ExportBundlePredicate Predicate { get; init; } +} + +/// +/// Subject of an in-toto statement. +/// +public sealed record ExportInTotoSubject +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("digest")] + public required IReadOnlyDictionary Digest { get; init; } +} + +/// +/// Predicate for export bundle attestation. +/// +public sealed record ExportBundlePredicate +{ + [JsonPropertyName("exportRunId")] + public required string ExportRunId { get; init; } + + [JsonPropertyName("tenantId")] + public required string TenantId { get; init; } + + [JsonPropertyName("profileId")] + public string? ProfileId { get; init; } + + [JsonPropertyName("bundleId")] + public string? BundleId { get; init; } + + [JsonPropertyName("bundleRootHash")] + public string? BundleRootHash { get; init; } + + [JsonPropertyName("createdAt")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("exporter")] + public required ExportAttestationExporter Exporter { get; init; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Exporter information for attestation. +/// +public sealed record ExportAttestationExporter +{ + [JsonPropertyName("name")] + public string Name { get; init; } = "StellaOps.ExportCenter"; + + [JsonPropertyName("version")] + public required string Version { get; init; } + + [JsonPropertyName("buildTimestamp")] + public DateTimeOffset? BuildTimestamp { get; init; } +} + +/// +/// Response DTO for attestation endpoint. +/// +public sealed record ExportAttestationResponse +{ + [JsonPropertyName("attestation_id")] + public required string AttestationId { get; init; } + + [JsonPropertyName("export_run_id")] + public required string ExportRunId { get; init; } + + [JsonPropertyName("artifact_digest")] + public required string ArtifactDigest { get; init; } + + [JsonPropertyName("created_at")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("envelope")] + public required ExportDsseEnvelope Envelope { get; init; } + + [JsonPropertyName("verification")] + public ExportAttestationVerification? Verification { get; init; } +} + +/// +/// Verification information for attestation. +/// +public sealed record ExportAttestationVerification +{ + [JsonPropertyName("key_id")] + public string? KeyId { get; init; } + + [JsonPropertyName("algorithm")] + public string? Algorithm { get; init; } + + [JsonPropertyName("provider")] + public string? Provider { get; init; } + + [JsonPropertyName("public_key_pem")] + public string? PublicKeyPem { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationService.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationService.cs new file mode 100644 index 000000000..b9a30a489 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationService.cs @@ -0,0 +1,309 @@ +using System.Collections.Concurrent; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.ExportCenter.WebService.Telemetry; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Service for producing DSSE attestations for export artifacts. +/// +public sealed class ExportAttestationService : IExportAttestationService +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly string ExporterVersion = typeof(ExportAttestationService).Assembly + .GetName().Version?.ToString() ?? "1.0.0"; + + private readonly IExportAttestationSigner _signer; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly ExportAttestationOptions _options; + + // In-memory storage for attestations (production would use persistent storage) + private readonly ConcurrentDictionary _attestations = new(); + private readonly ConcurrentDictionary _runToAttestationMap = new(); + + public ExportAttestationService( + IExportAttestationSigner signer, + TimeProvider timeProvider, + ILogger logger, + IOptions? options = null) + { + _signer = signer ?? throw new ArgumentNullException(nameof(signer)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value ?? ExportAttestationOptions.Default; + } + + public async Task CreateAttestationAsync( + ExportAttestationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + using var activity = ExportTelemetry.ActivitySource.StartActivity("attestation.create"); + activity?.SetTag("tenant_id", request.TenantId); + activity?.SetTag("export_run_id", request.ExportRunId); + + try + { + var now = _timeProvider.GetUtcNow(); + var attestationId = GenerateAttestationId(request); + + // Build in-toto statement + var statement = BuildStatement(request, now); + + // Serialize statement to canonical JSON + var statementJson = JsonSerializer.SerializeToUtf8Bytes(statement, SerializerOptions); + var payloadBase64 = ToBase64Url(statementJson); + + // Sign using PAE (Pre-Authentication Encoding) + var signResult = await _signer.SignAsync( + ExportAttestationPayloadTypes.DssePayloadType, + statementJson, + cancellationToken).ConfigureAwait(false); + + if (!signResult.Success) + { + _logger.LogError( + "Failed to sign attestation for export {ExportRunId}: {Error}", + request.ExportRunId, signResult.ErrorMessage); + return ExportAttestationResult.Failed(signResult.ErrorMessage ?? "Signing failed"); + } + + // Build DSSE envelope + var envelope = new ExportDsseEnvelope + { + PayloadType = ExportAttestationPayloadTypes.DssePayloadType, + Payload = payloadBase64, + Signatures = signResult.Signatures.Select(s => new ExportDsseEnvelopeSignature + { + KeyId = s.KeyId, + Signature = s.Signature + }).ToList() + }; + + // Store attestation + var stored = new StoredAttestation( + attestationId, + request.TenantId, + request.ExportRunId, + request.ArtifactDigest, + now, + envelope, + signResult.Verification); + + _attestations[attestationId] = stored; + _runToAttestationMap[BuildRunKey(request.TenantId, request.ExportRunId)] = attestationId; + + _logger.LogInformation( + "Created attestation {AttestationId} for export {ExportRunId}", + attestationId, request.ExportRunId); + + ExportTelemetry.ExportArtifactsTotal.Add(1, + new KeyValuePair("artifact_type", "attestation"), + new KeyValuePair("tenant_id", request.TenantId)); + + return ExportAttestationResult.Succeeded(attestationId, envelope); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error creating attestation for export {ExportRunId}", request.ExportRunId); + return ExportAttestationResult.Failed($"Error: {ex.Message}"); + } + } + + public Task GetAttestationAsync( + string attestationId, + string tenantId, + CancellationToken cancellationToken = default) + { + if (!_attestations.TryGetValue(attestationId, out var stored)) + { + return Task.FromResult(null); + } + + if (!string.Equals(stored.TenantId, tenantId, StringComparison.OrdinalIgnoreCase)) + { + return Task.FromResult(null); + } + + return Task.FromResult(BuildResponse(stored)); + } + + public Task GetAttestationByExportRunAsync( + string exportRunId, + string tenantId, + CancellationToken cancellationToken = default) + { + var key = BuildRunKey(tenantId, exportRunId); + if (!_runToAttestationMap.TryGetValue(key, out var attestationId)) + { + return Task.FromResult(null); + } + + return GetAttestationAsync(attestationId, tenantId, cancellationToken); + } + + public async Task VerifyAttestationAsync( + string attestationId, + string tenantId, + CancellationToken cancellationToken = default) + { + if (!_attestations.TryGetValue(attestationId, out var stored)) + { + return false; + } + + if (!string.Equals(stored.TenantId, tenantId, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + try + { + // Decode payload + var payloadBytes = FromBase64Url(stored.Envelope.Payload); + + // Verify each signature + foreach (var signature in stored.Envelope.Signatures) + { + var isValid = await _signer.VerifyAsync( + stored.Envelope.PayloadType, + payloadBytes, + signature.Signature, + signature.KeyId, + cancellationToken).ConfigureAwait(false); + + if (!isValid) + { + _logger.LogWarning( + "Attestation {AttestationId} signature verification failed for key {KeyId}", + attestationId, signature.KeyId); + return false; + } + } + + return true; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error verifying attestation {AttestationId}", attestationId); + return false; + } + } + + private ExportInTotoStatement BuildStatement(ExportAttestationRequest request, DateTimeOffset now) + { + var subject = new ExportInTotoSubject + { + Name = request.ArtifactName, + Digest = new Dictionary(StringComparer.Ordinal) + { + ["sha256"] = request.ArtifactDigest.ToLowerInvariant() + } + }; + + var predicate = new ExportBundlePredicate + { + ExportRunId = request.ExportRunId, + TenantId = request.TenantId, + ProfileId = request.ProfileId, + BundleId = request.BundleId, + BundleRootHash = request.BundleRootHash, + CreatedAt = now, + Exporter = new ExportAttestationExporter + { + Version = ExporterVersion + }, + Metadata = request.Metadata + }; + + return new ExportInTotoStatement + { + PredicateType = ExportAttestationPayloadTypes.ExportBundlePredicateType, + Subject = [subject], + Predicate = predicate + }; + } + + private static ExportAttestationResponse BuildResponse(StoredAttestation stored) + { + return new ExportAttestationResponse + { + AttestationId = stored.AttestationId, + ExportRunId = stored.ExportRunId, + ArtifactDigest = stored.ArtifactDigest, + CreatedAt = stored.CreatedAt, + Envelope = stored.Envelope, + Verification = stored.Verification + }; + } + + private static string GenerateAttestationId(ExportAttestationRequest request) + { + var input = $"{request.TenantId}:{request.ExportRunId}:{request.ArtifactDigest}"; + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input)); + return $"att-{Convert.ToHexStringLower(hash)[..16]}"; + } + + private static string BuildRunKey(string tenantId, string exportRunId) + { + return $"{tenantId}:{exportRunId}"; + } + + private static string ToBase64Url(byte[] data) + { + return Convert.ToBase64String(data) + .TrimEnd('=') + .Replace('+', '-') + .Replace('/', '_'); + } + + private static byte[] FromBase64Url(string base64Url) + { + var base64 = base64Url + .Replace('-', '+') + .Replace('_', '/'); + + switch (base64.Length % 4) + { + case 2: base64 += "=="; break; + case 3: base64 += "="; break; + } + + return Convert.FromBase64String(base64); + } + + private sealed record StoredAttestation( + string AttestationId, + string TenantId, + string ExportRunId, + string ArtifactDigest, + DateTimeOffset CreatedAt, + ExportDsseEnvelope Envelope, + ExportAttestationVerification? Verification); +} + +/// +/// Configuration options for attestation service. +/// +public sealed class ExportAttestationOptions +{ + public bool Enabled { get; set; } = true; + public string DefaultAlgorithm { get; set; } = "ECDSA-P256-SHA256"; + public string? KeyId { get; set; } + public string? Provider { get; set; } + + public static ExportAttestationOptions Default => new(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationSigner.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationSigner.cs new file mode 100644 index 000000000..cf4e7eab2 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/ExportAttestationSigner.cs @@ -0,0 +1,208 @@ +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Default implementation of attestation signer using ECDSA. +/// For production, this should route through ICryptoProviderRegistry. +/// +public sealed class ExportAttestationSigner : IExportAttestationSigner, IDisposable +{ + private readonly ILogger _logger; + private readonly ExportAttestationSignerOptions _options; + private readonly ECDsa _signingKey; + private readonly string _keyId; + + public ExportAttestationSigner( + ILogger logger, + IOptions? options = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value ?? ExportAttestationSignerOptions.Default; + + // Create or load signing key + _signingKey = ECDsa.Create(ECCurve.NamedCurves.nistP256); + _keyId = ComputeKeyId(_signingKey); + } + + public Task SignAsync( + string payloadType, + ReadOnlyMemory payload, + CancellationToken cancellationToken = default) + { + try + { + // Build PAE (Pre-Authentication Encoding) per DSSE spec + var pae = BuildPae(payloadType, payload.Span); + + // Sign PAE + var signatureBytes = _signingKey.SignData( + pae, + HashAlgorithmName.SHA256, + DSASignatureFormat.Rfc3279DerSequence); + + var signatureBase64Url = ToBase64Url(signatureBytes); + + var signatures = new List + { + new() + { + Signature = signatureBase64Url, + KeyId = _keyId, + Algorithm = _options.Algorithm + } + }; + + var verification = new ExportAttestationVerification + { + KeyId = _keyId, + Algorithm = _options.Algorithm, + Provider = _options.Provider, + PublicKeyPem = ExportPublicKeyPem() + }; + + _logger.LogDebug("Signed attestation with key {KeyId}", _keyId); + + return Task.FromResult(AttestationSignResult.Succeeded(signatures, verification)); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to sign attestation"); + return Task.FromResult(AttestationSignResult.Failed($"Signing failed: {ex.Message}")); + } + } + + public Task VerifyAsync( + string payloadType, + ReadOnlyMemory payload, + string signature, + string? keyId, + CancellationToken cancellationToken = default) + { + try + { + // Build PAE + var pae = BuildPae(payloadType, payload.Span); + + // Decode signature + var signatureBytes = FromBase64Url(signature); + + // Verify + var isValid = _signingKey.VerifyData( + pae, + signatureBytes, + HashAlgorithmName.SHA256, + DSASignatureFormat.Rfc3279DerSequence); + + return Task.FromResult(isValid); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to verify signature"); + return Task.FromResult(false); + } + } + + /// + /// Builds DSSE Pre-Authentication Encoding (PAE). + /// PAE = "DSSEv1" || SP || LEN(payloadType) || SP || payloadType || SP || LEN(payload) || SP || payload + /// + private static byte[] BuildPae(string payloadType, ReadOnlySpan payload) + { + const string prefix = "DSSEv1"; + var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType); + + using var ms = new MemoryStream(); + + // "DSSEv1 " + ms.Write(Encoding.UTF8.GetBytes(prefix)); + ms.WriteByte(0x20); // space + + // LEN(payloadType) + space + payloadType + space + WriteLength(ms, payloadTypeBytes.Length); + ms.WriteByte(0x20); + ms.Write(payloadTypeBytes); + ms.WriteByte(0x20); + + // LEN(payload) + space + payload + WriteLength(ms, payload.Length); + ms.WriteByte(0x20); + ms.Write(payload); + + return ms.ToArray(); + } + + private static void WriteLength(MemoryStream ms, int length) + { + var lengthBytes = Encoding.UTF8.GetBytes(length.ToString()); + ms.Write(lengthBytes); + } + + private static string ComputeKeyId(ECDsa key) + { + var publicKeyBytes = key.ExportSubjectPublicKeyInfo(); + var hash = SHA256.HashData(publicKeyBytes); + return Convert.ToHexStringLower(hash)[..16]; + } + + private string ExportPublicKeyPem() + { + var publicKeyBytes = _signingKey.ExportSubjectPublicKeyInfo(); + var base64 = Convert.ToBase64String(publicKeyBytes); + + var sb = new StringBuilder(); + sb.AppendLine("-----BEGIN PUBLIC KEY-----"); + + for (var i = 0; i < base64.Length; i += 64) + { + var length = Math.Min(64, base64.Length - i); + sb.AppendLine(base64.Substring(i, length)); + } + + sb.AppendLine("-----END PUBLIC KEY-----"); + return sb.ToString(); + } + + private static string ToBase64Url(byte[] data) + { + return Convert.ToBase64String(data) + .TrimEnd('=') + .Replace('+', '-') + .Replace('/', '_'); + } + + private static byte[] FromBase64Url(string base64Url) + { + var base64 = base64Url + .Replace('-', '+') + .Replace('_', '/'); + + switch (base64.Length % 4) + { + case 2: base64 += "=="; break; + case 3: base64 += "="; break; + } + + return Convert.FromBase64String(base64); + } + + public void Dispose() + { + _signingKey.Dispose(); + } +} + +/// +/// Options for attestation signer. +/// +public sealed class ExportAttestationSignerOptions +{ + public string Algorithm { get; set; } = "ECDSA-P256-SHA256"; + public string Provider { get; set; } = "StellaOps.ExportCenter"; + public string? KeyPath { get; set; } + + public static ExportAttestationSignerOptions Default => new(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationService.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationService.cs new file mode 100644 index 000000000..25b7381a4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationService.cs @@ -0,0 +1,53 @@ +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Service for producing DSSE attestations for export artifacts. +/// +public interface IExportAttestationService +{ + /// + /// Creates a DSSE attestation for an export artifact. + /// + /// The attestation request. + /// Cancellation token. + /// The attestation result with DSSE envelope. + Task CreateAttestationAsync( + ExportAttestationRequest request, + CancellationToken cancellationToken = default); + + /// + /// Gets an existing attestation by ID. + /// + /// The attestation ID. + /// The tenant ID. + /// Cancellation token. + /// The attestation response if found. + Task GetAttestationAsync( + string attestationId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Gets the attestation for an export run. + /// + /// The export run ID. + /// The tenant ID. + /// Cancellation token. + /// The attestation response if found. + Task GetAttestationByExportRunAsync( + string exportRunId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Verifies an attestation signature. + /// + /// The attestation ID. + /// The tenant ID. + /// Cancellation token. + /// True if signature is valid. + Task VerifyAttestationAsync( + string attestationId, + string tenantId, + CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationSigner.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationSigner.cs new file mode 100644 index 000000000..e909209b4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IExportAttestationSigner.cs @@ -0,0 +1,64 @@ +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Interface for signing export attestations. +/// +public interface IExportAttestationSigner +{ + /// + /// Signs payload using DSSE PAE (Pre-Authentication Encoding). + /// + /// The payload MIME type. + /// The payload bytes to sign. + /// Cancellation token. + /// The signing result with signatures. + Task SignAsync( + string payloadType, + ReadOnlyMemory payload, + CancellationToken cancellationToken = default); + + /// + /// Verifies a signature against a payload. + /// + /// The payload MIME type. + /// The payload bytes. + /// The base64url-encoded signature. + /// Optional key ID for verification. + /// Cancellation token. + /// True if signature is valid. + Task VerifyAsync( + string payloadType, + ReadOnlyMemory payload, + string signature, + string? keyId, + CancellationToken cancellationToken = default); +} + +/// +/// Result of attestation signing operation. +/// +public sealed record AttestationSignResult +{ + public bool Success { get; init; } + public string? ErrorMessage { get; init; } + public IReadOnlyList Signatures { get; init; } = []; + public ExportAttestationVerification? Verification { get; init; } + + public static AttestationSignResult Succeeded( + IReadOnlyList signatures, + ExportAttestationVerification? verification = null) => + new() { Success = true, Signatures = signatures, Verification = verification }; + + public static AttestationSignResult Failed(string errorMessage) => + new() { Success = false, ErrorMessage = errorMessage }; +} + +/// +/// Information about a single signature. +/// +public sealed record AttestationSignatureInfo +{ + public required string Signature { get; init; } + public string? KeyId { get; init; } + public string? Algorithm { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IPromotionAttestationAssembler.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IPromotionAttestationAssembler.cs new file mode 100644 index 000000000..d1f45d591 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/IPromotionAttestationAssembler.cs @@ -0,0 +1,97 @@ +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Interface for assembling promotion attestations with SBOM/VEX digests, +/// Rekor proofs, and DSSE envelopes for Offline Kit delivery. +/// +public interface IPromotionAttestationAssembler +{ + /// + /// Assembles a promotion attestation bundle from the provided artifacts. + /// + /// The assembly request containing all artifacts. + /// Cancellation token. + /// The assembled promotion attestation. + Task AssembleAsync( + PromotionAttestationAssemblyRequest request, + CancellationToken cancellationToken = default); + + /// + /// Retrieves a previously assembled promotion attestation. + /// + /// The assembly identifier. + /// The tenant identifier. + /// Cancellation token. + /// The assembly if found, null otherwise. + Task GetAssemblyAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Gets assemblies for a specific promotion. + /// + /// The promotion identifier. + /// The tenant identifier. + /// Cancellation token. + /// List of assemblies for the promotion. + Task> GetAssembliesForPromotionAsync( + string promotionId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Verifies the signatures and integrity of an assembly. + /// + /// The assembly identifier. + /// The tenant identifier. + /// Cancellation token. + /// True if the assembly is valid. + Task VerifyAssemblyAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Exports an assembly to a portable bundle format for Offline Kit. + /// + /// The assembly identifier. + /// The tenant identifier. + /// Cancellation token. + /// Stream containing the bundle, or null if not found. + Task ExportBundleAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default); +} + +/// +/// Result of exporting a promotion assembly to a bundle. +/// +public sealed record PromotionBundleExportResult +{ + /// + /// The exported bundle stream (gzipped tar). + /// + public required Stream BundleStream { get; init; } + + /// + /// The bundle filename. + /// + public required string FileName { get; init; } + + /// + /// SHA-256 digest of the bundle. + /// + public required string BundleDigest { get; init; } + + /// + /// Size of the bundle in bytes. + /// + public long SizeBytes { get; init; } + + /// + /// Media type of the bundle. + /// + public string MediaType { get; init; } = "application/gzip"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationAssembler.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationAssembler.cs new file mode 100644 index 000000000..c1b3b9ab5 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationAssembler.cs @@ -0,0 +1,612 @@ +using System.Buffers.Binary; +using System.Collections.Concurrent; +using System.Formats.Tar; +using System.IO.Compression; +using System.Reflection; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Assembles promotion attestations with SBOM/VEX digests, Rekor proofs, +/// and DSSE envelopes for Offline Kit delivery. +/// +public sealed class PromotionAttestationAssembler : IPromotionAttestationAssembler +{ + private const string BundleVersion = "promotion-bundle/v1"; + private const string AssemblyFileName = "promotion-assembly.json"; + private const string EnvelopeFileName = "promotion.dsse.json"; + private const string RekorProofsFileName = "rekor-proofs.ndjson"; + private const string DsseEnvelopesDir = "envelopes/"; + private const string ChecksumsFileName = "checksums.txt"; + private const string VerifyScriptFileName = "verify-promotion.sh"; + private const string MetadataFileName = "metadata.json"; + + private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly UnixFileMode DefaultFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead; + + private static readonly UnixFileMode ExecutableFileMode = + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower + }; + + private readonly ILogger _logger; + private readonly IExportAttestationSigner _signer; + private readonly TimeProvider _timeProvider; + + // In-memory store for development/testing; production would use persistent storage + private readonly ConcurrentDictionary _assemblies = new(); + + public PromotionAttestationAssembler( + ILogger logger, + IExportAttestationSigner signer, + TimeProvider? timeProvider = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _signer = signer ?? throw new ArgumentNullException(nameof(signer)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async Task AssembleAsync( + PromotionAttestationAssemblyRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + try + { + var assemblyId = GenerateAssemblyId(); + var createdAt = _timeProvider.GetUtcNow(); + + // Build the promotion predicate + var predicate = BuildPromotionPredicate(request, createdAt); + + // Build subjects from all artifacts + var subjects = BuildSubjects(request); + + // Build the in-toto statement + var statement = new ExportInTotoStatement + { + PredicateType = PromotionAttestationPayloadTypes.PromotionPredicateType, + Subject = subjects, + Predicate = new ExportBundlePredicate + { + ExportRunId = request.PromotionId, + TenantId = request.TenantId, + ProfileId = request.ProfileId, + BundleId = assemblyId, + BundleRootHash = ComputeRootHash(request), + CreatedAt = createdAt, + Exporter = new ExportAttestationExporter + { + Version = GetAssemblyVersion(), + BuildTimestamp = GetBuildTimestamp() + }, + Metadata = request.Metadata + } + }; + + // Serialize and sign + var statementJson = JsonSerializer.Serialize(statement, SerializerOptions); + var statementBytes = Encoding.UTF8.GetBytes(statementJson); + + var signResult = await _signer.SignAsync( + ExportAttestationPayloadTypes.DssePayloadType, + statementBytes, + cancellationToken); + + if (!signResult.Success) + { + _logger.LogError("Failed to sign promotion attestation: {Error}", signResult.ErrorMessage); + return PromotionAttestationAssemblyResult.Failed( + signResult.ErrorMessage ?? "Signing failed"); + } + + // Build DSSE envelope + var envelope = new ExportDsseEnvelope + { + PayloadType = ExportAttestationPayloadTypes.DssePayloadType, + Payload = Convert.ToBase64String(statementBytes), + Signatures = signResult.Signatures.Select(s => new ExportDsseEnvelopeSignature + { + KeyId = s.KeyId, + Signature = s.Signature + }).ToList() + }; + + // Create the assembly + var assembly = new PromotionAttestationAssembly + { + AssemblyId = assemblyId, + PromotionId = request.PromotionId, + TenantId = request.TenantId, + ProfileId = request.ProfileId, + SourceEnvironment = request.SourceEnvironment, + TargetEnvironment = request.TargetEnvironment, + CreatedAt = createdAt, + PromotionEnvelope = envelope, + SbomDigests = request.SbomDigests, + VexDigests = request.VexDigests, + RekorProofs = request.RekorProofs, + DsseEnvelopes = request.DsseEnvelopes, + Verification = signResult.Verification, + RootHash = ComputeRootHash(request) + }; + + // Store the assembly + var storeKey = BuildStoreKey(assemblyId, request.TenantId); + _assemblies[storeKey] = assembly; + + _logger.LogInformation( + "Created promotion attestation assembly {AssemblyId} for promotion {PromotionId}", + assemblyId, request.PromotionId); + + return PromotionAttestationAssemblyResult.Succeeded(assemblyId, assembly); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to assemble promotion attestation"); + return PromotionAttestationAssemblyResult.Failed($"Assembly failed: {ex.Message}"); + } + } + + public Task GetAssemblyAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default) + { + var key = BuildStoreKey(assemblyId, tenantId); + _assemblies.TryGetValue(key, out var assembly); + return Task.FromResult(assembly); + } + + public Task> GetAssembliesForPromotionAsync( + string promotionId, + string tenantId, + CancellationToken cancellationToken = default) + { + var assemblies = _assemblies.Values + .Where(a => a.PromotionId == promotionId && a.TenantId == tenantId) + .OrderByDescending(a => a.CreatedAt) + .ToList(); + + return Task.FromResult>(assemblies); + } + + public async Task VerifyAssemblyAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default) + { + var assembly = await GetAssemblyAsync(assemblyId, tenantId, cancellationToken); + if (assembly is null) + { + return false; + } + + try + { + // Verify the main promotion envelope + var payloadBytes = Convert.FromBase64String(assembly.PromotionEnvelope.Payload); + + foreach (var sig in assembly.PromotionEnvelope.Signatures) + { + var isValid = await _signer.VerifyAsync( + assembly.PromotionEnvelope.PayloadType, + payloadBytes, + sig.Signature, + sig.KeyId, + cancellationToken); + + if (!isValid) + { + _logger.LogWarning( + "Signature verification failed for assembly {AssemblyId} with key {KeyId}", + assemblyId, sig.KeyId); + return false; + } + } + + return true; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error verifying assembly {AssemblyId}", assemblyId); + return false; + } + } + + public async Task ExportBundleAsync( + string assemblyId, + string tenantId, + CancellationToken cancellationToken = default) + { + var assembly = await GetAssemblyAsync(assemblyId, tenantId, cancellationToken); + if (assembly is null) + { + return null; + } + + var stream = new MemoryStream(); + + using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)) + using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true)) + { + // Write assembly JSON + var assemblyJson = JsonSerializer.Serialize(assembly, SerializerOptions); + WriteTextEntry(tar, AssemblyFileName, assemblyJson, DefaultFileMode); + + // Write promotion DSSE envelope + var envelopeJson = JsonSerializer.Serialize(assembly.PromotionEnvelope, SerializerOptions); + WriteTextEntry(tar, EnvelopeFileName, envelopeJson, DefaultFileMode); + + // Write Rekor proofs as NDJSON + if (assembly.RekorProofs.Count > 0) + { + var rekorNdjson = BuildRekorNdjson(assembly.RekorProofs); + WriteTextEntry(tar, RekorProofsFileName, rekorNdjson, DefaultFileMode); + } + + // Write included DSSE envelopes + foreach (var envelopeRef in assembly.DsseEnvelopes) + { + var envelopePath = $"{DsseEnvelopesDir}{envelopeRef.AttestationType}/{envelopeRef.AttestationId}.dsse.json"; + WriteTextEntry(tar, envelopePath, envelopeRef.EnvelopeJson, DefaultFileMode); + } + + // Write metadata + var metadata = BuildBundleMetadata(assembly); + var metadataJson = JsonSerializer.Serialize(metadata, SerializerOptions); + WriteTextEntry(tar, MetadataFileName, metadataJson, DefaultFileMode); + + // Compute checksums and write + var checksums = BuildChecksums(assembly, assemblyJson, envelopeJson, metadataJson); + WriteTextEntry(tar, ChecksumsFileName, checksums, DefaultFileMode); + + // Write verification script + var verifyScript = BuildVerificationScript(assembly); + WriteTextEntry(tar, VerifyScriptFileName, verifyScript, ExecutableFileMode); + } + + ApplyDeterministicGzipHeader(stream); + + // Compute bundle digest + stream.Position = 0; + var bundleBytes = stream.ToArray(); + var bundleDigest = "sha256:" + Convert.ToHexStringLower(SHA256.HashData(bundleBytes)); + + stream.Position = 0; + + var fileName = $"promotion-{assembly.PromotionId}-{assembly.AssemblyId}.tar.gz"; + + return new PromotionBundleExportResult + { + BundleStream = stream, + FileName = fileName, + BundleDigest = bundleDigest, + SizeBytes = bundleBytes.Length + }; + } + + private static PromotionPredicate BuildPromotionPredicate( + PromotionAttestationAssemblyRequest request, + DateTimeOffset promotedAt) + { + return new PromotionPredicate + { + PromotionId = request.PromotionId, + TenantId = request.TenantId, + ProfileId = request.ProfileId, + SourceEnvironment = request.SourceEnvironment, + TargetEnvironment = request.TargetEnvironment, + PromotedAt = promotedAt, + SbomDigests = request.SbomDigests.Select(d => new PromotionDigestEntry + { + Name = d.Name, + Digest = new Dictionary { ["sha256"] = d.Sha256Digest }, + ArtifactType = d.ArtifactType + }).ToList(), + VexDigests = request.VexDigests.Select(d => new PromotionDigestEntry + { + Name = d.Name, + Digest = new Dictionary { ["sha256"] = d.Sha256Digest }, + ArtifactType = d.ArtifactType + }).ToList(), + RekorProofs = request.RekorProofs.Select(p => new PromotionRekorReference + { + LogIndex = p.LogIndex, + LogId = p.LogId, + Uuid = p.Uuid + }).ToList(), + EnvelopeDigests = request.DsseEnvelopes.Select(e => new PromotionDigestEntry + { + Name = e.AttestationId, + Digest = new Dictionary { ["sha256"] = e.EnvelopeDigest }, + ArtifactType = e.AttestationType + }).ToList(), + Promoter = new PromotionPromoterInfo + { + Version = GetAssemblyVersion(), + BuildTimestamp = GetBuildTimestamp() + }, + Metadata = request.Metadata + }; + } + + private static IReadOnlyList BuildSubjects( + PromotionAttestationAssemblyRequest request) + { + var subjects = new List(); + + // Add SBOM subjects + foreach (var sbom in request.SbomDigests) + { + subjects.Add(new ExportInTotoSubject + { + Name = sbom.Name, + Digest = new Dictionary { ["sha256"] = sbom.Sha256Digest } + }); + } + + // Add VEX subjects + foreach (var vex in request.VexDigests) + { + subjects.Add(new ExportInTotoSubject + { + Name = vex.Name, + Digest = new Dictionary { ["sha256"] = vex.Sha256Digest } + }); + } + + // Add envelope subjects + foreach (var envelope in request.DsseEnvelopes) + { + subjects.Add(new ExportInTotoSubject + { + Name = $"envelope:{envelope.AttestationType}/{envelope.AttestationId}", + Digest = new Dictionary { ["sha256"] = envelope.EnvelopeDigest } + }); + } + + return subjects; + } + + private static string ComputeRootHash(PromotionAttestationAssemblyRequest request) + { + var hashes = new List(); + + // Collect all digests + foreach (var sbom in request.SbomDigests) + { + hashes.Add(sbom.Sha256Digest); + } + + foreach (var vex in request.VexDigests) + { + hashes.Add(vex.Sha256Digest); + } + + foreach (var envelope in request.DsseEnvelopes) + { + hashes.Add(envelope.EnvelopeDigest); + } + + if (hashes.Count == 0) + { + // Empty marker + return "sha256:" + Convert.ToHexStringLower( + SHA256.HashData(Encoding.UTF8.GetBytes("stellaops:promotion:empty"))); + } + + // Sort and combine with null separator + var builder = new StringBuilder(); + foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal)) + { + builder.Append(hash).Append('\0'); + } + + var combined = Encoding.UTF8.GetBytes(builder.ToString()); + return "sha256:" + Convert.ToHexStringLower(SHA256.HashData(combined)); + } + + private static string BuildRekorNdjson(IReadOnlyList proofs) + { + var builder = new StringBuilder(); + foreach (var proof in proofs.OrderBy(p => p.LogIndex)) + { + var json = JsonSerializer.Serialize(proof, SerializerOptions); + builder.AppendLine(json); + } + return builder.ToString(); + } + + private static object BuildBundleMetadata(PromotionAttestationAssembly assembly) + { + return new + { + version = BundleVersion, + assembly_id = assembly.AssemblyId, + promotion_id = assembly.PromotionId, + tenant_id = assembly.TenantId, + source_environment = assembly.SourceEnvironment, + target_environment = assembly.TargetEnvironment, + created_at = assembly.CreatedAt, + root_hash = assembly.RootHash, + sbom_count = assembly.SbomDigests.Count, + vex_count = assembly.VexDigests.Count, + rekor_proof_count = assembly.RekorProofs.Count, + envelope_count = assembly.DsseEnvelopes.Count + }; + } + + private static string BuildChecksums( + PromotionAttestationAssembly assembly, + string assemblyJson, + string envelopeJson, + string metadataJson) + { + var builder = new StringBuilder(); + builder.AppendLine("# Promotion attestation bundle checksums (sha256)"); + + // Calculate and append checksums in lexical order + var files = new SortedDictionary(StringComparer.Ordinal); + + files[AssemblyFileName] = Convert.ToHexStringLower( + SHA256.HashData(Encoding.UTF8.GetBytes(assemblyJson))); + files[EnvelopeFileName] = Convert.ToHexStringLower( + SHA256.HashData(Encoding.UTF8.GetBytes(envelopeJson))); + files[MetadataFileName] = Convert.ToHexStringLower( + SHA256.HashData(Encoding.UTF8.GetBytes(metadataJson))); + + foreach (var (file, hash) in files) + { + builder.Append(hash).Append(" ").AppendLine(file); + } + + return builder.ToString(); + } + + private static string BuildVerificationScript(PromotionAttestationAssembly assembly) + { + var builder = new StringBuilder(); + builder.AppendLine("#!/usr/bin/env sh"); + builder.AppendLine("# Promotion Attestation Bundle Verification Script"); + builder.AppendLine("# No network access required"); + builder.AppendLine(); + builder.AppendLine("set -eu"); + builder.AppendLine(); + builder.AppendLine("# Verify checksums"); + builder.AppendLine("echo \"Verifying checksums...\""); + builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then"); + builder.AppendLine(" sha256sum --check checksums.txt"); + builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then"); + builder.AppendLine(" shasum -a 256 --check checksums.txt"); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2"); + builder.AppendLine(" exit 1"); + builder.AppendLine("fi"); + builder.AppendLine(); + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Checksums verified successfully.\""); + builder.AppendLine("echo \"\""); + builder.AppendLine(); + builder.AppendLine("# Promotion details"); + builder.Append("ASSEMBLY_ID=\"").Append(assembly.AssemblyId).AppendLine("\""); + builder.Append("PROMOTION_ID=\"").Append(assembly.PromotionId).AppendLine("\""); + builder.Append("SOURCE_ENV=\"").Append(assembly.SourceEnvironment).AppendLine("\""); + builder.Append("TARGET_ENV=\"").Append(assembly.TargetEnvironment).AppendLine("\""); + builder.AppendLine(); + builder.AppendLine("echo \"Promotion Details:\""); + builder.AppendLine("echo \" Assembly ID: $ASSEMBLY_ID\""); + builder.AppendLine("echo \" Promotion ID: $PROMOTION_ID\""); + builder.AppendLine("echo \" Source: $SOURCE_ENV\""); + builder.AppendLine("echo \" Target: $TARGET_ENV\""); + builder.AppendLine("echo \"\""); + builder.AppendLine(); + builder.AppendLine("# Verify DSSE envelope"); + builder.AppendLine("DSSE_FILE=\"promotion.dsse.json\""); + builder.AppendLine(); + builder.AppendLine("if command -v stella >/dev/null 2>&1; then"); + builder.AppendLine(" echo \"Verifying promotion DSSE envelope with stella CLI...\""); + builder.AppendLine(" stella attest verify --envelope \"$DSSE_FILE\""); + builder.AppendLine("else"); + builder.AppendLine(" echo \"Note: stella CLI not found. Manual DSSE verification recommended.\""); + builder.AppendLine(" echo \"Install stella CLI and run: stella attest verify --envelope $DSSE_FILE\""); + builder.AppendLine("fi"); + builder.AppendLine(); + + // Verify included envelopes + if (assembly.DsseEnvelopes.Count > 0) + { + builder.AppendLine("# Verify included attestation envelopes"); + builder.AppendLine("if command -v stella >/dev/null 2>&1; then"); + builder.AppendLine(" echo \"\""); + builder.AppendLine(" echo \"Verifying included attestation envelopes...\""); + foreach (var env in assembly.DsseEnvelopes) + { + var path = $"envelopes/{env.AttestationType}/{env.AttestationId}.dsse.json"; + builder.Append(" stella attest verify --envelope \"").Append(path).AppendLine("\" || echo \"Warning: Failed to verify envelope\""); + } + builder.AppendLine("fi"); + builder.AppendLine(); + } + + builder.AppendLine("echo \"\""); + builder.AppendLine("echo \"Verification complete.\""); + + return builder.ToString(); + } + + private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode) + { + var bytes = Encoding.UTF8.GetBytes(content); + using var dataStream = new MemoryStream(bytes); + var entry = new PaxTarEntry(TarEntryType.RegularFile, path) + { + Mode = mode, + ModificationTime = FixedTimestamp, + Uid = 0, + Gid = 0, + UserName = string.Empty, + GroupName = string.Empty, + DataStream = dataStream + }; + writer.WriteEntry(entry); + } + + private static void ApplyDeterministicGzipHeader(MemoryStream stream) + { + if (stream.Length < 10) + { + throw new InvalidOperationException("GZip header not fully written."); + } + + var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds); + Span buffer = stackalloc byte[4]; + BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds); + + var originalPosition = stream.Position; + stream.Position = 4; + stream.Write(buffer); + stream.Position = originalPosition; + } + + private static string GenerateAssemblyId() + { + return $"promo-{Guid.NewGuid():N}"[..24]; + } + + private static string BuildStoreKey(string assemblyId, string tenantId) + { + return $"{tenantId}:{assemblyId}"; + } + + private static string GetAssemblyVersion() + { + return Assembly.GetExecutingAssembly() + .GetCustomAttribute() + ?.InformationalVersion ?? "1.0.0"; + } + + private static DateTimeOffset? GetBuildTimestamp() + { + var attr = Assembly.GetExecutingAssembly() + .GetCustomAttribute(); + return attr?.Key == "BuildTimestamp" && DateTimeOffset.TryParse(attr.Value, out var ts) + ? ts + : null; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationEndpoints.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationEndpoints.cs new file mode 100644 index 000000000..e38144628 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationEndpoints.cs @@ -0,0 +1,213 @@ +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Extension methods for mapping promotion attestation endpoints. +/// +public static class PromotionAttestationEndpoints +{ + /// + /// Maps promotion attestation endpoints to the application. + /// + public static WebApplication MapPromotionAttestationEndpoints(this WebApplication app) + { + var group = app.MapGroup("/v1/promotions") + .WithTags("Promotion Attestations") + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator); + + // POST /v1/promotions/attestations - Create promotion attestation assembly + group.MapPost("/attestations", CreatePromotionAttestationAsync) + .WithName("CreatePromotionAttestation") + .WithSummary("Create promotion attestation assembly") + .WithDescription("Creates a promotion attestation assembly bundling SBOM/VEX digests, Rekor proofs, and DSSE envelopes.") + .Produces(StatusCodes.Status201Created) + .Produces(StatusCodes.Status400BadRequest); + + // GET /v1/promotions/attestations/{assemblyId} - Get promotion assembly by ID + group.MapGet("/attestations/{assemblyId}", GetPromotionAssemblyAsync) + .WithName("GetPromotionAssembly") + .WithSummary("Get promotion attestation assembly") + .WithDescription("Returns the promotion attestation assembly for the specified ID.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // GET /v1/promotions/{promotionId}/attestations - Get assemblies for promotion + group.MapGet("/{promotionId}/attestations", GetAssembliesForPromotionAsync) + .WithName("GetAssembliesForPromotion") + .WithSummary("Get attestation assemblies for a promotion") + .WithDescription("Returns all attestation assemblies for the specified promotion.") + .Produces>(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // POST /v1/promotions/attestations/{assemblyId}/verify - Verify assembly + group.MapPost("/attestations/{assemblyId}/verify", VerifyPromotionAssemblyAsync) + .WithName("VerifyPromotionAssembly") + .WithSummary("Verify promotion attestation assembly") + .WithDescription("Verifies the cryptographic signatures of the promotion attestation assembly.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // GET /v1/promotions/attestations/{assemblyId}/bundle - Export bundle for Offline Kit + group.MapGet("/attestations/{assemblyId}/bundle", ExportPromotionBundleAsync) + .WithName("ExportPromotionBundle") + .WithSummary("Export promotion bundle for Offline Kit") + .WithDescription("Exports the promotion attestation assembly as a portable bundle for Offline Kit delivery.") + .Produces(StatusCodes.Status200OK, contentType: "application/gzip") + .Produces(StatusCodes.Status404NotFound); + + return app; + } + + private static async Task, BadRequest>> CreatePromotionAttestationAsync( + [FromBody] PromotionAttestationAssemblyRequest request, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IPromotionAttestationAssembler assembler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.BadRequest("Tenant ID is required"); + } + + // Ensure request has tenant ID + var requestWithTenant = request with { TenantId = tenantId }; + + var result = await assembler.AssembleAsync(requestWithTenant, cancellationToken); + if (!result.Success) + { + return TypedResults.BadRequest(result.ErrorMessage ?? "Assembly failed"); + } + + return TypedResults.Created($"/v1/promotions/attestations/{result.AssemblyId}", result); + } + + private static async Task, NotFound>> GetPromotionAssemblyAsync( + string assemblyId, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IPromotionAttestationAssembler assembler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var assembly = await assembler.GetAssemblyAsync(assemblyId, tenantId, cancellationToken); + if (assembly is null) + { + return TypedResults.NotFound(); + } + + return TypedResults.Ok(assembly); + } + + private static async Task>, NotFound>> GetAssembliesForPromotionAsync( + string promotionId, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IPromotionAttestationAssembler assembler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var assemblies = await assembler.GetAssembliesForPromotionAsync(promotionId, tenantId, cancellationToken); + return TypedResults.Ok(assemblies); + } + + private static async Task, NotFound>> VerifyPromotionAssemblyAsync( + string assemblyId, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IPromotionAttestationAssembler assembler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var assembly = await assembler.GetAssemblyAsync(assemblyId, tenantId, cancellationToken); + if (assembly is null) + { + return TypedResults.NotFound(); + } + + var isValid = await assembler.VerifyAssemblyAsync(assemblyId, tenantId, cancellationToken); + + return TypedResults.Ok(new PromotionAttestationVerifyResponse + { + AssemblyId = assemblyId, + PromotionId = assembly.PromotionId, + IsValid = isValid, + VerifiedAt = DateTimeOffset.UtcNow + }); + } + + private static async Task> ExportPromotionBundleAsync( + string assemblyId, + [FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader, + [FromServices] IPromotionAttestationAssembler assembler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var tenantId = ResolveTenantId(tenantIdHeader, httpContext); + if (string.IsNullOrWhiteSpace(tenantId)) + { + return TypedResults.NotFound(); + } + + var exportResult = await assembler.ExportBundleAsync(assemblyId, tenantId, cancellationToken); + if (exportResult is null) + { + return TypedResults.NotFound(); + } + + // Set content disposition for download + httpContext.Response.Headers.ContentDisposition = $"attachment; filename=\"{exportResult.FileName}\""; + httpContext.Response.Headers["X-Bundle-Digest"] = exportResult.BundleDigest; + + return TypedResults.File( + exportResult.BundleStream, + exportResult.MediaType, + exportResult.FileName); + } + + private static string? ResolveTenantId(string? header, HttpContext httpContext) + { + if (!string.IsNullOrWhiteSpace(header)) + { + return header; + } + + // Try to get from claims + var tenantClaim = httpContext.User.FindFirst("tenant_id") + ?? httpContext.User.FindFirst("tid"); + + return tenantClaim?.Value; + } +} + +/// +/// Response for promotion attestation verification. +/// +public sealed record PromotionAttestationVerifyResponse +{ + public required string AssemblyId { get; init; } + public required string PromotionId { get; init; } + public required bool IsValid { get; init; } + public required DateTimeOffset VerifiedAt { get; init; } + public string? ErrorMessage { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationModels.cs new file mode 100644 index 000000000..b9e630edf --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Attestation/PromotionAttestationModels.cs @@ -0,0 +1,354 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.Attestation; + +/// +/// Payload types for promotion attestations. +/// +public static class PromotionAttestationPayloadTypes +{ + public const string PromotionPredicateType = "stella.ops/promotion@v1"; + public const string PromotionBundlePredicateType = "stella.ops/promotion-bundle@v1"; +} + +/// +/// Request to create a promotion attestation assembly. +/// +public sealed record PromotionAttestationAssemblyRequest +{ + /// + /// Unique identifier for the promotion. + /// + public required string PromotionId { get; init; } + + /// + /// Tenant identifier. + /// + public required string TenantId { get; init; } + + /// + /// Optional profile identifier. + /// + public string? ProfileId { get; init; } + + /// + /// Source environment (e.g., "staging"). + /// + public required string SourceEnvironment { get; init; } + + /// + /// Target environment (e.g., "production"). + /// + public required string TargetEnvironment { get; init; } + + /// + /// SBOM digest references to include. + /// + public IReadOnlyList SbomDigests { get; init; } = []; + + /// + /// VEX digest references to include. + /// + public IReadOnlyList VexDigests { get; init; } = []; + + /// + /// Rekor transparency log proofs. + /// + public IReadOnlyList RekorProofs { get; init; } = []; + + /// + /// Existing DSSE envelopes to include in the bundle. + /// + public IReadOnlyList DsseEnvelopes { get; init; } = []; + + /// + /// Optional metadata for the promotion. + /// + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Reference to an artifact with its digest. +/// +public sealed record ArtifactDigestReference +{ + /// + /// Unique identifier for the artifact. + /// + public required string ArtifactId { get; init; } + + /// + /// Name of the artifact. + /// + public required string Name { get; init; } + + /// + /// Media type of the artifact (e.g., "application/spdx+json"). + /// + public required string MediaType { get; init; } + + /// + /// SHA-256 digest of the artifact. + /// + public required string Sha256Digest { get; init; } + + /// + /// Size of the artifact in bytes. + /// + public long SizeBytes { get; init; } + + /// + /// Optional URI where the artifact can be retrieved. + /// + public string? Uri { get; init; } + + /// + /// Artifact type (sbom, vex, etc.). + /// + public required string ArtifactType { get; init; } + + /// + /// Artifact version or format (e.g., "spdx-3.0.1", "cyclonedx-1.6", "openvex"). + /// + public string? FormatVersion { get; init; } +} + +/// +/// Rekor transparency log proof entry. +/// +public sealed record RekorProofEntry +{ + /// + /// Log index in Rekor. + /// + [JsonPropertyName("logIndex")] + public required long LogIndex { get; init; } + + /// + /// Log ID (tree ID). + /// + [JsonPropertyName("logId")] + public required string LogId { get; init; } + + /// + /// Integrated time (Unix timestamp). + /// + [JsonPropertyName("integratedTime")] + public required long IntegratedTime { get; init; } + + /// + /// Entry UUID. + /// + [JsonPropertyName("uuid")] + public required string Uuid { get; init; } + + /// + /// Entry body (base64-encoded). + /// + [JsonPropertyName("body")] + public string? Body { get; init; } + + /// + /// Inclusion proof for verification. + /// + [JsonPropertyName("inclusionProof")] + public RekorInclusionProof? InclusionProof { get; init; } +} + +/// +/// Merkle tree inclusion proof from Rekor. +/// +public sealed record RekorInclusionProof +{ + [JsonPropertyName("logIndex")] + public long LogIndex { get; init; } + + [JsonPropertyName("rootHash")] + public string? RootHash { get; init; } + + [JsonPropertyName("treeSize")] + public long TreeSize { get; init; } + + [JsonPropertyName("hashes")] + public IReadOnlyList Hashes { get; init; } = []; +} + +/// +/// Reference to an existing DSSE envelope. +/// +public sealed record DsseEnvelopeReference +{ + /// + /// Attestation ID. + /// + public required string AttestationId { get; init; } + + /// + /// Type of attestation (e.g., "sbom", "vex", "slsa-provenance"). + /// + public required string AttestationType { get; init; } + + /// + /// Serialized DSSE envelope JSON. + /// + public required string EnvelopeJson { get; init; } + + /// + /// SHA-256 digest of the envelope. + /// + public required string EnvelopeDigest { get; init; } +} + +/// +/// Result of creating a promotion attestation assembly. +/// +public sealed record PromotionAttestationAssemblyResult +{ + public bool Success { get; init; } + public string? AssemblyId { get; init; } + public string? ErrorMessage { get; init; } + public PromotionAttestationAssembly? Assembly { get; init; } + + public static PromotionAttestationAssemblyResult Succeeded( + string assemblyId, + PromotionAttestationAssembly assembly) => + new() { Success = true, AssemblyId = assemblyId, Assembly = assembly }; + + public static PromotionAttestationAssemblyResult Failed(string errorMessage) => + new() { Success = false, ErrorMessage = errorMessage }; +} + +/// +/// Complete promotion attestation assembly. +/// +public sealed record PromotionAttestationAssembly +{ + [JsonPropertyName("assembly_id")] + public required string AssemblyId { get; init; } + + [JsonPropertyName("promotion_id")] + public required string PromotionId { get; init; } + + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + [JsonPropertyName("profile_id")] + public string? ProfileId { get; init; } + + [JsonPropertyName("source_environment")] + public required string SourceEnvironment { get; init; } + + [JsonPropertyName("target_environment")] + public required string TargetEnvironment { get; init; } + + [JsonPropertyName("created_at")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("promotion_envelope")] + public required ExportDsseEnvelope PromotionEnvelope { get; init; } + + [JsonPropertyName("sbom_digests")] + public IReadOnlyList SbomDigests { get; init; } = []; + + [JsonPropertyName("vex_digests")] + public IReadOnlyList VexDigests { get; init; } = []; + + [JsonPropertyName("rekor_proofs")] + public IReadOnlyList RekorProofs { get; init; } = []; + + [JsonPropertyName("dsse_envelopes")] + public IReadOnlyList DsseEnvelopes { get; init; } = []; + + [JsonPropertyName("verification")] + public ExportAttestationVerification? Verification { get; init; } + + [JsonPropertyName("root_hash")] + public string? RootHash { get; init; } +} + +/// +/// Promotion predicate for in-toto statements. +/// +public sealed record PromotionPredicate +{ + [JsonPropertyName("promotionId")] + public required string PromotionId { get; init; } + + [JsonPropertyName("tenantId")] + public required string TenantId { get; init; } + + [JsonPropertyName("profileId")] + public string? ProfileId { get; init; } + + [JsonPropertyName("sourceEnvironment")] + public required string SourceEnvironment { get; init; } + + [JsonPropertyName("targetEnvironment")] + public required string TargetEnvironment { get; init; } + + [JsonPropertyName("promotedAt")] + public required DateTimeOffset PromotedAt { get; init; } + + [JsonPropertyName("sbomDigests")] + public IReadOnlyList SbomDigests { get; init; } = []; + + [JsonPropertyName("vexDigests")] + public IReadOnlyList VexDigests { get; init; } = []; + + [JsonPropertyName("rekorProofs")] + public IReadOnlyList RekorProofs { get; init; } = []; + + [JsonPropertyName("envelopeDigests")] + public IReadOnlyList EnvelopeDigests { get; init; } = []; + + [JsonPropertyName("promoter")] + public required PromotionPromoterInfo Promoter { get; init; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Digest entry for promotion predicate. +/// +public sealed record PromotionDigestEntry +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("digest")] + public required IReadOnlyDictionary Digest { get; init; } + + [JsonPropertyName("artifactType")] + public string? ArtifactType { get; init; } +} + +/// +/// Rekor reference for promotion predicate. +/// +public sealed record PromotionRekorReference +{ + [JsonPropertyName("logIndex")] + public required long LogIndex { get; init; } + + [JsonPropertyName("logId")] + public required string LogId { get; init; } + + [JsonPropertyName("uuid")] + public required string Uuid { get; init; } +} + +/// +/// Information about the promoter. +/// +public sealed record PromotionPromoterInfo +{ + [JsonPropertyName("name")] + public string Name { get; init; } = "StellaOps.ExportCenter"; + + [JsonPropertyName("version")] + public required string Version { get; init; } + + [JsonPropertyName("buildTimestamp")] + public DateTimeOffset? BuildTimestamp { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecatedEndpointsRegistry.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecatedEndpointsRegistry.cs new file mode 100644 index 000000000..9f6d71f51 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecatedEndpointsRegistry.cs @@ -0,0 +1,68 @@ +namespace StellaOps.ExportCenter.WebService.Deprecation; + +/// +/// Registry of deprecated export endpoints with their migration paths. +/// +public static class DeprecatedEndpointsRegistry +{ + /// + /// Date when legacy /exports endpoints were deprecated. + /// + public static readonly DateTimeOffset LegacyExportsDeprecationDate = + new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + /// + /// Date when legacy /exports endpoints will be removed. + /// + public static readonly DateTimeOffset LegacyExportsSunsetDate = + new(2025, 7, 1, 0, 0, 0, TimeSpan.Zero); + + /// + /// Documentation URL for API deprecation migration guide. + /// + public const string DeprecationDocumentationUrl = + "https://docs.stellaops.io/api/export-center/migration"; + + /// + /// Deprecation info for GET /exports (list exports). + /// + public static readonly DeprecationInfo ListExports = new( + DeprecatedAt: LegacyExportsDeprecationDate, + SunsetAt: LegacyExportsSunsetDate, + SuccessorPath: "/v1/exports/profiles", + DocumentationUrl: DeprecationDocumentationUrl, + Reason: "Legacy exports list endpoint replaced by profiles API"); + + /// + /// Deprecation info for POST /exports (create export). + /// + public static readonly DeprecationInfo CreateExport = new( + DeprecatedAt: LegacyExportsDeprecationDate, + SunsetAt: LegacyExportsSunsetDate, + SuccessorPath: "/v1/exports/evidence", + DocumentationUrl: DeprecationDocumentationUrl, + Reason: "Legacy export creation endpoint replaced by typed export APIs"); + + /// + /// Deprecation info for DELETE /exports/{id} (delete export). + /// + public static readonly DeprecationInfo DeleteExport = new( + DeprecatedAt: LegacyExportsDeprecationDate, + SunsetAt: LegacyExportsSunsetDate, + SuccessorPath: "/v1/exports/runs/{id}/cancel", + DocumentationUrl: DeprecationDocumentationUrl, + Reason: "Legacy export deletion replaced by run cancellation API"); + + /// + /// Gets all deprecated endpoint registrations. + /// + public static IReadOnlyList<(string Method, string Pattern, DeprecationInfo Info)> GetAll() + { + return + [ + ("GET", "/exports", ListExports), + ("POST", "/exports", CreateExport), + ("DELETE", "/exports/{id}", DeleteExport) + ]; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationHeaderExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationHeaderExtensions.cs new file mode 100644 index 000000000..2e923835b --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationHeaderExtensions.cs @@ -0,0 +1,125 @@ +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Primitives; + +namespace StellaOps.ExportCenter.WebService.Deprecation; + +/// +/// Extension methods for adding RFC 8594 deprecation headers to HTTP responses. +/// +public static class DeprecationHeaderExtensions +{ + /// + /// HTTP header indicating the resource is deprecated (RFC 8594). + /// + public const string DeprecationHeader = "Deprecation"; + + /// + /// HTTP header indicating when the resource will be removed (RFC 8594). + /// + public const string SunsetHeader = "Sunset"; + + /// + /// HTTP Link header with relation type for successor resource. + /// + public const string LinkHeader = "Link"; + + /// + /// HTTP Warning header for additional deprecation notice (RFC 7234). + /// + public const string WarningHeader = "Warning"; + + /// + /// Adds RFC 8594 deprecation headers to the response. + /// + /// The HTTP context. + /// Deprecation metadata. + public static void AddDeprecationHeaders(this HttpContext context, DeprecationInfo info) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(info); + + var response = context.Response; + + // RFC 8594: Deprecation header with IMF-fixdate + response.Headers[DeprecationHeader] = info.DeprecatedAt.ToUniversalTime().ToString("R"); + + // RFC 8594: Sunset header with IMF-fixdate + response.Headers[SunsetHeader] = info.SunsetAt.ToUniversalTime().ToString("R"); + + // Link header pointing to successor and/or documentation + var links = new List(); + + if (!string.IsNullOrEmpty(info.SuccessorPath)) + { + links.Add($"<{info.SuccessorPath}>; rel=\"successor-version\""); + } + + if (!string.IsNullOrEmpty(info.DocumentationUrl)) + { + links.Add($"<{info.DocumentationUrl}>; rel=\"deprecation\""); + } + + if (links.Count > 0) + { + response.Headers.Append(LinkHeader, string.Join(", ", links)); + } + + // Warning header with deprecation notice + var reason = info.Reason ?? "This endpoint is deprecated and will be removed."; + var warning = $"299 - \"{reason} Use {info.SuccessorPath} instead. Sunset: {info.SunsetAt:yyyy-MM-dd}\""; + response.Headers[WarningHeader] = warning; + } + + /// + /// Creates an endpoint filter that adds deprecation headers and logs usage. + /// + /// Deprecation metadata. + /// Logger factory for deprecation logging. + /// An endpoint filter delegate. + public static Func> + CreateDeprecationFilter(DeprecationInfo info, ILoggerFactory? loggerFactory = null) + { + var logger = loggerFactory?.CreateLogger("DeprecatedEndpoint"); + + return async (context, next) => + { + var httpContext = context.HttpContext; + + // Add deprecation headers + httpContext.AddDeprecationHeaders(info); + + // Log deprecated endpoint usage + logger?.LogWarning( + "Deprecated endpoint accessed: {Method} {Path} - Successor: {Successor}, Sunset: {Sunset}, Client: {ClientIp}", + httpContext.Request.Method, + httpContext.Request.Path, + info.SuccessorPath, + info.SunsetAt, + httpContext.Connection.RemoteIpAddress); + + // If past sunset, optionally return 410 Gone + if (info.IsPastSunset) + { + logger?.LogError( + "Sunset endpoint accessed after removal date: {Method} {Path} - Was removed: {Sunset}", + httpContext.Request.Method, + httpContext.Request.Path, + info.SunsetAt); + + return Results.Problem( + title: "Endpoint Removed", + detail: $"This endpoint was deprecated on {info.DeprecatedAt:yyyy-MM-dd} and removed on {info.SunsetAt:yyyy-MM-dd}. Use {info.SuccessorPath} instead.", + statusCode: StatusCodes.Status410Gone, + extensions: new Dictionary + { + ["successorPath"] = info.SuccessorPath, + ["documentationUrl"] = info.DocumentationUrl, + ["sunsetDate"] = info.SunsetAt.ToString("o") + }); + } + + return await next(context); + }; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationInfo.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationInfo.cs new file mode 100644 index 000000000..8012b08b4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationInfo.cs @@ -0,0 +1,27 @@ +namespace StellaOps.ExportCenter.WebService.Deprecation; + +/// +/// Describes deprecation metadata for an API endpoint. +/// +/// UTC date when the endpoint was deprecated. +/// UTC date when the endpoint will be removed. +/// Path to the replacement endpoint (e.g., "/v1/exports"). +/// URL to deprecation documentation or migration guide. +/// Human-readable reason for deprecation. +public sealed record DeprecationInfo( + DateTimeOffset DeprecatedAt, + DateTimeOffset SunsetAt, + string SuccessorPath, + string? DocumentationUrl = null, + string? Reason = null) +{ + /// + /// Returns true if the sunset date has passed. + /// + public bool IsPastSunset => DateTimeOffset.UtcNow >= SunsetAt; + + /// + /// Days remaining until sunset. + /// + public int DaysUntilSunset => Math.Max(0, (int)(SunsetAt - DateTimeOffset.UtcNow).TotalDays); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationNotificationService.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationNotificationService.cs new file mode 100644 index 000000000..ae37f5091 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationNotificationService.cs @@ -0,0 +1,106 @@ +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.WebService.Deprecation; + +/// +/// Service for emitting notifications when deprecated endpoints are accessed. +/// +public interface IDeprecationNotificationService +{ + /// + /// Records access to a deprecated endpoint. + /// + /// HTTP method. + /// Request path. + /// Deprecation metadata. + /// Client identification (IP, user agent, etc.). + /// Cancellation token. + Task RecordDeprecatedAccessAsync( + string method, + string path, + DeprecationInfo info, + DeprecationClientInfo clientInfo, + CancellationToken cancellationToken = default); +} + +/// +/// Information about the client accessing a deprecated endpoint. +/// +/// Client IP address. +/// Client user agent string. +/// Tenant ID if available. +/// User ID if authenticated. +/// Distributed trace ID. +public sealed record DeprecationClientInfo( + string? ClientIp, + string? UserAgent, + string? TenantId, + string? UserId, + string? TraceId); + +/// +/// Default implementation that logs deprecation events. +/// +public sealed class DeprecationNotificationService : IDeprecationNotificationService +{ + private readonly ILogger _logger; + + public DeprecationNotificationService(ILogger logger) + { + _logger = logger; + } + + public Task RecordDeprecatedAccessAsync( + string method, + string path, + DeprecationInfo info, + DeprecationClientInfo clientInfo, + CancellationToken cancellationToken = default) + { + // Log structured event for telemetry/audit + _logger.LogWarning( + "Deprecated endpoint access: Method={Method}, Path={Path}, " + + "DeprecatedAt={DeprecatedAt}, SunsetAt={SunsetAt}, DaysUntilSunset={DaysUntilSunset}, " + + "Successor={Successor}, ClientIp={ClientIp}, UserAgent={UserAgent}, " + + "TenantId={TenantId}, UserId={UserId}, TraceId={TraceId}", + method, + path, + info.DeprecatedAt, + info.SunsetAt, + info.DaysUntilSunset, + info.SuccessorPath, + clientInfo.ClientIp, + clientInfo.UserAgent, + clientInfo.TenantId, + clientInfo.UserId, + clientInfo.TraceId); + + // Emit custom metric counter + DeprecationMetrics.DeprecatedEndpointAccessCounter.Add( + 1, + new KeyValuePair("method", method), + new KeyValuePair("path", path), + new KeyValuePair("successor", info.SuccessorPath), + new KeyValuePair("days_until_sunset", info.DaysUntilSunset)); + + return Task.CompletedTask; + } +} + +/// +/// Metrics for deprecation tracking. +/// +public static class DeprecationMetrics +{ + private static readonly System.Diagnostics.Metrics.Meter Meter = + new("StellaOps.ExportCenter.Deprecation", "1.0.0"); + + /// + /// Counter for deprecated endpoint accesses. + /// + public static readonly System.Diagnostics.Metrics.Counter DeprecatedEndpointAccessCounter = + Meter.CreateCounter( + "export_center_deprecated_endpoint_access_total", + "requests", + "Total number of requests to deprecated endpoints"); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationRouteBuilderExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationRouteBuilderExtensions.cs new file mode 100644 index 000000000..eaec00395 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Deprecation/DeprecationRouteBuilderExtensions.cs @@ -0,0 +1,62 @@ +using Microsoft.AspNetCore.Builder; + +namespace StellaOps.ExportCenter.WebService.Deprecation; + +/// +/// Extension methods for applying deprecation metadata to routes. +/// +public static class DeprecationRouteBuilderExtensions +{ + /// + /// Marks the endpoint as deprecated with RFC 8594 headers. + /// + /// The route handler builder. + /// Deprecation metadata. + /// The route handler builder for chaining. + public static RouteHandlerBuilder WithDeprecation(this RouteHandlerBuilder builder, DeprecationInfo info) + { + ArgumentNullException.ThrowIfNull(builder); + ArgumentNullException.ThrowIfNull(info); + + return builder + .AddEndpointFilter(DeprecationHeaderExtensions.CreateDeprecationFilter(info)) + .WithMetadata(info) + .WithMetadata(new DeprecatedAttribute()) + .WithTags("Deprecated"); + } + + /// + /// Marks the endpoint as deprecated with standard sunset timeline. + /// + /// The route handler builder. + /// Path to the replacement endpoint. + /// When the endpoint was deprecated. + /// When the endpoint will be removed. + /// Optional documentation URL. + /// Optional deprecation reason. + /// The route handler builder for chaining. + public static RouteHandlerBuilder WithDeprecation( + this RouteHandlerBuilder builder, + string successorPath, + DateTimeOffset deprecatedAt, + DateTimeOffset sunsetAt, + string? documentationUrl = null, + string? reason = null) + { + return builder.WithDeprecation(new DeprecationInfo( + deprecatedAt, + sunsetAt, + successorPath, + documentationUrl, + reason)); + } +} + +/// +/// Marker attribute indicating an endpoint is deprecated. +/// Used for OpenAPI documentation generation. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)] +public sealed class DeprecatedAttribute : Attribute +{ +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/EvidenceLockerServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/EvidenceLockerServiceCollectionExtensions.cs new file mode 100644 index 000000000..147b9f6ba --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/EvidenceLockerServiceCollectionExtensions.cs @@ -0,0 +1,203 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.ExportCenter.WebService.EvidenceLocker; + +/// +/// Extension methods for registering evidence locker integration services. +/// +public static class EvidenceLockerServiceCollectionExtensions +{ + /// + /// Adds evidence locker integration services to the service collection. + /// + /// The service collection. + /// Optional configuration for the evidence locker client. + /// The service collection for chaining. + public static IServiceCollection AddExportEvidenceLocker( + this IServiceCollection services, + Action? configureOptions = null) + { + ArgumentNullException.ThrowIfNull(services); + + // Configure options + if (configureOptions is not null) + { + services.Configure(configureOptions); + } + + // Register Merkle tree calculator + services.TryAddSingleton(); + + // Register HTTP client for evidence locker + services.AddHttpClient((serviceProvider, client) => + { + var options = serviceProvider.GetService>()?.Value + ?? ExportEvidenceLockerOptions.Default; + + client.BaseAddress = new Uri(options.BaseUrl); + client.Timeout = options.Timeout; + client.DefaultRequestHeaders.Accept.Add( + new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json")); + }); + + return services; + } + + /// + /// Adds evidence locker integration with in-memory implementation for testing. + /// + /// The service collection. + /// The service collection for chaining. + public static IServiceCollection AddExportEvidenceLockerInMemory( + this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} + +/// +/// In-memory implementation of evidence locker client for testing. +/// +public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerClient +{ + private readonly IExportMerkleTreeCalculator _merkleCalculator; + private readonly Dictionary _bundles = new(StringComparer.OrdinalIgnoreCase); + private readonly object _lock = new(); + private int _bundleCounter; + + public InMemoryExportEvidenceLockerClient(IExportMerkleTreeCalculator merkleCalculator) + { + _merkleCalculator = merkleCalculator ?? throw new ArgumentNullException(nameof(merkleCalculator)); + } + + public Task PushSnapshotAsync( + ExportEvidenceSnapshotRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var bundleId = Guid.NewGuid().ToString(); + var entries = request.Materials.Select(m => new ExportManifestEntry + { + Section = m.Section, + CanonicalPath = $"{m.Section}/{m.Path}", + Sha256 = m.Sha256.ToLowerInvariant(), + SizeBytes = m.SizeBytes, + MediaType = m.MediaType ?? "application/octet-stream", + Attributes = m.Attributes + }).ToList(); + + var rootHash = _merkleCalculator.CalculateRootHash(entries); + + var manifest = new ExportBundleManifest + { + BundleId = bundleId, + TenantId = request.TenantId, + ProfileId = request.ProfileId, + ExportRunId = request.ExportRunId, + Kind = request.Kind, + CreatedAt = DateTimeOffset.UtcNow, + RootHash = rootHash, + Metadata = request.Metadata ?? new Dictionary(), + Entries = entries, + Distribution = request.Distribution + }; + + lock (_lock) + { + _bundles[bundleId] = manifest; + _bundleCounter++; + } + + return Task.FromResult(ExportEvidenceSnapshotResult.Succeeded(bundleId, rootHash)); + } + + public Task UpdateDistributionTranscriptAsync( + string bundleId, + string tenantId, + ExportDistributionInfo distribution, + CancellationToken cancellationToken = default) + { + lock (_lock) + { + if (!_bundles.TryGetValue(bundleId, out var existing)) + { + return Task.FromResult(false); + } + + _bundles[bundleId] = existing with { Distribution = distribution }; + } + + return Task.FromResult(true); + } + + public Task GetBundleAsync( + string bundleId, + string tenantId, + CancellationToken cancellationToken = default) + { + lock (_lock) + { + _bundles.TryGetValue(bundleId, out var manifest); + return Task.FromResult(manifest); + } + } + + public Task VerifyRootHashAsync( + string bundleId, + string tenantId, + string expectedRootHash, + CancellationToken cancellationToken = default) + { + lock (_lock) + { + if (!_bundles.TryGetValue(bundleId, out var manifest)) + { + return Task.FromResult(false); + } + + return Task.FromResult( + string.Equals(manifest.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase)); + } + } + + /// + /// Gets all stored bundles (for testing). + /// + public IReadOnlyList GetAllBundles() + { + lock (_lock) + { + return _bundles.Values.ToList(); + } + } + + /// + /// Clears all stored bundles (for testing). + /// + public void Clear() + { + lock (_lock) + { + _bundles.Clear(); + _bundleCounter = 0; + } + } + + /// + /// Gets the count of stored bundles (for testing). + /// + public int Count + { + get + { + lock (_lock) { return _bundles.Count; } + } + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceLockerClient.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceLockerClient.cs new file mode 100644 index 000000000..faaa2934a --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceLockerClient.cs @@ -0,0 +1,386 @@ +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.ExportCenter.WebService.Telemetry; + +namespace StellaOps.ExportCenter.WebService.EvidenceLocker; + +/// +/// HTTP client implementation for pushing export manifests to evidence locker. +/// +public sealed class ExportEvidenceLockerClient : IExportEvidenceLockerClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private readonly HttpClient _httpClient; + private readonly IExportMerkleTreeCalculator _merkleCalculator; + private readonly ILogger _logger; + private readonly ExportEvidenceLockerOptions _options; + + public ExportEvidenceLockerClient( + HttpClient httpClient, + IExportMerkleTreeCalculator merkleCalculator, + ILogger logger, + IOptions options) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _merkleCalculator = merkleCalculator ?? throw new ArgumentNullException(nameof(merkleCalculator)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value ?? ExportEvidenceLockerOptions.Default; + } + + public async Task PushSnapshotAsync( + ExportEvidenceSnapshotRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (!_options.Enabled) + { + _logger.LogDebug("Evidence locker integration disabled; skipping snapshot push"); + return ExportEvidenceSnapshotResult.Failed("Evidence locker integration disabled"); + } + + using var activity = ExportTelemetry.ActivitySource.StartActivity("evidence.push_snapshot"); + activity?.SetTag("tenant_id", request.TenantId); + activity?.SetTag("export_run_id", request.ExportRunId); + activity?.SetTag("kind", request.Kind.ToString()); + + try + { + // Build manifest entries for Merkle calculation + var entries = request.Materials.Select(m => new ExportManifestEntry + { + Section = m.Section, + CanonicalPath = $"{m.Section}/{m.Path}", + Sha256 = m.Sha256.ToLowerInvariant(), + SizeBytes = m.SizeBytes, + MediaType = m.MediaType ?? "application/octet-stream", + Attributes = m.Attributes + }).ToList(); + + // Pre-calculate Merkle root for verification + var expectedRootHash = _merkleCalculator.CalculateRootHash(entries); + + // Build request payload + var apiRequest = new EvidenceSnapshotApiRequest + { + Kind = MapKindToApi(request.Kind), + Description = request.Description, + Metadata = BuildMetadata(request), + Materials = request.Materials.Select(m => new EvidenceSnapshotMaterialApiDto + { + Section = m.Section, + Path = m.Path, + Sha256 = m.Sha256.ToLowerInvariant(), + SizeBytes = m.SizeBytes, + MediaType = m.MediaType ?? "application/octet-stream", + Attributes = m.Attributes?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value) + }).ToList() + }; + + var response = await _httpClient.PostAsJsonAsync( + $"{_options.BaseUrl}/evidence/snapshot", + apiRequest, + SerializerOptions, + cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var errorBody = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + _logger.LogError( + "Evidence locker snapshot push failed with status {StatusCode}: {Error}", + response.StatusCode, errorBody); + + return ExportEvidenceSnapshotResult.Failed( + $"HTTP {(int)response.StatusCode}: {errorBody}"); + } + + var apiResponse = await response.Content.ReadFromJsonAsync( + SerializerOptions, cancellationToken).ConfigureAwait(false); + + if (apiResponse is null) + { + return ExportEvidenceSnapshotResult.Failed("Empty response from evidence locker"); + } + + // Verify Merkle root matches + if (!string.Equals(apiResponse.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase)) + { + _logger.LogWarning( + "Merkle root mismatch for export {ExportRunId}: expected {Expected}, got {Actual}", + request.ExportRunId, expectedRootHash, apiResponse.RootHash); + } + + _logger.LogInformation( + "Pushed export manifest to evidence locker: bundle={BundleId}, root={RootHash}", + apiResponse.BundleId, apiResponse.RootHash); + + ExportTelemetry.ExportArtifactsTotal.Add(1, + new KeyValuePair("artifact_type", "evidence_bundle"), + new KeyValuePair("tenant_id", request.TenantId)); + + return ExportEvidenceSnapshotResult.Succeeded( + apiResponse.BundleId.ToString(), + apiResponse.RootHash, + MapSignatureFromApi(apiResponse.Signature)); + } + catch (HttpRequestException ex) + { + _logger.LogError(ex, "HTTP error pushing export manifest to evidence locker"); + return ExportEvidenceSnapshotResult.Failed($"HTTP error: {ex.Message}"); + } + catch (TaskCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unexpected error pushing export manifest to evidence locker"); + return ExportEvidenceSnapshotResult.Failed($"Unexpected error: {ex.Message}"); + } + } + + public async Task UpdateDistributionTranscriptAsync( + string bundleId, + string tenantId, + ExportDistributionInfo distribution, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundleId); + ArgumentNullException.ThrowIfNull(tenantId); + ArgumentNullException.ThrowIfNull(distribution); + + if (!_options.Enabled) + { + return false; + } + + try + { + var request = new { distribution }; + var response = await _httpClient.PatchAsJsonAsync( + $"{_options.BaseUrl}/evidence/{bundleId}/distribution", + request, + SerializerOptions, + cancellationToken).ConfigureAwait(false); + + return response.IsSuccessStatusCode; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to update distribution transcript for bundle {BundleId}", bundleId); + return false; + } + } + + public async Task GetBundleAsync( + string bundleId, + string tenantId, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundleId); + ArgumentNullException.ThrowIfNull(tenantId); + + if (!_options.Enabled) + { + return null; + } + + try + { + var response = await _httpClient.GetAsync( + $"{_options.BaseUrl}/evidence/{bundleId}", + cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + return null; + } + + return await response.Content.ReadFromJsonAsync( + SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to get evidence bundle {BundleId}", bundleId); + return null; + } + } + + public async Task VerifyRootHashAsync( + string bundleId, + string tenantId, + string expectedRootHash, + CancellationToken cancellationToken = default) + { + var bundle = await GetBundleAsync(bundleId, tenantId, cancellationToken).ConfigureAwait(false); + if (bundle?.RootHash is null) + { + return false; + } + + return string.Equals(bundle.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase); + } + + private static int MapKindToApi(ExportBundleKind kind) + { + return kind switch + { + ExportBundleKind.Evidence => 1, + ExportBundleKind.Attestation => 2, + ExportBundleKind.Mirror => 3, + ExportBundleKind.Risk => 3, // Maps to Export=3 in evidence locker + ExportBundleKind.OfflineKit => 3, + _ => 3 + }; + } + + private static Dictionary BuildMetadata(ExportEvidenceSnapshotRequest request) + { + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["export_run_id"] = request.ExportRunId, + ["export_kind"] = request.Kind.ToString().ToLowerInvariant() + }; + + if (!string.IsNullOrWhiteSpace(request.ProfileId)) + { + metadata["profile_id"] = request.ProfileId; + } + + if (request.Metadata is not null) + { + foreach (var (key, value) in request.Metadata) + { + metadata[key] = value; + } + } + + return metadata; + } + + private static ExportDsseSignatureInfo? MapSignatureFromApi(EvidenceSignatureApiDto? apiSignature) + { + if (apiSignature is null) + { + return null; + } + + return new ExportDsseSignatureInfo + { + PayloadType = apiSignature.PayloadType, + Payload = apiSignature.Payload, + Signature = apiSignature.Signature, + KeyId = apiSignature.KeyId, + Algorithm = apiSignature.Algorithm, + Provider = apiSignature.Provider, + SignedAt = apiSignature.SignedAt, + TimestampedAt = apiSignature.TimestampedAt, + TimestampAuthority = apiSignature.TimestampAuthority + }; + } + + #region API DTOs + + private sealed record EvidenceSnapshotApiRequest + { + [JsonPropertyName("kind")] + public int Kind { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("metadata")] + public Dictionary? Metadata { get; init; } + + [JsonPropertyName("materials")] + public List? Materials { get; init; } + } + + private sealed record EvidenceSnapshotMaterialApiDto + { + [JsonPropertyName("section")] + public string? Section { get; init; } + + [JsonPropertyName("path")] + public string? Path { get; init; } + + [JsonPropertyName("sha256")] + public required string Sha256 { get; init; } + + [JsonPropertyName("size_bytes")] + public long SizeBytes { get; init; } + + [JsonPropertyName("media_type")] + public string? MediaType { get; init; } + + [JsonPropertyName("attributes")] + public Dictionary? Attributes { get; init; } + } + + private sealed record EvidenceSnapshotApiResponse + { + [JsonPropertyName("bundle_id")] + public Guid BundleId { get; init; } + + [JsonPropertyName("root_hash")] + public required string RootHash { get; init; } + + [JsonPropertyName("signature")] + public EvidenceSignatureApiDto? Signature { get; init; } + } + + private sealed record EvidenceSignatureApiDto + { + [JsonPropertyName("payload_type")] + public required string PayloadType { get; init; } + + [JsonPropertyName("payload")] + public required string Payload { get; init; } + + [JsonPropertyName("signature")] + public required string Signature { get; init; } + + [JsonPropertyName("key_id")] + public string? KeyId { get; init; } + + [JsonPropertyName("algorithm")] + public required string Algorithm { get; init; } + + [JsonPropertyName("provider")] + public required string Provider { get; init; } + + [JsonPropertyName("signed_at")] + public DateTimeOffset SignedAt { get; init; } + + [JsonPropertyName("timestamped_at")] + public DateTimeOffset? TimestampedAt { get; init; } + + [JsonPropertyName("timestamp_authority")] + public string? TimestampAuthority { get; init; } + } + + #endregion +} + +/// +/// Configuration options for evidence locker integration. +/// +public sealed class ExportEvidenceLockerOptions +{ + public bool Enabled { get; set; } = true; + public string BaseUrl { get; set; } = "http://evidence-locker:8080"; + public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30); + public int MaxRetries { get; set; } = 3; + + public static ExportEvidenceLockerOptions Default => new(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceModels.cs new file mode 100644 index 000000000..55da61939 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportEvidenceModels.cs @@ -0,0 +1,186 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.EvidenceLocker; + +/// +/// Export bundle manifest for evidence locker submission. +/// Aligns with EvidenceLocker bundle-packaging.schema.json. +/// +public sealed record ExportBundleManifest +{ + [JsonPropertyName("bundle_id")] + public required string BundleId { get; init; } + + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + [JsonPropertyName("profile_id")] + public string? ProfileId { get; init; } + + [JsonPropertyName("export_run_id")] + public required string ExportRunId { get; init; } + + [JsonPropertyName("kind")] + public required ExportBundleKind Kind { get; init; } + + [JsonPropertyName("created_at")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("root_hash")] + public string? RootHash { get; init; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary Metadata { get; init; } = new Dictionary(); + + [JsonPropertyName("entries")] + public IReadOnlyList Entries { get; init; } = []; + + [JsonPropertyName("distribution")] + public ExportDistributionInfo? Distribution { get; init; } +} + +/// +/// Export bundle kind for evidence locker categorization. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum ExportBundleKind +{ + Evidence = 1, + Attestation = 2, + Mirror = 3, + Risk = 4, + OfflineKit = 5 +} + +/// +/// Entry in export manifest representing a single artifact. +/// +public sealed record ExportManifestEntry +{ + [JsonPropertyName("section")] + public required string Section { get; init; } + + [JsonPropertyName("canonical_path")] + public required string CanonicalPath { get; init; } + + [JsonPropertyName("sha256")] + public required string Sha256 { get; init; } + + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } + + [JsonPropertyName("media_type")] + public required string MediaType { get; init; } + + [JsonPropertyName("attributes")] + public IReadOnlyDictionary? Attributes { get; init; } +} + +/// +/// Distribution information for export transcript. +/// +public sealed record ExportDistributionInfo +{ + [JsonPropertyName("type")] + public required string Type { get; init; } + + [JsonPropertyName("target_uri")] + public string? TargetUri { get; init; } + + [JsonPropertyName("distributed_at")] + public DateTimeOffset? DistributedAt { get; init; } + + [JsonPropertyName("checksum")] + public string? Checksum { get; init; } + + [JsonPropertyName("size_bytes")] + public long? SizeBytes { get; init; } +} + +/// +/// Request to push export manifest to evidence locker. +/// +public sealed record ExportEvidenceSnapshotRequest +{ + public required string TenantId { get; init; } + public required string ExportRunId { get; init; } + public string? ProfileId { get; init; } + public required ExportBundleKind Kind { get; init; } + public string? Description { get; init; } + public IReadOnlyDictionary? Metadata { get; init; } + public required IReadOnlyList Materials { get; init; } + public ExportDistributionInfo? Distribution { get; init; } +} + +/// +/// Material input for evidence snapshot. +/// +public sealed record ExportMaterialInput +{ + public required string Section { get; init; } + public required string Path { get; init; } + public required string Sha256 { get; init; } + public required long SizeBytes { get; init; } + public string? MediaType { get; init; } + public IReadOnlyDictionary? Attributes { get; init; } +} + +/// +/// Response from evidence locker after snapshot creation. +/// +public sealed record ExportEvidenceSnapshotResult +{ + public bool Success { get; init; } + public string? BundleId { get; init; } + public string? RootHash { get; init; } + public ExportDsseSignatureInfo? Signature { get; init; } + public string? ErrorMessage { get; init; } + + public static ExportEvidenceSnapshotResult Succeeded( + string bundleId, + string rootHash, + ExportDsseSignatureInfo? signature = null) => + new() + { + Success = true, + BundleId = bundleId, + RootHash = rootHash, + Signature = signature + }; + + public static ExportEvidenceSnapshotResult Failed(string errorMessage) => + new() { Success = false, ErrorMessage = errorMessage }; +} + +/// +/// DSSE signature information from evidence locker. +/// +public sealed record ExportDsseSignatureInfo +{ + [JsonPropertyName("payload_type")] + public required string PayloadType { get; init; } + + [JsonPropertyName("payload")] + public required string Payload { get; init; } + + [JsonPropertyName("signature")] + public required string Signature { get; init; } + + [JsonPropertyName("key_id")] + public string? KeyId { get; init; } + + [JsonPropertyName("algorithm")] + public required string Algorithm { get; init; } + + [JsonPropertyName("provider")] + public required string Provider { get; init; } + + [JsonPropertyName("signed_at")] + public required DateTimeOffset SignedAt { get; init; } + + [JsonPropertyName("timestamped_at")] + public DateTimeOffset? TimestampedAt { get; init; } + + [JsonPropertyName("timestamp_authority")] + public string? TimestampAuthority { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportMerkleTreeCalculator.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportMerkleTreeCalculator.cs new file mode 100644 index 000000000..91c892674 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/ExportMerkleTreeCalculator.cs @@ -0,0 +1,93 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.ExportCenter.WebService.EvidenceLocker; + +/// +/// Calculates Merkle root hash for export manifest entries. +/// Aligns with EvidenceLocker's MerkleTreeCalculator implementation. +/// +public interface IExportMerkleTreeCalculator +{ + /// + /// Calculates the Merkle root hash from manifest entries. + /// + /// The manifest entries with canonical paths and hashes. + /// The hex-encoded Merkle root hash. + string CalculateRootHash(IEnumerable entries); + + /// + /// Calculates the Merkle root hash from canonical leaf values. + /// + /// Leaf values in format "canonicalPath|sha256". + /// The hex-encoded Merkle root hash. + string CalculateRootHash(IEnumerable canonicalLeafValues); +} + +/// +/// Default implementation of Merkle tree calculator for export manifests. +/// Uses SHA-256 and follows EvidenceLocker's deterministic tree construction. +/// +public sealed class ExportMerkleTreeCalculator : IExportMerkleTreeCalculator +{ + private const string EmptyTreeMarker = "stellaops:evidence:empty"; + + public string CalculateRootHash(IEnumerable entries) + { + ArgumentNullException.ThrowIfNull(entries); + + var canonicalLeaves = entries + .OrderBy(e => e.CanonicalPath, StringComparer.Ordinal) + .Select(e => $"{e.CanonicalPath}|{e.Sha256.ToLowerInvariant()}"); + + return CalculateRootHash(canonicalLeaves); + } + + public string CalculateRootHash(IEnumerable canonicalLeafValues) + { + ArgumentNullException.ThrowIfNull(canonicalLeafValues); + + var leaves = canonicalLeafValues + .Select(HashString) + .ToArray(); + + // Special case: empty tree + if (leaves.Length == 0) + { + return HashString(EmptyTreeMarker); + } + + return BuildTree(leaves); + } + + private static string BuildTree(IReadOnlyList currentLevel) + { + if (currentLevel.Count == 1) + { + return currentLevel[0]; // Root node + } + + var nextLevel = new List((currentLevel.Count + 1) / 2); + + for (var i = 0; i < currentLevel.Count; i += 2) + { + var left = currentLevel[i]; + var right = i + 1 < currentLevel.Count ? currentLevel[i + 1] : left; + + // Sort siblings canonically before combining (deterministic ordering) + var combined = string.CompareOrdinal(left, right) <= 0 + ? $"{left}|{right}" + : $"{right}|{left}"; + + nextLevel.Add(HashString(combined)); + } + + return BuildTree(nextLevel); + } + + private static string HashString(string input) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input)); + return Convert.ToHexStringLower(bytes); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/IExportEvidenceLockerClient.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/IExportEvidenceLockerClient.cs new file mode 100644 index 000000000..8dbd4011a --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/EvidenceLocker/IExportEvidenceLockerClient.cs @@ -0,0 +1,58 @@ +namespace StellaOps.ExportCenter.WebService.EvidenceLocker; + +/// +/// Client interface for pushing export manifests and transcripts to the evidence locker. +/// +public interface IExportEvidenceLockerClient +{ + /// + /// Pushes an export manifest snapshot to the evidence locker. + /// Creates a new evidence bundle with the specified materials. + /// + /// The snapshot request containing materials and metadata. + /// Cancellation token. + /// Result containing bundle ID, root hash, and optional DSSE signature. + Task PushSnapshotAsync( + ExportEvidenceSnapshotRequest request, + CancellationToken cancellationToken = default); + + /// + /// Updates an existing evidence bundle with distribution transcript information. + /// + /// The evidence bundle ID. + /// The tenant ID. + /// Distribution information to record. + /// Cancellation token. + /// True if update succeeded. + Task UpdateDistributionTranscriptAsync( + string bundleId, + string tenantId, + ExportDistributionInfo distribution, + CancellationToken cancellationToken = default); + + /// + /// Gets the evidence bundle details including signature. + /// + /// The evidence bundle ID. + /// The tenant ID. + /// Cancellation token. + /// The bundle manifest if found, null otherwise. + Task GetBundleAsync( + string bundleId, + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Verifies that a bundle's Merkle root matches expected value. + /// + /// The evidence bundle ID. + /// The tenant ID. + /// The expected Merkle root hash. + /// Cancellation token. + /// True if root hash matches. + Task VerifyRootHashAsync( + string bundleId, + string tenantId, + string expectedRootHash, + CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentEvents.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentEvents.cs new file mode 100644 index 000000000..7541efca4 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentEvents.cs @@ -0,0 +1,167 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Event types for export incidents. +/// +public static class ExportIncidentEventTypes +{ + /// + /// Incident activated event. + /// + public const string IncidentActivated = "export.incident.activated"; + + /// + /// Incident updated event. + /// + public const string IncidentUpdated = "export.incident.updated"; + + /// + /// Incident escalated event. + /// + public const string IncidentEscalated = "export.incident.escalated"; + + /// + /// Incident de-escalated event. + /// + public const string IncidentDeescalated = "export.incident.deescalated"; + + /// + /// Incident resolved event. + /// + public const string IncidentResolved = "export.incident.resolved"; +} + +/// +/// Base class for incident events. +/// +public abstract record ExportIncidentEventBase +{ + [JsonPropertyName("event_type")] + public abstract string EventType { get; } + + [JsonPropertyName("incident_id")] + public required string IncidentId { get; init; } + + [JsonPropertyName("type")] + public required ExportIncidentType Type { get; init; } + + [JsonPropertyName("severity")] + public required ExportIncidentSeverity Severity { get; init; } + + [JsonPropertyName("status")] + public required ExportIncidentStatus Status { get; init; } + + [JsonPropertyName("summary")] + public required string Summary { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("affected_tenants")] + public IReadOnlyList? AffectedTenants { get; init; } + + [JsonPropertyName("affected_profiles")] + public IReadOnlyList? AffectedProfiles { get; init; } + + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } +} + +/// +/// Event emitted when an incident is activated. +/// +public sealed record ExportIncidentActivatedEvent : ExportIncidentEventBase +{ + public override string EventType => ExportIncidentEventTypes.IncidentActivated; + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("activated_by")] + public string? ActivatedBy { get; init; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Event emitted when an incident is updated. +/// +public sealed record ExportIncidentUpdatedEvent : ExportIncidentEventBase +{ + public override string EventType => ExportIncidentEventTypes.IncidentUpdated; + + [JsonPropertyName("previous_status")] + public ExportIncidentStatus? PreviousStatus { get; init; } + + [JsonPropertyName("previous_severity")] + public ExportIncidentSeverity? PreviousSeverity { get; init; } + + [JsonPropertyName("update_message")] + public required string UpdateMessage { get; init; } + + [JsonPropertyName("updated_by")] + public string? UpdatedBy { get; init; } +} + +/// +/// Event emitted when an incident is escalated. +/// +public sealed record ExportIncidentEscalatedEvent : ExportIncidentEventBase +{ + public override string EventType => ExportIncidentEventTypes.IncidentEscalated; + + [JsonPropertyName("previous_severity")] + public required ExportIncidentSeverity PreviousSeverity { get; init; } + + [JsonPropertyName("escalation_reason")] + public required string EscalationReason { get; init; } + + [JsonPropertyName("escalated_by")] + public string? EscalatedBy { get; init; } +} + +/// +/// Event emitted when an incident is de-escalated. +/// +public sealed record ExportIncidentDeescalatedEvent : ExportIncidentEventBase +{ + public override string EventType => ExportIncidentEventTypes.IncidentDeescalated; + + [JsonPropertyName("previous_severity")] + public required ExportIncidentSeverity PreviousSeverity { get; init; } + + [JsonPropertyName("deescalation_reason")] + public required string DeescalationReason { get; init; } + + [JsonPropertyName("deescalated_by")] + public string? DeescalatedBy { get; init; } +} + +/// +/// Event emitted when an incident is resolved. +/// +public sealed record ExportIncidentResolvedEvent : ExportIncidentEventBase +{ + public override string EventType => ExportIncidentEventTypes.IncidentResolved; + + [JsonPropertyName("resolution_message")] + public required string ResolutionMessage { get; init; } + + [JsonPropertyName("is_false_positive")] + public bool IsFalsePositive { get; init; } + + [JsonPropertyName("resolved_by")] + public string? ResolvedBy { get; init; } + + [JsonPropertyName("activated_at")] + public required DateTimeOffset ActivatedAt { get; init; } + + [JsonPropertyName("duration_seconds")] + public double DurationSeconds { get; init; } + + [JsonPropertyName("post_incident_notes")] + public string? PostIncidentNotes { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentManager.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentManager.cs new file mode 100644 index 000000000..34379b1ec --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentManager.cs @@ -0,0 +1,535 @@ +using System.Collections.Concurrent; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using StellaOps.ExportCenter.WebService.Telemetry; +using StellaOps.ExportCenter.WebService.Timeline; + +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Manages export incidents and emits events to timeline and notifier. +/// +public sealed class ExportIncidentManager : IExportIncidentManager +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) } + }; + + private readonly ILogger _logger; + private readonly IExportTimelinePublisher _timelinePublisher; + private readonly IExportNotificationEmitter _notificationEmitter; + private readonly TimeProvider _timeProvider; + + // In-memory store for incidents (production would use persistent storage) + private readonly ConcurrentDictionary _incidents = new(); + + public ExportIncidentManager( + ILogger logger, + IExportTimelinePublisher timelinePublisher, + IExportNotificationEmitter notificationEmitter, + TimeProvider? timeProvider = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timelinePublisher = timelinePublisher ?? throw new ArgumentNullException(nameof(timelinePublisher)); + _notificationEmitter = notificationEmitter ?? throw new ArgumentNullException(nameof(notificationEmitter)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async Task ActivateIncidentAsync( + ExportIncidentActivationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + try + { + var now = _timeProvider.GetUtcNow(); + var incidentId = GenerateIncidentId(); + + var incident = new ExportIncident + { + IncidentId = incidentId, + Type = request.Type, + Severity = request.Severity, + Status = ExportIncidentStatus.Active, + Summary = request.Summary, + Description = request.Description, + AffectedTenants = request.AffectedTenants, + AffectedProfiles = request.AffectedProfiles, + ActivatedAt = now, + LastUpdatedAt = now, + ActivatedBy = request.ActivatedBy, + CorrelationId = request.CorrelationId, + Metadata = request.Metadata, + Updates = new List + { + new() + { + UpdateId = GenerateUpdateId(), + Timestamp = now, + NewStatus = ExportIncidentStatus.Active, + Message = $"Incident activated: {request.Summary}" + } + } + }; + + if (!_incidents.TryAdd(incidentId, incident)) + { + return ExportIncidentResult.Failed("Failed to store incident"); + } + + // Emit timeline event + var timelineEvent = new ExportIncidentActivatedEvent + { + IncidentId = incidentId, + Type = request.Type, + Severity = request.Severity, + Status = ExportIncidentStatus.Active, + Summary = request.Summary, + Description = request.Description, + Timestamp = now, + AffectedTenants = request.AffectedTenants, + AffectedProfiles = request.AffectedProfiles, + ActivatedBy = request.ActivatedBy, + CorrelationId = request.CorrelationId, + Metadata = request.Metadata + }; + + await PublishTimelineEventAsync(timelineEvent, cancellationToken); + + // Emit notification + await _notificationEmitter.EmitIncidentActivatedAsync(incident, cancellationToken); + + // Record metric + ExportTelemetry.IncidentsActivatedTotal.Add(1, + new("severity", request.Severity.ToString().ToLowerInvariant()), + new("type", request.Type.ToString().ToLowerInvariant())); + + _logger.LogWarning( + "Export incident activated: {IncidentId} [{Type}] [{Severity}] - {Summary}", + incidentId, request.Type, request.Severity, request.Summary); + + return ExportIncidentResult.Succeeded(incident); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to activate incident"); + return ExportIncidentResult.Failed($"Activation failed: {ex.Message}"); + } + } + + public async Task UpdateIncidentAsync( + string incidentId, + ExportIncidentUpdateRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (!_incidents.TryGetValue(incidentId, out var existingIncident)) + { + return ExportIncidentResult.Failed("Incident not found"); + } + + if (existingIncident.Status is ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive) + { + return ExportIncidentResult.Failed("Cannot update resolved incident"); + } + + try + { + var now = _timeProvider.GetUtcNow(); + var previousStatus = existingIncident.Status; + var previousSeverity = existingIncident.Severity; + + var newStatus = request.Status ?? existingIncident.Status; + var newSeverity = request.Severity ?? existingIncident.Severity; + + var update = new ExportIncidentUpdate + { + UpdateId = GenerateUpdateId(), + Timestamp = now, + PreviousStatus = previousStatus != newStatus ? previousStatus : null, + NewStatus = newStatus, + PreviousSeverity = previousSeverity != newSeverity ? previousSeverity : null, + NewSeverity = previousSeverity != newSeverity ? newSeverity : null, + Message = request.Message, + UpdatedBy = request.UpdatedBy + }; + + var updatedIncident = existingIncident with + { + Status = newStatus, + Severity = newSeverity, + LastUpdatedAt = now, + Updates = [.. existingIncident.Updates, update] + }; + + if (!_incidents.TryUpdate(incidentId, updatedIncident, existingIncident)) + { + return ExportIncidentResult.Failed("Concurrent update conflict"); + } + + // Determine event type based on severity change + ExportIncidentEventBase timelineEvent; + if (newSeverity > previousSeverity) + { + timelineEvent = new ExportIncidentEscalatedEvent + { + IncidentId = incidentId, + Type = updatedIncident.Type, + Severity = newSeverity, + Status = newStatus, + Summary = updatedIncident.Summary, + Timestamp = now, + AffectedTenants = updatedIncident.AffectedTenants, + AffectedProfiles = updatedIncident.AffectedProfiles, + CorrelationId = updatedIncident.CorrelationId, + PreviousSeverity = previousSeverity, + EscalationReason = request.Message, + EscalatedBy = request.UpdatedBy + }; + + ExportTelemetry.IncidentsEscalatedTotal.Add(1, + new("from_severity", previousSeverity.ToString().ToLowerInvariant()), + new("to_severity", newSeverity.ToString().ToLowerInvariant())); + } + else if (newSeverity < previousSeverity) + { + timelineEvent = new ExportIncidentDeescalatedEvent + { + IncidentId = incidentId, + Type = updatedIncident.Type, + Severity = newSeverity, + Status = newStatus, + Summary = updatedIncident.Summary, + Timestamp = now, + AffectedTenants = updatedIncident.AffectedTenants, + AffectedProfiles = updatedIncident.AffectedProfiles, + CorrelationId = updatedIncident.CorrelationId, + PreviousSeverity = previousSeverity, + DeescalationReason = request.Message, + DeescalatedBy = request.UpdatedBy + }; + + ExportTelemetry.IncidentsDeescalatedTotal.Add(1, + new("from_severity", previousSeverity.ToString().ToLowerInvariant()), + new("to_severity", newSeverity.ToString().ToLowerInvariant())); + } + else + { + timelineEvent = new ExportIncidentUpdatedEvent + { + IncidentId = incidentId, + Type = updatedIncident.Type, + Severity = newSeverity, + Status = newStatus, + Summary = updatedIncident.Summary, + Timestamp = now, + AffectedTenants = updatedIncident.AffectedTenants, + AffectedProfiles = updatedIncident.AffectedProfiles, + CorrelationId = updatedIncident.CorrelationId, + PreviousStatus = previousStatus != newStatus ? previousStatus : null, + PreviousSeverity = previousSeverity != newSeverity ? previousSeverity : null, + UpdateMessage = request.Message, + UpdatedBy = request.UpdatedBy + }; + } + + await PublishTimelineEventAsync(timelineEvent, cancellationToken); + await _notificationEmitter.EmitIncidentUpdatedAsync(updatedIncident, request.Message, cancellationToken); + + _logger.LogInformation( + "Export incident updated: {IncidentId} [{Status}] [{Severity}] - {Message}", + incidentId, newStatus, newSeverity, request.Message); + + return ExportIncidentResult.Succeeded(updatedIncident); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to update incident {IncidentId}", incidentId); + return ExportIncidentResult.Failed($"Update failed: {ex.Message}"); + } + } + + public async Task ResolveIncidentAsync( + string incidentId, + ExportIncidentResolutionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (!_incidents.TryGetValue(incidentId, out var existingIncident)) + { + return ExportIncidentResult.Failed("Incident not found"); + } + + if (existingIncident.Status is ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive) + { + return ExportIncidentResult.Failed("Incident already resolved"); + } + + try + { + var now = _timeProvider.GetUtcNow(); + var newStatus = request.IsFalsePositive + ? ExportIncidentStatus.FalsePositive + : ExportIncidentStatus.Resolved; + + var update = new ExportIncidentUpdate + { + UpdateId = GenerateUpdateId(), + Timestamp = now, + PreviousStatus = existingIncident.Status, + NewStatus = newStatus, + Message = request.ResolutionMessage, + UpdatedBy = request.ResolvedBy + }; + + var resolvedIncident = existingIncident with + { + Status = newStatus, + LastUpdatedAt = now, + ResolvedAt = now, + ResolvedBy = request.ResolvedBy, + Updates = [.. existingIncident.Updates, update] + }; + + if (!_incidents.TryUpdate(incidentId, resolvedIncident, existingIncident)) + { + return ExportIncidentResult.Failed("Concurrent update conflict"); + } + + var duration = now - existingIncident.ActivatedAt; + + var timelineEvent = new ExportIncidentResolvedEvent + { + IncidentId = incidentId, + Type = resolvedIncident.Type, + Severity = resolvedIncident.Severity, + Status = newStatus, + Summary = resolvedIncident.Summary, + Timestamp = now, + AffectedTenants = resolvedIncident.AffectedTenants, + AffectedProfiles = resolvedIncident.AffectedProfiles, + CorrelationId = resolvedIncident.CorrelationId, + ResolutionMessage = request.ResolutionMessage, + IsFalsePositive = request.IsFalsePositive, + ResolvedBy = request.ResolvedBy, + ActivatedAt = existingIncident.ActivatedAt, + DurationSeconds = duration.TotalSeconds, + PostIncidentNotes = request.PostIncidentNotes + }; + + await PublishTimelineEventAsync(timelineEvent, cancellationToken); + await _notificationEmitter.EmitIncidentResolvedAsync( + resolvedIncident, request.ResolutionMessage, request.IsFalsePositive, cancellationToken); + + // Record metrics + ExportTelemetry.IncidentsResolvedTotal.Add(1, + new("severity", resolvedIncident.Severity.ToString().ToLowerInvariant()), + new("type", resolvedIncident.Type.ToString().ToLowerInvariant()), + new("is_false_positive", request.IsFalsePositive.ToString().ToLowerInvariant())); + + ExportTelemetry.IncidentDurationSeconds.Record(duration.TotalSeconds, + new("severity", resolvedIncident.Severity.ToString().ToLowerInvariant()), + new("type", resolvedIncident.Type.ToString().ToLowerInvariant())); + + _logger.LogInformation( + "Export incident resolved: {IncidentId} after {Duration:F1}s - {Message}", + incidentId, duration.TotalSeconds, request.ResolutionMessage); + + return ExportIncidentResult.Succeeded(resolvedIncident); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to resolve incident {IncidentId}", incidentId); + return ExportIncidentResult.Failed($"Resolution failed: {ex.Message}"); + } + } + + public Task GetIncidentModeStatusAsync( + CancellationToken cancellationToken = default) + { + var activeIncidents = _incidents.Values + .Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)) + .OrderByDescending(i => i.Severity) + .ThenByDescending(i => i.ActivatedAt) + .ToList(); + + var status = new ExportIncidentModeStatus + { + IncidentModeActive = activeIncidents.Count > 0, + ActiveIncidents = activeIncidents, + HighestSeverity = activeIncidents.Count > 0 + ? activeIncidents.Max(i => i.Severity) + : null, + AsOf = _timeProvider.GetUtcNow() + }; + + return Task.FromResult(status); + } + + public Task> GetActiveIncidentsAsync( + CancellationToken cancellationToken = default) + { + var activeIncidents = _incidents.Values + .Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)) + .OrderByDescending(i => i.Severity) + .ThenByDescending(i => i.ActivatedAt) + .ToList(); + + return Task.FromResult>(activeIncidents); + } + + public Task GetIncidentAsync( + string incidentId, + CancellationToken cancellationToken = default) + { + _incidents.TryGetValue(incidentId, out var incident); + return Task.FromResult(incident); + } + + public Task> GetRecentIncidentsAsync( + int limit = 50, + bool includeResolved = true, + CancellationToken cancellationToken = default) + { + var query = _incidents.Values.AsEnumerable(); + + if (!includeResolved) + { + query = query.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)); + } + + var incidents = query + .OrderByDescending(i => i.LastUpdatedAt) + .Take(limit) + .ToList(); + + return Task.FromResult>(incidents); + } + + public Task IsIncidentModeActiveAsync( + CancellationToken cancellationToken = default) + { + var isActive = _incidents.Values + .Any(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)); + + return Task.FromResult(isActive); + } + + public Task GetHighestActiveSeverityAsync( + CancellationToken cancellationToken = default) + { + var activeIncidents = _incidents.Values + .Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)) + .ToList(); + + var highestSeverity = activeIncidents.Count > 0 + ? activeIncidents.Max(i => i.Severity) + : (ExportIncidentSeverity?)null; + + return Task.FromResult(highestSeverity); + } + + private async Task PublishTimelineEventAsync( + ExportIncidentEventBase incidentEvent, + CancellationToken cancellationToken) + { + try + { + var eventJson = JsonSerializer.Serialize(incidentEvent, incidentEvent.GetType(), SerializerOptions); + + // Publish to timeline using the timeline publisher + // Note: This creates a synthetic export started event to leverage existing publisher + await _timelinePublisher.PublishIncidentEventAsync( + incidentEvent.EventType, + incidentEvent.IncidentId, + eventJson, + incidentEvent.CorrelationId, + cancellationToken); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to publish incident timeline event {EventType}", incidentEvent.EventType); + } + } + + private static string GenerateIncidentId() + { + return $"inc-{Guid.NewGuid():N}"[..20]; + } + + private static string GenerateUpdateId() + { + return $"upd-{Guid.NewGuid():N}"[..16]; + } +} + +/// +/// Interface for emitting incident notifications. +/// +public interface IExportNotificationEmitter +{ + Task EmitIncidentActivatedAsync(ExportIncident incident, CancellationToken cancellationToken = default); + Task EmitIncidentUpdatedAsync(ExportIncident incident, string updateMessage, CancellationToken cancellationToken = default); + Task EmitIncidentResolvedAsync(ExportIncident incident, string resolutionMessage, bool isFalsePositive, CancellationToken cancellationToken = default); +} + +/// +/// Default implementation of notification emitter that logs notifications. +/// Production would integrate with actual notification service (Email, Slack, Teams, PagerDuty). +/// +public sealed class LoggingNotificationEmitter : IExportNotificationEmitter +{ + private readonly ILogger _logger; + + public LoggingNotificationEmitter(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task EmitIncidentActivatedAsync(ExportIncident incident, CancellationToken cancellationToken = default) + { + _logger.LogWarning( + "NOTIFICATION: Incident Activated [{Severity}] - {Summary}. ID: {IncidentId}", + incident.Severity, incident.Summary, incident.IncidentId); + + ExportTelemetry.NotificationsEmittedTotal.Add(1, + new("type", "incident_activated"), + new("severity", incident.Severity.ToString().ToLowerInvariant())); + + return Task.CompletedTask; + } + + public Task EmitIncidentUpdatedAsync(ExportIncident incident, string updateMessage, CancellationToken cancellationToken = default) + { + _logger.LogInformation( + "NOTIFICATION: Incident Updated [{Severity}] - {Message}. ID: {IncidentId}", + incident.Severity, updateMessage, incident.IncidentId); + + ExportTelemetry.NotificationsEmittedTotal.Add(1, + new("type", "incident_updated"), + new("severity", incident.Severity.ToString().ToLowerInvariant())); + + return Task.CompletedTask; + } + + public Task EmitIncidentResolvedAsync(ExportIncident incident, string resolutionMessage, bool isFalsePositive, CancellationToken cancellationToken = default) + { + _logger.LogInformation( + "NOTIFICATION: Incident Resolved [{Status}] - {Message}. ID: {IncidentId}, FalsePositive: {IsFalsePositive}", + incident.Status, resolutionMessage, incident.IncidentId, isFalsePositive); + + ExportTelemetry.NotificationsEmittedTotal.Add(1, + new("type", "incident_resolved"), + new("is_false_positive", isFalsePositive.ToString().ToLowerInvariant())); + + return Task.CompletedTask; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentModels.cs new file mode 100644 index 000000000..ebf1d233c --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/ExportIncidentModels.cs @@ -0,0 +1,332 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Export incident severity levels. +/// +public enum ExportIncidentSeverity +{ + /// + /// Informational - system operating normally. + /// + Info = 0, + + /// + /// Warning - potential issues detected, exports may be delayed. + /// + Warning = 1, + + /// + /// Error - export failures occurring, some exports unavailable. + /// + Error = 2, + + /// + /// Critical - widespread export failures, service degraded. + /// + Critical = 3, + + /// + /// Emergency - complete export service outage. + /// + Emergency = 4 +} + +/// +/// Export incident status. +/// +public enum ExportIncidentStatus +{ + /// + /// Incident is active. + /// + Active = 1, + + /// + /// Incident is being investigated. + /// + Investigating = 2, + + /// + /// Incident is being mitigated. + /// + Mitigating = 3, + + /// + /// Incident has been resolved. + /// + Resolved = 4, + + /// + /// Incident was a false positive. + /// + FalsePositive = 5 +} + +/// +/// Export incident type. +/// +public enum ExportIncidentType +{ + /// + /// Export job failures. + /// + ExportFailure = 1, + + /// + /// Export latency degradation. + /// + LatencyDegradation = 2, + + /// + /// Storage capacity issues. + /// + StorageCapacity = 3, + + /// + /// External dependency failure (e.g., EvidenceLocker, signing service). + /// + DependencyFailure = 4, + + /// + /// Data integrity issue detected. + /// + IntegrityIssue = 5, + + /// + /// Security incident. + /// + SecurityIncident = 6, + + /// + /// Configuration error. + /// + ConfigurationError = 7, + + /// + /// Rate limiting or throttling activated. + /// + RateLimiting = 8 +} + +/// +/// Request to activate incident mode. +/// +public sealed record ExportIncidentActivationRequest +{ + /// + /// Incident type. + /// + public required ExportIncidentType Type { get; init; } + + /// + /// Incident severity. + /// + public required ExportIncidentSeverity Severity { get; init; } + + /// + /// Human-readable summary of the incident. + /// + public required string Summary { get; init; } + + /// + /// Detailed description of the incident. + /// + public string? Description { get; init; } + + /// + /// Affected tenant IDs (null/empty means all tenants). + /// + public IReadOnlyList? AffectedTenants { get; init; } + + /// + /// Affected export profile IDs. + /// + public IReadOnlyList? AffectedProfiles { get; init; } + + /// + /// Optional correlation ID for tracing. + /// + public string? CorrelationId { get; init; } + + /// + /// Operator or system that activated the incident. + /// + public string? ActivatedBy { get; init; } + + /// + /// Additional context/metadata. + /// + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Active export incident. +/// +public sealed record ExportIncident +{ + [JsonPropertyName("incident_id")] + public required string IncidentId { get; init; } + + [JsonPropertyName("type")] + public required ExportIncidentType Type { get; init; } + + [JsonPropertyName("severity")] + public required ExportIncidentSeverity Severity { get; init; } + + [JsonPropertyName("status")] + public required ExportIncidentStatus Status { get; init; } + + [JsonPropertyName("summary")] + public required string Summary { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("affected_tenants")] + public IReadOnlyList? AffectedTenants { get; init; } + + [JsonPropertyName("affected_profiles")] + public IReadOnlyList? AffectedProfiles { get; init; } + + [JsonPropertyName("activated_at")] + public required DateTimeOffset ActivatedAt { get; init; } + + [JsonPropertyName("last_updated_at")] + public required DateTimeOffset LastUpdatedAt { get; init; } + + [JsonPropertyName("resolved_at")] + public DateTimeOffset? ResolvedAt { get; init; } + + [JsonPropertyName("activated_by")] + public string? ActivatedBy { get; init; } + + [JsonPropertyName("resolved_by")] + public string? ResolvedBy { get; init; } + + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary? Metadata { get; init; } + + [JsonPropertyName("updates")] + public IReadOnlyList Updates { get; init; } = []; +} + +/// +/// Update to an incident. +/// +public sealed record ExportIncidentUpdate +{ + [JsonPropertyName("update_id")] + public required string UpdateId { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("previous_status")] + public ExportIncidentStatus? PreviousStatus { get; init; } + + [JsonPropertyName("new_status")] + public required ExportIncidentStatus NewStatus { get; init; } + + [JsonPropertyName("previous_severity")] + public ExportIncidentSeverity? PreviousSeverity { get; init; } + + [JsonPropertyName("new_severity")] + public ExportIncidentSeverity? NewSeverity { get; init; } + + [JsonPropertyName("message")] + public required string Message { get; init; } + + [JsonPropertyName("updated_by")] + public string? UpdatedBy { get; init; } +} + +/// +/// Request to resolve an incident. +/// +public sealed record ExportIncidentResolutionRequest +{ + /// + /// Resolution message/notes. + /// + public required string ResolutionMessage { get; init; } + + /// + /// Whether this was a false positive. + /// + public bool IsFalsePositive { get; init; } + + /// + /// Operator or system resolving the incident. + /// + public string? ResolvedBy { get; init; } + + /// + /// Post-incident review notes. + /// + public string? PostIncidentNotes { get; init; } +} + +/// +/// Request to update an incident. +/// +public sealed record ExportIncidentUpdateRequest +{ + /// + /// New status (optional). + /// + public ExportIncidentStatus? Status { get; init; } + + /// + /// New severity (optional, for escalation/de-escalation). + /// + public ExportIncidentSeverity? Severity { get; init; } + + /// + /// Update message. + /// + public required string Message { get; init; } + + /// + /// Operator or system making the update. + /// + public string? UpdatedBy { get; init; } +} + +/// +/// Result of incident operations. +/// +public sealed record ExportIncidentResult +{ + public bool Success { get; init; } + public string? ErrorMessage { get; init; } + public ExportIncident? Incident { get; init; } + + public static ExportIncidentResult Succeeded(ExportIncident incident) => + new() { Success = true, Incident = incident }; + + public static ExportIncidentResult Failed(string errorMessage) => + new() { Success = false, ErrorMessage = errorMessage }; +} + +/// +/// Incident mode status response. +/// +public sealed record ExportIncidentModeStatus +{ + [JsonPropertyName("incident_mode_active")] + public bool IncidentModeActive { get; init; } + + [JsonPropertyName("active_incidents")] + public IReadOnlyList ActiveIncidents { get; init; } = []; + + [JsonPropertyName("highest_severity")] + public ExportIncidentSeverity? HighestSeverity { get; init; } + + [JsonPropertyName("as_of")] + public required DateTimeOffset AsOf { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IExportIncidentManager.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IExportIncidentManager.cs new file mode 100644 index 000000000..91ab72744 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IExportIncidentManager.cs @@ -0,0 +1,98 @@ +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Interface for managing export incidents and emitting events to timeline + notifier. +/// +public interface IExportIncidentManager +{ + /// + /// Activates incident mode with the specified parameters. + /// Emits incident activation events to timeline and notifier. + /// + /// The activation request. + /// Cancellation token. + /// The result of the activation. + Task ActivateIncidentAsync( + ExportIncidentActivationRequest request, + CancellationToken cancellationToken = default); + + /// + /// Updates an existing incident. + /// Emits incident update events to timeline and notifier. + /// + /// The incident identifier. + /// The update request. + /// Cancellation token. + /// The result of the update. + Task UpdateIncidentAsync( + string incidentId, + ExportIncidentUpdateRequest request, + CancellationToken cancellationToken = default); + + /// + /// Resolves an incident. + /// Emits incident resolution events to timeline and notifier. + /// + /// The incident identifier. + /// The resolution request. + /// Cancellation token. + /// The result of the resolution. + Task ResolveIncidentAsync( + string incidentId, + ExportIncidentResolutionRequest request, + CancellationToken cancellationToken = default); + + /// + /// Gets the current incident mode status. + /// + /// Cancellation token. + /// The current incident mode status. + Task GetIncidentModeStatusAsync( + CancellationToken cancellationToken = default); + + /// + /// Gets all active incidents. + /// + /// Cancellation token. + /// List of active incidents. + Task> GetActiveIncidentsAsync( + CancellationToken cancellationToken = default); + + /// + /// Gets an incident by ID. + /// + /// The incident identifier. + /// Cancellation token. + /// The incident if found, null otherwise. + Task GetIncidentAsync( + string incidentId, + CancellationToken cancellationToken = default); + + /// + /// Gets recent incidents (active and recently resolved). + /// + /// Maximum number of incidents to return. + /// Whether to include resolved incidents. + /// Cancellation token. + /// List of incidents. + Task> GetRecentIncidentsAsync( + int limit = 50, + bool includeResolved = true, + CancellationToken cancellationToken = default); + + /// + /// Checks if incident mode is currently active. + /// + /// Cancellation token. + /// True if incident mode is active. + Task IsIncidentModeActiveAsync( + CancellationToken cancellationToken = default); + + /// + /// Gets the highest active incident severity. + /// + /// Cancellation token. + /// The highest severity, or null if no active incidents. + Task GetHighestActiveSeverityAsync( + CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentEndpoints.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentEndpoints.cs new file mode 100644 index 000000000..71450af5c --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentEndpoints.cs @@ -0,0 +1,215 @@ +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.ServerIntegration; + +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Extension methods for mapping incident management endpoints. +/// +public static class IncidentEndpoints +{ + /// + /// Maps incident management endpoints to the application. + /// + public static WebApplication MapIncidentEndpoints(this WebApplication app) + { + var group = app.MapGroup("/v1/incidents") + .WithTags("Incident Management") + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator); + + // GET /v1/incidents/status - Get incident mode status + group.MapGet("/status", GetIncidentModeStatusAsync) + .WithName("GetIncidentModeStatus") + .WithSummary("Get incident mode status") + .WithDescription("Returns the current incident mode status including all active incidents.") + .Produces(StatusCodes.Status200OK); + + // GET /v1/incidents - Get all active incidents + group.MapGet("", GetActiveIncidentsAsync) + .WithName("GetActiveIncidents") + .WithSummary("Get active incidents") + .WithDescription("Returns all currently active incidents.") + .Produces>(StatusCodes.Status200OK); + + // GET /v1/incidents/recent - Get recent incidents + group.MapGet("/recent", GetRecentIncidentsAsync) + .WithName("GetRecentIncidents") + .WithSummary("Get recent incidents") + .WithDescription("Returns recent incidents including resolved ones.") + .Produces>(StatusCodes.Status200OK); + + // GET /v1/incidents/{id} - Get incident by ID + group.MapGet("/{id}", GetIncidentAsync) + .WithName("GetIncident") + .WithSummary("Get incident by ID") + .WithDescription("Returns the specified incident.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // POST /v1/incidents - Activate a new incident + group.MapPost("", ActivateIncidentAsync) + .WithName("ActivateIncident") + .WithSummary("Activate a new incident") + .WithDescription("Activates a new incident and emits events to timeline and notifier.") + .Produces(StatusCodes.Status201Created) + .Produces(StatusCodes.Status400BadRequest); + + // PATCH /v1/incidents/{id} - Update an incident + group.MapPatch("/{id}", UpdateIncidentAsync) + .WithName("UpdateIncident") + .WithSummary("Update an incident") + .WithDescription("Updates an existing incident status or severity.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status400BadRequest); + + // POST /v1/incidents/{id}/resolve - Resolve an incident + group.MapPost("/{id}/resolve", ResolveIncidentAsync) + .WithName("ResolveIncident") + .WithSummary("Resolve an incident") + .WithDescription("Resolves an incident and emits resolution event.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status400BadRequest); + + return app; + } + + private static async Task> GetIncidentModeStatusAsync( + [FromServices] IExportIncidentManager incidentManager, + CancellationToken cancellationToken) + { + var status = await incidentManager.GetIncidentModeStatusAsync(cancellationToken); + return TypedResults.Ok(status); + } + + private static async Task>> GetActiveIncidentsAsync( + [FromServices] IExportIncidentManager incidentManager, + CancellationToken cancellationToken) + { + var incidents = await incidentManager.GetActiveIncidentsAsync(cancellationToken); + return TypedResults.Ok(incidents); + } + + private static async Task>> GetRecentIncidentsAsync( + [FromQuery] int? limit, + [FromQuery] bool? includeResolved, + [FromServices] IExportIncidentManager incidentManager, + CancellationToken cancellationToken) + { + var incidents = await incidentManager.GetRecentIncidentsAsync( + limit ?? 50, + includeResolved ?? true, + cancellationToken); + return TypedResults.Ok(incidents); + } + + private static async Task, NotFound>> GetIncidentAsync( + string id, + [FromServices] IExportIncidentManager incidentManager, + CancellationToken cancellationToken) + { + var incident = await incidentManager.GetIncidentAsync(id, cancellationToken); + if (incident is null) + { + return TypedResults.NotFound(); + } + return TypedResults.Ok(incident); + } + + private static async Task, BadRequest>> ActivateIncidentAsync( + [FromBody] ExportIncidentActivationRequest request, + [FromServices] IExportIncidentManager incidentManager, + HttpContext httpContext, + CancellationToken cancellationToken) + { + // Add operator info from claims if not specified + var requestWithOperator = request; + if (string.IsNullOrWhiteSpace(request.ActivatedBy)) + { + var operatorClaim = httpContext.User.FindFirst("sub") + ?? httpContext.User.FindFirst("preferred_username"); + if (operatorClaim is not null) + { + requestWithOperator = request with { ActivatedBy = operatorClaim.Value }; + } + } + + var result = await incidentManager.ActivateIncidentAsync(requestWithOperator, cancellationToken); + if (!result.Success) + { + return TypedResults.BadRequest(result.ErrorMessage ?? "Activation failed"); + } + + return TypedResults.Created($"/v1/incidents/{result.Incident?.IncidentId}", result); + } + + private static async Task, NotFound, BadRequest>> UpdateIncidentAsync( + string id, + [FromBody] ExportIncidentUpdateRequest request, + [FromServices] IExportIncidentManager incidentManager, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var existingIncident = await incidentManager.GetIncidentAsync(id, cancellationToken); + if (existingIncident is null) + { + return TypedResults.NotFound(); + } + + // Add operator info from claims if not specified + var requestWithOperator = request; + if (string.IsNullOrWhiteSpace(request.UpdatedBy)) + { + var operatorClaim = httpContext.User.FindFirst("sub") + ?? httpContext.User.FindFirst("preferred_username"); + if (operatorClaim is not null) + { + requestWithOperator = request with { UpdatedBy = operatorClaim.Value }; + } + } + + var result = await incidentManager.UpdateIncidentAsync(id, requestWithOperator, cancellationToken); + if (!result.Success) + { + return TypedResults.BadRequest(result.ErrorMessage ?? "Update failed"); + } + + return TypedResults.Ok(result); + } + + private static async Task, NotFound, BadRequest>> ResolveIncidentAsync( + string id, + [FromBody] ExportIncidentResolutionRequest request, + [FromServices] IExportIncidentManager incidentManager, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var existingIncident = await incidentManager.GetIncidentAsync(id, cancellationToken); + if (existingIncident is null) + { + return TypedResults.NotFound(); + } + + // Add operator info from claims if not specified + var requestWithOperator = request; + if (string.IsNullOrWhiteSpace(request.ResolvedBy)) + { + var operatorClaim = httpContext.User.FindFirst("sub") + ?? httpContext.User.FindFirst("preferred_username"); + if (operatorClaim is not null) + { + requestWithOperator = request with { ResolvedBy = operatorClaim.Value }; + } + } + + var result = await incidentManager.ResolveIncidentAsync(id, requestWithOperator, cancellationToken); + if (!result.Success) + { + return TypedResults.BadRequest(result.ErrorMessage ?? "Resolution failed"); + } + + return TypedResults.Ok(result); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentServiceCollectionExtensions.cs new file mode 100644 index 000000000..7475b2610 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Incident/IncidentServiceCollectionExtensions.cs @@ -0,0 +1,31 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.ExportCenter.WebService.Incident; + +/// +/// Extension methods for registering incident management services. +/// +public static class IncidentServiceCollectionExtensions +{ + /// + /// Adds export incident management services to the service collection. + /// + /// The service collection. + /// The service collection for chaining. + public static IServiceCollection AddExportIncidentManagement(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + // Register TimeProvider if not already registered + services.TryAddSingleton(TimeProvider.System); + + // Register notification emitter + services.TryAddSingleton(); + + // Register incident manager + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/OpenApiDiscoveryEndpoints.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/OpenApiDiscoveryEndpoints.cs new file mode 100644 index 000000000..ea08f8f34 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/OpenApiDiscoveryEndpoints.cs @@ -0,0 +1,261 @@ +using System.Reflection; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; + +namespace StellaOps.ExportCenter.WebService; + +/// +/// OpenAPI discovery endpoints for ExportCenter. +/// Per EXPORT-OAS-61-002. +/// +public static class OpenApiDiscoveryEndpoints +{ + private const string OasVersion = "v1"; + private const string ServiceName = "export-center"; + private const string SpecFileName = "export-center.v1.yaml"; + private const string SpecVersion = "3.0.3"; + + private static readonly DateTimeOffset FixedGeneratedAt = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + /// + /// Maps the OpenAPI discovery endpoints. + /// + public static IEndpointRouteBuilder MapOpenApiDiscovery(this IEndpointRouteBuilder app) + { + var group = app.MapGroup("") + .AllowAnonymous() + .WithTags("discovery"); + + group.MapGet("/.well-known/openapi", (Delegate)GetDiscoveryMetadata) + .WithName("GetOpenApiDiscovery") + .WithSummary("OpenAPI discovery metadata") + .WithDescription("Returns service metadata and link to the OpenAPI specification."); + + group.MapGet("/.well-known/openapi.json", (Delegate)GetDiscoveryMetadata) + .WithName("GetOpenApiDiscoveryJson") + .ExcludeFromDescription(); + + group.MapGet("/openapi/export-center.yaml", (Delegate)GetOpenApiSpec) + .WithName("GetOpenApiSpec") + .WithSummary("OpenAPI specification") + .WithDescription("Returns the OpenAPI v3.0.3 specification for ExportCenter."); + + group.MapGet("/openapi/export-center.json", (Delegate)GetOpenApiSpecJson) + .WithName("GetOpenApiSpecJson") + .WithSummary("OpenAPI specification (JSON)") + .WithDescription("Returns the OpenAPI specification as JSON."); + + return app; + } + + private static Task GetDiscoveryMetadata(HttpContext context) + { + var metadata = new OpenApiDiscoveryResponse + { + Service = ServiceName, + Version = GetServiceVersion(), + SpecVersion = SpecVersion, + Format = "application/yaml", + Url = "/openapi/export-center.yaml", + JsonUrl = "/openapi/export-center.json", + ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope", + GeneratedAt = FixedGeneratedAt, + ProfilesSupported = new[] { "attestation", "mirror", "bootstrap", "airgap-evidence" } + }; + + var json = JsonSerializer.Serialize(metadata, JsonOptions); + var etag = ComputeEtag(json); + + // Check If-None-Match + if (context.Request.Headers.TryGetValue("If-None-Match", out var ifNoneMatch) && + ifNoneMatch == etag) + { + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300"; + return Task.FromResult(Results.StatusCode(304)); + } + + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300"; + context.Response.Headers["X-Export-Oas-Version"] = OasVersion; + context.Response.Headers["Last-Modified"] = FixedGeneratedAt.ToString("R"); + + return Task.FromResult(Results.Json(metadata, JsonOptions, contentType: "application/json")); + } + + private static async Task GetOpenApiSpec(HttpContext context) + { + var spec = await GetEmbeddedOpenApiYaml(); + if (spec is null) + { + return Results.NotFound(new { error = new { code = "SPEC_NOT_FOUND", message = "OpenAPI specification not available" } }); + } + + var etag = ComputeEtag(spec); + + // Check If-None-Match + if (context.Request.Headers.TryGetValue("If-None-Match", out var ifNoneMatch) && + ifNoneMatch == etag) + { + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300"; + return Results.StatusCode(304); + } + + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300"; + context.Response.Headers["X-Export-Oas-Version"] = OasVersion; + context.Response.Headers["Last-Modified"] = FixedGeneratedAt.ToString("R"); + + return Results.Content(spec, "application/yaml", Encoding.UTF8); + } + + private static async Task GetOpenApiSpecJson(HttpContext context) + { + var yamlSpec = await GetEmbeddedOpenApiYaml(); + if (yamlSpec is null) + { + return Results.NotFound(new { error = new { code = "SPEC_NOT_FOUND", message = "OpenAPI specification not available" } }); + } + + // For now, return a redirect to the YAML endpoint with Accept header hint + // Full YAML-to-JSON conversion would require a YAML parser + context.Response.Headers["X-Export-Oas-Version"] = OasVersion; + return Results.Redirect("/openapi/export-center.yaml", permanent: false); + } + + private static string ComputeEtag(string content) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); + return $"\"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}\""; + } + + private static string GetServiceVersion() + { + var assembly = Assembly.GetExecutingAssembly(); + var version = assembly.GetCustomAttribute()?.InformationalVersion + ?? assembly.GetName().Version?.ToString() + ?? "1.0.0"; + return version; + } + + private static async Task GetEmbeddedOpenApiYaml() + { + // Try to read from embedded resource or file system + // For now, return a placeholder that references the spec location + var assembly = Assembly.GetExecutingAssembly(); + var resourceName = $"{assembly.GetName().Name}.OpenApi.export-center.v1.yaml"; + + using var stream = assembly.GetManifestResourceStream(resourceName); + if (stream is not null) + { + using var reader = new StreamReader(stream); + return await reader.ReadToEndAsync(); + } + + // Fall back to file system for development + var basePath = AppContext.BaseDirectory; + var possiblePaths = new[] + { + Path.Combine(basePath, "OpenApi", SpecFileName), + Path.Combine(basePath, "..", "..", "..", "OpenApi", SpecFileName), + Path.Combine(basePath, "..", "..", "..", "..", "..", "..", "docs", "modules", "export-center", "openapi", SpecFileName) + }; + + foreach (var path in possiblePaths) + { + if (File.Exists(path)) + { + return await File.ReadAllTextAsync(path); + } + } + + // Return a minimal inline spec if file not found + return GetMinimalOpenApiSpec(); + } + + private static string GetMinimalOpenApiSpec() + { + return """ + openapi: 3.0.3 + info: + title: StellaOps ExportCenter API + version: 1.0.0 + description: Export profiles, runs, and deterministic bundle downloads for air-gap deployments. + servers: + - url: / + paths: + /.well-known/openapi: + get: + summary: OpenAPI discovery + responses: + '200': + description: Discovery metadata + /v1/exports/profiles: + get: + summary: List export profiles + responses: + '200': + description: List of profiles + components: + schemas: + ErrorEnvelope: + type: object + properties: + error: + type: object + properties: + code: + type: string + message: + type: string + """; + } +} + +/// +/// Response model for OpenAPI discovery endpoint. +/// +public sealed record OpenApiDiscoveryResponse +{ + [JsonPropertyName("service")] + public required string Service { get; init; } + + [JsonPropertyName("version")] + public required string Version { get; init; } + + [JsonPropertyName("specVersion")] + public required string SpecVersion { get; init; } + + [JsonPropertyName("format")] + public required string Format { get; init; } + + [JsonPropertyName("url")] + public required string Url { get; init; } + + [JsonPropertyName("jsonUrl")] + public string? JsonUrl { get; init; } + + [JsonPropertyName("errorEnvelopeSchema")] + public required string ErrorEnvelopeSchema { get; init; } + + [JsonPropertyName("generatedAt")] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("profilesSupported")] + public IReadOnlyList? ProfilesSupported { get; init; } + + [JsonPropertyName("checksumSha256")] + public string? ChecksumSha256 { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs index dda1972ce..4c7f2a19b 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs @@ -2,6 +2,14 @@ using Microsoft.AspNetCore.Authorization; using StellaOps.Auth.Abstractions; using StellaOps.Auth.ServerIntegration; using StellaOps.AirGap.Policy; +using StellaOps.ExportCenter.WebService; +using StellaOps.ExportCenter.WebService.Deprecation; +using StellaOps.ExportCenter.WebService.Telemetry; +using StellaOps.ExportCenter.WebService.Timeline; +using StellaOps.ExportCenter.WebService.EvidenceLocker; +using StellaOps.ExportCenter.WebService.Attestation; +using StellaOps.ExportCenter.WebService.Incident; +using StellaOps.ExportCenter.WebService.RiskBundle; var builder = WebApplication.CreateBuilder(args); @@ -24,6 +32,41 @@ builder.Services.AddAuthorization(options => builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirGap"); +// Deprecation notification service +builder.Services.AddSingleton(); + +// Telemetry services +builder.Services.AddExportCenterTelemetry(); + +// Timeline event publisher +builder.Services.AddExportTimelinePublisher(); + +// Evidence locker integration (use in-memory for development) +if (builder.Environment.IsDevelopment()) +{ + builder.Services.AddExportEvidenceLockerInMemory(); +} +else +{ + builder.Services.AddExportEvidenceLocker(options => + { + var evidenceLockerUrl = builder.Configuration.GetValue("EvidenceLocker:BaseUrl"); + if (!string.IsNullOrWhiteSpace(evidenceLockerUrl)) + { + options.BaseUrl = evidenceLockerUrl; + } + }); +} + +// Attestation services (DSSE signing) +builder.Services.AddExportAttestation(); + +// Incident management services +builder.Services.AddExportIncidentManagement(); + +// Risk bundle job handler +builder.Services.AddRiskBundleJobHandler(); + builder.Services.AddOpenApi(); var app = builder.Build(); @@ -37,13 +80,38 @@ app.UseHttpsRedirection(); app.UseAuthentication(); app.UseAuthorization(); +// OpenAPI discovery endpoints (anonymous) +app.MapOpenApiDiscovery(); + +// Attestation endpoints +app.MapAttestationEndpoints(); + +// Promotion attestation endpoints +app.MapPromotionAttestationEndpoints(); + +// Incident management endpoints +app.MapIncidentEndpoints(); + +// Risk bundle endpoints +app.MapRiskBundleEndpoints(); + +// Legacy exports endpoints (deprecated, use /v1/exports/* instead) app.MapGet("/exports", () => Results.Ok(Array.Empty())) - .RequireAuthorization(StellaOpsResourceServerPolicies.ExportViewer); + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportViewer) + .WithDeprecation(DeprecatedEndpointsRegistry.ListExports) + .WithSummary("List exports (DEPRECATED)") + .WithDescription("This endpoint is deprecated. Use GET /v1/exports/profiles instead."); app.MapPost("/exports", () => Results.Accepted("/exports", new { status = "scheduled" })) - .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator); + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator) + .WithDeprecation(DeprecatedEndpointsRegistry.CreateExport) + .WithSummary("Create export (DEPRECATED)") + .WithDescription("This endpoint is deprecated. Use POST /v1/exports/evidence or /v1/exports/attestations instead."); app.MapDelete("/exports/{id}", (string id) => Results.NoContent()) - .RequireAuthorization(StellaOpsResourceServerPolicies.ExportAdmin); + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportAdmin) + .WithDeprecation(DeprecatedEndpointsRegistry.DeleteExport) + .WithSummary("Delete export (DEPRECATED)") + .WithDescription("This endpoint is deprecated. Use POST /v1/exports/runs/{id}/cancel instead."); app.Run(); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/IRiskBundleJobHandler.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/IRiskBundleJobHandler.cs new file mode 100644 index 000000000..4af86f970 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/IRiskBundleJobHandler.cs @@ -0,0 +1,55 @@ +namespace StellaOps.ExportCenter.WebService.RiskBundle; + +/// +/// Interface for handling risk bundle job operations. +/// +public interface IRiskBundleJobHandler +{ + /// + /// Gets available providers for risk bundle generation. + /// + /// Cancellation token. + /// Response containing available providers. + Task GetAvailableProvidersAsync(CancellationToken cancellationToken = default); + + /// + /// Submits a new risk bundle job. + /// + /// The job submission request. + /// The actor submitting the job (from auth claims). + /// Cancellation token. + /// Result of the job submission. + Task SubmitJobAsync( + RiskBundleJobSubmitRequest request, + string? actor, + CancellationToken cancellationToken = default); + + /// + /// Gets the status of a specific job. + /// + /// The job identifier. + /// Cancellation token. + /// Job status details, or null if not found. + Task GetJobStatusAsync(string jobId, CancellationToken cancellationToken = default); + + /// + /// Gets recent jobs, optionally filtered by tenant. + /// + /// Optional tenant ID filter. + /// Maximum number of jobs to return. + /// Cancellation token. + /// List of recent job status details. + Task> GetRecentJobsAsync( + string? tenantId, + int limit = 50, + CancellationToken cancellationToken = default); + + /// + /// Cancels a pending or running job. + /// + /// The job identifier. + /// The actor cancelling the job. + /// Cancellation token. + /// True if cancellation was successful. + Task CancelJobAsync(string jobId, string? actor, CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleEndpoints.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleEndpoints.cs new file mode 100644 index 000000000..1df96778c --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleEndpoints.cs @@ -0,0 +1,142 @@ +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.ServerIntegration; + +namespace StellaOps.ExportCenter.WebService.RiskBundle; + +/// +/// Extension methods for mapping risk bundle endpoints. +/// +public static class RiskBundleEndpoints +{ + /// + /// Maps risk bundle job endpoints to the application. + /// + public static WebApplication MapRiskBundleEndpoints(this WebApplication app) + { + var group = app.MapGroup("/v1/risk-bundles") + .WithTags("Risk Bundles") + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator); + + // GET /v1/risk-bundles/providers - Get available providers + group.MapGet("/providers", GetAvailableProvidersAsync) + .WithName("GetRiskBundleProviders") + .WithSummary("Get available risk bundle providers") + .WithDescription("Returns available providers for risk bundle generation, including mandatory and optional providers.") + .Produces(StatusCodes.Status200OK); + + // POST /v1/risk-bundles/jobs - Submit a new job + group.MapPost("/jobs", SubmitJobAsync) + .WithName("SubmitRiskBundleJob") + .WithSummary("Submit a risk bundle job") + .WithDescription("Submits a new risk bundle generation job with selected providers.") + .Produces(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest); + + // GET /v1/risk-bundles/jobs - Get recent jobs + group.MapGet("/jobs", GetRecentJobsAsync) + .WithName("GetRecentRiskBundleJobs") + .WithSummary("Get recent risk bundle jobs") + .WithDescription("Returns recent risk bundle jobs, optionally filtered by tenant.") + .Produces>(StatusCodes.Status200OK); + + // GET /v1/risk-bundles/jobs/{jobId} - Get job status + group.MapGet("/jobs/{jobId}", GetJobStatusAsync) + .WithName("GetRiskBundleJobStatus") + .WithSummary("Get risk bundle job status") + .WithDescription("Returns the status of a specific risk bundle job.") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound); + + // POST /v1/risk-bundles/jobs/{jobId}/cancel - Cancel a job + group.MapPost("/jobs/{jobId}/cancel", CancelJobAsync) + .WithName("CancelRiskBundleJob") + .WithSummary("Cancel a risk bundle job") + .WithDescription("Cancels a pending or running risk bundle job.") + .Produces(StatusCodes.Status204NoContent) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status409Conflict); + + return app; + } + + private static async Task> GetAvailableProvidersAsync( + [FromServices] IRiskBundleJobHandler handler, + CancellationToken cancellationToken) + { + var providers = await handler.GetAvailableProvidersAsync(cancellationToken); + return TypedResults.Ok(providers); + } + + private static async Task, BadRequest>> SubmitJobAsync( + [FromBody] RiskBundleJobSubmitRequest request, + [FromServices] IRiskBundleJobHandler handler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + // Get actor from claims + var actor = httpContext.User.FindFirst("sub")?.Value + ?? httpContext.User.FindFirst("preferred_username")?.Value; + + var result = await handler.SubmitJobAsync(request, actor, cancellationToken); + + if (!result.Success) + { + return TypedResults.BadRequest(result); + } + + return TypedResults.Accepted($"/v1/risk-bundles/jobs/{result.JobId}", result); + } + + private static async Task>> GetRecentJobsAsync( + [FromQuery] string? tenantId, + [FromQuery] int? limit, + [FromServices] IRiskBundleJobHandler handler, + CancellationToken cancellationToken) + { + var jobs = await handler.GetRecentJobsAsync(tenantId, limit ?? 50, cancellationToken); + return TypedResults.Ok(jobs); + } + + private static async Task, NotFound>> GetJobStatusAsync( + string jobId, + [FromServices] IRiskBundleJobHandler handler, + CancellationToken cancellationToken) + { + var status = await handler.GetJobStatusAsync(jobId, cancellationToken); + if (status is null) + { + return TypedResults.NotFound(); + } + return TypedResults.Ok(status); + } + + private static async Task>> CancelJobAsync( + string jobId, + [FromServices] IRiskBundleJobHandler handler, + HttpContext httpContext, + CancellationToken cancellationToken) + { + var status = await handler.GetJobStatusAsync(jobId, cancellationToken); + if (status is null) + { + return TypedResults.NotFound(); + } + + if (status.Status is not (RiskBundleJobStatus.Pending or RiskBundleJobStatus.Running)) + { + return TypedResults.Conflict($"Job cannot be cancelled in status '{status.Status}'"); + } + + var actor = httpContext.User.FindFirst("sub")?.Value + ?? httpContext.User.FindFirst("preferred_username")?.Value; + + var cancelled = await handler.CancelJobAsync(jobId, actor, cancellationToken); + if (!cancelled) + { + return TypedResults.Conflict("Failed to cancel job"); + } + + return TypedResults.NoContent(); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobHandler.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobHandler.cs new file mode 100644 index 000000000..8cf6dc0e8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobHandler.cs @@ -0,0 +1,537 @@ +using System.Collections.Concurrent; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.ExportCenter.WebService.Telemetry; +using StellaOps.ExportCenter.WebService.Timeline; + +namespace StellaOps.ExportCenter.WebService.RiskBundle; + +/// +/// Implementation of risk bundle job handler with provider selection and audit logging. +/// +public sealed class RiskBundleJobHandler : IRiskBundleJobHandler +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly string[] MandatoryProviderIds = ["cisa-kev"]; + private static readonly string[] OptionalProviderIds = ["nvd", "osv", "ghsa", "epss"]; + + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly IExportTimelinePublisher _timelinePublisher; + private readonly RiskBundleJobHandlerOptions _options; + + // In-memory job store (would be replaced with persistent storage in production) + private readonly ConcurrentDictionary _jobs = new(); + + public RiskBundleJobHandler( + TimeProvider timeProvider, + ILogger logger, + IExportTimelinePublisher timelinePublisher, + IOptions? options = null) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timelinePublisher = timelinePublisher ?? throw new ArgumentNullException(nameof(timelinePublisher)); + _options = options?.Value ?? RiskBundleJobHandlerOptions.Default; + } + + public Task GetAvailableProvidersAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var providers = new List(); + + // Add mandatory providers + foreach (var providerId in MandatoryProviderIds) + { + providers.Add(CreateProviderInfo(providerId, mandatory: true)); + } + + // Add optional providers + foreach (var providerId in OptionalProviderIds) + { + providers.Add(CreateProviderInfo(providerId, mandatory: false)); + } + + var response = new RiskBundleProvidersResponse + { + Providers = providers, + MandatoryProviderIds = MandatoryProviderIds, + OptionalProviderIds = OptionalProviderIds + }; + + return Task.FromResult(response); + } + + public async Task SubmitJobAsync( + RiskBundleJobSubmitRequest request, + string? actor, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var now = _timeProvider.GetUtcNow(); + var jobId = request.JobId?.ToString("N") ?? Guid.NewGuid().ToString("N"); + + // Validate provider selection + var selectedProviders = ResolveSelectedProviders(request.SelectedProviders); + var validationError = ValidateProviderSelection(selectedProviders); + if (validationError is not null) + { + _logger.LogWarning( + "Risk bundle job {JobId} submission rejected: {Error}", + jobId, validationError); + + return new RiskBundleJobSubmitResult + { + Success = false, + JobId = jobId, + Status = RiskBundleJobStatus.Failed, + ErrorMessage = validationError, + SubmittedAt = now, + SelectedProviders = selectedProviders + }; + } + + // Create job state + var jobState = new RiskBundleJobState + { + JobId = jobId, + Status = RiskBundleJobStatus.Pending, + TenantId = request.TenantId, + CorrelationId = request.CorrelationId, + Actor = actor, + SubmittedAt = now, + SelectedProviders = selectedProviders, + Request = request + }; + + if (!_jobs.TryAdd(jobId, jobState)) + { + return new RiskBundleJobSubmitResult + { + Success = false, + JobId = jobId, + Status = RiskBundleJobStatus.Failed, + ErrorMessage = "Job with this ID already exists", + SubmittedAt = now, + SelectedProviders = selectedProviders + }; + } + + // Emit audit event + await EmitAuditEventAsync( + "risk_bundle.job.submitted", + jobId, + request.TenantId, + actor, + request.CorrelationId, + new Dictionary + { + ["provider_count"] = selectedProviders.Count.ToString(), + ["providers"] = string.Join(",", selectedProviders), + ["include_osv"] = request.IncludeOsv.ToString().ToLowerInvariant() + }, + cancellationToken).ConfigureAwait(false); + + // Record metrics + ExportTelemetry.RiskBundleJobsSubmitted.Add(1, + new KeyValuePair("tenant_id", request.TenantId ?? "unknown")); + + _logger.LogInformation( + "Risk bundle job {JobId} submitted with {ProviderCount} providers by {Actor}", + jobId, selectedProviders.Count, actor ?? "anonymous"); + + // Start background execution (in production, this would queue to a job processor) + _ = ExecuteJobAsync(jobState, cancellationToken); + + return new RiskBundleJobSubmitResult + { + Success = true, + JobId = jobId, + Status = RiskBundleJobStatus.Pending, + SubmittedAt = now, + SelectedProviders = selectedProviders + }; + } + + public Task GetJobStatusAsync(string jobId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(jobId); + cancellationToken.ThrowIfCancellationRequested(); + + if (!_jobs.TryGetValue(jobId, out var state)) + { + return Task.FromResult(null); + } + + return Task.FromResult(CreateStatusDetail(state)); + } + + public Task> GetRecentJobsAsync( + string? tenantId, + int limit = 50, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var query = _jobs.Values.AsEnumerable(); + + if (!string.IsNullOrWhiteSpace(tenantId)) + { + query = query.Where(j => string.Equals(j.TenantId, tenantId, StringComparison.OrdinalIgnoreCase)); + } + + var results = query + .OrderByDescending(j => j.SubmittedAt) + .Take(Math.Min(limit, 100)) + .Select(CreateStatusDetail) + .ToList(); + + return Task.FromResult>(results); + } + + public async Task CancelJobAsync(string jobId, string? actor, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(jobId); + cancellationToken.ThrowIfCancellationRequested(); + + if (!_jobs.TryGetValue(jobId, out var state)) + { + return false; + } + + // Can only cancel pending or running jobs + if (state.Status is not (RiskBundleJobStatus.Pending or RiskBundleJobStatus.Running)) + { + return false; + } + + state.Status = RiskBundleJobStatus.Cancelled; + state.CompletedAt = _timeProvider.GetUtcNow(); + state.CancellationSource?.Cancel(); + + // Emit audit event + await EmitAuditEventAsync( + "risk_bundle.job.cancelled", + jobId, + state.TenantId, + actor, + state.CorrelationId, + new Dictionary + { + ["original_status"] = state.Status.ToString() + }, + cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Risk bundle job {JobId} cancelled by {Actor}", jobId, actor ?? "anonymous"); + + return true; + } + + private async Task ExecuteJobAsync(RiskBundleJobState state, CancellationToken cancellationToken) + { + state.CancellationSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + var linkedToken = state.CancellationSource.Token; + + try + { + state.Status = RiskBundleJobStatus.Running; + state.StartedAt = _timeProvider.GetUtcNow(); + + await EmitAuditEventAsync( + "risk_bundle.job.started", + state.JobId, + state.TenantId, + state.Actor, + state.CorrelationId, + null, + linkedToken).ConfigureAwait(false); + + // Simulate job execution (in production, this would call the actual RiskBundleJob) + await Task.Delay(TimeSpan.FromMilliseconds(100), linkedToken).ConfigureAwait(false); + + linkedToken.ThrowIfCancellationRequested(); + + // Create simulated outcome + var bundleId = Guid.NewGuid(); + state.Outcome = new RiskBundleOutcomeSummary + { + BundleId = bundleId, + RootHash = $"sha256:{Guid.NewGuid():N}", + BundleStorageKey = $"risk-bundles/{bundleId:N}/risk-bundle.tar.gz", + ManifestStorageKey = $"risk-bundles/{bundleId:N}/provider-manifest.json", + ManifestSignatureStorageKey = $"risk-bundles/{bundleId:N}/signatures/provider-manifest.dsse", + ProviderCount = state.SelectedProviders.Count, + TotalSizeBytes = state.SelectedProviders.Count * 1024 * 1024 // Simulated + }; + + state.IncludedProviders = state.SelectedProviders + .Select(p => new RiskBundleProviderResult + { + ProviderId = p, + Sha256 = $"sha256:{Guid.NewGuid():N}", + SizeBytes = 1024 * 1024, + Source = $"mirror://{p}/current", + SnapshotDate = DateOnly.FromDateTime(_timeProvider.GetUtcNow().DateTime), + Optional = !MandatoryProviderIds.Contains(p) + }) + .ToList(); + + state.Status = RiskBundleJobStatus.Completed; + state.CompletedAt = _timeProvider.GetUtcNow(); + + await EmitAuditEventAsync( + "risk_bundle.job.completed", + state.JobId, + state.TenantId, + state.Actor, + state.CorrelationId, + new Dictionary + { + ["bundle_id"] = bundleId.ToString("N"), + ["root_hash"] = state.Outcome.RootHash, + ["provider_count"] = state.Outcome.ProviderCount.ToString(), + ["total_size_bytes"] = state.Outcome.TotalSizeBytes.ToString() + }, + CancellationToken.None).ConfigureAwait(false); + + // Record metrics + ExportTelemetry.RiskBundleJobsCompleted.Add(1, + new KeyValuePair("tenant_id", state.TenantId ?? "unknown"), + new KeyValuePair("status", "success")); + + var durationSeconds = (state.CompletedAt.Value - state.StartedAt!.Value).TotalSeconds; + ExportTelemetry.RiskBundleJobDurationSeconds.Record(durationSeconds, + new KeyValuePair("tenant_id", state.TenantId ?? "unknown")); + + _logger.LogInformation( + "Risk bundle job {JobId} completed with {ProviderCount} providers in {DurationMs:F0}ms", + state.JobId, state.Outcome.ProviderCount, durationSeconds * 1000); + } + catch (OperationCanceledException) + { + if (state.Status != RiskBundleJobStatus.Cancelled) + { + state.Status = RiskBundleJobStatus.Cancelled; + state.CompletedAt = _timeProvider.GetUtcNow(); + } + } + catch (Exception ex) + { + state.Status = RiskBundleJobStatus.Failed; + state.CompletedAt = _timeProvider.GetUtcNow(); + state.ErrorMessage = ex.Message; + + await EmitAuditEventAsync( + "risk_bundle.job.failed", + state.JobId, + state.TenantId, + state.Actor, + state.CorrelationId, + new Dictionary + { + ["error"] = ex.Message, + ["error_type"] = ex.GetType().Name + }, + CancellationToken.None).ConfigureAwait(false); + + ExportTelemetry.RiskBundleJobsCompleted.Add(1, + new KeyValuePair("tenant_id", state.TenantId ?? "unknown"), + new KeyValuePair("status", "failed")); + + _logger.LogError(ex, "Risk bundle job {JobId} failed", state.JobId); + } + finally + { + state.CancellationSource?.Dispose(); + state.CancellationSource = null; + } + } + + private async Task EmitAuditEventAsync( + string eventType, + string jobId, + string? tenantId, + string? actor, + string? correlationId, + Dictionary? attributes, + CancellationToken cancellationToken) + { + var auditEvent = new RiskBundleAuditEvent + { + EventType = eventType, + JobId = jobId, + TenantId = tenantId, + OccurredAt = _timeProvider.GetUtcNow(), + Actor = actor, + CorrelationId = correlationId, + Attributes = attributes + }; + + var eventJson = JsonSerializer.Serialize(auditEvent, SerializerOptions); + + try + { + await _timelinePublisher.PublishIncidentEventAsync( + eventType, + jobId, + eventJson, + correlationId, + cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to publish audit event {EventType} for job {JobId}", eventType, jobId); + } + } + + private static List ResolveSelectedProviders(IReadOnlyList? requestedProviders) + { + if (requestedProviders is null or { Count: 0 }) + { + // Default: mandatory providers only + return [.. MandatoryProviderIds]; + } + + // Normalize and deduplicate + var selected = new HashSet(StringComparer.OrdinalIgnoreCase); + + // Always include mandatory providers + foreach (var provider in MandatoryProviderIds) + { + selected.Add(provider); + } + + // Add requested providers + foreach (var provider in requestedProviders) + { + if (!string.IsNullOrWhiteSpace(provider)) + { + selected.Add(provider.Trim().ToLowerInvariant()); + } + } + + return [.. selected.OrderBy(p => p, StringComparer.Ordinal)]; + } + + private static string? ValidateProviderSelection(List selectedProviders) + { + // Ensure all mandatory providers are included + foreach (var mandatory in MandatoryProviderIds) + { + if (!selectedProviders.Contains(mandatory, StringComparer.OrdinalIgnoreCase)) + { + return $"Mandatory provider '{mandatory}' must be included"; + } + } + + // Validate that selected providers are known + var allKnown = MandatoryProviderIds.Concat(OptionalProviderIds).ToHashSet(StringComparer.OrdinalIgnoreCase); + foreach (var provider in selectedProviders) + { + if (!allKnown.Contains(provider)) + { + return $"Unknown provider '{provider}'"; + } + } + + return null; + } + + private static RiskBundleAvailableProvider CreateProviderInfo(string providerId, bool mandatory) + { + var (displayName, description) = providerId switch + { + "cisa-kev" => ("CISA KEV", "CISA Known Exploited Vulnerabilities catalog"), + "nvd" => ("NVD", "NIST National Vulnerability Database"), + "osv" => ("OSV", "Open Source Vulnerabilities database"), + "ghsa" => ("GitHub Security Advisories", "GitHub Security Advisory database"), + "epss" => ("EPSS", "Exploit Prediction Scoring System"), + _ => (providerId.ToUpperInvariant(), null) + }; + + return new RiskBundleAvailableProvider + { + ProviderId = providerId, + DisplayName = displayName, + Description = description, + Mandatory = mandatory, + Available = true, // Would check actual availability in production + LastSnapshotDate = DateOnly.FromDateTime(DateTime.UtcNow.AddDays(-1)), + DefaultSourcePath = $"/data/providers/{providerId}/current" + }; + } + + private static RiskBundleJobStatusDetail CreateStatusDetail(RiskBundleJobState state) + { + return new RiskBundleJobStatusDetail + { + JobId = state.JobId, + Status = state.Status, + TenantId = state.TenantId, + SubmittedAt = state.SubmittedAt, + StartedAt = state.StartedAt, + CompletedAt = state.CompletedAt, + SelectedProviders = state.SelectedProviders, + IncludedProviders = state.IncludedProviders, + ErrorMessage = state.ErrorMessage, + Outcome = state.Outcome + }; + } + + private sealed class RiskBundleJobState + { + public required string JobId { get; init; } + public RiskBundleJobStatus Status { get; set; } + public string? TenantId { get; init; } + public string? CorrelationId { get; init; } + public string? Actor { get; init; } + public required DateTimeOffset SubmittedAt { get; init; } + public DateTimeOffset? StartedAt { get; set; } + public DateTimeOffset? CompletedAt { get; set; } + public required IReadOnlyList SelectedProviders { get; init; } + public IReadOnlyList? IncludedProviders { get; set; } + public string? ErrorMessage { get; set; } + public RiskBundleOutcomeSummary? Outcome { get; set; } + public RiskBundleJobSubmitRequest? Request { get; init; } + public CancellationTokenSource? CancellationSource { get; set; } + } +} + +/// +/// Options for the risk bundle job handler. +/// +public sealed record RiskBundleJobHandlerOptions +{ + /// + /// Maximum number of concurrent jobs. + /// + public int MaxConcurrentJobs { get; init; } = 5; + + /// + /// Job timeout duration. + /// + public TimeSpan JobTimeout { get; init; } = TimeSpan.FromMinutes(30); + + /// + /// How long to retain completed jobs in memory. + /// + public TimeSpan JobRetentionPeriod { get; init; } = TimeSpan.FromHours(24); + + /// + /// Default storage prefix for bundles. + /// + public string DefaultStoragePrefix { get; init; } = "risk-bundles"; + + public static RiskBundleJobHandlerOptions Default => new(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobModels.cs new file mode 100644 index 000000000..6ea571b5d --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleJobModels.cs @@ -0,0 +1,395 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.RiskBundle; + +/// +/// Request to submit a risk bundle job. +/// +public sealed record RiskBundleJobSubmitRequest +{ + /// + /// Unique identifier for the job. Generated if not provided. + /// + public Guid? JobId { get; init; } + + /// + /// Tenant identifier for audit logging. + /// + public string? TenantId { get; init; } + + /// + /// Optional correlation ID for tracing. + /// + public string? CorrelationId { get; init; } + + /// + /// Selected provider IDs to include in the bundle. + /// If empty, uses default providers. + /// + public IReadOnlyList SelectedProviders { get; init; } = []; + + /// + /// Provider-specific overrides for source paths and options. + /// + public IReadOnlyList? ProviderOverrides { get; init; } + + /// + /// Whether to include OSV data in the bundle. + /// + public bool IncludeOsv { get; init; } + + /// + /// Storage prefix for the generated bundle. + /// + public string? StoragePrefix { get; init; } + + /// + /// Custom bundle filename. Defaults to risk-bundle.tar.gz. + /// + public string? BundleFileName { get; init; } + + /// + /// Allow missing optional providers without failing. + /// + public bool AllowMissingOptional { get; init; } = true; + + /// + /// Allow stale optional provider data without failing. + /// + public bool AllowStaleOptional { get; init; } = true; + + /// + /// Additional metadata to include in audit logs. + /// + public IReadOnlyDictionary? Metadata { get; init; } +} + +/// +/// Provider-specific override for a risk bundle job. +/// +public sealed record RiskBundleProviderOverride +{ + /// + /// Provider identifier (e.g., "cisa-kev", "nvd", "osv"). + /// + public required string ProviderId { get; init; } + + /// + /// Optional override for the source path. + /// + public string? SourcePath { get; init; } + + /// + /// Source descriptor (e.g., "mirror://kev/current"). + /// + public string? Source { get; init; } + + /// + /// Optional signature file path. + /// + public string? SignaturePath { get; init; } + + /// + /// Override for the snapshot date. + /// + public DateOnly? SnapshotDate { get; init; } + + /// + /// Whether this provider is optional. + /// + public bool? Optional { get; init; } +} + +/// +/// Result of submitting a risk bundle job. +/// +public sealed record RiskBundleJobSubmitResult +{ + /// + /// Whether the job was successfully submitted. + /// + public required bool Success { get; init; } + + /// + /// The job identifier. + /// + public required string JobId { get; init; } + + /// + /// Current job status. + /// + public required RiskBundleJobStatus Status { get; init; } + + /// + /// Error message if submission failed. + /// + public string? ErrorMessage { get; init; } + + /// + /// Timestamp when the job was submitted. + /// + public required DateTimeOffset SubmittedAt { get; init; } + + /// + /// Selected providers for this job. + /// + public IReadOnlyList SelectedProviders { get; init; } = []; +} + +/// +/// Status of a risk bundle job. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum RiskBundleJobStatus +{ + /// Job is pending execution. + Pending = 0, + + /// Job is currently running. + Running = 1, + + /// Job completed successfully. + Completed = 2, + + /// Job failed. + Failed = 3, + + /// Job was cancelled. + Cancelled = 4 +} + +/// +/// Detailed status of a risk bundle job. +/// +public sealed record RiskBundleJobStatusDetail +{ + /// + /// The job identifier. + /// + public required string JobId { get; init; } + + /// + /// Current job status. + /// + public required RiskBundleJobStatus Status { get; init; } + + /// + /// Tenant identifier. + /// + public string? TenantId { get; init; } + + /// + /// Timestamp when the job was submitted. + /// + public required DateTimeOffset SubmittedAt { get; init; } + + /// + /// Timestamp when the job started executing. + /// + public DateTimeOffset? StartedAt { get; init; } + + /// + /// Timestamp when the job completed or failed. + /// + public DateTimeOffset? CompletedAt { get; init; } + + /// + /// Selected providers for this job. + /// + public IReadOnlyList SelectedProviders { get; init; } = []; + + /// + /// Providers that were successfully included. + /// + public IReadOnlyList? IncludedProviders { get; init; } + + /// + /// Error message if the job failed. + /// + public string? ErrorMessage { get; init; } + + /// + /// Bundle outcome details if completed. + /// + public RiskBundleOutcomeSummary? Outcome { get; init; } +} + +/// +/// Result for an individual provider in a risk bundle. +/// +public sealed record RiskBundleProviderResult +{ + /// + /// Provider identifier. + /// + public required string ProviderId { get; init; } + + /// + /// SHA-256 hash of the provider data. + /// + public required string Sha256 { get; init; } + + /// + /// Size in bytes. + /// + public required long SizeBytes { get; init; } + + /// + /// Source descriptor. + /// + public required string Source { get; init; } + + /// + /// Snapshot date if available. + /// + public DateOnly? SnapshotDate { get; init; } + + /// + /// Whether this provider is optional. + /// + public required bool Optional { get; init; } +} + +/// +/// Summary of a completed risk bundle job outcome. +/// +public sealed record RiskBundleOutcomeSummary +{ + /// + /// Bundle identifier. + /// + public required Guid BundleId { get; init; } + + /// + /// Root hash (SHA-256 of manifest). + /// + public required string RootHash { get; init; } + + /// + /// Storage key for the bundle. + /// + public required string BundleStorageKey { get; init; } + + /// + /// Storage key for the manifest. + /// + public required string ManifestStorageKey { get; init; } + + /// + /// Storage key for the manifest signature. + /// + public required string ManifestSignatureStorageKey { get; init; } + + /// + /// Number of providers included. + /// + public required int ProviderCount { get; init; } + + /// + /// Total bundle size in bytes. + /// + public required long TotalSizeBytes { get; init; } +} + +/// +/// Audit event for risk bundle job lifecycle. +/// +public sealed record RiskBundleAuditEvent +{ + /// + /// Event type (e.g., "risk_bundle.job.submitted", "risk_bundle.job.completed"). + /// + public required string EventType { get; init; } + + /// + /// Job identifier. + /// + public required string JobId { get; init; } + + /// + /// Tenant identifier. + /// + public string? TenantId { get; init; } + + /// + /// Event timestamp. + /// + public required DateTimeOffset OccurredAt { get; init; } + + /// + /// Actor who triggered the event. + /// + public string? Actor { get; init; } + + /// + /// Correlation ID for tracing. + /// + public string? CorrelationId { get; init; } + + /// + /// Additional event attributes. + /// + public IReadOnlyDictionary? Attributes { get; init; } +} + +/// +/// Available provider information. +/// +public sealed record RiskBundleAvailableProvider +{ + /// + /// Provider identifier. + /// + public required string ProviderId { get; init; } + + /// + /// Human-readable display name. + /// + public required string DisplayName { get; init; } + + /// + /// Provider description. + /// + public string? Description { get; init; } + + /// + /// Whether this provider is mandatory. + /// + public required bool Mandatory { get; init; } + + /// + /// Whether this provider is currently available. + /// + public required bool Available { get; init; } + + /// + /// Last known snapshot date. + /// + public DateOnly? LastSnapshotDate { get; init; } + + /// + /// Default source path. + /// + public string? DefaultSourcePath { get; init; } +} + +/// +/// Response containing available providers. +/// +public sealed record RiskBundleProvidersResponse +{ + /// + /// List of available providers. + /// + public required IReadOnlyList Providers { get; init; } + + /// + /// Mandatory provider IDs. + /// + public required IReadOnlyList MandatoryProviderIds { get; init; } + + /// + /// Optional provider IDs. + /// + public required IReadOnlyList OptionalProviderIds { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleServiceCollectionExtensions.cs new file mode 100644 index 000000000..6cf8628cd --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/RiskBundle/RiskBundleServiceCollectionExtensions.cs @@ -0,0 +1,37 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.ExportCenter.WebService.RiskBundle; + +/// +/// Extension methods for registering risk bundle services. +/// +public static class RiskBundleServiceCollectionExtensions +{ + /// + /// Adds risk bundle job handler services to the service collection. + /// + /// The service collection. + /// Optional configuration action. + /// The service collection for chaining. + public static IServiceCollection AddRiskBundleJobHandler( + this IServiceCollection services, + Action? configure = null) + { + ArgumentNullException.ThrowIfNull(services); + + // Register TimeProvider if not already registered + services.TryAddSingleton(TimeProvider.System); + + // Configure options if provided + if (configure is not null) + { + services.Configure(configure); + } + + // Register the job handler + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj index 5ceae99df..cf8db4dc2 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj @@ -10,6 +10,8 @@ + + diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportActivityExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportActivityExtensions.cs new file mode 100644 index 000000000..92e724b0c --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportActivityExtensions.cs @@ -0,0 +1,155 @@ +using System.Diagnostics; + +namespace StellaOps.ExportCenter.WebService.Telemetry; + +/// +/// Extension methods for creating export-related activities (spans). +/// +public static class ExportActivityExtensions +{ + /// + /// Starts an activity for an export run. + /// + public static Activity? StartExportRunActivity( + string runId, + string profileId, + string tenantId, + string exportType) + { + var activity = ExportTelemetry.ActivitySource.StartActivity( + "export.run", + ActivityKind.Internal); + + if (activity is not null) + { + activity.SetTag(ExportTelemetryTags.RunId, runId); + activity.SetTag(ExportTelemetryTags.ProfileId, profileId); + activity.SetTag(ExportTelemetryTags.TenantId, tenantId); + activity.SetTag(ExportTelemetryTags.ExportType, exportType); + } + + return activity; + } + + /// + /// Starts an activity for the planning phase. + /// + public static Activity? StartExportPlanActivity( + string runId, + string profileId) + { + var activity = ExportTelemetry.ActivitySource.StartActivity( + "export.plan", + ActivityKind.Internal); + + if (activity is not null) + { + activity.SetTag(ExportTelemetryTags.RunId, runId); + activity.SetTag(ExportTelemetryTags.ProfileId, profileId); + } + + return activity; + } + + /// + /// Starts an activity for the write/build phase. + /// + public static Activity? StartExportWriteActivity( + string runId, + string artifactType) + { + var activity = ExportTelemetry.ActivitySource.StartActivity( + "export.write", + ActivityKind.Internal); + + if (activity is not null) + { + activity.SetTag(ExportTelemetryTags.RunId, runId); + activity.SetTag(ExportTelemetryTags.ArtifactType, artifactType); + } + + return activity; + } + + /// + /// Starts an activity for bundle distribution. + /// + public static Activity? StartExportDistributeActivity( + string runId, + string distributionType) + { + var activity = ExportTelemetry.ActivitySource.StartActivity( + "export.distribute", + ActivityKind.Internal); + + if (activity is not null) + { + activity.SetTag(ExportTelemetryTags.RunId, runId); + activity.SetTag(ExportTelemetryTags.DistributionType, distributionType); + } + + return activity; + } + + /// + /// Adds artifact count to an activity. + /// + public static Activity? SetArtifactCount(this Activity? activity, int count) + { + activity?.SetTag("artifact_count", count); + return activity; + } + + /// + /// Adds bundle size to an activity. + /// + public static Activity? SetBundleSize(this Activity? activity, long sizeBytes) + { + activity?.SetTag("bundle_size_bytes", sizeBytes); + return activity; + } + + /// + /// Adds export status to an activity. + /// + public static Activity? SetExportStatus(this Activity? activity, string status) + { + activity?.SetTag(ExportTelemetryTags.Status, status); + return activity; + } + + /// + /// Marks an activity as failed with an error. + /// + public static Activity? SetError(this Activity? activity, Exception exception, string? errorCode = null) + { + if (activity is not null) + { + activity.SetStatus(ActivityStatusCode.Error, exception.Message); + activity.SetTag(ExportTelemetryTags.Status, ExportStatuses.Failed); + activity.SetTag("exception.type", exception.GetType().Name); + activity.SetTag("exception.message", exception.Message); + + if (!string.IsNullOrEmpty(errorCode)) + { + activity.SetTag(ExportTelemetryTags.ErrorCode, errorCode); + } + } + + return activity; + } + + /// + /// Marks an activity as successful. + /// + public static Activity? SetSuccess(this Activity? activity) + { + if (activity is not null) + { + activity.SetStatus(ActivityStatusCode.Ok); + activity.SetTag(ExportTelemetryTags.Status, ExportStatuses.Success); + } + + return activity; + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportLoggerExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportLoggerExtensions.cs new file mode 100644 index 000000000..ec298db1f --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportLoggerExtensions.cs @@ -0,0 +1,138 @@ +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.WebService.Telemetry; + +/// +/// High-performance structured logging extensions for export operations. +/// Uses LoggerMessage source generators for optimal performance. +/// +public static partial class ExportLoggerExtensions +{ + [LoggerMessage( + EventId = 1000, + Level = LogLevel.Information, + Message = "Export run started: RunId={RunId}, ProfileId={ProfileId}, TenantId={TenantId}, ExportType={ExportType}")] + public static partial void LogExportRunStarted( + this ILogger logger, + string runId, + string profileId, + string tenantId, + string exportType); + + [LoggerMessage( + EventId = 1001, + Level = LogLevel.Information, + Message = "Export run completed: RunId={RunId}, ProfileId={ProfileId}, TenantId={TenantId}, DurationMs={DurationMs}, ArtifactCount={ArtifactCount}, BundleSizeBytes={BundleSizeBytes}")] + public static partial void LogExportRunCompleted( + this ILogger logger, + string runId, + string profileId, + string tenantId, + long durationMs, + int artifactCount, + long bundleSizeBytes); + + [LoggerMessage( + EventId = 1002, + Level = LogLevel.Error, + Message = "Export run failed: RunId={RunId}, ProfileId={ProfileId}, TenantId={TenantId}, DurationMs={DurationMs}, ErrorCode={ErrorCode}")] + public static partial void LogExportRunFailed( + this ILogger logger, + Exception? exception, + string runId, + string profileId, + string tenantId, + long durationMs, + string? errorCode); + + [LoggerMessage( + EventId = 1003, + Level = LogLevel.Warning, + Message = "Export run cancelled: RunId={RunId}, ProfileId={ProfileId}, TenantId={TenantId}, DurationMs={DurationMs}")] + public static partial void LogExportRunCancelled( + this ILogger logger, + string runId, + string profileId, + string tenantId, + long durationMs); + + [LoggerMessage( + EventId = 1010, + Level = LogLevel.Debug, + Message = "Export planning started: RunId={RunId}, ProfileId={ProfileId}")] + public static partial void LogExportPlanningStarted( + this ILogger logger, + string runId, + string profileId); + + [LoggerMessage( + EventId = 1011, + Level = LogLevel.Debug, + Message = "Export planning completed: RunId={RunId}, ProfileId={ProfileId}, DurationMs={DurationMs}, ItemCount={ItemCount}")] + public static partial void LogExportPlanningCompleted( + this ILogger logger, + string runId, + string profileId, + long durationMs, + int itemCount); + + [LoggerMessage( + EventId = 1020, + Level = LogLevel.Debug, + Message = "Export artifact written: RunId={RunId}, ArtifactType={ArtifactType}, SizeBytes={SizeBytes}")] + public static partial void LogExportArtifactWritten( + this ILogger logger, + string runId, + string artifactType, + long sizeBytes); + + [LoggerMessage( + EventId = 1030, + Level = LogLevel.Information, + Message = "Export bundle created: RunId={RunId}, BundleHash={BundleHash}, SizeBytes={SizeBytes}")] + public static partial void LogExportBundleCreated( + this ILogger logger, + string runId, + string bundleHash, + long sizeBytes); + + [LoggerMessage( + EventId = 1040, + Level = LogLevel.Information, + Message = "Export distribution started: RunId={RunId}, DistributionType={DistributionType}")] + public static partial void LogExportDistributionStarted( + this ILogger logger, + string runId, + string distributionType); + + [LoggerMessage( + EventId = 1041, + Level = LogLevel.Information, + Message = "Export distribution completed: RunId={RunId}, DistributionType={DistributionType}, DurationMs={DurationMs}")] + public static partial void LogExportDistributionCompleted( + this ILogger logger, + string runId, + string distributionType, + long durationMs); + + [LoggerMessage( + EventId = 1050, + Level = LogLevel.Debug, + Message = "Export profile loaded: ProfileId={ProfileId}, TenantId={TenantId}, Adapter={Adapter}")] + public static partial void LogExportProfileLoaded( + this ILogger logger, + string profileId, + string tenantId, + string adapter); + + [LoggerMessage( + EventId = 1060, + Level = LogLevel.Warning, + Message = "Export retry scheduled: RunId={RunId}, Attempt={Attempt}, MaxAttempts={MaxAttempts}, RetryAfterMs={RetryAfterMs}")] + public static partial void LogExportRetryScheduled( + this ILogger logger, + string runId, + int attempt, + int maxAttempts, + long retryAfterMs); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportRunTelemetryContext.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportRunTelemetryContext.cs new file mode 100644 index 000000000..06adb5132 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportRunTelemetryContext.cs @@ -0,0 +1,221 @@ +using System.Diagnostics; + +namespace StellaOps.ExportCenter.WebService.Telemetry; + +/// +/// Telemetry context for tracking an export run lifecycle. +/// Encapsulates metrics recording, activity tracking, and structured logging. +/// +public sealed class ExportRunTelemetryContext : IDisposable +{ + private readonly Stopwatch _stopwatch; + private readonly Activity? _activity; + private readonly string _runId; + private readonly string _profileId; + private readonly string _tenantId; + private readonly string _exportType; + private readonly KeyValuePair[] _baseTags; + + private bool _completed; + private int _artifactCount; + private long _bundleSizeBytes; + + /// + /// Creates a new telemetry context for an export run. + /// + public ExportRunTelemetryContext( + string runId, + string profileId, + string tenantId, + string exportType) + { + _runId = runId; + _profileId = profileId; + _tenantId = tenantId; + _exportType = exportType; + + _baseTags = + [ + new(ExportTelemetryTags.Profile, profileId), + new(ExportTelemetryTags.Tenant, tenantId), + new(ExportTelemetryTags.ExportType, exportType) + ]; + + // Record run start + ExportTelemetry.ExportRunsTotal.Add(1, _baseTags); + ExportTelemetry.ExportRunsInProgress.Add(1, new KeyValuePair(ExportTelemetryTags.Tenant, tenantId)); + + // Start activity and stopwatch + _activity = ExportActivityExtensions.StartExportRunActivity(runId, profileId, tenantId, exportType); + _stopwatch = Stopwatch.StartNew(); + } + + /// + /// The run ID. + /// + public string RunId => _runId; + + /// + /// The profile ID. + /// + public string ProfileId => _profileId; + + /// + /// The tenant ID. + /// + public string TenantId => _tenantId; + + /// + /// The export type. + /// + public string ExportType => _exportType; + + /// + /// The current activity (span). + /// + public Activity? Activity => _activity; + + /// + /// Records an artifact being exported. + /// + public void RecordArtifact(string artifactType, long sizeBytes = 0) + { + _artifactCount++; + _bundleSizeBytes += sizeBytes; + + var tags = new KeyValuePair[] + { + new(ExportTelemetryTags.Profile, _profileId), + new(ExportTelemetryTags.Tenant, _tenantId), + new(ExportTelemetryTags.ArtifactType, artifactType) + }; + + ExportTelemetry.ExportArtifactsTotal.Add(1, tags); + + if (sizeBytes > 0) + { + ExportTelemetry.ExportBytesTotal.Add(sizeBytes, _baseTags); + } + } + + /// + /// Sets the final bundle size. + /// + public void SetBundleSize(long sizeBytes) + { + _bundleSizeBytes = sizeBytes; + _activity?.SetBundleSize(sizeBytes); + } + + /// + /// Marks the export run as successful. + /// + public void Complete() + { + if (_completed) return; + _completed = true; + + _stopwatch.Stop(); + var duration = _stopwatch.Elapsed.TotalSeconds; + + // Record success metrics + ExportTelemetry.ExportRunsSuccessTotal.Add(1, _baseTags); + + var durationTags = new KeyValuePair[] + { + new(ExportTelemetryTags.Profile, _profileId), + new(ExportTelemetryTags.Tenant, _tenantId), + new(ExportTelemetryTags.ExportType, _exportType), + new(ExportTelemetryTags.Status, ExportStatuses.Success) + }; + ExportTelemetry.ExportRunDurationSeconds.Record(duration, durationTags); + + if (_bundleSizeBytes > 0) + { + ExportTelemetry.ExportBundleSizeBytes.Record(_bundleSizeBytes, _baseTags); + } + + _activity?.SetArtifactCount(_artifactCount); + _activity?.SetSuccess(); + } + + /// + /// Marks the export run as failed. + /// + public void Fail(Exception? exception = null, string? errorCode = null) + { + if (_completed) return; + _completed = true; + + _stopwatch.Stop(); + var duration = _stopwatch.Elapsed.TotalSeconds; + + // Record failure metrics + var failureTags = new KeyValuePair[] + { + new(ExportTelemetryTags.Profile, _profileId), + new(ExportTelemetryTags.Tenant, _tenantId), + new(ExportTelemetryTags.ExportType, _exportType), + new(ExportTelemetryTags.ErrorCode, errorCode ?? "unknown") + }; + ExportTelemetry.ExportRunsFailedTotal.Add(1, failureTags); + + var durationTags = new KeyValuePair[] + { + new(ExportTelemetryTags.Profile, _profileId), + new(ExportTelemetryTags.Tenant, _tenantId), + new(ExportTelemetryTags.ExportType, _exportType), + new(ExportTelemetryTags.Status, ExportStatuses.Failed) + }; + ExportTelemetry.ExportRunDurationSeconds.Record(duration, durationTags); + + if (exception is not null) + { + _activity?.SetError(exception, errorCode); + } + else + { + _activity?.SetExportStatus(ExportStatuses.Failed); + } + } + + /// + /// Marks the export run as cancelled. + /// + public void Cancel() + { + if (_completed) return; + _completed = true; + + _stopwatch.Stop(); + var duration = _stopwatch.Elapsed.TotalSeconds; + + var durationTags = new KeyValuePair[] + { + new(ExportTelemetryTags.Profile, _profileId), + new(ExportTelemetryTags.Tenant, _tenantId), + new(ExportTelemetryTags.ExportType, _exportType), + new(ExportTelemetryTags.Status, ExportStatuses.Cancelled) + }; + ExportTelemetry.ExportRunDurationSeconds.Record(duration, durationTags); + + _activity?.SetExportStatus(ExportStatuses.Cancelled); + } + + /// + /// Disposes the telemetry context, ensuring metrics are recorded. + /// + public void Dispose() + { + // Decrement in-progress counter + ExportTelemetry.ExportRunsInProgress.Add(-1, new KeyValuePair(ExportTelemetryTags.Tenant, _tenantId)); + + // If not explicitly completed, mark as failed + if (!_completed) + { + Fail(errorCode: "incomplete"); + } + + _activity?.Dispose(); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportTelemetry.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportTelemetry.cs new file mode 100644 index 000000000..595e785f7 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/ExportTelemetry.cs @@ -0,0 +1,286 @@ +using System.Diagnostics; +using System.Diagnostics.Metrics; + +namespace StellaOps.ExportCenter.WebService.Telemetry; + +/// +/// Telemetry instrumentation for ExportCenter service. +/// Provides metrics, activity sources, and structured logging support. +/// +public static class ExportTelemetry +{ + /// + /// Service name used for telemetry identification. + /// + public const string ServiceName = "StellaOps.ExportCenter"; + + /// + /// Service version. + /// + public const string ServiceVersion = "1.0.0"; + + /// + /// Meter for export center metrics. + /// + public static readonly Meter Meter = new(ServiceName, ServiceVersion); + + /// + /// Activity source for distributed tracing. + /// + public static readonly ActivitySource ActivitySource = new(ServiceName, ServiceVersion); + + #region Counters + + /// + /// Total number of export runs initiated. + /// Tags: profile, tenant, type (evidence|attestation|mirror|risk) + /// + public static readonly Counter ExportRunsTotal = Meter.CreateCounter( + "export_runs_total", + "runs", + "Total number of export runs initiated"); + + /// + /// Total number of successful export runs. + /// Tags: profile, tenant, type + /// + public static readonly Counter ExportRunsSuccessTotal = Meter.CreateCounter( + "export_runs_success_total", + "runs", + "Total number of successful export runs"); + + /// + /// Total number of failed export runs. + /// Tags: profile, tenant, type, error_code + /// + public static readonly Counter ExportRunsFailedTotal = Meter.CreateCounter( + "export_runs_failed_total", + "runs", + "Total number of failed export runs"); + + /// + /// Total number of artifacts exported. + /// Tags: profile, tenant, artifact_type (sbom|vex|attestation|policy|evidence) + /// + public static readonly Counter ExportArtifactsTotal = Meter.CreateCounter( + "export_artifacts_total", + "artifacts", + "Total number of artifacts exported"); + + /// + /// Total bytes exported. + /// Tags: profile, tenant, type + /// + public static readonly Counter ExportBytesTotal = Meter.CreateCounter( + "export_bytes_total", + "bytes", + "Total bytes exported"); + + /// + /// Total number of timeline events published. + /// Tags: event_type, tenant_id + /// + public static readonly Counter TimelineEventsPublished = Meter.CreateCounter( + "export_timeline_events_published_total", + "events", + "Total number of timeline events published"); + + /// + /// Total number of timeline event publish failures. + /// Tags: event_type, tenant_id, error_code + /// + public static readonly Counter TimelineEventsFailedTotal = Meter.CreateCounter( + "export_timeline_events_failed_total", + "events", + "Total number of timeline event publish failures"); + + /// + /// Total number of deduplicated timeline events. + /// Tags: event_type, tenant_id + /// + public static readonly Counter TimelineEventsDeduplicated = Meter.CreateCounter( + "export_timeline_events_deduplicated_total", + "events", + "Total number of deduplicated timeline events"); + + /// + /// Total number of incidents activated. + /// Tags: severity, type + /// + public static readonly Counter IncidentsActivatedTotal = Meter.CreateCounter( + "export_incidents_activated_total", + "incidents", + "Total number of incidents activated"); + + /// + /// Total number of incidents resolved. + /// Tags: severity, type, is_false_positive + /// + public static readonly Counter IncidentsResolvedTotal = Meter.CreateCounter( + "export_incidents_resolved_total", + "incidents", + "Total number of incidents resolved"); + + /// + /// Total number of incidents escalated. + /// Tags: from_severity, to_severity + /// + public static readonly Counter IncidentsEscalatedTotal = Meter.CreateCounter( + "export_incidents_escalated_total", + "incidents", + "Total number of incidents escalated"); + + /// + /// Total number of incidents de-escalated. + /// Tags: from_severity, to_severity + /// + public static readonly Counter IncidentsDeescalatedTotal = Meter.CreateCounter( + "export_incidents_deescalated_total", + "incidents", + "Total number of incidents de-escalated"); + + /// + /// Total number of notifications emitted. + /// Tags: type (incident_activated|incident_updated|incident_resolved), severity + /// + public static readonly Counter NotificationsEmittedTotal = Meter.CreateCounter( + "export_notifications_emitted_total", + "notifications", + "Total number of notifications emitted"); + + /// + /// Total number of risk bundle jobs submitted. + /// Tags: tenant_id + /// + public static readonly Counter RiskBundleJobsSubmitted = Meter.CreateCounter( + "export_risk_bundle_jobs_submitted_total", + "jobs", + "Total number of risk bundle jobs submitted"); + + /// + /// Total number of risk bundle jobs completed. + /// Tags: tenant_id, status (success|failed|cancelled) + /// + public static readonly Counter RiskBundleJobsCompleted = Meter.CreateCounter( + "export_risk_bundle_jobs_completed_total", + "jobs", + "Total number of risk bundle jobs completed"); + + #endregion + + #region Histograms + + /// + /// Export run duration in seconds. + /// Tags: profile, tenant, type, status (success|failed|cancelled) + /// + public static readonly Histogram ExportRunDurationSeconds = Meter.CreateHistogram( + "export_run_duration_seconds", + "seconds", + "Export run duration in seconds"); + + /// + /// Export planning phase duration in seconds. + /// Tags: profile, tenant + /// + public static readonly Histogram ExportPlanDurationSeconds = Meter.CreateHistogram( + "export_plan_duration_seconds", + "seconds", + "Export planning phase duration in seconds"); + + /// + /// Export bundle size in bytes. + /// Tags: profile, tenant, type + /// + public static readonly Histogram ExportBundleSizeBytes = Meter.CreateHistogram( + "export_bundle_size_bytes", + "bytes", + "Export bundle size in bytes"); + + /// + /// Incident duration in seconds. + /// Tags: severity, type + /// + public static readonly Histogram IncidentDurationSeconds = Meter.CreateHistogram( + "export_incident_duration_seconds", + "seconds", + "Incident duration in seconds"); + + /// + /// Risk bundle job duration in seconds. + /// Tags: tenant_id + /// + public static readonly Histogram RiskBundleJobDurationSeconds = Meter.CreateHistogram( + "export_risk_bundle_job_duration_seconds", + "seconds", + "Risk bundle job duration in seconds"); + + #endregion + + #region Gauges + + /// + /// Number of export runs currently in progress. + /// Tags: tenant + /// + public static readonly UpDownCounter ExportRunsInProgress = Meter.CreateUpDownCounter( + "export_runs_in_progress", + "runs", + "Number of export runs currently in progress"); + + #endregion +} + +/// +/// Tag names for export telemetry. +/// +public static class ExportTelemetryTags +{ + public const string Profile = "profile"; + public const string ProfileId = "profile_id"; + public const string Tenant = "tenant"; + public const string TenantId = "tenant_id"; + public const string ExportType = "export_type"; + public const string ArtifactType = "artifact_type"; + public const string Status = "status"; + public const string ErrorCode = "error_code"; + public const string RunId = "run_id"; + public const string DistributionType = "distribution_type"; +} + +/// +/// Export type values. +/// +public static class ExportTypes +{ + public const string Evidence = "evidence"; + public const string Attestation = "attestation"; + public const string Mirror = "mirror"; + public const string Risk = "risk"; + public const string DevPortal = "devportal"; + public const string OfflineKit = "offline_kit"; +} + +/// +/// Artifact type values. +/// +public static class ArtifactTypes +{ + public const string Sbom = "sbom"; + public const string Vex = "vex"; + public const string Attestation = "attestation"; + public const string Policy = "policy"; + public const string Evidence = "evidence"; + public const string Manifest = "manifest"; +} + +/// +/// Export status values. +/// +public static class ExportStatuses +{ + public const string Success = "success"; + public const string Failed = "failed"; + public const string Cancelled = "cancelled"; + public const string Timeout = "timeout"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/TelemetryServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/TelemetryServiceCollectionExtensions.cs new file mode 100644 index 000000000..6a073ab0a --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Telemetry/TelemetryServiceCollectionExtensions.cs @@ -0,0 +1,68 @@ +using Microsoft.Extensions.DependencyInjection; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; + +namespace StellaOps.ExportCenter.WebService.Telemetry; + +/// +/// Extension methods for configuring export center telemetry. +/// +public static class TelemetryServiceCollectionExtensions +{ + /// + /// Adds export center metrics instrumentation to the OpenTelemetry meter provider. + /// + public static MeterProviderBuilder AddExportCenterInstrumentation(this MeterProviderBuilder builder) + { + return builder.AddMeter(ExportTelemetry.ServiceName); + } + + /// + /// Adds export center tracing instrumentation to the OpenTelemetry tracer provider. + /// + public static TracerProviderBuilder AddExportCenterInstrumentation(this TracerProviderBuilder builder) + { + return builder.AddSource(ExportTelemetry.ServiceName); + } + + /// + /// Configures export center telemetry for the service collection. + /// + public static IServiceCollection AddExportCenterTelemetry(this IServiceCollection services) + { + // Register telemetry context factory if needed + services.AddSingleton(); + + return services; + } +} + +/// +/// Factory for creating export telemetry contexts. +/// +public interface IExportTelemetryFactory +{ + /// + /// Creates a new telemetry context for an export run. + /// + ExportRunTelemetryContext CreateRunContext( + string runId, + string profileId, + string tenantId, + string exportType); +} + +/// +/// Default implementation of the export telemetry factory. +/// +public sealed class ExportTelemetryFactory : IExportTelemetryFactory +{ + public ExportRunTelemetryContext CreateRunContext( + string runId, + string profileId, + string tenantId, + string exportType) + { + return new ExportRunTelemetryContext(runId, profileId, tenantId, exportType); + } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEventTypes.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEventTypes.cs new file mode 100644 index 000000000..1b132b61d --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEventTypes.cs @@ -0,0 +1,14 @@ +namespace StellaOps.ExportCenter.WebService.Timeline; + +/// +/// Timeline event types for export lifecycle. +/// +public static class ExportTimelineEventTypes +{ + public const string ExportStarted = "export.started"; + public const string ExportCompleted = "export.completed"; + public const string ExportFailed = "export.failed"; + public const string ExportCancelled = "export.cancelled"; + public const string ArtifactCreated = "export.artifact.created"; + public const string ManifestSigned = "export.manifest.signed"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEvents.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEvents.cs new file mode 100644 index 000000000..23ff692ca --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelineEvents.cs @@ -0,0 +1,186 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.ExportCenter.WebService.Timeline; + +/// +/// Base timeline event for export lifecycle events. +/// +public abstract record ExportTimelineEventBase +{ + [JsonPropertyName("run_id")] + public required string RunId { get; init; } + + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + [JsonPropertyName("profile_id")] + public string? ProfileId { get; init; } + + [JsonPropertyName("export_type")] + public required string ExportType { get; init; } + + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } + + [JsonPropertyName("trace_id")] + public string? TraceId { get; init; } + + [JsonPropertyName("occurred_at")] + public required DateTimeOffset OccurredAt { get; init; } + + /// + /// Gets the event type identifier for routing. + /// + public abstract string EventType { get; } +} + +/// +/// Timeline event emitted when an export run begins. +/// +public sealed record ExportStartedEvent : ExportTimelineEventBase +{ + public override string EventType => ExportTimelineEventTypes.ExportStarted; + + [JsonPropertyName("requested_at")] + public required DateTimeOffset RequestedAt { get; init; } + + [JsonPropertyName("requested_by")] + public string? RequestedBy { get; init; } + + [JsonPropertyName("scope")] + public ExportScopeInfo? Scope { get; init; } +} + +/// +/// Timeline event emitted when an export run completes successfully. +/// +public sealed record ExportCompletedEvent : ExportTimelineEventBase +{ + public override string EventType => ExportTimelineEventTypes.ExportCompleted; + + [JsonPropertyName("bundle_id")] + public required string BundleId { get; init; } + + [JsonPropertyName("manifest_uri")] + public string? ManifestUri { get; init; } + + [JsonPropertyName("manifest_digest")] + public string? ManifestDigest { get; init; } + + [JsonPropertyName("bundle_digest")] + public string? BundleDigest { get; init; } + + [JsonPropertyName("artifact_count")] + public int ArtifactCount { get; init; } + + [JsonPropertyName("total_size_bytes")] + public long? TotalSizeBytes { get; init; } + + [JsonPropertyName("duration_seconds")] + public double DurationSeconds { get; init; } + + [JsonPropertyName("evidence_refs")] + public IReadOnlyList? EvidenceRefs { get; init; } +} + +/// +/// Timeline event emitted when an export run fails. +/// +public sealed record ExportFailedEvent : ExportTimelineEventBase +{ + public override string EventType => ExportTimelineEventTypes.ExportFailed; + + [JsonPropertyName("error_code")] + public required string ErrorCode { get; init; } + + [JsonPropertyName("error_message")] + public string? ErrorMessage { get; init; } + + [JsonPropertyName("failed_at_stage")] + public string? FailedAtStage { get; init; } + + [JsonPropertyName("duration_seconds")] + public double DurationSeconds { get; init; } + + [JsonPropertyName("is_retriable")] + public bool IsRetriable { get; init; } +} + +/// +/// Timeline event emitted when an export run is cancelled. +/// +public sealed record ExportCancelledEvent : ExportTimelineEventBase +{ + public override string EventType => ExportTimelineEventTypes.ExportCancelled; + + [JsonPropertyName("cancelled_by")] + public string? CancelledBy { get; init; } + + [JsonPropertyName("reason")] + public string? Reason { get; init; } + + [JsonPropertyName("duration_seconds")] + public double DurationSeconds { get; init; } +} + +/// +/// Timeline event emitted when an artifact is created during export. +/// +public sealed record ExportArtifactCreatedEvent : ExportTimelineEventBase +{ + public override string EventType => ExportTimelineEventTypes.ArtifactCreated; + + [JsonPropertyName("artifact_id")] + public required string ArtifactId { get; init; } + + [JsonPropertyName("artifact_type")] + public required string ArtifactType { get; init; } + + [JsonPropertyName("artifact_digest")] + public required string ArtifactDigest { get; init; } + + [JsonPropertyName("artifact_size_bytes")] + public long ArtifactSizeBytes { get; init; } + + [JsonPropertyName("artifact_uri")] + public string? ArtifactUri { get; init; } +} + +/// +/// Scope information for an export run. +/// +public sealed record ExportScopeInfo +{ + [JsonPropertyName("namespace")] + public string? Namespace { get; init; } + + [JsonPropertyName("repository")] + public string? Repository { get; init; } + + [JsonPropertyName("digest")] + public string? Digest { get; init; } + + [JsonPropertyName("policy_id")] + public string? PolicyId { get; init; } + + [JsonPropertyName("filters")] + public IReadOnlyDictionary? Filters { get; init; } +} + +/// +/// Reference to evidence produced by an export. +/// +public sealed record ExportEvidenceRef +{ + [JsonPropertyName("evidence_type")] + public required string EvidenceType { get; init; } + + [JsonPropertyName("evidence_uri")] + public string? EvidenceUri { get; init; } + + [JsonPropertyName("evidence_digest")] + public string? EvidenceDigest { get; init; } + + [JsonPropertyName("subject")] + public string? Subject { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelinePublisher.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelinePublisher.cs new file mode 100644 index 000000000..cf36cb12f --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/ExportTimelinePublisher.cs @@ -0,0 +1,469 @@ +using System.Collections.Concurrent; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.ExportCenter.Core.Notifications; +using StellaOps.ExportCenter.WebService.Telemetry; + +namespace StellaOps.ExportCenter.WebService.Timeline; + +/// +/// Publishes export lifecycle events to the timeline service. +/// Implements idempotency through hash-based deduplication and exponential backoff retry. +/// +public sealed class ExportTimelinePublisher : IExportTimelinePublisher +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly TimeSpan[] RetryDelays = + [ + TimeSpan.FromMilliseconds(100), + TimeSpan.FromMilliseconds(250), + TimeSpan.FromMilliseconds(500), + TimeSpan.FromSeconds(1), + TimeSpan.FromSeconds(2) + ]; + + private readonly IExportNotificationSink _sink; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly ExportTimelinePublisherOptions _options; + + // In-memory dedupe cache with sliding expiration + private readonly ConcurrentDictionary _dedupeCache = new(); + private readonly object _cleanupLock = new(); + private DateTimeOffset _lastCleanup; + + public ExportTimelinePublisher( + IExportNotificationSink sink, + TimeProvider timeProvider, + ILogger logger, + IOptions? options = null) + { + _sink = sink ?? throw new ArgumentNullException(nameof(sink)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value ?? ExportTimelinePublisherOptions.Default; + _lastCleanup = _timeProvider.GetUtcNow(); + } + + public Task PublishStartedAsync( + ExportStartedEvent @event, + CancellationToken cancellationToken = default) => + PublishEventAsync(@event, cancellationToken); + + public Task PublishCompletedAsync( + ExportCompletedEvent @event, + CancellationToken cancellationToken = default) => + PublishEventAsync(@event, cancellationToken); + + public Task PublishFailedAsync( + ExportFailedEvent @event, + CancellationToken cancellationToken = default) => + PublishEventAsync(@event, cancellationToken); + + public Task PublishCancelledAsync( + ExportCancelledEvent @event, + CancellationToken cancellationToken = default) => + PublishEventAsync(@event, cancellationToken); + + public Task PublishArtifactCreatedAsync( + ExportArtifactCreatedEvent @event, + CancellationToken cancellationToken = default) => + PublishEventAsync(@event, cancellationToken); + + public async Task PublishEventAsync( + ExportTimelineEventBase @event, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(@event); + + PeriodicCleanup(); + + var eventId = GenerateEventId(@event); + var idempotencyKey = ComputeIdempotencyKey(@event); + + // Check dedupe cache + if (_options.EnableDeduplication && IsDuplicate(idempotencyKey)) + { + _logger.LogDebug( + "Deduplicated timeline event {EventType} for run {RunId}", + @event.EventType, @event.RunId); + return TimelinePublishResult.Deduplicated(eventId); + } + + var envelope = BuildEnvelope(@event, eventId); + var channel = GetChannel(@event.EventType); + var payload = JsonSerializer.Serialize(envelope, SerializerOptions); + + var result = await PublishWithRetryAsync( + channel, + payload, + @event.RunId, + @event.TenantId, + cancellationToken).ConfigureAwait(false); + + if (result.Success && _options.EnableDeduplication) + { + RecordDelivery(idempotencyKey); + } + + if (result.Success) + { + ExportTelemetry.TimelineEventsPublished.Add(1, + new KeyValuePair("event_type", @event.EventType), + new KeyValuePair("tenant_id", @event.TenantId)); + + _logger.LogInformation( + "Published timeline event {EventType} for run {RunId}", + @event.EventType, @event.RunId); + } + + return result.Success + ? TimelinePublishResult.Succeeded(eventId, result.AttemptCount) + : TimelinePublishResult.Failed(result.ErrorMessage ?? "Unknown error", result.AttemptCount); + } + + public async Task PublishIncidentEventAsync( + string eventType, + string incidentId, + string eventJson, + string? correlationId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(eventType); + ArgumentException.ThrowIfNullOrWhiteSpace(incidentId); + ArgumentException.ThrowIfNullOrWhiteSpace(eventJson); + + PeriodicCleanup(); + + var now = _timeProvider.GetUtcNow(); + var eventId = $"inc-{ComputeHash($"{incidentId}:{eventType}:{now:O}")[..16]}"; + var payloadHash = ComputeHash(eventJson); + + var envelope = new TimelineEventEnvelope + { + EventId = eventId, + TenantId = "system", // Incident events are system-level + EventType = eventType, + Source = "stellaops.export-center.incident", + OccurredAt = now, + CorrelationId = correlationId, + Severity = GetIncidentSeverity(eventType), + PayloadHash = payloadHash, + RawPayloadJson = eventJson, + Attributes = new Dictionary + { + ["incident_id"] = incidentId + } + }; + + var channel = GetChannel(eventType); + var payload = JsonSerializer.Serialize(envelope, SerializerOptions); + + var result = await PublishWithRetryAsync( + channel, + payload, + incidentId, + "system", + cancellationToken).ConfigureAwait(false); + + if (result.Success) + { + ExportTelemetry.TimelineEventsPublished.Add(1, + new KeyValuePair("event_type", eventType), + new KeyValuePair("tenant_id", "system")); + + _logger.LogInformation( + "Published incident timeline event {EventType} for incident {IncidentId}", + eventType, incidentId); + } + else + { + ExportTelemetry.TimelineEventsFailedTotal.Add(1, + new KeyValuePair("event_type", eventType), + new KeyValuePair("error_code", "publish_failed")); + } + + return result.Success + ? TimelinePublishResult.Succeeded(eventId, result.AttemptCount) + : TimelinePublishResult.Failed(result.ErrorMessage ?? "Unknown error", result.AttemptCount); + } + + private static string GetIncidentSeverity(string eventType) + { + return eventType switch + { + "export.incident.activated" => "warning", + "export.incident.escalated" => "error", + "export.incident.deescalated" => "info", + "export.incident.resolved" => "info", + _ => "warning" + }; + } + + private TimelineEventEnvelope BuildEnvelope(ExportTimelineEventBase @event, string eventId) + { + var rawPayload = JsonSerializer.Serialize(@event, @event.GetType(), SerializerOptions); + var payloadHash = ComputeHash(rawPayload); + + return new TimelineEventEnvelope + { + EventId = eventId, + TenantId = @event.TenantId, + EventType = @event.EventType, + Source = "stellaops.export-center", + OccurredAt = @event.OccurredAt, + CorrelationId = @event.CorrelationId, + TraceId = @event.TraceId, + Severity = GetSeverity(@event), + PayloadHash = payloadHash, + RawPayloadJson = rawPayload, + Attributes = BuildAttributes(@event) + }; + } + + private static Dictionary BuildAttributes(ExportTimelineEventBase @event) + { + var attributes = new Dictionary(StringComparer.Ordinal) + { + ["run_id"] = @event.RunId, + ["export_type"] = @event.ExportType + }; + + if (!string.IsNullOrWhiteSpace(@event.ProfileId)) + { + attributes["profile_id"] = @event.ProfileId; + } + + // Add type-specific attributes + switch (@event) + { + case ExportCompletedEvent completed: + attributes["bundle_id"] = completed.BundleId; + if (!string.IsNullOrWhiteSpace(completed.BundleDigest)) + { + attributes["bundle_digest"] = completed.BundleDigest; + } + attributes["artifact_count"] = completed.ArtifactCount.ToString(); + break; + + case ExportFailedEvent failed: + attributes["error_code"] = failed.ErrorCode; + attributes["is_retriable"] = failed.IsRetriable.ToString().ToLowerInvariant(); + break; + + case ExportArtifactCreatedEvent artifact: + attributes["artifact_id"] = artifact.ArtifactId; + attributes["artifact_type"] = artifact.ArtifactType; + attributes["artifact_digest"] = artifact.ArtifactDigest; + break; + } + + return attributes; + } + + private static string GetSeverity(ExportTimelineEventBase @event) + { + return @event switch + { + ExportFailedEvent => "error", + ExportCancelledEvent => "warning", + _ => "info" + }; + } + + private string GetChannel(string eventType) + { + return $"{_options.ChannelPrefix}.{eventType}"; + } + + private async Task PublishWithRetryAsync( + string channel, + string payload, + string runId, + string tenantId, + CancellationToken cancellationToken) + { + var attempt = 0; + string? lastError = null; + + while (attempt < _options.MaxRetries) + { + try + { + await _sink.PublishAsync(channel, payload, cancellationToken).ConfigureAwait(false); + return new PublishAttemptResult(Success: true, AttemptCount: attempt + 1); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) when (IsTransient(ex) && attempt < _options.MaxRetries - 1) + { + lastError = ex.Message; + attempt++; + + var delay = attempt <= RetryDelays.Length + ? RetryDelays[attempt - 1] + : RetryDelays[^1]; + + _logger.LogWarning(ex, + "Transient failure publishing timeline event for run {RunId}, attempt {Attempt}/{MaxRetries}", + runId, attempt, _options.MaxRetries); + + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, + "Non-transient failure publishing timeline event for run {RunId}", + runId); + + return new PublishAttemptResult( + Success: false, + ErrorMessage: ex.Message, + AttemptCount: attempt + 1); + } + } + + return new PublishAttemptResult( + Success: false, + ErrorMessage: lastError ?? "Max retries exceeded", + AttemptCount: attempt); + } + + private static string GenerateEventId(ExportTimelineEventBase @event) + { + // Event ID combines run ID, event type, and timestamp for uniqueness + var components = $"{@event.RunId}:{@event.EventType}:{@event.OccurredAt:O}"; + var hash = ComputeHash(components); + return $"exp-{hash[..16]}"; + } + + private static string ComputeIdempotencyKey(ExportTimelineEventBase @event) + { + // Idempotency key based on run ID + event type + time window + var timeWindow = @event.OccurredAt.ToUnixTimeSeconds() / 60; // 1-minute windows + var components = $"{@event.RunId}:{@event.EventType}:{timeWindow}"; + return ComputeHash(components); + } + + private static string ComputeHash(string input) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input)); + return Convert.ToHexStringLower(bytes); + } + + private bool IsDuplicate(string idempotencyKey) + { + var now = _timeProvider.GetUtcNow(); + if (_dedupeCache.TryGetValue(idempotencyKey, out var expiresAt)) + { + return now < expiresAt; + } + return false; + } + + private void RecordDelivery(string idempotencyKey) + { + var expiresAt = _timeProvider.GetUtcNow().Add(_options.DedupeWindow); + _dedupeCache[idempotencyKey] = expiresAt; + } + + private void PeriodicCleanup() + { + var now = _timeProvider.GetUtcNow(); + if (now - _lastCleanup < _options.CleanupInterval) + { + return; + } + + lock (_cleanupLock) + { + if (now - _lastCleanup < _options.CleanupInterval) + { + return; + } + + var keysToRemove = _dedupeCache + .Where(kvp => now >= kvp.Value) + .Select(kvp => kvp.Key) + .ToList(); + + foreach (var key in keysToRemove) + { + _dedupeCache.TryRemove(key, out _); + } + + _lastCleanup = now; + + if (keysToRemove.Count > 0) + { + _logger.LogDebug("Cleaned up {Count} expired dedupe entries", keysToRemove.Count); + } + } + } + + private static bool IsTransient(Exception ex) + { + return ex is TimeoutException or + TaskCanceledException or + IOException; + } + + private sealed record PublishAttemptResult( + bool Success, + string? ErrorMessage = null, + int AttemptCount = 1); +} + +/// +/// Timeline event envelope for publishing to timeline indexer. +/// Matches TimelineIndexer.Core.Models.TimelineEventEnvelope structure. +/// +public sealed class TimelineEventEnvelope +{ + public required string EventId { get; init; } + public required string TenantId { get; init; } + public required string EventType { get; init; } + public required string Source { get; init; } + public required DateTimeOffset OccurredAt { get; init; } + + public string? CorrelationId { get; init; } + public string? TraceId { get; init; } + public string? Actor { get; init; } + public string Severity { get; init; } = "info"; + public string? PayloadHash { get; set; } + public string RawPayloadJson { get; init; } = "{}"; + public string? NormalizedPayloadJson { get; init; } + public IDictionary? Attributes { get; init; } + + public string? BundleDigest { get; init; } + public Guid? BundleId { get; init; } + public string? AttestationSubject { get; init; } + public string? AttestationDigest { get; init; } + public string? ManifestUri { get; init; } +} + +/// +/// Options for the export timeline publisher. +/// +public sealed record ExportTimelinePublisherOptions +{ + public int MaxRetries { get; init; } = 5; + public bool EnableDeduplication { get; init; } = true; + public TimeSpan DedupeWindow { get; init; } = TimeSpan.FromMinutes(5); + public TimeSpan CleanupInterval { get; init; } = TimeSpan.FromMinutes(1); + public string ChannelPrefix { get; init; } = "timeline.export"; + + public static ExportTimelinePublisherOptions Default => new(); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/IExportTimelinePublisher.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/IExportTimelinePublisher.cs new file mode 100644 index 000000000..b00fb66c1 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/IExportTimelinePublisher.cs @@ -0,0 +1,86 @@ +namespace StellaOps.ExportCenter.WebService.Timeline; + +/// +/// Interface for publishing export lifecycle events to the timeline. +/// +public interface IExportTimelinePublisher +{ + /// + /// Publishes an export started event. + /// + Task PublishStartedAsync( + ExportStartedEvent @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes an export completed event. + /// + Task PublishCompletedAsync( + ExportCompletedEvent @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes an export failed event. + /// + Task PublishFailedAsync( + ExportFailedEvent @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes an export cancelled event. + /// + Task PublishCancelledAsync( + ExportCancelledEvent @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes an artifact created event. + /// + Task PublishArtifactCreatedAsync( + ExportArtifactCreatedEvent @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes a generic timeline event. + /// + Task PublishEventAsync( + ExportTimelineEventBase @event, + CancellationToken cancellationToken = default); + + /// + /// Publishes an incident event to the timeline. + /// + /// The incident event type. + /// The incident identifier. + /// The serialized event JSON. + /// Optional correlation ID for tracing. + /// Cancellation token. + /// The publish result. + Task PublishIncidentEventAsync( + string eventType, + string incidentId, + string eventJson, + string? correlationId, + CancellationToken cancellationToken = default); +} + +/// +/// Result of a timeline publish operation. +/// +public sealed record TimelinePublishResult +{ + public bool Success { get; init; } + public string? EventId { get; init; } + public string? ErrorMessage { get; init; } + public int AttemptCount { get; init; } = 1; + public bool WasDeduplicated { get; init; } + + public static TimelinePublishResult Succeeded(string eventId, int attempts = 1) => + new() { Success = true, EventId = eventId, AttemptCount = attempts }; + + public static TimelinePublishResult Failed(string errorMessage, int attempts = 1) => + new() { Success = false, ErrorMessage = errorMessage, AttemptCount = attempts }; + + public static TimelinePublishResult Deduplicated(string eventId) => + new() { Success = true, EventId = eventId, WasDeduplicated = true }; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/TimelineServiceCollectionExtensions.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/TimelineServiceCollectionExtensions.cs new file mode 100644 index 000000000..8d2601bb8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Timeline/TimelineServiceCollectionExtensions.cs @@ -0,0 +1,73 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.ExportCenter.Core.Notifications; + +namespace StellaOps.ExportCenter.WebService.Timeline; + +/// +/// Extension methods for registering export timeline services. +/// +public static class TimelineServiceCollectionExtensions +{ + /// + /// Adds export timeline publisher services to the service collection. + /// + /// The service collection. + /// Optional configuration for the timeline publisher. + /// The service collection for chaining. + public static IServiceCollection AddExportTimelinePublisher( + this IServiceCollection services, + Action? configureOptions = null) + { + ArgumentNullException.ThrowIfNull(services); + + // Configure options + if (configureOptions is not null) + { + services.Configure(configureOptions); + } + + // Register TimeProvider if not already registered + services.TryAddSingleton(TimeProvider.System); + + // Register notification sink if not already registered (use in-memory for development) + services.TryAddSingleton(); + + // Register timeline publisher + services.TryAddSingleton(); + + return services; + } + + /// + /// Adds export timeline publisher with a custom notification sink. + /// + /// The notification sink implementation type. + /// The service collection. + /// Optional configuration for the timeline publisher. + /// The service collection for chaining. + public static IServiceCollection AddExportTimelinePublisher( + this IServiceCollection services, + Action? configureOptions = null) + where TSink : class, IExportNotificationSink + { + ArgumentNullException.ThrowIfNull(services); + + // Configure options + if (configureOptions is not null) + { + services.Configure(configureOptions); + } + + // Register TimeProvider if not already registered + services.TryAddSingleton(TimeProvider.System); + + // Register the specified sink type + services.TryAddSingleton(); + + // Register timeline publisher + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/Mirror/StellaOps.Mirror.Creator/TASKS.md b/src/Mirror/StellaOps.Mirror.Creator/TASKS.md index 51eacb3d2..a108a625d 100644 --- a/src/Mirror/StellaOps.Mirror.Creator/TASKS.md +++ b/src/Mirror/StellaOps.Mirror.Creator/TASKS.md @@ -7,3 +7,4 @@ | MIRROR-GAPS-125-013 | DONE | Mirror strategy gaps (MS1–MS10) encoded in mirror-policy and bundle meta. | | MIRROR-CRT-57-002 | DONE | Time-anchor DSSE emitted when SIGN_KEY is set; bundle meta + verifier check anchor integrity. | | MIRROR-CRT-58-001 | DONE | CLI wrappers (`mirror-create.sh`, `mirror-verify.sh`) for deterministic build/verify flows; uses existing assembler + verifier. | +| MIRROR-CRT-58-002 | DOING (dev) | Export Center scheduling helper (`src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh`) added; production signing still pending MIRROR-CRT-56-002 key. | diff --git a/src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh b/src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh new file mode 100644 index 000000000..e2ed1ea09 --- /dev/null +++ b/src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Schedule an Export Center run for mirror bundles and emit an audit log entry. +# Requires curl. Uses bearer token auth for simplicity; swap to DPoP if/when gateway enforces it. + +BASE_URL="${EXPORT_CENTER_BASE_URL:-http://localhost:8080}" +TENANT="${EXPORT_CENTER_TENANT:-tenant-default}" +PROJECT="${EXPORT_CENTER_PROJECT:-}" +TOKEN="${EXPORT_CENTER_TOKEN:-}" +PROFILE_ID="${1:-}" +AUDIT_LOG="${AUDIT_LOG_PATH:-$(pwd)/logs/export-center-schedule.log}" + +if [[ -z "$PROFILE_ID" ]]; then + echo "usage: $(basename "$0") [targets-json] [formats-json]" >&2 + echo "env: EXPORT_CENTER_BASE_URL, EXPORT_CENTER_TENANT, EXPORT_CENTER_PROJECT, EXPORT_CENTER_TOKEN, AUDIT_LOG_PATH" >&2 + exit 1 +fi + +TARGETS_JSON="${2:-[\"vex\",\"advisory\",\"policy\"]}" +FORMATS_JSON="${3:-[\"json\",\"ndjson\"]}" + +mkdir -p "$(dirname "$AUDIT_LOG")" + +AUTH_HEADER=() +if [[ -n "$TOKEN" ]]; then + AUTH_HEADER=(-H "Authorization: Bearer ${TOKEN}") +fi + +payload="$(cat <> "${AUDIT_LOG}" + +echo "${response}" diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs index 8b971d64f..b2e8f331b 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs @@ -57,6 +57,7 @@ internal static class NodePackageCollector TraverseTarballs(context, projectInput.Tarballs, packages, visited, yarnPnpPresent, cancellationToken); TraverseYarnPnpCache(context, projectInput.YarnCacheRoots, packages, visited, yarnPnpPresent, cancellationToken); + MergePnpPackages(context, packages, visited, cancellationToken); AttachImports(context, packages, cancellationToken); @@ -149,6 +150,37 @@ internal static class NodePackageCollector } } + private static void MergePnpPackages( + LanguageAnalyzerContext context, + List packages, + HashSet visited, + CancellationToken cancellationToken) + { + var pnpPackages = NodePnpDataLoader.Load(context, cancellationToken); + if (pnpPackages.Count == 0) + { + return; + } + + foreach (var package in pnpPackages) + { + cancellationToken.ThrowIfCancellationRequested(); + + var key = package.RelativePathNormalized; + if (!string.IsNullOrEmpty(key) && !visited.Add(key)) + { + continue; + } + + if (packages.Any(p => string.Equals(p.ComponentKey, package.ComponentKey, StringComparison.Ordinal))) + { + continue; + } + + packages.Add(package); + } + } + private static IEnumerable EnumerateSourceFiles(string root) { foreach (var extension in new[] { ".js", ".jsx", ".mjs", ".cjs", ".ts", ".tsx" }) diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePnpDataLoader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePnpDataLoader.cs index 3556e96cd..145a56a3e 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePnpDataLoader.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePnpDataLoader.cs @@ -84,7 +84,7 @@ internal static class NodePnpDataLoader return null; } - var relativePath = context.GetRelativePath(packageLocation).Replace(Path.DirectorySeparatorChar, '/'); + var relativePath = NormalizeRelativePath(context, packageLocation); var absolutePackagePath = Path.GetFullPath(Path.Combine(context.RootPath, packageLocation)); var usedByEntrypoint = context.UsageHints.IsPathUsed(absolutePackagePath); var packageJsonLocator = BuildLocator(packageLocation); @@ -252,6 +252,20 @@ internal static class NodePnpDataLoader return (name ?? string.Empty, version ?? string.Empty); } + private static string NormalizeRelativePath(LanguageAnalyzerContext context, string packageLocation) + { + var relative = context.GetRelativePath(packageLocation); + if (string.IsNullOrWhiteSpace(relative) || relative == ".") + { + return "."; + } + + return relative + .TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar) + .Replace(Path.DirectorySeparatorChar, '/') + .Replace(Path.AltDirectorySeparatorChar, '/'); + } + private static string BuildLocator(string packageLocation) { if (string.IsNullOrWhiteSpace(packageLocation)) @@ -276,4 +290,3 @@ internal static class NodePnpDataLoader return relative.Replace(Path.DirectorySeparatorChar, '/'); } } - diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/.pnp.data.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/.pnp.data.json new file mode 100644 index 000000000..f31e4eb42 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/.pnp.data.json @@ -0,0 +1,18 @@ +{ + "packages": { + "yarn-pnp-demo@workspace:.": { + "packageLocation": ".", + "packageJson": { + "name": "yarn-pnp-demo", + "version": "1.0.0" + } + }, + "cached-lib@npm:1.0.0": { + "packageLocation": ".yarn/cache/cached-lib-1.0.0.zip/node_modules/cached-lib/", + "packageJson": { + "name": "cached-lib", + "version": "1.0.0" + } + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/expected.json index 33e32febd..a06e61df8 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/yarn-pnp/expected.json @@ -8,7 +8,9 @@ "type": "npm", "usedByEntrypoint": false, "metadata": { - "path": ".yarn/cache", + "lockLocator": "cached-lib@npm:1.0.0", + "lockSource": "pnp.data", + "path": ".yarn/cache/cached-lib-1.0.0.zip/node_modules/cached-lib", "yarnPnp": "true" }, "evidence": [ @@ -29,6 +31,8 @@ "type": "npm", "usedByEntrypoint": false, "metadata": { + "lockLocator": "yarn-pnp-demo@workspace:.", + "lockSource": "pnp.data", "path": ".", "yarnPnp": "true" }, @@ -40,4 +44,4 @@ } ] } -] \ No newline at end of file +] diff --git a/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs index 806691ae0..e3d573145 100644 --- a/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs @@ -1,24 +1,26 @@ -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Signer.Core; -using StellaOps.Signer.Infrastructure.Auditing; -using StellaOps.Signer.Infrastructure.ProofOfEntitlement; -using StellaOps.Signer.Infrastructure.Quotas; -using StellaOps.Signer.Infrastructure.ReleaseVerification; -using StellaOps.Signer.Infrastructure.Signing; - -namespace StellaOps.Signer.Infrastructure; - -public static class ServiceCollectionExtensions -{ - public static IServiceCollection AddSignerPipeline(this IServiceCollection services) - { - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(TimeProvider.System); - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography; +using StellaOps.Signer.Core; +using StellaOps.Signer.Infrastructure.Auditing; +using StellaOps.Signer.Infrastructure.ProofOfEntitlement; +using StellaOps.Signer.Infrastructure.Quotas; +using StellaOps.Signer.Infrastructure.ReleaseVerification; +using StellaOps.Signer.Infrastructure.Signing; + +namespace StellaOps.Signer.Infrastructure; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSignerPipeline(this IServiceCollection services) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(); + return services; + } +} diff --git a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Fixtures/TestCryptoFactory.Sm.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Fixtures/TestCryptoFactory.Sm.cs index d44dafd49..15e2ea007 100644 --- a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Fixtures/TestCryptoFactory.Sm.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Fixtures/TestCryptoFactory.Sm.cs @@ -2,7 +2,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Options; -using Org.BouncyCastle.Asn1.Pkcs; +using Org.BouncyCastle.Pkcs; using Org.BouncyCastle.Crypto.Generators; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Security; @@ -31,9 +31,18 @@ public static partial class TestCryptoFactory var registry = provider.GetRequiredService(); // Seed a test key - var smProvider = (SmSoftCryptoProvider)provider.GetRequiredService(); - var key = Sm2TestKeyFactory.Create("sm2-key"); - smProvider.UpsertSigningKey(key); + var previousGate = Environment.GetEnvironmentVariable("SM_SOFT_ALLOWED"); + Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", "1"); + try + { + var smProvider = (SmSoftCryptoProvider)provider.GetRequiredService(); + var key = Sm2TestKeyFactory.Create("sm2-key"); + smProvider.UpsertSigningKey(key); + } + finally + { + Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", previousGate); + } return registry; } @@ -48,7 +57,7 @@ internal static class Sm2TestKeyFactory var generator = new ECKeyPairGenerator("EC"); generator.Init(new ECKeyGenerationParameters(domain, new SecureRandom())); var pair = generator.GenerateKeyPair(); - var privateDer = Org.BouncyCastle.Asn1.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(pair.Private).GetDerEncoded(); + var privateDer = PrivateKeyInfoFactory.CreatePrivateKeyInfo(pair.Private).GetDerEncoded(); var reference = new CryptoKeyReference(keyId, "cn.sm.soft"); return new CryptoSigningKey(reference, SignatureAlgorithms.Sm2, privateDer, DateTimeOffset.UtcNow); } diff --git a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/DualSignTests.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/DualSignTests.cs index 303c4eebd..9739cb969 100644 --- a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/DualSignTests.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/DualSignTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; +using System.Text.Json; using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; @@ -33,13 +34,29 @@ public class DualSignTests var signer = new CryptoDsseSigner(registry, resolver, options, NullLogger.Instance); var request = new SigningRequest( - Options: new SigningOptions(SigningMode.Keyless), - Payload: Array.Empty(), Subjects: Array.Empty(), - PredicateType: "demo"); + PredicateType: "demo", + Predicate: JsonDocument.Parse("{}"), + ScannerImageDigest: "sha256:dummydigest", + ProofOfEntitlement: new ProofOfEntitlement(SignerPoEFormat.Jwt, "ok"), + Options: new SigningOptions(SigningMode.Keyless, ExpirySeconds: null, ReturnBundle: "full")); - var entitlement = new ProofOfEntitlementResult(true, "entitled", Array.Empty()); - var caller = new CallerContext("tenant", "subject", "plan", "scanner-digest"); + var entitlement = new ProofOfEntitlementResult( + LicenseId: "lic", + CustomerId: "cust", + Plan: "plan", + MaxArtifactBytes: 1024 * 1024, + QpsLimit: 10, + QpsRemaining: 10, + ExpiresAtUtc: DateTimeOffset.UtcNow.AddMinutes(5)); + + var caller = new CallerContext( + Subject: "subject", + Tenant: "tenant", + Scopes: Array.Empty(), + Audiences: Array.Empty(), + SenderBinding: null, + ClientCertificateThumbprint: null); var bundle = await signer.SignAsync(request, entitlement, caller, CancellationToken.None); @@ -104,15 +121,15 @@ public class DualSignTests this.provider = provider; } - public Task ResolveKeyAsync(SigningMode mode, string tenant, CancellationToken cancellationToken) + public ValueTask ResolveKeyAsync(SigningMode mode, string tenant, CancellationToken cancellationToken) { - return Task.FromResult(new SigningKeyResolution( + return ValueTask.FromResult(new SigningKeyResolution( keyId, provider, - issuer: null, - subject: null, - expiresAtUtc: null, - certificateChain: Array.Empty())); + null, + null, + null, + Array.Empty())); } } } diff --git a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/Sm2SigningTests.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/Sm2SigningTests.cs index 7e97341ff..d41c1727d 100644 --- a/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/Sm2SigningTests.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/Signing/Sm2SigningTests.cs @@ -108,7 +108,7 @@ public class Sm2SigningTests : IDisposable JsonDocument.Parse("{}"), "sha256:00", new ProofOfEntitlement(SignerPoEFormat.Jwt, "stub"), - new SigningOptions(SigningMode.Keyless, null, null)); + new SigningOptions(SigningMode.Keyless, null, "dsse")); } private static CallerContext BuildCaller() => new( @@ -116,6 +116,6 @@ public class Sm2SigningTests : IDisposable Tenant: "tenant-1", Scopes: Array.Empty(), Audiences: Array.Empty(), - SenderBinding: null, - ClientCertificateThumbprint: null); + SenderBinding: string.Empty, + ClientCertificateThumbprint: string.Empty); }