diff --git a/devops/compose/.env b/devops/compose/.env new file mode 100644 index 000000000..80bccc144 --- /dev/null +++ b/devops/compose/.env @@ -0,0 +1,171 @@ +# ============================================================================= +# STELLA OPS ENVIRONMENT CONFIGURATION +# ============================================================================= +# Main environment template for docker-compose.stella-ops.yml +# Copy to .env and customize for your deployment. +# +# Usage: +# cp env/stellaops.env.example .env +# docker compose -f docker-compose.stella-ops.yml up -d +# +# ============================================================================= + +# ============================================================================= +# INFRASTRUCTURE +# ============================================================================= + +# PostgreSQL Database +POSTGRES_USER=stellaops +POSTGRES_PASSWORD=stellaops +POSTGRES_DB=stellaops_platform +POSTGRES_PORT=5432 + +# Valkey (Redis-compatible cache and messaging) +VALKEY_PORT=6379 + +# RustFS Object Storage +RUSTFS_HTTP_PORT=8080 + +# ============================================================================= +# CORE SERVICES +# ============================================================================= + +# Authority (OAuth2/OIDC) +AUTHORITY_ISSUER=https://authority.stella-ops.local +AUTHORITY_PORT=8440 +AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:30:00 + +# Signer +SIGNER_POE_INTROSPECT_URL=http://authority.stella-ops.local/.well-known/openid-configuration +SIGNER_PORT=8441 + +# Attestor +ATTESTOR_PORT=8442 + +# Issuer Directory +ISSUER_DIRECTORY_PORT=8447 +ISSUER_DIRECTORY_SEED_CSAF=true + +# Concelier +CONCELIER_PORT=8445 + +# Notify +NOTIFY_WEB_PORT=8446 + +# Web UI +UI_PORT=8443 + +# ============================================================================= +# SCANNER CONFIGURATION +# ============================================================================= + +SCANNER_WEB_PORT=8444 + +# Queue configuration (Valkey only - NATS removed) +SCANNER__QUEUE__BROKER=valkey://cache.stella-ops.local:6379 + +# Event streaming +SCANNER_EVENTS_ENABLED=false +SCANNER_EVENTS_DRIVER=valkey +SCANNER_EVENTS_DSN=cache.stella-ops.local:6379 +SCANNER_EVENTS_STREAM=stella.events +SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5 +SCANNER_EVENTS_MAX_STREAM_LENGTH=10000 + +# Surface cache configuration +SCANNER_SURFACE_FS_ENDPOINT=http://s3.stella-ops.local +SCANNER_SURFACE_FS_BUCKET=surface-cache +SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface +SCANNER_SURFACE_CACHE_QUOTA_MB=4096 +SCANNER_SURFACE_PREFETCH_ENABLED=false +SCANNER_SURFACE_TENANT=default +SCANNER_SURFACE_FEATURES= +SCANNER_SURFACE_SECRETS_PROVIDER=file +SCANNER_SURFACE_SECRETS_NAMESPACE= +SCANNER_SURFACE_SECRETS_ROOT=/etc/stellaops/secrets +SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER= +SCANNER_SURFACE_SECRETS_ALLOW_INLINE=false +SURFACE_SECRETS_HOST_PATH=./offline/surface-secrets + +# Offline Kit configuration +SCANNER_OFFLINEKIT_ENABLED=false +SCANNER_OFFLINEKIT_REQUIREDSSE=true +SCANNER_OFFLINEKIT_REKOROFFLINEMODE=true +SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY=/etc/stellaops/trust-roots +SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY=/var/lib/stellaops/rekor-snapshot +SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH=./offline/trust-roots +SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH=./offline/rekor-snapshot + +# ============================================================================= +# SCHEDULER CONFIGURATION +# ============================================================================= + +# Queue configuration (Valkey only - NATS removed) +SCHEDULER__QUEUE__KIND=Valkey +SCHEDULER__QUEUE__VALKEY__URL=cache.stella-ops.local:6379 +SCHEDULER_SCANNER_BASEADDRESS=http://scanner.stella-ops.local + +# ============================================================================= +# REKOR / SIGSTORE CONFIGURATION +# ============================================================================= + +# Rekor server URL (default: public Sigstore, use http://rekor-v2:3000 for local) +REKOR_SERVER_URL=https://rekor.sigstore.dev +REKOR_VERSION=V2 +REKOR_TILE_BASE_URL= +REKOR_LOG_ID=c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d +REKOR_TILES_IMAGE=ghcr.io/sigstore/rekor-tiles:latest + +# ============================================================================= +# ADVISORY AI CONFIGURATION +# ============================================================================= + +ADVISORY_AI_WEB_PORT=8448 +ADVISORY_AI_SBOM_BASEADDRESS=http://scanner.stella-ops.local +ADVISORY_AI_INFERENCE_MODE=Local +ADVISORY_AI_REMOTE_BASEADDRESS= +ADVISORY_AI_REMOTE_APIKEY= + +# ============================================================================= +# CRYPTO CONFIGURATION +# ============================================================================= + +# Crypto profile: default, china, russia, eu +STELLAOPS_CRYPTO_PROFILE=default + +# Enable crypto simulation (for testing) +STELLAOPS_CRYPTO_ENABLE_SIM=0 +STELLAOPS_CRYPTO_SIM_URL=http://sim-crypto:8080 + +# CryptoPro (Russia only) - requires EULA acceptance +CRYPTOPRO_PORT=18080 +CRYPTOPRO_ACCEPT_EULA=0 +CRYPTOPRO_CONTAINER_NAME=stellaops-signing +CRYPTOPRO_USE_MACHINE_STORE=true +CRYPTOPRO_PROVIDER_TYPE=80 + +# SM Remote (China only) +SM_REMOTE_PORT=56080 +SM_SOFT_ALLOWED=1 +SM_REMOTE_HSM_URL= +SM_REMOTE_HSM_API_KEY= +SM_REMOTE_HSM_TIMEOUT=30000 + +# ============================================================================= +# NETWORKING +# ============================================================================= + +# External reverse proxy network (Traefik, Envoy, etc.) +FRONTDOOR_NETWORK=stellaops_frontdoor + +# ============================================================================= +# TELEMETRY (optional) +# ============================================================================= + +OTEL_GRPC_PORT=4317 +OTEL_HTTP_PORT=4318 +OTEL_PROMETHEUS_PORT=9464 +PROMETHEUS_PORT=9090 +TEMPO_PORT=3200 +LOKI_PORT=3100 +PROMETHEUS_RETENTION=15d diff --git a/devops/compose/docker-compose.dev.yml b/devops/compose/docker-compose.dev.yml index ada7997ac..1fb608ae7 100644 --- a/devops/compose/docker-compose.dev.yml +++ b/devops/compose/docker-compose.dev.yml @@ -10,9 +10,11 @@ # docker compose -f docker-compose.dev.yml up -d # # This provides: -# - PostgreSQL 18.1 on port 5432 -# - Valkey 9.0.1 on port 6379 -# - RustFS on port 8080 +# - PostgreSQL 18.1 on 127.1.1.1:5432 (db.stella-ops.local) +# - Valkey 9.0.1 on 127.1.1.2:6379 (cache.stella-ops.local) +# - SeaweedFS (S3) on 127.1.1.3:8080 (s3.stella-ops.local) +# - Rekor v2 (tiles) on 127.1.1.4:3322 (rekor.stella-ops.local) +# - Zot (OCI registry) on 127.1.1.5:80 (registry.stella-ops.local) # ============================================================================= services: @@ -27,7 +29,7 @@ services: volumes: - postgres-data:/var/lib/postgresql/data ports: - - "${POSTGRES_PORT:-5432}:5432" + - "127.1.1.1:${POSTGRES_PORT:-5432}:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-stellaops}"] interval: 10s @@ -42,7 +44,7 @@ services: volumes: - valkey-data:/data ports: - - "${VALKEY_PORT:-6379}:6379" + - "127.1.1.2:${VALKEY_PORT:-6379}:6379" healthcheck: test: ["CMD", "valkey-cli", "ping"] interval: 10s @@ -50,24 +52,52 @@ services: retries: 5 rustfs: - image: registry.stella-ops.org/stellaops/rustfs:2025.09.2 + image: chrislusf/seaweedfs:latest container_name: stellaops-dev-rustfs restart: unless-stopped - command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] - environment: - RUSTFS__LOG__LEVEL: info - RUSTFS__STORAGE__PATH: /data + command: ["server", "-s3", "-s3.port=8080", "-dir=/data"] volumes: - rustfs-data:/data ports: - - "${RUSTFS_PORT:-8080}:8080" + - "127.1.1.3:${RUSTFS_PORT:-8080}:8080" healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8080/health"] + test: ["CMD", "wget", "-qO-", "http://localhost:8080/status"] interval: 30s timeout: 10s retries: 3 + rekor-v2: + image: ${REKOR_TILES_IMAGE:-ghcr.io/sigstore/rekor-tiles:latest} + container_name: stellaops-dev-rekor + restart: unless-stopped + volumes: + - rekor-tiles-data:/var/lib/rekor-tiles + ports: + - "127.1.1.4:${REKOR_PORT:-3322}:3322" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:3322/healthz"] + interval: 30s + timeout: 10s + retries: 3 + + registry: + image: ghcr.io/project-zot/zot-linux-amd64:v2.1.3 + container_name: stellaops-dev-registry + restart: unless-stopped + volumes: + - registry-data:/var/lib/registry + - ./zot-config.json:/etc/zot/config.json:ro + ports: + - "127.1.1.5:80:5000" + healthcheck: + test: ["CMD", "wget", "-qO-", "http://localhost:5000/v2/"] + interval: 30s + timeout: 5s + retries: 3 + volumes: postgres-data: valkey-data: rustfs-data: + rekor-tiles-data: + registry-data: diff --git a/devops/compose/docker-compose.stella-ops.yml b/devops/compose/docker-compose.stella-ops.yml index cc29bd50e..7b13b63e9 100644 --- a/devops/compose/docker-compose.stella-ops.yml +++ b/devops/compose/docker-compose.stella-ops.yml @@ -2,7 +2,7 @@ # STELLA OPS - MAIN STACK # ============================================================================= # Consolidated Docker Compose for the complete StellaOps platform. -# Infrastructure: PostgreSQL 18.1, Valkey 9.0.1, RustFS, Rekor v2 +# Infrastructure: PostgreSQL 18.1, Valkey 9.0.1, SeaweedFS (S3), Rekor v2, Zot (OCI) # # Usage: # docker compose -f devops/compose/docker-compose.stella-ops.yml up -d @@ -26,7 +26,40 @@ x-release-labels: &release-labels com.stellaops.profile: "default" x-postgres-connection: &postgres-connection - "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}" + "Host=db.stella-ops.local;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}" + +x-kestrel-cert: &kestrel-cert + Kestrel__Certificates__Default__Path: "/app/etc/certs/kestrel-dev.pfx" + Kestrel__Certificates__Default__Password: "devpass" + +x-cert-volume: &cert-volume + "../../etc/authority/keys:/app/etc/certs:ro" + +x-plugin-tmpfs: &plugin-tmpfs + /app/plugins: + mode: "1777" + +# --------------------------------------------------------------------------- +# Common anchors for the 60-service stack +# --------------------------------------------------------------------------- +x-depends-infra: &depends-infra + postgres: + condition: service_healthy + valkey: + condition: service_healthy + +x-healthcheck-tcp: &healthcheck-tcp + interval: 30s + timeout: 5s + retries: 3 + start_period: 15s + +x-healthcheck-worker: &healthcheck-worker + test: ["CMD", "/usr/local/bin/healthcheck.sh"] + interval: 30s + timeout: 5s + start_period: 30s + retries: 3 networks: stellaops: @@ -41,20 +74,19 @@ volumes: valkey-data: rustfs-data: rekor-tiles-data: + registry-data: concelier-jobs: scanner-surface-cache: advisory-ai-queue: advisory-ai-plans: advisory-ai-outputs: + evidence-data: services: # =========================================================================== # INFRASTRUCTURE SERVICES # =========================================================================== - # --------------------------------------------------------------------------- - # PostgreSQL 18.1 - Primary database - # --------------------------------------------------------------------------- postgres: image: docker.io/library/postgres:18.1 container_name: stellaops-postgres @@ -68,9 +100,11 @@ services: - postgres-data:/var/lib/postgresql/data - ./postgres-init:/docker-entrypoint-initdb.d:ro ports: - - "${POSTGRES_PORT:-5432}:5432" + - "127.1.1.1:${POSTGRES_PORT:-5432}:5432" networks: - - stellaops + stellaops: + aliases: + - db.stella-ops.local healthcheck: test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-stellaops} -d ${POSTGRES_DB:-stellaops_platform}"] interval: 10s @@ -79,9 +113,6 @@ services: start_period: 10s labels: *release-labels - # --------------------------------------------------------------------------- - # Valkey 9.0.1 - Cache and message queue (Redis-compatible) - # --------------------------------------------------------------------------- valkey: image: docker.io/valkey/valkey:9.0.1 container_name: stellaops-valkey @@ -90,9 +121,11 @@ services: volumes: - valkey-data:/data ports: - - "${VALKEY_PORT:-6379}:6379" + - "127.1.1.2:${VALKEY_PORT:-6379}:6379" networks: - - stellaops + stellaops: + aliases: + - cache.stella-ops.local healthcheck: test: ["CMD", "valkey-cli", "ping"] interval: 10s @@ -100,56 +133,66 @@ services: retries: 5 labels: *release-labels - # --------------------------------------------------------------------------- - # RustFS - S3-compatible object storage - # --------------------------------------------------------------------------- rustfs: - image: registry.stella-ops.org/stellaops/rustfs:2025.09.2 + image: chrislusf/seaweedfs:latest container_name: stellaops-rustfs - command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] + command: ["server", "-s3", "-s3.port=8333", "-volume.port=8080", "-dir=/data"] restart: unless-stopped - environment: - RUSTFS__LOG__LEVEL: info - RUSTFS__STORAGE__PATH: /data volumes: - rustfs-data:/data ports: - - "${RUSTFS_HTTP_PORT:-8080}:8080" + - "127.1.1.3:${RUSTFS_HTTP_PORT:-8333}:8333" networks: - - stellaops + stellaops: + aliases: + - s3.stella-ops.local healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8080/health"] + test: ["CMD", "wget", "-qO-", "http://127.0.0.1:8333/"] interval: 30s timeout: 10s retries: 3 labels: *release-labels - # --------------------------------------------------------------------------- - # Rekor v2 (tiles) - Sigstore transparency log - # --------------------------------------------------------------------------- + registry: + image: ghcr.io/project-zot/zot-linux-amd64:v2.1.3 + container_name: stellaops-registry + restart: unless-stopped + volumes: + - registry-data:/var/lib/registry + - ./zot-config.json:/etc/zot/config.json:ro + ports: + - "127.1.1.5:80:5000" + networks: + stellaops: + aliases: + - registry.stella-ops.local + healthcheck: + disable: true + labels: *release-labels + rekor-v2: image: ${REKOR_TILES_IMAGE:-ghcr.io/sigstore/rekor-tiles:latest} container_name: stellaops-rekor restart: unless-stopped volumes: - rekor-tiles-data:/var/lib/rekor-tiles + ports: + - "127.1.1.4:${REKOR_PORT:-3322}:3322" networks: - - stellaops + stellaops: + aliases: + - rekor.stella-ops.local profiles: ["sigstore"] labels: <<: *release-labels com.stellaops.component: "rekor-v2" - # --------------------------------------------------------------------------- - # Sigstore CLI tools (on-demand) - # --------------------------------------------------------------------------- rekor-cli: image: ghcr.io/sigstore/rekor-cli:v1.4.3 entrypoint: ["rekor-cli"] command: ["version"] profiles: ["sigstore"] - networks: - - stellaops + networks: [stellaops] labels: *release-labels cosign: @@ -157,19 +200,110 @@ services: entrypoint: ["cosign"] command: ["version"] profiles: ["sigstore"] - networks: - - stellaops + networks: [stellaops] labels: *release-labels # =========================================================================== - # APPLICATION SERVICES + # APPLICATION SERVICES (ordered by port-registry slot) # =========================================================================== - # --------------------------------------------------------------------------- - # Authority - OAuth2/OIDC identity provider - # --------------------------------------------------------------------------- + # --- Slot 0: Router Gateway ------------------------------------------------ + router-gateway: + image: stellaops/router-gateway:dev + container_name: stellaops-router-gateway + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.2:80:80" + networks: + stellaops: + aliases: + - router.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 1: Platform ------------------------------------------------------ + platform: + image: stellaops/platform:dev + container_name: stellaops-platform + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + STELLAOPS_ROUTER_URL: "http://router.stella-ops.local" + STELLAOPS_PLATFORM_URL: "http://platform.stella-ops.local" + STELLAOPS_AUTHORITY_URL: "http://authority.stella-ops.local" + STELLAOPS_GATEWAY_URL: "http://gateway.stella-ops.local" + STELLAOPS_ATTESTOR_URL: "http://attestor.stella-ops.local" + STELLAOPS_EVIDENCELOCKER_URL: "http://evidencelocker.stella-ops.local" + STELLAOPS_SCANNER_URL: "http://scanner.stella-ops.local" + STELLAOPS_CONCELIER_URL: "http://concelier.stella-ops.local" + STELLAOPS_EXCITITOR_URL: "http://excititor.stella-ops.local" + STELLAOPS_VEXHUB_URL: "http://vexhub.stella-ops.local" + STELLAOPS_VEXLENS_URL: "http://vexlens.stella-ops.local" + STELLAOPS_VULNEXPLORER_URL: "http://vulnexplorer.stella-ops.local" + STELLAOPS_POLICY_ENGINE_URL: "http://policy-engine.stella-ops.local" + STELLAOPS_POLICY_GATEWAY_URL: "http://policy-gateway.stella-ops.local" + STELLAOPS_RISKENGINE_URL: "http://riskengine.stella-ops.local" + STELLAOPS_ORCHESTRATOR_URL: "http://orchestrator.stella-ops.local" + STELLAOPS_TASKRUNNER_URL: "http://taskrunner.stella-ops.local" + STELLAOPS_SCHEDULER_URL: "http://scheduler.stella-ops.local" + STELLAOPS_GRAPH_URL: "http://graph.stella-ops.local" + STELLAOPS_CARTOGRAPHER_URL: "http://cartographer.stella-ops.local" + STELLAOPS_REACHGRAPH_URL: "http://reachgraph.stella-ops.local" + STELLAOPS_TIMELINEINDEXER_URL: "http://timelineindexer.stella-ops.local" + STELLAOPS_TIMELINE_URL: "http://timeline.stella-ops.local" + STELLAOPS_FINDINGS_LEDGER_URL: "http://findings.stella-ops.local" + STELLAOPS_DOCTOR_URL: "http://doctor.stella-ops.local" + STELLAOPS_OPSMEMORY_URL: "http://opsmemory.stella-ops.local" + STELLAOPS_NOTIFIER_URL: "http://notifier.stella-ops.local" + STELLAOPS_NOTIFY_URL: "http://notify.stella-ops.local" + STELLAOPS_SIGNER_URL: "http://signer.stella-ops.local" + STELLAOPS_SMREMOTE_URL: "http://smremote.stella-ops.local" + STELLAOPS_AIRGAP_CONTROLLER_URL: "http://airgap-controller.stella-ops.local" + STELLAOPS_AIRGAP_TIME_URL: "http://airgap-time.stella-ops.local" + STELLAOPS_PACKSREGISTRY_URL: "http://packsregistry.stella-ops.local" + STELLAOPS_REGISTRY_TOKENSERVICE_URL: "http://registry-token.stella-ops.local" + STELLAOPS_BINARYINDEX_URL: "http://binaryindex.stella-ops.local" + STELLAOPS_ISSUERDIRECTORY_URL: "http://issuerdirectory.stella-ops.local" + STELLAOPS_SYMBOLS_URL: "http://symbols.stella-ops.local" + STELLAOPS_SBOMSERVICE_URL: "http://sbomservice.stella-ops.local" + STELLAOPS_EXPORTCENTER_URL: "http://exportcenter.stella-ops.local" + STELLAOPS_REPLAY_URL: "http://replay.stella-ops.local" + STELLAOPS_INTEGRATIONS_URL: "http://integrations.stella-ops.local" + STELLAOPS_SIGNALS_URL: "http://signals.stella-ops.local" + STELLAOPS_ADVISORYAI_URL: "http://advisoryai.stella-ops.local" + STELLAOPS_UNKNOWNS_URL: "http://unknowns.stella-ops.local" + volumes: + - *cert-volume + ports: + - "127.1.0.3:80:80" + networks: + stellaops: + aliases: + - platform.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 2: Authority ----------------------------------------------------- authority: - image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 + image: stellaops/authority:dev container_name: stellaops-authority restart: unless-stopped depends_on: @@ -178,156 +312,214 @@ services: valkey: condition: service_healthy environment: - STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" - STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres" - STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - STELLAOPS_AUTHORITY__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" - STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" - STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority/plugins" + ASPNETCORE_URLS: "http://+:8440" + Kestrel__Certificates__Default__Path: "/app/etc/authority/keys/kestrel-dev.pfx" + Kestrel__Certificates__Default__Password: "devpass" + STELLAOPS_AUTHORITY_AUTHORITY__SCHEMAVERSION: "1" + STELLAOPS_AUTHORITY_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER:-http://authority.stella-ops.local}" + STELLAOPS_AUTHORITY_AUTHORITY__STORAGE__CONNECTIONSTRING: *postgres-connection + STELLAOPS_AUTHORITY_AUTHORITY__CACHE__REDIS__CONNECTIONSTRING: "cache.stella-ops.local:6379" + STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ACTIVEKEYID: "dev-signing-key-1" + STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__KEYPATH: "/app/etc/authority/keys/signing-dev.pem" + STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ACTIVEKEYID: "dev-ack-key-1" + STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__KEYPATH: "/app/etc/authority/keys/ack-token-dev.pem" + STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ALLOWEDHOSTS__0: "notify.stella-ops.local" + STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ESCALATION__SCOPE: "notify.escalate" + STELLAOPS_AUTHORITY_AUTHORITY__BOOTSTRAP__ENABLED: "false" + STELLAOPS_AUTHORITY_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" + STELLAOPS_AUTHORITY_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority/plugins" volumes: - ../../etc/authority:/app/etc/authority:ro - ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro + tmpfs: + - /app/plugins:mode=1777 ports: - - "${AUTHORITY_PORT:-8440}:8440" + - "127.1.0.4:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - authority.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Signer - Cryptographic signing service - # --------------------------------------------------------------------------- - signer: - image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e - container_name: stellaops-signer + # --- Slot 3: Gateway ------------------------------------------------------- + gateway: + image: stellaops/gateway:dev + container_name: stellaops-gateway restart: unless-stopped - depends_on: - - authority - - valkey + depends_on: *depends-infra environment: - SIGNER__AUTHORITY__BASEURL: "https://authority:8440" - SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}" - SIGNER__STORAGE__DRIVER: "postgres" - SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - SIGNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume ports: - - "${SIGNER_PORT:-8441}:8441" + - "127.1.0.5:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - gateway.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Attestor - SLSA attestation service - # --------------------------------------------------------------------------- + # --- Slot 4: Attestor ------------------------------------------------------ attestor: - image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f + image: stellaops/attestor:dev container_name: stellaops-attestor restart: unless-stopped depends_on: - signer environment: - ATTESTOR__SIGNER__BASEURL: "https://signer:8441" - ATTESTOR__STORAGE__DRIVER: "postgres" - ATTESTOR__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - ATTESTOR__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" + ASPNETCORE_URLS: "http://+:8442" + <<: *kestrel-cert + ATTESTOR_ATTESTOR__SIGNER__BASEURL: "http://signer.stella-ops.local" + ConnectionStrings__Default: *postgres-connection + volumes: + - *cert-volume ports: - - "${ATTESTOR_PORT:-8442}:8442" + - "127.1.0.6:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - attestor.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Issuer Directory - CSAF publisher registry - # --------------------------------------------------------------------------- - issuer-directory: - image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0 - container_name: stellaops-issuer-directory + # --- Slot 5: Attestor TileProxy -------------------------------------------- + attestor-tileproxy: + image: stellaops/attestor-tileproxy:dev + container_name: stellaops-attestor-tileproxy restart: unless-stopped depends_on: - - postgres - - authority + - attestor environment: - ISSUERDIRECTORY__CONFIG: "/app/etc/issuer-directory/issuer-directory.yaml" - ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" - ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440" - ISSUERDIRECTORY__STORAGE__DRIVER: "postgres" - ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}" + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + TILE_PROXY__tile_proxy__UpstreamUrl: "http://rekor.stella-ops.local:3322" + TILE_PROXY__tile_proxy__Origin: "stellaops-tileproxy" + TILE_PROXY__tile_proxy__Cache__BasePath: "/var/cache/stellaops/tiles" + TILE_PROXY__tile_proxy__Cache__MaxSizeGb: "1" volumes: - - ../../etc/issuer-directory:/app/etc/issuer-directory:ro - ports: - - "${ISSUER_DIRECTORY_PORT:-8447}:8080" + - *cert-volume + tmpfs: + - /var/cache/stellaops/tiles:mode=1777 networks: - - stellaops + stellaops: + aliases: + - attestor-tileproxy.stella-ops.local + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Concelier - Advisory aggregation service - # --------------------------------------------------------------------------- - concelier: - image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5 - container_name: stellaops-concelier + # --- Slot 6: Evidence Locker ------------------------------------------------ + evidence-locker-web: + image: stellaops/evidence-locker-web:dev + container_name: stellaops-evidence-locker-web restart: unless-stopped - depends_on: - - postgres - - valkey - - rustfs + depends_on: *depends-infra environment: - CONCELIER__STORAGE__DRIVER: "postgres" - CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - CONCELIER__STORAGE__S3__ENDPOINT: "http://rustfs:8080" - CONCELIER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" - CONCELIER__AUTHORITY__BASEURL: "https://authority:8440" - CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true" - CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}" + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + EvidenceLocker__Database__ConnectionString: *postgres-connection + EvidenceLocker__Database__ApplyMigrationsAtStartup: "true" + EvidenceLocker__ObjectStore__Kind: "FileSystem" + EvidenceLocker__ObjectStore__FileSystem__RootPath: "/data/evidence" + EvidenceLocker__ObjectStore__EnforceWriteOnce: "false" + EvidenceLocker__Signing__Enabled: "false" + EvidenceLocker__Signing__Algorithm: "ES256" + EvidenceLocker__Signing__KeyId: "dev-evidence-key" + EvidenceLocker__Quotas__MaxMaterialCount: "128" + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + EvidenceLocker__Authority__BaseUrl: "http://authority.stella-ops.local" volumes: - - concelier-jobs:/var/lib/concelier/jobs + - *cert-volume + - evidence-data:/data/evidence ports: - - "${CONCELIER_PORT:-8445}:8445" + - "127.1.0.7:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - evidencelocker.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Scanner Web - SBOM/vulnerability scanning API - # --------------------------------------------------------------------------- + evidence-locker-worker: + image: stellaops/evidence-locker-worker:dev + container_name: stellaops-evidence-locker-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + EvidenceLocker__Database__ConnectionString: *postgres-connection + EvidenceLocker__Database__ApplyMigrationsAtStartup: "true" + EvidenceLocker__ObjectStore__Kind: "FileSystem" + EvidenceLocker__ObjectStore__FileSystem__RootPath: "/data/evidence" + EvidenceLocker__ObjectStore__EnforceWriteOnce: "false" + EvidenceLocker__Signing__Enabled: "false" + EvidenceLocker__Signing__Algorithm: "ES256" + EvidenceLocker__Signing__KeyId: "dev-evidence-key" + EvidenceLocker__Quotas__MaxMaterialCount: "128" + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + - evidence-data:/data/evidence + networks: + stellaops: + aliases: + - evidence-locker-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 8: Scanner ------------------------------------------------------- scanner-web: - image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7 + image: stellaops/scanner-web:dev container_name: stellaops-scanner-web restart: unless-stopped depends_on: - - postgres - - valkey - - concelier - - rustfs + postgres: + condition: service_healthy + valkey: + condition: service_healthy + rustfs: + condition: service_started environment: - SCANNER__STORAGE__DRIVER: "postgres" - SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - # Queue configuration - Valkey only - SCANNER__QUEUE__BROKER: "valkey://valkey:6379" - # Event streaming - SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}" - SCANNER__EVENTS__DRIVER: "valkey" - SCANNER__EVENTS__DSN: "valkey:6379" - SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" - SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" - SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" - # Offline kit - SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}" - SCANNER__OFFLINEKIT__REQUIREDSSE: "${SCANNER_OFFLINEKIT_REQUIREDSSE:-true}" - SCANNER__OFFLINEKIT__REKOROFFLINEMODE: "${SCANNER_OFFLINEKIT_REKOROFFLINEMODE:-true}" - SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}" - SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}" - # Surface cache - SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}" + ASPNETCORE_URLS: "http://+:8444" + <<: *kestrel-cert + SCANNER_SCANNER__PLUGINS__BASEDIRECTORY: "/app" + SCANNER_SCANNER__STORAGE__DRIVER: "postgres" + SCANNER_SCANNER__STORAGE__DSN: *postgres-connection + SCANNER_SCANNER__STORAGE__COMMANDTIMEOUTSECONDS: "30" + SCANNER_SCANNER__STORAGE__HEALTHCHECKTIMEOUTSECONDS: "5" + SCANNER_SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER_SCANNER__ARTIFACTSTORE__ENDPOINT: "http://s3.stella-ops.local:8333" + SCANNER_SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER_SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER_SCANNER__QUEUE__DRIVER: "redis" + SCANNER_SCANNER__QUEUE__DSN: "cache.stella-ops.local:6379" + SCANNER_SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}" + SCANNER_SCANNER__EVENTS__DRIVER: "redis" + SCANNER_SCANNER__EVENTS__DSN: "cache.stella-ops.local:6379" + SCANNER_SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" + SCANNER_SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" + SCANNER_SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" + SCANNER_SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}" + SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://s3.stella-ops.local:8333}" SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}" SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}" SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}" @@ -342,139 +534,1283 @@ services: volumes: - ../../etc/scanner:/app/etc/scanner:ro - ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro - - scanner-surface-cache:/var/lib/stellaops/surface - ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro - ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro - ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + - /var/lib/stellaops/surface:mode=1777 ports: - - "${SCANNER_WEB_PORT:-8444}:8444" + - "127.1.0.8:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - scanner.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Scanner Worker - Background scanning jobs - # --------------------------------------------------------------------------- scanner-worker: - image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab + image: stellaops/scanner-worker:dev container_name: stellaops-scanner-worker restart: unless-stopped depends_on: - - scanner-web - - valkey - - rustfs + postgres: + condition: service_healthy + valkey: + condition: service_healthy + rustfs: + condition: service_started environment: - SCANNER__STORAGE__DRIVER: "postgres" - SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379" - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - # Queue configuration - Valkey only - SCANNER__QUEUE__BROKER: "valkey://valkey:6379" - # Surface cache - SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}" - SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}" - SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}" - SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}" - SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}" - SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}" - SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}" - SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}" - SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}" - SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}" - SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}" - SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}" + <<: *kestrel-cert + # Scanner worker options + Scanner__Worker__Authority__Enabled: "false" + BinaryIndex__Enabled: "false" + # Scanner storage (Postgres + S3/RustFS object store) + ScannerStorage__Postgres__ConnectionString: *postgres-connection + ScannerStorage__Postgres__SchemaName: "scanner" + ScannerStorage__ObjectStore__Driver: "rustfs" + ScannerStorage__ObjectStore__BucketName: "scanner-artifacts" + ScannerStorage__ObjectStore__RustFs__BaseUrl: "http://s3.stella-ops.local:8333" + # Surface environment (read via Environment.GetEnvironmentVariable) + SCANNER_SURFACE_FS_ENDPOINT: "http://s3.stella-ops.local:8333" + SURFACE_FS_ENDPOINT: "http://s3.stella-ops.local:8333" + SCANNER_SURFACE_SECRETS_NAMESPACE: "stellaops" + SCANNER_SURFACE_SECRETS_PROVIDER: "file" + SCANNER_SURFACE_SECRETS_ROOT: "/var/lib/stellaops/surface" + SCANNER_SURFACE_VALIDATION_DISABLED: "true" + # EPSS bundle source path (EpssBundleSource constructor) + EPSS_BUNDLE_PATH: "/app/epss" volumes: - - scanner-surface-cache:/var/lib/stellaops/surface - - ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro + - *cert-volume + tmpfs: + - /var/lib/stellaops/surface:mode=1777 + - /app/epss:mode=1777 networks: - - stellaops + stellaops: + aliases: + - scanner-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 9: Concelier ----------------------------------------------------- + concelier: + image: stellaops/concelier:dev + container_name: stellaops-concelier + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + valkey: + condition: service_healthy + rustfs: + condition: service_started + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + CONCELIER_PLUGINS__BASEDIRECTORY: "/app" + CONCELIER_POSTGRESSTORAGE__CONNECTIONSTRING: *postgres-connection + CONCELIER_POSTGRESSTORAGE__ENABLED: "true" + CONCELIER_S3__ENDPOINT: "http://s3.stella-ops.local:8333" + CONCELIER_AUTHORITY__BASEURL: "http://authority.stella-ops.local" + CONCELIER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true" + CONCELIER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}" + volumes: + - concelier-jobs:/var/lib/concelier/jobs + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + ports: + - "127.1.0.9:80:80" + networks: + stellaops: + aliases: + - concelier.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 10: Excititor ---------------------------------------------------- + excititor: + image: stellaops/excititor:dev + container_name: stellaops-excititor + restart: unless-stopped + profiles: ["code-fix-pending"] # Docker build error from prior session + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + # Postgres options (section: Postgres:Excititor) + Postgres__Excititor__ConnectionString: *postgres-connection + Postgres__Excititor__SchemaName: "vex" + Excititor__Concelier__BaseUrl: "http://concelier.stella-ops.local" + Excititor__Storage__Driver: "postgres" + volumes: + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + ports: + - "127.1.0.10:80:80" + networks: + stellaops: + aliases: + - excititor.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + excititor-worker: + image: stellaops/excititor-worker:dev + container_name: stellaops-excititor-worker + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + valkey: + condition: service_healthy + environment: + <<: *kestrel-cert + # Postgres options (section: Postgres:Excititor) + Postgres__Excititor__ConnectionString: *postgres-connection + Postgres__Excititor__SchemaName: "vex" + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + Excititor__Concelier__BaseUrl: "http://concelier.stella-ops.local" + Excititor__Storage__Driver: "postgres" + Excititor__Worker__DisableConsensus: "true" + # TenantAuthorityOptionsValidator requires BaseUrls dict with at least one entry + Excititor__Authority__BaseUrls__default: "http://authority.stella-ops.local" + # IssuerDirectoryClientOptions.Validate() requires BaseAddress + IssuerDirectory__Client__BaseAddress: "http://issuerdirectory.stella-ops.local" + volumes: + - *cert-volume + networks: + stellaops: + aliases: + - excititor-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 11: VexHub ------------------------------------------------------- + vexhub-web: + image: stellaops/vexhub-web:dev + container_name: stellaops-vexhub-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.11:80:80" + networks: + stellaops: + aliases: + - vexhub.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 12: VexLens ------------------------------------------------------ + vexlens-web: + image: stellaops/vexlens-web:dev + container_name: stellaops-vexlens-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.12:80:80" + networks: + stellaops: + aliases: + - vexlens.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 13: VulnExplorer (api) ------------------------------------------- + api: + image: stellaops/api:dev + container_name: stellaops-api + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.13:80:80" + networks: + stellaops: + aliases: + - vulnexplorer.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 14: Policy Engine ------------------------------------------------ + policy-engine: + image: stellaops/policy-engine:dev + container_name: stellaops-policy-engine + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + STELLAOPS_POLICY_ENGINE_Postgres__Policy__ConnectionString: *postgres-connection + STELLAOPS_POLICY_ENGINE_ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.14:80:80" + networks: + stellaops: + aliases: + - policy-engine.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 15: Policy Gateway ----------------------------------------------- + policy: + image: stellaops/policy:dev + container_name: stellaops-policy + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8084" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.15:80:80" + networks: + stellaops: + aliases: + - policy-gateway.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 16: RiskEngine --------------------------------------------------- + riskengine-web: + image: stellaops/riskengine-web:dev + container_name: stellaops-riskengine-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.16:80:80" + networks: + stellaops: + aliases: + - riskengine.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + riskengine-worker: + image: stellaops/riskengine-worker:dev + container_name: stellaops-riskengine-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - riskengine-worker.stella-ops.local + labels: *release-labels + + # --- Slot 17: Orchestrator ------------------------------------------------- + orchestrator: + image: stellaops/orchestrator:dev + container_name: stellaops-orchestrator + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.17:80:80" + networks: + stellaops: + aliases: + - orchestrator.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + orchestrator-worker: + image: stellaops/orchestrator-worker:dev + container_name: stellaops-orchestrator-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - orchestrator-worker.stella-ops.local + labels: *release-labels + + # --- Slot 18: TaskRunner --------------------------------------------------- + taskrunner-web: + image: stellaops/taskrunner-web:dev + container_name: stellaops-taskrunner-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.18:80:80" + networks: + stellaops: + aliases: + - taskrunner.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + taskrunner-worker: + image: stellaops/taskrunner-worker:dev + container_name: stellaops-taskrunner-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + # AirGap egress policy (disable for dev) + AirGap__Egress__Enabled: "false" + volumes: + - *cert-volume + tmpfs: + - /app/queue:mode=1777 + - /app/state:mode=1777 + - /app/artifacts:mode=1777 + - /app/approvals:mode=1777 + - /app/logs:mode=1777 + networks: + stellaops: + aliases: + - taskrunner-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 19: Scheduler ---------------------------------------------------- + scheduler-web: + image: stellaops/scheduler-web:dev + container_name: stellaops-scheduler-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + - /plugins:mode=1777 + ports: + - "127.1.0.19:80:80" + networks: + stellaops: + aliases: + - scheduler.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Scheduler Worker - Background job scheduling - # --------------------------------------------------------------------------- scheduler-worker: - image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0 + image: stellaops/scheduler-worker:dev container_name: stellaops-scheduler-worker restart: unless-stopped depends_on: - - postgres - - valkey - - scanner-web - command: - - "dotnet" - - "StellaOps.Scheduler.Worker.Host.dll" + postgres: + condition: service_healthy + valkey: + condition: service_healthy environment: - SCHEDULER__STORAGE__DRIVER: "postgres" - SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - # Queue configuration - Valkey only - SCHEDULER__QUEUE__KIND: "Valkey" - SCHEDULER__QUEUE__VALKEY__URL: "valkey:6379" - SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}" + <<: *kestrel-cert + # Queue config (Redis transport) + scheduler__queue__Kind: "Redis" + scheduler__queue__Redis__ConnectionString: "cache.stella-ops.local:6379" + # Persistence config (section: Scheduler:Storage, subsection: Postgres:Scheduler) + Scheduler__Storage__Postgres__Scheduler__ConnectionString: *postgres-connection + Scheduler__Storage__Postgres__Scheduler__SchemaName: "scheduler" + # Worker config + Scheduler__Worker__Runner__Scanner__BaseAddress: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner.stella-ops.local}" + Scheduler__Worker__Graph__Cartographer__BaseAddress: "http://cartographer.stella-ops.local" + Scheduler__Worker__Graph__SchedulerApi__BaseAddress: "http://scheduler.stella-ops.local" + Scheduler__Worker__Policy__Api__BaseAddress: "http://policy.stella-ops.local" + # Surface environment + SURFACE_FS_ENDPOINT: "http://s3.stella-ops.local:8333" + volumes: + - *cert-volume + tmpfs: + - /var/lib/stellaops/surface:mode=1777 networks: - - stellaops + stellaops: + aliases: + - scheduler-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker labels: *release-labels - # --------------------------------------------------------------------------- - # Notify Web - Notification service - # --------------------------------------------------------------------------- + # --- Slot 20: Graph API ---------------------------------------------------- + graph-api: + image: stellaops/graph-api:dev + container_name: stellaops-graph-api + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.20:80:80" + networks: + stellaops: + aliases: + - graph.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 21: Cartographer ------------------------------------------------- + cartographer: + image: stellaops/cartographer:dev + container_name: stellaops-cartographer + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.21:80:80" + networks: + stellaops: + aliases: + - cartographer.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 22: ReachGraph --------------------------------------------------- + reachgraph-web: + image: stellaops/reachgraph-web:dev + container_name: stellaops-reachgraph-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.22:80:80" + networks: + stellaops: + aliases: + - reachgraph.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 23: Timeline Indexer --------------------------------------------- + timeline-indexer-web: + image: stellaops/timeline-indexer-web:dev + container_name: stellaops-timeline-indexer-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.23:80:80" + networks: + stellaops: + aliases: + - timelineindexer.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + timeline-indexer-worker: + image: stellaops/timeline-indexer-worker:dev + container_name: stellaops-timeline-indexer-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - timeline-indexer-worker.stella-ops.local + labels: *release-labels + + # --- Slot 24: Timeline ---------------------------------------------------- + timeline-web: + image: stellaops/timeline-web:dev + container_name: stellaops-timeline-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.24:80:80" + networks: + stellaops: + aliases: + - timeline.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 25: Findings Ledger ---------------------------------------------- + findings-ledger-web: + image: stellaops/findings-ledger-web:dev + container_name: stellaops-findings-ledger-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__FindingsLedger: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + findings__ledger__Database__ConnectionString: *postgres-connection + findings__ledger__Authority__Issuer: "http://authority.stella-ops.local" + findings__ledger__Authority__RequireHttpsMetadata: "false" + findings__ledger__Attachments__EncryptionKey: "IiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiI=" + findings__ledger__Attachments__SignedUrlBase: "http://findings.stella-ops.local/attachments" + findings__ledger__Attachments__SignedUrlSecret: "dev-signed-url-secret" + findings__ledger__Attachments__SignedUrlLifetime: "00:15:00" + findings__ledger__Attachments__RequireConsoleCsrf: "false" + volumes: + - *cert-volume + ports: + - "127.1.0.25:80:80" + networks: + stellaops: + aliases: + - findings.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 26: Doctor ------------------------------------------------------- + doctor-web: + image: stellaops/doctor-web:dev + container_name: stellaops-doctor-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.26:80:80" + networks: + stellaops: + aliases: + - doctor.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + doctor-scheduler: + image: stellaops/doctor-scheduler:dev + container_name: stellaops-doctor-scheduler + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - doctor-scheduler.stella-ops.local + labels: *release-labels + + # --- Slot 27: OpsMemory --------------------------------------------------- + opsmemory-web: + image: stellaops/opsmemory-web:dev + container_name: stellaops-opsmemory-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.27:80:80" + networks: + stellaops: + aliases: + - opsmemory.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 28: Notifier ---------------------------------------------------- + notifier-web: + image: stellaops/notifier-web:dev + container_name: stellaops-notifier-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.28:80:80" + networks: + stellaops: + aliases: + - notifier.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + notifier-worker: + image: stellaops/notifier-worker:dev + container_name: stellaops-notifier-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + notifier__queue__Transport: "redis" + notifier__queue__Redis__ConnectionString: "cache.stella-ops.local:6379" + notifier__storage__postgres__ConnectionString: *postgres-connection + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - notifier-worker.stella-ops.local + labels: *release-labels + + # --- Slot 29: Notify ------------------------------------------------------ notify-web: - image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.10.0} + image: stellaops/notify-web:dev container_name: stellaops-notify-web restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + DOTNET_ENVIRONMENT: Production + NOTIFY_NOTIFY__STORAGE__DRIVER: "postgres" + NOTIFY_NOTIFY__STORAGE__CONNECTIONSTRING: *postgres-connection + NOTIFY_NOTIFY__STORAGE__DATABASE: "notify" + NOTIFY_NOTIFY__PLUGINS__BASEDIRECTORY: "/app" + Postgres__Notify__ConnectionString: *postgres-connection + volumes: + - ../../etc/notify:/app/etc/notify:ro + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + ports: + - "127.1.0.29:80:80" + networks: + stellaops: + aliases: + - notify.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 30: Signer ------------------------------------------------------ + signer: + image: stellaops/signer:dev + container_name: stellaops-signer + restart: unless-stopped + depends_on: + - authority + - valkey + environment: + ASPNETCORE_URLS: "http://+:8441" + <<: *kestrel-cert + ConnectionStrings__KeyManagement: *postgres-connection + ConnectionStrings__Default: *postgres-connection + volumes: + - *cert-volume + ports: + - "127.1.0.30:80:80" + networks: + stellaops: + aliases: + - signer.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 31: SmRemote ---------------------------------------------------- + smremote: + image: stellaops/smremote:dev + container_name: stellaops-smremote + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.31:80:80" + networks: + stellaops: + aliases: + - smremote.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 32: AirGap Controller -------------------------------------------- + airgap-controller: + image: stellaops/airgap-controller:dev + container_name: stellaops-airgap-controller + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.32:80:80" + networks: + stellaops: + aliases: + - airgap-controller.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 33: AirGap Time ------------------------------------------------- + airgap-time: + image: stellaops/airgap-time:dev + container_name: stellaops-airgap-time + restart: unless-stopped + profiles: ["airgap"] # Requires time anchor file - air-gap specific + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + volumes: + - *cert-volume + ports: + - "127.1.0.33:80:80" + networks: + stellaops: + aliases: + - airgap-time.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 34: PacksRegistry ----------------------------------------------- + packsregistry-web: + image: stellaops/packsregistry-web:dev + container_name: stellaops-packsregistry-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.34:80:80" + networks: + stellaops: + aliases: + - packsregistry.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + packsregistry-worker: + image: stellaops/packsregistry-worker:dev + container_name: stellaops-packsregistry-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + healthcheck: + <<: *healthcheck-worker + networks: + stellaops: + aliases: + - packsregistry-worker.stella-ops.local + labels: *release-labels + + # --- Slot 35: Registry Token ----------------------------------------------- + registry-token: + image: stellaops/registry-token:dev + container_name: stellaops-registry-token + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + RegistryTokenService__Signing__Issuer: "http://registry-token.stella-ops.local" + RegistryTokenService__Signing__KeyPath: "/app/etc/certs/kestrel-dev.pfx" + RegistryTokenService__Signing__Lifetime: "00:05:00" + RegistryTokenService__Registry__Realm: "http://registry.stella-ops.local" + RegistryTokenService__Authority__Issuer: "http://authority.stella-ops.local" + RegistryTokenService__Authority__Audience: "api://registry" + RegistryTokenService__Authority__RequireHttpsMetadata: "false" + RegistryTokenService__Plans__0__Name: "default" + RegistryTokenService__Plans__0__Repositories__0__Pattern: "*" + RegistryTokenService__Plans__0__Repositories__0__Actions__0: "pull" + RegistryTokenService__Plans__0__Repositories__0__Actions__1: "push" + volumes: + - *cert-volume + ports: + - "127.1.0.35:80:80" + networks: + stellaops: + aliases: + - registry-token.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 36: BinaryIndex -------------------------------------------------- + binaryindex-web: + image: stellaops/binaryindex-web:dev + container_name: stellaops-binaryindex-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.36:80:80" + networks: + stellaops: + aliases: + - binaryindex.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 37: Issuer Directory --------------------------------------------- + issuer-directory: + image: stellaops/issuer-directory-web:dev + container_name: stellaops-issuer-directory + restart: unless-stopped depends_on: - postgres - authority - - valkey environment: - DOTNET_ENVIRONMENT: Production - NOTIFY__STORAGE__DRIVER: "postgres" - NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection - # Queue configuration - Valkey only - NOTIFY__QUEUE__DRIVER: "valkey" - NOTIFY__QUEUE__VALKEY__URL: "valkey:6379" + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ISSUERDIRECTORY__AUTHORITY__ENABLED: "true" + ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER:-http://authority.stella-ops.local}" + ISSUERDIRECTORY__AUTHORITY__AUDIENCES__0: "api://issuer-directory" + ISSUERDIRECTORY__AUTHORITY__REQUIREHTTPSMETADATA: "false" + ISSUERDIRECTORY__PERSISTENCE__PROVIDER: "Postgres" + ISSUERDIRECTORY__PERSISTENCE__POSTGRESCONNECTIONSTRING: *postgres-connection + ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "false" volumes: - - ../../etc/notify:/app/etc/notify:ro + - ../../etc/issuer-directory:/app/etc/issuer-directory:ro + - *cert-volume ports: - - "${NOTIFY_WEB_PORT:-8446}:8446" + - "127.1.0.37:80:80" networks: - - stellaops - - frontdoor + stellaops: + aliases: + - issuerdirectory.stella-ops.local + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Excititor - VEX generation service - # --------------------------------------------------------------------------- - excititor: - image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa - container_name: stellaops-excititor + # --- Slot 38: Symbols ------------------------------------------------------ + symbols: + image: stellaops/symbols:dev + container_name: stellaops-symbols + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.38:80:80" + networks: + stellaops: + aliases: + - symbols.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 39: SbomService -------------------------------------------------- + sbomservice: + image: stellaops/sbomservice:dev + container_name: stellaops-sbomservice + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.39:80:80" + networks: + stellaops: + aliases: + - sbomservice.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 40: ExportCenter ------------------------------------------------- + export: + image: stellaops/export:dev + container_name: stellaops-export + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + Export__AllowInMemoryRepositories: "true" + volumes: + - *cert-volume + ports: + - "127.1.0.40:80:80" + networks: + stellaops: + aliases: + - exportcenter.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + export-worker: + image: stellaops/export-worker:dev + container_name: stellaops-export-worker + restart: unless-stopped + depends_on: *depends-infra + environment: + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + Export__AllowInMemoryRepositories: "true" + volumes: + - *cert-volume + networks: + stellaops: + aliases: + - export-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 41: Replay ------------------------------------------------------- + replay-web: + image: stellaops/replay-web:dev + container_name: stellaops-replay-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.41:80:80" + networks: + stellaops: + aliases: + - replay.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 42: Integrations ------------------------------------------------ + integrations-web: + image: stellaops/integrations-web:dev + container_name: stellaops-integrations-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + tmpfs: + - /app/plugins:mode=1777 + ports: + - "127.1.0.42:80:80" + networks: + stellaops: + aliases: + - integrations.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 43: Zastava Webhook ---------------------------------------------- + zastava-webhook: + image: stellaops/zastava-webhook:dev + container_name: stellaops-zastava-webhook restart: unless-stopped depends_on: - - postgres - - concelier + authority: + condition: service_healthy environment: - EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445" - EXCITITOR__STORAGE__DRIVER: "postgres" - EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: *postgres-connection + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + # Runtime authority (used by token provider for OIDC discovery) + zastava__runtime__authority__Issuer: "http://authority.stella-ops.local" + zastava__runtime__authority__allowStaticTokenFallback: "true" + zastava__runtime__authority__staticTokenValue: "dev-bypass-token" + zastava__runtime__tenant: "default" + zastava__runtime__environment: "local" + # Webhook authority + zastava__webhook__authority__Issuer: "http://authority.stella-ops.local" + zastava__webhook__authority__staticTokenValue: "dev-bypass-token" + # TLS (PFX from cert volume) + zastava__webhook__tls__mode: "Secret" + zastava__webhook__tls__pfxPath: "/app/etc/certs/kestrel-dev.pfx" + zastava__webhook__tls__pfxPassword: "devpass" + # Backend (scanner service) + zastava__webhook__backend__baseAddress: "http://scanner.stella-ops.local" + zastava__webhook__backend__allowInsecureHttp: "true" + volumes: + - *cert-volume networks: - - stellaops + stellaops: + aliases: + - zastava-webhook.stella-ops.local labels: *release-labels - # --------------------------------------------------------------------------- - # Advisory AI Web - AI-powered advisory analysis API - # --------------------------------------------------------------------------- + # --- Slot 44: Signals ------------------------------------------------------ + signals: + image: stellaops/signals:dev + container_name: stellaops-signals + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.43:80:80" + networks: + stellaops: + aliases: + - signals.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Slot 45: Advisory AI -------------------------------------------------- advisory-ai-web: - image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.10.0 + image: stellaops/advisory-ai-web:dev container_name: stellaops-advisory-ai-web restart: unless-stopped depends_on: - scanner-web environment: - ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner.stella-ops.local}" ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" @@ -482,57 +1818,93 @@ services: ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" ports: - - "${ADVISORY_AI_WEB_PORT:-8448}:8448" + - "127.1.0.44:80:80" volumes: + - *cert-volume - ../../etc/llm-providers:/app/etc/llm-providers:ro - advisory-ai-queue:/var/lib/advisory-ai/queue - advisory-ai-plans:/var/lib/advisory-ai/plans - advisory-ai-outputs:/var/lib/advisory-ai/outputs + tmpfs: + - /app/plugins:mode=1777 networks: - - stellaops - - frontdoor + stellaops: + aliases: + - advisoryai.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp labels: *release-labels - # --------------------------------------------------------------------------- - # Advisory AI Worker - Background AI processing - # --------------------------------------------------------------------------- advisory-ai-worker: - image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.10.0 + image: stellaops/advisory-ai-worker:dev container_name: stellaops-advisory-ai-worker restart: unless-stopped - depends_on: - - advisory-ai-web - environment: - ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" - ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" - ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" - ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" - ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" - ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" - ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" - volumes: - - ../../etc/llm-providers:/app/etc/llm-providers:ro - - advisory-ai-queue:/var/lib/advisory-ai/queue - - advisory-ai-plans:/var/lib/advisory-ai/plans - - advisory-ai-outputs:/var/lib/advisory-ai/outputs - networks: - - stellaops - labels: *release-labels - - # --------------------------------------------------------------------------- - # Web UI - Angular frontend - # --------------------------------------------------------------------------- - web-ui: - image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23 - container_name: stellaops-web-ui - restart: unless-stopped depends_on: - scanner-web environment: - STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444" - ports: - - "${UI_PORT:-8443}:8443" + <<: *kestrel-cert + ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner.stella-ops.local}" + ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/tmp/advisory-ai/queue" + ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/tmp/advisory-ai/plans" + ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/tmp/advisory-ai/outputs" + ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" + ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" + ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" + volumes: + - *cert-volume networks: - - stellaops - - frontdoor + stellaops: + aliases: + - advisory-ai-worker.stella-ops.local + healthcheck: + <<: *healthcheck-worker + labels: *release-labels + + # --- Slot 46: Unknowns ---------------------------------------------------- + unknowns-web: + image: stellaops/unknowns-web:dev + container_name: stellaops-unknowns-web + restart: unless-stopped + depends_on: *depends-infra + environment: + ASPNETCORE_URLS: "http://+:8080" + <<: *kestrel-cert + ConnectionStrings__Default: *postgres-connection + ConnectionStrings__UnknownsDb: *postgres-connection + ConnectionStrings__Redis: "cache.stella-ops.local:6379" + volumes: + - *cert-volume + ports: + - "127.1.0.45:80:80" + networks: + stellaops: + aliases: + - unknowns.stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD-SHELL", "bash -c 'echo > /dev/tcp/$(hostname)/80'"] + <<: *healthcheck-tcp + labels: *release-labels + + # --- Console (Angular frontend) ------------------------------------------- + web-ui: + image: stellaops/console:dev + container_name: stellaops-web-ui + restart: unless-stopped + depends_on: + - platform + environment: + STELLAOPS_UI__BACKEND__BASEURL: "http://platform.stella-ops.local" + ports: + - "127.1.0.1:80:8080" + networks: + stellaops: + aliases: + - stella-ops.local + frontdoor: {} + healthcheck: + test: ["CMD", "wget", "-qO-", "http://localhost:8080/"] + <<: *healthcheck-tcp labels: *release-labels diff --git a/devops/compose/postgres-init/01-create-schemas.sql b/devops/compose/postgres-init/01-create-schemas.sql new file mode 100644 index 000000000..7898390a3 --- /dev/null +++ b/devops/compose/postgres-init/01-create-schemas.sql @@ -0,0 +1,14 @@ +-- Pre-create schemas referenced by Stella Ops services. +-- Runs once on first PostgreSQL container start via docker-entrypoint-initdb.d. + +CREATE SCHEMA IF NOT EXISTS scanner; +CREATE SCHEMA IF NOT EXISTS vex; +CREATE SCHEMA IF NOT EXISTS scheduler; +CREATE SCHEMA IF NOT EXISTS policy; +CREATE SCHEMA IF NOT EXISTS notify; +CREATE SCHEMA IF NOT EXISTS notifier; +CREATE SCHEMA IF NOT EXISTS evidence; +CREATE SCHEMA IF NOT EXISTS findings; +CREATE SCHEMA IF NOT EXISTS timeline; +CREATE SCHEMA IF NOT EXISTS doctor; +CREATE SCHEMA IF NOT EXISTS issuer_directory; diff --git a/devops/compose/zot-config.json b/devops/compose/zot-config.json new file mode 100644 index 000000000..cd003adeb --- /dev/null +++ b/devops/compose/zot-config.json @@ -0,0 +1,16 @@ +{ + "distSpecVersion": "1.1.0", + "storage": { + "rootDirectory": "/var/lib/registry", + "gc": true, + "gcDelay": "1h", + "gcInterval": "24h" + }, + "http": { + "address": "0.0.0.0", + "port": "5000" + }, + "log": { + "level": "info" + } +} diff --git a/devops/docker/Dockerfile.console b/devops/docker/Dockerfile.console index ebe47db1d..3e5b8ab2d 100644 --- a/devops/docker/Dockerfile.console +++ b/devops/docker/Dockerfile.console @@ -1,26 +1,29 @@ # syntax=docker/dockerfile:1.7 # Multi-stage Angular console image with non-root runtime (DOCKER-44-001) -ARG NODE_IMAGE=node:20-bullseye-slim +ARG NODE_IMAGE=node:20-bookworm-slim ARG NGINX_IMAGE=nginxinc/nginx-unprivileged:1.27-alpine ARG APP_DIR=src/Web/StellaOps.Web ARG DIST_DIR=dist ARG APP_PORT=8080 FROM ${NODE_IMAGE} AS build +ARG APP_DIR +ARG DIST_DIR ENV npm_config_fund=false npm_config_audit=false SOURCE_DATE_EPOCH=1704067200 WORKDIR /app COPY ${APP_DIR}/package*.json ./ -RUN npm ci --prefer-offline --no-progress --cache .npm +RUN npm install --no-progress COPY ${APP_DIR}/ ./ RUN npm run build -- --configuration=production --output-path=${DIST_DIR} FROM ${NGINX_IMAGE} AS runtime -ARG APP_PORT +ARG APP_PORT=8080 +ARG DIST_DIR=dist ENV APP_PORT=${APP_PORT} USER 101 WORKDIR / COPY --from=build /app/${DIST_DIR}/ /usr/share/nginx/html/ -COPY ops/devops/docker/healthcheck-frontend.sh /usr/local/bin/healthcheck-frontend.sh +COPY devops/docker/healthcheck-frontend.sh /usr/local/bin/healthcheck-frontend.sh RUN rm -f /etc/nginx/conf.d/default.conf && \ cat > /etc/nginx/conf.d/default.conf </dev/null || true + USER ${APP_UID}:${APP_GID} EXPOSE ${APP_PORT} HEALTHCHECK --interval=30s --timeout=5s --start-period=15s --retries=3 \ CMD /usr/local/bin/healthcheck.sh -# Harden filesystem; deploys should also set readOnlyRootFilesystem true -RUN chmod 500 /app && \ - find /app -maxdepth 1 -type f -exec chmod 400 {} \; && \ - find /app -maxdepth 1 -type d -exec chmod 500 {} \; - # Use shell form so APP_BINARY env can be expanded without duplicating the template per service ENTRYPOINT ["sh","-c","exec ./\"$APP_BINARY\""] diff --git a/devops/docker/build-all.ps1 b/devops/docker/build-all.ps1 new file mode 100644 index 000000000..fc00b4f72 --- /dev/null +++ b/devops/docker/build-all.ps1 @@ -0,0 +1,106 @@ +#!/usr/bin/env pwsh +<# +.SYNOPSIS + Build hardened Docker images for all Stella Ops services using the shared template/matrix. +.DESCRIPTION + PowerShell port of build-all.sh. Reads services-matrix.env (pipe-delimited) and builds + each service image using Dockerfile.hardened.template (or Dockerfile.console for Angular). +.PARAMETER Registry + Docker image registry prefix. Default: stellaops +.PARAMETER TagSuffix + Tag suffix for built images. Default: dev +.PARAMETER SdkImage + .NET SDK base image. Default: mcr.microsoft.com/dotnet/sdk:10.0-noble +.PARAMETER RuntimeImage + .NET runtime base image. Default: mcr.microsoft.com/dotnet/aspnet:10.0-noble +#> +[CmdletBinding()] +param( + [string]$Registry = $env:REGISTRY ?? 'stellaops', + [string]$TagSuffix = $env:TAG_SUFFIX ?? 'dev', + [string]$SdkImage = $env:SDK_IMAGE ?? 'mcr.microsoft.com/dotnet/sdk:10.0-noble', + [string]$RuntimeImage = $env:RUNTIME_IMAGE ?? 'mcr.microsoft.com/dotnet/aspnet:10.0-noble' +) + +$ErrorActionPreference = 'Continue' + +$Root = git rev-parse --show-toplevel 2>$null +if (-not $Root) { + Write-Error 'Not inside a git repository.' + exit 1 +} +$Root = $Root.Trim() + +$MatrixPath = Join-Path $Root 'devops/docker/services-matrix.env' +if (-not (Test-Path $MatrixPath)) { + Write-Error "Matrix file not found: $MatrixPath" + exit 1 +} + +Write-Host "Building services from $MatrixPath -> ${Registry}/:${TagSuffix}" -ForegroundColor Cyan + +$succeeded = @() +$failed = @() + +foreach ($line in Get-Content $MatrixPath) { + $line = $line.Trim() + if (-not $line -or $line.StartsWith('#')) { continue } + + $parts = $line -split '\|' + if ($parts.Count -lt 5) { continue } + + $service = $parts[0] + $dockerfile = $parts[1] + $project = $parts[2] + $binary = $parts[3] + $port = $parts[4] + + $image = "${Registry}/${service}:${TagSuffix}" + $dfPath = Join-Path $Root $dockerfile + + if (-not (Test-Path $dfPath)) { + Write-Warning "Skipping ${service}: dockerfile missing ($dfPath)" + continue + } + + if ($dockerfile -like '*Dockerfile.console*') { + Write-Host "[console] $service -> $image" -ForegroundColor Yellow + docker build ` + -f $dfPath $Root ` + --build-arg "APP_DIR=$project" ` + --build-arg "APP_PORT=$port" ` + -t $image + } + else { + Write-Host "[service] $service -> $image" -ForegroundColor Green + docker build ` + -f $dfPath $Root ` + --build-arg "SDK_IMAGE=$SdkImage" ` + --build-arg "RUNTIME_IMAGE=$RuntimeImage" ` + --build-arg "APP_PROJECT=$project" ` + --build-arg "APP_BINARY=$binary" ` + --build-arg "APP_PORT=$port" ` + -t $image + } + + if ($LASTEXITCODE -eq 0) { + $succeeded += $service + } + else { + $failed += $service + Write-Host "FAILED: $service" -ForegroundColor Red + } +} + +Write-Host '' +Write-Host '=== BUILD RESULTS ===' -ForegroundColor Cyan +Write-Host "Succeeded ($($succeeded.Count)): $($succeeded -join ', ')" -ForegroundColor Green +Write-Host "Failed ($($failed.Count)): $($failed -join ', ')" -ForegroundColor $(if ($failed.Count -gt 0) { 'Red' } else { 'Green' }) +Write-Host '' + +if ($failed.Count -gt 0) { + Write-Error 'Some builds failed. Fix the issues and re-run.' + exit 1 +} + +Write-Host 'Build complete. Remember to enforce readOnlyRootFilesystem at deploy time and run sbom_attest.sh (DOCKER-44-002).' -ForegroundColor Cyan diff --git a/devops/docker/build-all.sh b/devops/docker/build-all.sh index 95a1e9896..ff0221a37 100644 --- a/devops/docker/build-all.sh +++ b/devops/docker/build-all.sh @@ -1,13 +1,15 @@ #!/usr/bin/env bash # Build hardened images for the core services using the shared template/matrix (DOCKER-44-001) -set -euo pipefail +set -uo pipefail +FAILED=() +SUCCEEDED=() ROOT=${ROOT:-"$(git rev-parse --show-toplevel)"} -MATRIX=${MATRIX:-"${ROOT}/ops/devops/docker/services-matrix.env"} +MATRIX=${MATRIX:-"${ROOT}/devops/docker/services-matrix.env"} REGISTRY=${REGISTRY:-"stellaops"} TAG_SUFFIX=${TAG_SUFFIX:-"dev"} -SDK_IMAGE=${SDK_IMAGE:-"mcr.microsoft.com/dotnet/sdk:10.0-bookworm-slim"} -RUNTIME_IMAGE=${RUNTIME_IMAGE:-"mcr.microsoft.com/dotnet/aspnet:10.0-bookworm-slim"} +SDK_IMAGE=${SDK_IMAGE:-"mcr.microsoft.com/dotnet/sdk:10.0-noble"} +RUNTIME_IMAGE=${RUNTIME_IMAGE:-"mcr.microsoft.com/dotnet/aspnet:10.0-noble"} if [[ ! -f "${MATRIX}" ]]; then echo "matrix file not found: ${MATRIX}" >&2 @@ -45,6 +47,22 @@ while IFS='|' read -r service dockerfile project binary port; do -t "${image}" fi + if [[ $? -eq 0 ]]; then + SUCCEEDED+=("${service}") + else + FAILED+=("${service}") + echo "FAILED: ${service}" >&2 + fi + done < "${MATRIX}" +echo "" >&2 +echo "=== BUILD RESULTS ===" >&2 +echo "Succeeded (${#SUCCEEDED[@]}): ${SUCCEEDED[*]:-none}" >&2 +echo "Failed (${#FAILED[@]}): ${FAILED[*]:-none}" >&2 +echo "" >&2 +if [[ ${#FAILED[@]} -gt 0 ]]; then + echo "Some builds failed. Fix the issues and re-run." >&2 + exit 1 +fi echo "Build complete. Remember to enforce readOnlyRootFilesystem at deploy time and run sbom_attest.sh (DOCKER-44-002)." >&2 diff --git a/devops/docker/services-matrix.env b/devops/docker/services-matrix.env index 4a3a35f73..c882bf745 100644 --- a/devops/docker/services-matrix.env +++ b/devops/docker/services-matrix.env @@ -1,12 +1,112 @@ # service|dockerfile|project|binary|port # Paths are relative to repo root; dockerfile is usually the shared hardened template. -api|ops/devops/docker/Dockerfile.hardened.template|src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj|StellaOps.VulnExplorer.Api|8080 -orchestrator|ops/devops/docker/Dockerfile.hardened.template|src/Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj|StellaOps.Orchestrator.WebService|8080 -task-runner|ops/devops/docker/Dockerfile.hardened.template|src/Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj|StellaOps.Orchestrator.Worker|8081 -concelier|ops/devops/docker/Dockerfile.hardened.template|src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj|StellaOps.Concelier.WebService|8080 -excititor|ops/devops/docker/Dockerfile.hardened.template|src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj|StellaOps.Excititor.WebService|8080 -policy|ops/devops/docker/Dockerfile.hardened.template|src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj|StellaOps.Policy.Gateway|8084 -notify|ops/devops/docker/Dockerfile.hardened.template|src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj|StellaOps.Notify.WebService|8080 -export|ops/devops/docker/Dockerfile.hardened.template|src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj|StellaOps.ExportCenter.WebService|8080 -advisoryai|ops/devops/docker/Dockerfile.hardened.template|src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj|StellaOps.AdvisoryAI.WebService|8080 -console|ops/devops/docker/Dockerfile.console|src/Web/StellaOps.Web|StellaOps.Web|8080 +# Ordered by port-registry slot number. All services use port 8080 internally +# unless they have a legacy port assignment (authority=8440, signer=8441, etc.). +# +# ── Slot 0: Router Gateway ────────────────────────────────────────────────────── +router-gateway|devops/docker/Dockerfile.hardened.template|src/Router/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj|StellaOps.Gateway.WebService|8080 +# ── Slot 1: Platform ──────────────────────────────────────────────────────────── +platform|devops/docker/Dockerfile.hardened.template|src/Platform/StellaOps.Platform.WebService/StellaOps.Platform.WebService.csproj|StellaOps.Platform.WebService|8080 +# ── Slot 2: Authority ─────────────────────────────────────────────────────────── +authority|devops/docker/Dockerfile.hardened.template|src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj|StellaOps.Authority|8440 +# ── Slot 3: Gateway ───────────────────────────────────────────────────────────── +gateway|devops/docker/Dockerfile.hardened.template|src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj|StellaOps.Gateway.WebService|8080 +# ── Slot 4: Attestor ──────────────────────────────────────────────────────────── +attestor|devops/docker/Dockerfile.hardened.template|src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj|StellaOps.Attestor.WebService|8442 +# ── Slot 5: Attestor TileProxy ────────────────────────────────────────────────── +attestor-tileproxy|devops/docker/Dockerfile.hardened.template|src/Attestor/StellaOps.Attestor.TileProxy/StellaOps.Attestor.TileProxy.csproj|StellaOps.Attestor.TileProxy|8080 +# ── Slot 6: Evidence Locker ───────────────────────────────────────────────────── +evidence-locker-web|devops/docker/Dockerfile.hardened.template|src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj|StellaOps.EvidenceLocker.WebService|8080 +evidence-locker-worker|devops/docker/Dockerfile.hardened.template|src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj|StellaOps.EvidenceLocker.Worker|8080 +# ── Slot 8: Scanner ───────────────────────────────────────────────────────────── +scanner-web|devops/docker/Dockerfile.hardened.template|src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj|StellaOps.Scanner.WebService|8444 +scanner-worker|devops/docker/Dockerfile.hardened.template|src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj|StellaOps.Scanner.Worker|8080 +# ── Slot 9: Concelier ─────────────────────────────────────────────────────────── +concelier|devops/docker/Dockerfile.hardened.template|src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj|StellaOps.Concelier.WebService|8080 +# ── Slot 10: Excititor ────────────────────────────────────────────────────────── +excititor|devops/docker/Dockerfile.hardened.template|src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj|StellaOps.Excititor.WebService|8080 +excititor-worker|devops/docker/Dockerfile.hardened.template|src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj|StellaOps.Excititor.Worker|8080 +# ── Slot 11: VexHub ───────────────────────────────────────────────────────────── +vexhub-web|devops/docker/Dockerfile.hardened.template|src/VexHub/StellaOps.VexHub.WebService/StellaOps.VexHub.WebService.csproj|StellaOps.VexHub.WebService|8080 +# ── Slot 12: VexLens ──────────────────────────────────────────────────────────── +vexlens-web|devops/docker/Dockerfile.hardened.template|src/VexLens/StellaOps.VexLens.WebService/StellaOps.VexLens.WebService.csproj|StellaOps.VexLens.WebService|8080 +# ── Slot 13: VulnExplorer (api) ───────────────────────────────────────────────── +api|devops/docker/Dockerfile.hardened.template|src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj|StellaOps.VulnExplorer.Api|8080 +# ── Slot 14: Policy Engine ────────────────────────────────────────────────────── +policy-engine|devops/docker/Dockerfile.hardened.template|src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj|StellaOps.Policy.Engine|8080 +# ── Slot 15: Policy Gateway ───────────────────────────────────────────────────── +policy|devops/docker/Dockerfile.hardened.template|src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj|StellaOps.Policy.Gateway|8084 +# ── Slot 16: RiskEngine ───────────────────────────────────────────────────────── +riskengine-web|devops/docker/Dockerfile.hardened.template|src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj|StellaOps.RiskEngine.WebService|8080 +riskengine-worker|devops/docker/Dockerfile.hardened.template|src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj|StellaOps.RiskEngine.Worker|8080 +# ── Slot 17: Orchestrator ─────────────────────────────────────────────────────── +orchestrator|devops/docker/Dockerfile.hardened.template|src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj|StellaOps.Orchestrator.WebService|8080 +orchestrator-worker|devops/docker/Dockerfile.hardened.template|src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj|StellaOps.Orchestrator.Worker|8080 +# ── Slot 18: TaskRunner ───────────────────────────────────────────────────────── +taskrunner-web|devops/docker/Dockerfile.hardened.template|src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj|StellaOps.TaskRunner.WebService|8080 +taskrunner-worker|devops/docker/Dockerfile.hardened.template|src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj|StellaOps.TaskRunner.Worker|8080 +# ── Slot 19: Scheduler ────────────────────────────────────────────────────────── +scheduler-web|devops/docker/Dockerfile.hardened.template|src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj|StellaOps.Scheduler.WebService|8080 +scheduler-worker|devops/docker/Dockerfile.hardened.template|src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj|StellaOps.Scheduler.Worker.Host|8080 +# ── Slot 20: Graph ────────────────────────────────────────────────────────────── +graph-api|devops/docker/Dockerfile.hardened.template|src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj|StellaOps.Graph.Api|8080 +# ── Slot 21: Cartographer ─────────────────────────────────────────────────────── +cartographer|devops/docker/Dockerfile.hardened.template|src/Cartographer/StellaOps.Cartographer/StellaOps.Cartographer.csproj|StellaOps.Cartographer|8080 +# ── Slot 22: ReachGraph ───────────────────────────────────────────────────────── +reachgraph-web|devops/docker/Dockerfile.hardened.template|src/ReachGraph/StellaOps.ReachGraph.WebService/StellaOps.ReachGraph.WebService.csproj|StellaOps.ReachGraph.WebService|8080 +# ── Slot 23: Timeline Indexer ─────────────────────────────────────────────────── +timeline-indexer-web|devops/docker/Dockerfile.hardened.template|src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj|StellaOps.TimelineIndexer.WebService|8080 +timeline-indexer-worker|devops/docker/Dockerfile.hardened.template|src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj|StellaOps.TimelineIndexer.Worker|8080 +# ── Slot 24: Timeline ─────────────────────────────────────────────────────────── +timeline-web|devops/docker/Dockerfile.hardened.template|src/Timeline/StellaOps.Timeline.WebService/StellaOps.Timeline.WebService.csproj|StellaOps.Timeline.WebService|8080 +# ── Slot 25: Findings Ledger ──────────────────────────────────────────────────── +findings-ledger-web|devops/docker/Dockerfile.hardened.template|src/Findings/StellaOps.Findings.Ledger.WebService/StellaOps.Findings.Ledger.WebService.csproj|StellaOps.Findings.Ledger.WebService|8080 +# ── Slot 26: Doctor ───────────────────────────────────────────────────────────── +doctor-web|devops/docker/Dockerfile.hardened.template|src/Doctor/StellaOps.Doctor.WebService/StellaOps.Doctor.WebService.csproj|StellaOps.Doctor.WebService|8080 +doctor-scheduler|devops/docker/Dockerfile.hardened.template|src/Doctor/StellaOps.Doctor.Scheduler/StellaOps.Doctor.Scheduler.csproj|StellaOps.Doctor.Scheduler|8080 +# ── Slot 27: OpsMemory ────────────────────────────────────────────────────────── +opsmemory-web|devops/docker/Dockerfile.hardened.template|src/OpsMemory/StellaOps.OpsMemory.WebService/StellaOps.OpsMemory.WebService.csproj|StellaOps.OpsMemory.WebService|8080 +# ── Slot 28: Notifier ─────────────────────────────────────────────────────────── +notifier-web|devops/docker/Dockerfile.hardened.template|src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj|StellaOps.Notifier.WebService|8080 +notifier-worker|devops/docker/Dockerfile.hardened.template|src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj|StellaOps.Notifier.Worker|8080 +# ── Slot 29: Notify ───────────────────────────────────────────────────────────── +notify-web|devops/docker/Dockerfile.hardened.template|src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj|StellaOps.Notify.WebService|8080 +# ── Slot 30: Signer ───────────────────────────────────────────────────────────── +signer|devops/docker/Dockerfile.hardened.template|src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj|StellaOps.Signer.WebService|8441 +# ── Slot 31: SmRemote ─────────────────────────────────────────────────────────── +smremote|devops/docker/Dockerfile.hardened.template|src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj|StellaOps.SmRemote.Service|8080 +# ── Slot 32: AirGap Controller ────────────────────────────────────────────────── +airgap-controller|devops/docker/Dockerfile.hardened.template|src/AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj|StellaOps.AirGap.Controller|8080 +# ── Slot 33: AirGap Time ──────────────────────────────────────────────────────── +airgap-time|devops/docker/Dockerfile.hardened.template|src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj|StellaOps.AirGap.Time|8080 +# ── Slot 34: PacksRegistry ────────────────────────────────────────────────────── +packsregistry-web|devops/docker/Dockerfile.hardened.template|src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj|StellaOps.PacksRegistry.WebService|8080 +packsregistry-worker|devops/docker/Dockerfile.hardened.template|src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj|StellaOps.PacksRegistry.Worker|8080 +# ── Slot 35: Registry Token ───────────────────────────────────────────────────── +registry-token|devops/docker/Dockerfile.hardened.template|src/Registry/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj|StellaOps.Registry.TokenService|8080 +# ── Slot 36: BinaryIndex ──────────────────────────────────────────────────────── +binaryindex-web|devops/docker/Dockerfile.hardened.template|src/BinaryIndex/StellaOps.BinaryIndex.WebService/StellaOps.BinaryIndex.WebService.csproj|StellaOps.BinaryIndex.WebService|8080 +# ── Slot 37: IssuerDirectory ──────────────────────────────────────────────────── +issuer-directory-web|devops/docker/Dockerfile.hardened.template|src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/StellaOps.IssuerDirectory.WebService.csproj|StellaOps.IssuerDirectory.WebService|8080 +# ── Slot 38: Symbols ──────────────────────────────────────────────────────────── +symbols|devops/docker/Dockerfile.hardened.template|src/Symbols/StellaOps.Symbols.Server/StellaOps.Symbols.Server.csproj|StellaOps.Symbols.Server|8080 +# ── Slot 39: SbomService ──────────────────────────────────────────────────────── +sbomservice|devops/docker/Dockerfile.hardened.template|src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj|StellaOps.SbomService|8080 +# ── Slot 40: ExportCenter ─────────────────────────────────────────────────────── +export|devops/docker/Dockerfile.hardened.template|src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj|StellaOps.ExportCenter.WebService|8080 +export-worker|devops/docker/Dockerfile.hardened.template|src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj|StellaOps.ExportCenter.Worker|8080 +# ── Slot 41: Replay ───────────────────────────────────────────────────────────── +replay-web|devops/docker/Dockerfile.hardened.template|src/Replay/StellaOps.Replay.WebService/StellaOps.Replay.WebService.csproj|StellaOps.Replay.WebService|8080 +# ── Slot 42: Integrations ─────────────────────────────────────────────────────── +integrations-web|devops/docker/Dockerfile.hardened.template|src/Integrations/StellaOps.Integrations.WebService/StellaOps.Integrations.WebService.csproj|StellaOps.Integrations.WebService|8080 +# ── Slot 43: Zastava ──────────────────────────────────────────────────────────── +zastava-webhook|devops/docker/Dockerfile.hardened.template|src/Zastava/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj|StellaOps.Zastava.Webhook|8080 +# ── Slot 44: Signals ──────────────────────────────────────────────────────────── +signals|devops/docker/Dockerfile.hardened.template|src/Signals/StellaOps.Signals/StellaOps.Signals.csproj|StellaOps.Signals|8080 +# ── Slot 45: AdvisoryAI ───────────────────────────────────────────────────────── +advisory-ai-web|devops/docker/Dockerfile.hardened.template|src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj|StellaOps.AdvisoryAI.WebService|8080 +advisory-ai-worker|devops/docker/Dockerfile.hardened.template|src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj|StellaOps.AdvisoryAI.Worker|8080 +# ── Slot 46: Unknowns ─────────────────────────────────────────────────────────── +unknowns-web|devops/docker/Dockerfile.hardened.template|src/Unknowns/StellaOps.Unknowns.WebService/StellaOps.Unknowns.WebService.csproj|StellaOps.Unknowns.WebService|8080 +# ── Console (Angular frontend) ────────────────────────────────────────────────── +console|devops/docker/Dockerfile.console|src/Web/StellaOps.Web|StellaOps.Web|8080 diff --git a/docs/DEVELOPER_ONBOARDING.md b/docs/DEVELOPER_ONBOARDING.md index 08b00949a..f7712f357 100644 --- a/docs/DEVELOPER_ONBOARDING.md +++ b/docs/DEVELOPER_ONBOARDING.md @@ -57,6 +57,8 @@ StellaOps is a deterministic, offline-first SBOM + VEX platform built as a micro --- ## Prerequisites +> **Looking for a quick setup checklist?** See [`docs/dev/DEV_ENVIRONMENT_SETUP.md`](dev/DEV_ENVIRONMENT_SETUP.md) for a streamlined, copy-paste-friendly guide covering prerequisites, hosts file, infrastructure, builds, and Docker images. + ### Required Software 1. **Docker Desktop** (Windows/Mac) or **Docker Engine + Docker Compose** (Linux) @@ -67,12 +69,17 @@ StellaOps is a deterministic, offline-first SBOM + VEX platform built as a micro - Download: https://dotnet.microsoft.com/download/dotnet/10.0 - Verify: `dotnet --version` (should show 10.0.x) -3. **Visual Studio 2022** (v17.12+) or **Visual Studio Code** +3. **Node.js** (for Angular frontend) + - Version: ^20.19.0 || ^22.12.0 || ^24.0.0 (see `src/Web/StellaOps.Web/package.json` engines) + - npm: >=10.2.0 + - Verify: `node --version` / `npm --version` + +4. **Visual Studio 2022** (v17.12+) or **Visual Studio Code** - Workload: ASP.NET and web development - Workload: .NET desktop development - Extension (VS Code): C# Dev Kit -4. **Git** +5. **Git** - Version: 2.30+ recommended ### Optional Tools @@ -104,8 +111,8 @@ cd git.stella-ops.org ```bash # Copy the development environment template -cd deploy\compose -copy env\dev.env.example .env +cd devops\compose +copy env\stellaops.env.example .env # Edit .env with your preferred text editor notepad .env @@ -119,13 +126,13 @@ notepad .env ### Step 3: Start the Full Platform ```bash -# From deploy/compose directory -docker compose -f docker-compose.dev.yaml up -d +# From devops/compose directory +docker compose -f docker-compose.dev.yml up -d ``` **This will start all infrastructure and services:** -- PostgreSQL v16+ (port 5432) - Primary database for all services -- Valkey 8.0 (port 6379) - Cache, DPoP nonces, event streams, rate limiting +- PostgreSQL 18.1 (port 5432) - Primary database for all services +- Valkey 9.0.1 (port 6379) - Cache, DPoP nonces, event streams, rate limiting - RustFS (port 8080) - S3-compatible object storage for artifacts/SBOMs - Authority (port 8440) - OAuth2/OIDC authentication - Signer (port 8441) - Cryptographic signing @@ -138,15 +145,15 @@ docker compose -f docker-compose.dev.yaml up -d ```bash # Check all services are up -docker compose -f docker-compose.dev.yaml ps +docker compose -f docker-compose.dev.yml ps # Check logs for a specific service -docker compose -f docker-compose.dev.yaml logs -f scanner-web +docker compose -f docker-compose.dev.yml logs -f scanner-web # Check infrastructure health -docker compose -f docker-compose.dev.yaml logs postgres -docker compose -f docker-compose.dev.yaml logs valkey -docker compose -f docker-compose.dev.yaml logs rustfs +docker compose -f docker-compose.dev.yml logs postgres +docker compose -f docker-compose.dev.yml logs valkey +docker compose -f docker-compose.dev.yml logs rustfs ``` ### Step 5: Access the Platform @@ -176,7 +183,7 @@ Related references: Service-specific debugging guidance lives with each module to avoid stale, copy-pasted configuration examples. Generic workflow: -1. Stop the service container in `deploy/compose` (for example: `docker compose -f docker-compose.dev.yaml stop `). +1. Stop the service container in `devops/compose` (for example: `docker compose -f docker-compose.dev.yml stop `). 2. Run the service locally under a debugger. 3. Update dependent services to call `host.docker.internal:` (or your host IP) and restart them. 4. Use the module operations docs for required env vars, auth scopes, and health checks. @@ -315,11 +322,11 @@ STELLAOPS_SCANNER__QUEUE__BROKER=nats://localhost:4222 ```bash # 1. Start full platform -cd deploy\compose -docker compose -f docker-compose.dev.yaml up -d +cd devops\compose +docker compose -f docker-compose.dev.yml up -d # 2. Stop the service you want to debug -docker compose -f docker-compose.dev.yaml stop scanner-web +docker compose -f docker-compose.dev.yml stop scanner-web # 3. Open Visual Studio cd C:\dev\New folder\git.stella-ops.org @@ -331,7 +338,7 @@ start src\Scanner\StellaOps.Scanner.sln curl -X POST http://localhost:5210/api/scans -H "Content-Type: application/json" -d '{"imageRef":"alpine:latest"}' # 6. When done, stop VS debugger and restart Docker container -docker compose -f docker-compose.dev.yaml start scanner-web +docker compose -f docker-compose.dev.yml start scanner-web ``` ### Workflow 2: Debug Multiple Services Together @@ -340,7 +347,7 @@ docker compose -f docker-compose.dev.yaml start scanner-web ```bash # 1. Stop both containers -docker compose -f docker-compose.dev.yaml stop scanner-web scanner-worker +docker compose -f docker-compose.dev.yml stop scanner-web scanner-worker # 2. In Visual Studio, configure multiple startup projects: # - Right-click solution > Properties @@ -361,8 +368,8 @@ cd src\Concelier\StellaOps.Concelier.WebService dotnet build # 2. Stop Docker Concelier -cd ..\..\..\deploy\compose -docker compose -f docker-compose.dev.yaml stop concelier +cd ..\..\..\devops\compose +docker compose -f docker-compose.dev.yml stop concelier # 3. Run Concelier in Visual Studio (F5) @@ -371,7 +378,7 @@ docker compose -f docker-compose.dev.yaml stop concelier CONCELIER_BASEURL=http://host.docker.internal:5000 # 5. Restart Scanner to pick up new config -docker compose -f docker-compose.dev.yaml restart scanner-web +docker compose -f docker-compose.dev.yml restart scanner-web ``` ### Workflow 4: Reset Database State @@ -380,17 +387,17 @@ docker compose -f docker-compose.dev.yaml restart scanner-web ```bash # 1. Stop all services -docker compose -f docker-compose.dev.yaml down +docker compose -f docker-compose.dev.yml down # 2. Remove database volumes docker volume rm compose_postgres-data docker volume rm compose_valkey-data # 3. Restart platform (will recreate volumes and databases) -docker compose -f docker-compose.dev.yaml up -d +docker compose -f docker-compose.dev.yml up -d # 4. Wait for migrations to run -docker compose -f docker-compose.dev.yaml logs -f postgres +docker compose -f docker-compose.dev.yml logs -f postgres # Look for migration completion messages ``` @@ -400,7 +407,7 @@ docker compose -f docker-compose.dev.yaml logs -f postgres ```bash # 1. Use the air-gap compose profile -cd deploy\compose +cd devops\compose docker compose -f docker-compose.airgap.yaml up -d # 2. Verify no external network calls @@ -519,18 +526,18 @@ Note: StackExchange.Redis reports "redis server(s)" even when Valkey is the back 1. **Check Valkey is running:** ```bash -docker compose -f docker-compose.dev.yaml ps valkey +docker compose -f docker-compose.dev.yml ps valkey # Should show: State = "Up" # Check logs -docker compose -f docker-compose.dev.yaml logs valkey +docker compose -f docker-compose.dev.yml logs valkey ``` 2. **Reset Valkey:** ```bash -docker compose -f docker-compose.dev.yaml stop valkey +docker compose -f docker-compose.dev.yml stop valkey docker volume rm compose_valkey-data -docker compose -f docker-compose.dev.yaml up -d valkey +docker compose -f docker-compose.dev.yml up -d valkey ``` #### 5. Service Cannot Reach host.docker.internal @@ -546,7 +553,7 @@ Should work automatically with Docker Desktop. **Solution (Linux):** -Add to docker-compose.dev.yaml: +Add to docker-compose.dev.yml: ```yaml services: scanner-web: @@ -644,7 +651,7 @@ Permission denied writing to /data/db sudo chown -R $USER:$USER ./volumes # Or run Docker as root (not recommended for production) -sudo docker compose -f docker-compose.dev.yaml up -d +sudo docker compose -f docker-compose.dev.yml up -d ``` --- @@ -699,19 +706,19 @@ cd devops\compose docker compose -f docker-compose.stella-ops.yml up -d # Stop a specific service for debugging -docker compose -f docker-compose.dev.yaml stop +docker compose -f docker-compose.dev.yml stop # View logs -docker compose -f docker-compose.dev.yaml logs -f +docker compose -f docker-compose.dev.yml logs -f # Restart a service -docker compose -f docker-compose.dev.yaml restart +docker compose -f docker-compose.dev.yml restart # Stop all services -docker compose -f docker-compose.dev.yaml down +docker compose -f docker-compose.dev.yml down # Stop all services and remove volumes (DESTRUCTIVE) -docker compose -f docker-compose.dev.yaml down -v +docker compose -f docker-compose.dev.yml down -v # Build the module solution (see docs/dev/SOLUTION_BUILD_GUIDE.md) cd C:\dev\New folder\git.stella-ops.org diff --git a/docs/dev/DEV_ENVIRONMENT_SETUP.md b/docs/dev/DEV_ENVIRONMENT_SETUP.md new file mode 100644 index 000000000..416d142b6 --- /dev/null +++ b/docs/dev/DEV_ENVIRONMENT_SETUP.md @@ -0,0 +1,336 @@ +# Dev Environment Setup + +Actionable checklist for getting a local Stella Ops development environment running. +For hybrid debugging workflows and service-specific guides, see [`docs/DEVELOPER_ONBOARDING.md`](../DEVELOPER_ONBOARDING.md). + +--- + +## Quick Start (automated) + +Setup scripts validate prerequisites, start infrastructure, build solutions and Docker images, and launch the full platform. + +**Windows (PowerShell 7):** + +```powershell +.\scripts\setup.ps1 # full setup +.\scripts\setup.ps1 -InfraOnly # infrastructure only (PostgreSQL, Valkey, SeaweedFS, Rekor, Zot) +.\scripts\setup.ps1 -SkipBuild # skip .NET builds, build images and start platform +.\scripts\setup.ps1 -SkipImages # build .NET but skip Docker images +.\scripts\setup.ps1 -ImagesOnly # only build Docker images +``` + +**Linux / macOS:** + +```bash +./scripts/setup.sh # full setup +./scripts/setup.sh --infra-only # infrastructure only +./scripts/setup.sh --skip-build # skip .NET builds +./scripts/setup.sh --skip-images # skip Docker image builds +./scripts/setup.sh --images-only # only build Docker images +``` + +The scripts will check for required tools (dotnet 10.x, node 20+, npm 10+, docker, git), warn about missing hosts file entries, and copy `.env` from the example if needed. See the manual steps below for details on each stage. + +--- + +## 1. Prerequisites + +| Tool | Version | Verify | +|------|---------|--------| +| .NET 10 SDK | 10.0.100 (pinned in `global.json`) | `dotnet --version` | +| Node.js | ^20.19.0 \|\| ^22.12.0 \|\| ^24.0.0 | `node --version` | +| npm | >=10.2.0 | `npm --version` | +| Docker Desktop / Engine + Compose | 20.10+ | `docker --version` | +| Git | 2.30+ | `git --version` | +| PowerShell 7+ (Windows) or Bash | -- | `pwsh --version` / `bash --version` | + +### Optional + +- Visual Studio 2022 v17.12+ (ASP.NET and web development workload) +- VS Code + C# Dev Kit +- PostgreSQL client (`psql`, DBeaver, pgAdmin) +- `valkey-cli` or Redis Insight (Valkey is Redis-compatible) +- AWS CLI or `s3cmd` for RustFS inspection + +### System requirements + +- **RAM:** 16 GB minimum, 32 GB recommended +- **Disk:** 50 GB free (Docker images, volumes, build artifacts) +- **CPU:** 4 cores minimum, 8 cores recommended + +--- + +## 2. Hosts file setup + +Each service binds to a unique loopback IP so all can use ports 443/80 without collisions. +Full details: [`docs/technical/architecture/port-registry.md`](../technical/architecture/port-registry.md). + +Add the block below to your hosts file: + +- **Windows:** `C:\Windows\System32\drivers\etc\hosts` (run editor as Administrator) +- **Linux / macOS:** `/etc/hosts` (use `sudo`) + +``` +# Stella Ops local development hostnames +# Each service gets a unique loopback IP so all can bind :443/:80 simultaneously. +127.1.0.1 stella-ops.local +127.1.0.2 router.stella-ops.local +127.1.0.3 platform.stella-ops.local +127.1.0.4 authority.stella-ops.local +127.1.0.5 gateway.stella-ops.local +127.1.0.6 attestor.stella-ops.local +127.1.0.7 evidencelocker.stella-ops.local +127.1.0.8 scanner.stella-ops.local +127.1.0.9 concelier.stella-ops.local +127.1.0.10 excititor.stella-ops.local +127.1.0.11 vexhub.stella-ops.local +127.1.0.12 vexlens.stella-ops.local +127.1.0.13 vulnexplorer.stella-ops.local +127.1.0.14 policy-engine.stella-ops.local +127.1.0.15 policy-gateway.stella-ops.local +127.1.0.16 riskengine.stella-ops.local +127.1.0.17 orchestrator.stella-ops.local +127.1.0.18 taskrunner.stella-ops.local +127.1.0.19 scheduler.stella-ops.local +127.1.0.20 graph.stella-ops.local +127.1.0.21 cartographer.stella-ops.local +127.1.0.22 reachgraph.stella-ops.local +127.1.0.23 timelineindexer.stella-ops.local +127.1.0.24 timeline.stella-ops.local +127.1.0.25 findings.stella-ops.local +127.1.0.26 doctor.stella-ops.local +127.1.0.27 opsmemory.stella-ops.local +127.1.0.28 notifier.stella-ops.local +127.1.0.29 notify.stella-ops.local +127.1.0.30 signer.stella-ops.local +127.1.0.31 smremote.stella-ops.local +127.1.0.32 airgap-controller.stella-ops.local +127.1.0.33 airgap-time.stella-ops.local +127.1.0.34 packsregistry.stella-ops.local +127.1.0.35 registry-token.stella-ops.local +127.1.0.36 binaryindex.stella-ops.local +127.1.0.37 issuerdirectory.stella-ops.local +127.1.0.38 symbols.stella-ops.local +127.1.0.39 sbomservice.stella-ops.local +127.1.0.40 exportcenter.stella-ops.local +127.1.0.41 replay.stella-ops.local +127.1.0.42 integrations.stella-ops.local +127.1.0.43 signals.stella-ops.local +127.1.0.44 advisoryai.stella-ops.local +127.1.0.45 unknowns.stella-ops.local + +# Stella Ops infrastructure (local dev containers) +127.1.1.1 db.stella-ops.local +127.1.1.2 cache.stella-ops.local +127.1.1.3 s3.stella-ops.local +127.1.1.4 rekor.stella-ops.local +127.1.1.5 registry.stella-ops.local +``` + +--- + +## 3. Start infrastructure (Docker) + +```bash +cd devops/compose +cp env/stellaops.env.example .env # edit POSTGRES_PASSWORD at minimum +docker compose -f docker-compose.dev.yml up -d +docker compose -f docker-compose.dev.yml ps +``` + +### Verify infrastructure + +```bash +# PostgreSQL +psql -h db.stella-ops.local -U stellaops -d stellaops_dev -c "SELECT 1" + +# Valkey +valkey-cli -h cache.stella-ops.local ping +``` + +Infrastructure versions (from `docker-compose.dev.yml`): + +| Service | Version | Hostname | Port | +|---------|---------|----------|------| +| PostgreSQL | 18.1 | `db.stella-ops.local` | 5432 | +| Valkey | 9.0.1 | `cache.stella-ops.local` | 6379 | +| SeaweedFS (S3) | -- | `s3.stella-ops.local` | 8080 | +| Rekor v2 | -- | `rekor.stella-ops.local` | 3322 | +| Zot (OCI registry) | v2.1.3 | `registry.stella-ops.local` | 80 | + +--- + +## 4. Build .NET modules + +The codebase uses a **module-first** approach -- there is no root solution file used for builds. Each module has its own `.sln` under `src//`. + +### Single module + +```powershell +dotnet build src\Scanner\StellaOps.Scanner.sln +dotnet test src\Scanner\StellaOps.Scanner.sln +``` + +### All modules + +```powershell +# Windows (PowerShell 7) +.\scripts\build-all-solutions.ps1 + +# With tests +.\scripts\build-all-solutions.ps1 -Test + +# Linux / macOS +./scripts/build-all-solutions.sh + +# With tests +./scripts/build-all-solutions.sh --test +``` + +### Module solution index + +See [`docs/dev/SOLUTION_BUILD_GUIDE.md`](SOLUTION_BUILD_GUIDE.md) for the authoritative list. Current modules (39): + +| Module | Solution path | +|--------|---------------| +| AdvisoryAI | `src/AdvisoryAI/StellaOps.AdvisoryAI.sln` | +| AirGap | `src/AirGap/StellaOps.AirGap.sln` | +| Aoc | `src/Aoc/StellaOps.Aoc.sln` | +| Attestor | `src/Attestor/StellaOps.Attestor.sln` | +| Authority | `src/Authority/StellaOps.Authority.sln` | +| Bench | `src/Bench/StellaOps.Bench.sln` | +| BinaryIndex | `src/BinaryIndex/StellaOps.BinaryIndex.sln` | +| Cartographer | `src/Cartographer/StellaOps.Cartographer.sln` | +| Cli | `src/Cli/StellaOps.Cli.sln` | +| Concelier | `src/Concelier/StellaOps.Concelier.sln` | +| EvidenceLocker | `src/EvidenceLocker/StellaOps.EvidenceLocker.sln` | +| Excititor | `src/Excititor/StellaOps.Excititor.sln` | +| ExportCenter | `src/ExportCenter/StellaOps.ExportCenter.sln` | +| Feedser | `src/Feedser/StellaOps.Feedser.sln` | +| Findings | `src/Findings/StellaOps.Findings.sln` | +| Gateway | `src/Gateway/StellaOps.Gateway.sln` | +| Graph | `src/Graph/StellaOps.Graph.sln` | +| IssuerDirectory | `src/IssuerDirectory/StellaOps.IssuerDirectory.sln` | +| Notifier | `src/Notifier/StellaOps.Notifier.sln` | +| Notify | `src/Notify/StellaOps.Notify.sln` | +| Orchestrator | `src/Orchestrator/StellaOps.Orchestrator.sln` | +| PacksRegistry | `src/PacksRegistry/StellaOps.PacksRegistry.sln` | +| Policy | `src/Policy/StellaOps.Policy.sln` | +| ReachGraph | `src/ReachGraph/StellaOps.ReachGraph.sln` | +| Registry | `src/Registry/StellaOps.Registry.sln` | +| Replay | `src/Replay/StellaOps.Replay.sln` | +| RiskEngine | `src/RiskEngine/StellaOps.RiskEngine.sln` | +| Router | `src/Router/StellaOps.Router.sln` | +| SbomService | `src/SbomService/StellaOps.SbomService.sln` | +| Scanner | `src/Scanner/StellaOps.Scanner.sln` | +| Scheduler | `src/Scheduler/StellaOps.Scheduler.sln` | +| Signer | `src/Signer/StellaOps.Signer.sln` | +| Signals | `src/Signals/StellaOps.Signals.sln` | +| SmRemote | `src/SmRemote/StellaOps.SmRemote.sln` | +| TaskRunner | `src/TaskRunner/StellaOps.TaskRunner.sln` | +| Telemetry | `src/Telemetry/StellaOps.Telemetry.sln` | +| TimelineIndexer | `src/TimelineIndexer/StellaOps.TimelineIndexer.sln` | +| Tools | `src/Tools/StellaOps.Tools.sln` | +| VexHub | `src/VexHub/StellaOps.VexHub.sln` | +| VexLens | `src/VexLens/StellaOps.VexLens.sln` | +| VulnExplorer | `src/VulnExplorer/StellaOps.VulnExplorer.sln` | +| Zastava | `src/Zastava/StellaOps.Zastava.sln` | + +--- + +## 5. Build Angular frontend + +```bash +cd src/Web/StellaOps.Web +npm ci --prefer-offline --no-audit --no-fund +npm run start # dev server -> https://stella-ops.local +npm run build # production build +npm run test # unit tests (Vitest) +npm run test:e2e # Playwright E2E +``` + +Additional scripts: + +| Command | Purpose | +|---------|---------| +| `npm run storybook` | Launch Storybook component explorer | +| `npm run analyze` | Bundle size visualization (esbuild-visualizer) | +| `npm run test:a11y` | Accessibility smoke tests | + +--- + +## 6. Build Docker images + +### Option A: Build all services (matrix-driven) + +```bash +cd devops/docker +./build-all.sh +``` + +Uses `services-matrix.env` and `Dockerfile.hardened.template` for .NET services, `Dockerfile.console` for Angular. + +### Option B: Build a single .NET service + +```bash +docker build -f devops/docker/Dockerfile.hardened.template . \ + --build-arg SDK_IMAGE=mcr.microsoft.com/dotnet/sdk:10.0-bookworm-slim \ + --build-arg RUNTIME_IMAGE=mcr.microsoft.com/dotnet/aspnet:10.0-bookworm-slim \ + --build-arg APP_PROJECT=src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj \ + --build-arg APP_BINARY=StellaOps.Scanner.WebService \ + --build-arg APP_PORT=8080 \ + -t stellaops/scanner-web:dev +``` + +### Option C: Build the Angular console image + +```bash +docker build -f devops/docker/Dockerfile.console . \ + --build-arg APP_DIR=src/Web/StellaOps.Web \ + -t stellaops/console:dev +``` + +### Release-quality builds (distroless) + +Release Dockerfiles live under `devops/release/docker/`: + +- `Dockerfile.dotnet-service` -- .NET services +- `Dockerfile.angular-ui` -- Angular console + +Component manifest: `devops/release/components.json`. + +--- + +## 7. Run the full platform + +```bash +# Core services +docker compose -f devops/compose/docker-compose.stella-ops.yml up -d + +# With Sigstore transparency log +docker compose -f devops/compose/docker-compose.stella-ops.yml --profile sigstore up -d + +# With telemetry stack +docker compose -f devops/compose/docker-compose.stella-ops.yml \ + -f devops/compose/docker-compose.telemetry.yml up -d +``` + +Verify: + +```bash +docker compose -f devops/compose/docker-compose.stella-ops.yml ps +``` + +--- + +## 8. Hybrid debugging (quick reference) + +1. Start the full platform in Docker (section 7). +2. Stop the container for the service you want to debug: + ```bash + docker compose -f devops/compose/docker-compose.stella-ops.yml stop + ``` +3. Run that service locally from your IDE (F5 in Visual Studio, or `dotnet run`). +4. The local service uses `localhost` / `.stella-ops.local` hostnames to reach Docker-hosted infrastructure. + +For detailed walkthroughs, configuration overrides, and multi-service debugging see [`docs/DEVELOPER_ONBOARDING.md`](../DEVELOPER_ONBOARDING.md). diff --git a/docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md b/docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md index 352b31b22..1d68114ba 100644 --- a/docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md +++ b/docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md @@ -88,7 +88,7 @@ Completion criteria: - [ ] Applied changes logged with before/after counts. ### REMED-05 - Tier 2 manual remediation backlog -Status: TODO +Status: DOING Dependency: REMED-03 Owners: Developer, QA Task description: @@ -198,6 +198,7 @@ Completion criteria: | 2026-01-31 | BLOCKED: Scheduler __Libraries missing `docs/modules/scheduler/implementation_plan.md`; SOLID review deferred. | Developer | | 2026-01-31 | BLOCKED: Policy __Libraries missing `docs/product/advisories/14-Dec-2025 - Smart-Diff Technical Reference.md`; SOLID review deferred. | Developer | | 2026-01-31 | BLOCKED: Signals __Libraries missing unknowns registry doc and archived sprint paths referenced by AGENTS; SOLID review deferred. | Developer | +| 2026-02-04 | Aoc libraries remediated (private field naming, blocking async removed, IAocGuard split, AocWriteGuard and filter partials, service locator removal); Aoc tests passed (11 + 8). | Developer | | 2026-01-31 | BLOCKED: SbomService __Libraries missing required architecture/sprint docs; SOLID review deferred. | Developer | | 2026-01-31 | BLOCKED: Signer __Libraries required reading includes external Fulcio doc; blocked pending explicit user approval for web fetch. | Developer | | 2026-01-31 | BLOCKED: Zastava __Libraries missing `docs/modules/devops/runbooks/zastava-deployment.md`; SOLID review deferred. | Developer | @@ -298,6 +299,40 @@ Completion criteria: | 2026-02-03 | Remediated StellaOps.ReachGraph.Cache (ReachGraphValkeyCache split into <=100-line partials, ConfigureAwait(false) + cancellation checks, multi-endpoint invalidation); added ReachGraph.Cache unit tests for get/set/slice/invalidation/cancellation; `dotnet test src/__Libraries/__Tests/StellaOps.ReachGraph.Cache.Tests/StellaOps.ReachGraph.Cache.Tests.csproj` passed (9 tests). | Developer/QA | | 2026-02-03 | Remediated StellaOps.ReachGraph.Persistence (tenant filters added for list/get/delete, Intent traits added for tests); `dotnet test src/__Libraries/__Tests/StellaOps.ReachGraph.Persistence.Tests/StellaOps.ReachGraph.Persistence.Tests.csproj` passed (10 tests). | Developer/QA | | 2026-02-03 | Remediated StellaOps.ReachGraph core (dedup/hash/serialization/signing files split <=100 lines, ConfigureAwait(false) added in signing, new dedup/semantic key tests); `dotnet test src/__Libraries/__Tests/StellaOps.ReachGraph.Tests/StellaOps.ReachGraph.Tests.csproj` passed (MTP0001 warning). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Replay.Core.Tests (FeedSnapshot + Determinism validator tests split, ConfigureAwait(false) removed for xUnit); `dotnet test src/__Libraries/StellaOps.Replay.Core.Tests/StellaOps.Replay.Core.Tests.csproj` passed (64 tests) and `dotnet test src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/StellaOps.Replay.Core.Tests.csproj` passed (1 test). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Evidence.Bundle (evidence models split into single-purpose files, enum serialization test added); `dotnet test src/__Tests/StellaOps.Evidence.Bundle.Tests/StellaOps.Evidence.Bundle.Tests.csproj` passed (29 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Evidence.Core (adapters/store split into <=100-line partials, EvidenceBundleAdapter test added); `dotnet test src/__Libraries/StellaOps.Evidence.Core.Tests/StellaOps.Evidence.Core.Tests.csproj` passed (113 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Evidence (budget/retention/model/service/validation splits, ConfigureAwait(false) added, private field naming fixed; retention tier boundary test added); `dotnet test src/__Libraries/__Tests/StellaOps.Evidence.Tests/StellaOps.Evidence.Tests.csproj` passed (24 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | BLOCKED: StellaOps.Evidence.Pack remediation waiting on module AGENTS.md in src/__Libraries/StellaOps.Evidence.Pack. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Interop.Tests (async naming, harness/model splits <= 100 lines, FindingsComparer tests added; ConfigureAwait(false) skipped in tests per xUnit1030); `dotnet test src/__Tests/interop/StellaOps.Interop.Tests/StellaOps.Interop.Tests.csproj` passed (11 tests, 38 skipped). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.IssuerDirectory.Client/Core.Tests (client partial split, options/models split, service locator removed, cache/tests split; ConfigureAwait(false) skipped in tests per xUnit1030); `dotnet test src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj` passed (17 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.IssuerDirectory.Core (domain/service/validation partial splits, metrics field naming, domain/validator tests + missing issuer add test); `dotnet test src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj` passed (23 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-05 | Remediated StellaOps.Replay (ReplayEngine split into partials/interfaces, loader digest guard + exceptions separated, failure timestamps use TimeProvider, loader tests added); `dotnet test src/__Libraries/__Tests/StellaOps.Replay.Tests/StellaOps.Replay.Tests.csproj` passed (11 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated StellaOps.IssuerDirectory.Persistence (repository partial splits, service locator removed, added unit/integration tests, IssuerAuditSinkTests split); `dotnet test src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/StellaOps.IssuerDirectory.Persistence.Tests.csproj` passed (15 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated AirGap.Bundle file-length outliers (SnapshotBundleReader.MerkleEntries and PolicySnapshotExtractor.Policy), split AirGap.Bundle test suite into <= 100-line partials with helpers; fixed missing usings; `dotnet test src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/StellaOps.AirGap.Bundle.Tests.csproj` passed (150 tests). | Developer/QA | +| 2026-02-05 | Remediated AirGap.Persistence (service locator removal, <=100-line splits, bundle version store coverage, unit DI registration tests, deterministic fixtures/Intent tags); `dotnet test src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/StellaOps.AirGap.Persistence.Tests.csproj` passed (23 tests). | Developer/QA | +| 2026-02-04 | Remediated AirGap.Sync (service/transport/store splits <=100 lines, TimeProvider/path validation, metrics refactor, expanded unit coverage including FileBasedJobSyncTransport); `dotnet test src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/StellaOps.AirGap.Sync.Tests.csproj` passed (40 tests, MTP0001 warning). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated AirGap.Policy (EgressPolicy/EgressRule/EgressPolicyServiceCollectionExtensions splits <=100 lines, removed service locator registration, tests split and options binding verified); `dotnet test src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/StellaOps.AirGap.Policy.Tests.csproj` passed (12 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated AirGap.Policy.Analyzers (HttpClientUsageAnalyzer split into diagnostics/analysis partials, private field naming fixed) and AirGap.Policy.Analyzers.Tests (tests split into partials with shared helpers, added HttpClientHandler construction + test-assembly name coverage; ConfigureAwait(false) omitted in test methods per xUnit1030); `dotnet test src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/StellaOps.AirGap.Policy.Analyzers.Tests.csproj` passed (19 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated AirGap.Time (Rfc3161/Roughtime/TimeAnchorPolicyService splits, hosted startup validation replacing service locator, ConfigureAwait(false) applied, controller/health checks renamed Async) and AirGap.Time.Tests (test files split, ConfigureAwait(false) skipped per xUnit1030); `dotnet test src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/StellaOps.AirGap.Time.Tests.csproj` passed (48 tests). SOLID review notes + status tables updated. | Developer/QA | +| 2026-02-04 | Remediated BinaryIndex.Decompiler library/tests (interfaces/models split into <=100-line partials, parser/tokenizer refactor, keyword-only variable extraction filter, tests split + hex stack-offset coverage); `dotnet test src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/StellaOps.BinaryIndex.Decompiler.Tests.csproj` passed (35 tests). | Developer/QA | +| 2026-02-04 | Remediated BinaryIndex Disassembly.Abstractions + Disassembly.Tests (split >100-line files, removed service locator usage, renamed private fields); dotnet test rerun with `-p:BuildInParallel=false -p:UseSharedCompilation=false` after an MSBuild OOM on default run; 41 tests passed. | Developer | +| 2026-02-04 | Remediated BinaryIndex Disassembly (split service/hybrid/DI files, extracted helpers, removed hybrid service locator) and added hybrid DI registration test; `dotnet test src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/StellaOps.BinaryIndex.Disassembly.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (42 tests). | Developer | +| 2026-02-04 | Remediated BinaryIndex.Disassembly.B2R2 (plugin/pool/low-UIR split into <=100-line partials, private field rename, binary handle extracted); added B2R2 lifter pool + LowUIR support tests; `dotnet test src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/StellaOps.BinaryIndex.Disassembly.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (45 tests). | Developer | +| 2026-02-04 | Remediated Authority.Timestamping.Abstractions (split request/response/token/verification/options into <=100-line files, fixed includeNonce null handling); added Timestamping.Abstractions unit test project; `dotnet test src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/StellaOps.Authority.Timestamping.Abstractions.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (16 tests). | Developer/QA | +| 2026-02-04 | Remediated Authority.Timestamping (split HttpTsaClient/registry/verifier/cache/ASN.1 files into <=100-line partials, ConfigureAwait(false) added in library awaits); added Timestamping unit test project; `dotnet test src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/StellaOps.Authority.Timestamping.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (10 tests). | Developer/QA | +| 2026-02-04 | Remediated Authority.Core verdicts (split manifest/builder/replay verifier/store into <=100-line files, fixed private field naming); expanded unit coverage for serializer empty JSON, replay diffs, and asset pagination; `dotnet test src/Authority/__Tests/StellaOps.Authority.Core.Tests/StellaOps.Authority.Core.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (46 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.AdvisoryAI.Attestation (service/registry/models/store split into <=100-line partials, IAiAttestationStore split, module AGENTS added); `dotnet test src/__Libraries/__Tests/StellaOps.AdvisoryAI.Attestation.Tests/StellaOps.AdvisoryAI.Attestation.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (58 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Cryptography.Plugin.EIDAS.Tests (tests split into partials, service locator removed, DI assertions updated, missing-key test added; ConfigureAwait(false) omitted due to xUnit1030); `dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/StellaOps.Cryptography.Plugin.EIDAS.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (25 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Cryptography.Plugin.EIDAS (provider/options/client split into <=100-line partials, ConfigureAwait(false) added in library awaits); `dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/StellaOps.Cryptography.Plugin.EIDAS.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (25 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Cryptography.DependencyInjection (removed service locator usage, split DI/validator/options files, added option configurators for SM/Sim HttpClients); added DI ordering + plugin-loading tests; `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (326 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.AuditPack (System-first using order in builder/importer/replay helpers; ArchiveUtilities extraction tests added); `dotnet test src/__Libraries/__Tests/StellaOps.AuditPack.Tests/StellaOps.AuditPack.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (52 tests). | Developer/QA | +| 2026-02-04 | Remediated StellaOps.Auth.Security (DpopValidationOptions unit coverage added); `dotnet test src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/StellaOps.Auth.Security.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (20 tests). | Developer/QA | +| 2026-02-04 | BLOCKED: StellaOps.Cryptography.CertificateStatus.Abstractions and StellaOps.Cryptography.CertificateStatus missing module-local AGENTS.md; remediation deferred. | Developer | +| 2026-02-04 | Remediated StellaOps.Cryptography.Plugin.BouncyCastle (private field naming fixed, provider split into <=100-line partials, key normalization tests added); `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (330 tests). | Developer/QA | +| 2026-02-04 | Started StellaOps.Cryptography.Kms remediation review (AGENTS read; audit checklist loaded); work in progress. | Developer | +| 2026-02-04 | Remediated StellaOps.Cryptography.Kms (async naming + file splits <= 100 lines, service locator removal, blocking async removal, public key handling updates); `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/StellaOps.Cryptography.Kms.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (9 tests, MTP0001 warning) and `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (326 tests). | Developer/QA | +| 2026-02-04 | BLOCKED: StellaOps.Cryptography.CertificateStatus.Abstractions and StellaOps.Cryptography.CertificateStatus missing module-local AGENTS.md; remediation deferred. | Developer | ## Decisions & Risks - Decision: Remediation proceeds in tiers (safe automation, reviewed automation, manual fixes). - Decision: All automation must be deterministic, offline, and logged to `docs/implplan/audits/csproj-standards/remediation/`. @@ -310,7 +345,7 @@ Completion criteria: - Risk: Tier 1 symbol-aware changes require module expertise; schedule review windows per module. - Risk: File-by-file ramp increases timeline; adjust staffing to maintain momentum. - Risk: `src/__Libraries/__Tests/StellaOps.Orchestrator.Schemas.Tests` remediation blocked until module-local `AGENTS.md` exists (PM task required). -- Risk: `src/__Libraries/StellaOps.AdvisoryAI.Attestation` remediation blocked until module-local `AGENTS.md` exists. +- Resolved: Added module AGENTS for `src/__Libraries/StellaOps.AdvisoryAI.Attestation`; remediation unblocked. - Risk: Tier 0 left UsingInsideNamespace findings in 7 Scanner library files due to safe automation constraints; requires Tier 1/2 follow-up. - Risk: Tier 0 tool (`csproj-remediate-tier0.ps1`) has 3 known bugs discovered during repo-wide application: (1) **GlobalUsings.cs files are emptied** ? tool sorts `global using` directives but does not write them back, resulting in empty files. Workaround: revert GlobalUsings.cs. (2) **Top-level statement files break** ? `using var x = ...` disposal declarations are treated as using directives and moved into the sorted block. Workaround: revert affected Program.cs files. (3) **Duplicate usings not deduplicated** ? sorting can produce duplicate lines when usings appeared in multiple regions. Manual fix required. These bugs should be fixed before Tier 0 is used for future sprints. - Decision: Remaining 36 UsingNotSorted files are in GlobalUsings.cs or preprocessor-guarded files; these are Tier 1/2 scope and safe to defer. @@ -318,6 +353,7 @@ Completion criteria: - Resolved: Added module AGENTS for StellaOps.Artifact.Core.Tests; REMED-07 closed. - Decision: When file-audit.csv lacks entries for a project, generate SOLID notes by enumerating project .cs files (excluding bin/obj and auto-generated files). - Decision: Do not add ConfigureAwait(false) in xUnit tests when xUnit1030 flags it; treat ConfigureAwaitMissing as not applicable and record the exception in remediation notes. +- Decision: ReplayEngine failure timestamps now use the injected TimeProvider; documented in `docs/modules/replay/guides/DETERMINISTIC_REPLAY.md`. - Decision: CSProj audit detail outputs are now canonical under `docs/implplan/audits/csproj-standards/src/**` after the IncludeTests rerun; legacy module-based folders are archival. - Decision: Per-project remediation checklists live under `docs/implplan/audits/csproj-standards/remediation/checklists/src/**` and serve as REMED-05/Tier 0-2 action sources. - Decision: Cross-module TASKS boards created in `src/**` to track remediation and SOLID status per project. @@ -330,6 +366,9 @@ Completion criteria: - Risk: Signer AGENTS reference external Fulcio documentation; SOLID review should be revalidated if external policy requirements change. - Risk: solid-review generator matches `( + .RequireAocGuard( request => new object?[] { request.Payload }, serializerOptions: null, - guardOptions: null)) + guardOptions: null) .ProducesProblem(StatusCodes.Status400BadRequest) .WithTags("AOC"); diff --git a/docs/modules/replay/guides/DETERMINISTIC_REPLAY.md b/docs/modules/replay/guides/DETERMINISTIC_REPLAY.md index 4546afc9e..8f9c2e168 100644 --- a/docs/modules/replay/guides/DETERMINISTIC_REPLAY.md +++ b/docs/modules/replay/guides/DETERMINISTIC_REPLAY.md @@ -155,7 +155,8 @@ Producer note: default clock values in `StellaOps.Replay.Core` are `UnixEpoch` t ### 4.1 Environment Normalization -* **Clock:** frozen to `scan.time` unless a rule explicitly requires “now”. +* **Clock:** frozen to `scan.time` unless a rule explicitly requires “now”. +* **Replay engine timestamps:** success and failure outputs must use the injected `TimeProvider` to keep replay timestamps deterministic. * **Random seed:** derived as `H(scan.id || MerkleRootAllLayers)`. * **Locale/TZ:** enforced per manifest; deviations cause validation error. * **Filesystem normalization:** diff --git a/docs/technical/architecture/port-registry.md b/docs/technical/architecture/port-registry.md index fe7cb6930..11c1caa09 100644 --- a/docs/technical/architecture/port-registry.md +++ b/docs/technical/architecture/port-registry.md @@ -152,4 +152,23 @@ Add the following to your hosts file (`C:\Windows\System32\drivers\etc\hosts` on 127.1.0.43 signals.stella-ops.local 127.1.0.44 advisoryai.stella-ops.local 127.1.0.45 unknowns.stella-ops.local + +# Stella Ops infrastructure (local dev containers) +127.1.1.1 db.stella-ops.local +127.1.1.2 cache.stella-ops.local +127.1.1.3 s3.stella-ops.local +127.1.1.4 rekor.stella-ops.local +127.1.1.5 registry.stella-ops.local ``` + +### Infrastructure services + +Infrastructure containers (databases, caches, object storage, transparency logs) use a separate loopback range (`127.1.1.x`) to avoid collisions with application services. + +| IP | Hostname | Service | Port | +|----|----------|---------|------| +| `127.1.1.1` | `db.stella-ops.local` | PostgreSQL 18.1 | 5432 | +| `127.1.1.2` | `cache.stella-ops.local` | Valkey 9.0.1 | 6379 | +| `127.1.1.3` | `s3.stella-ops.local` | SeaweedFS (S3-compatible) | 8080 | +| `127.1.1.4` | `rekor.stella-ops.local` | Rekor v2 (tiles) | 3322 | +| `127.1.1.5` | `registry.stella-ops.local` | Zot (OCI registry) | 80 (→5000) | diff --git a/etc/authority/keys/ack-token-dev.pem b/etc/authority/keys/ack-token-dev.pem new file mode 100644 index 000000000..dfbf975ce --- /dev/null +++ b/etc/authority/keys/ack-token-dev.pem @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgMBVhnaKPP51XonFF +s6a3c38QknKe7QE/2uG/Me87/1WhRANCAAT9pvHVdj0b4ipmeG5hO+6vIkKef3iz +YCsDck4n0plEreGU6RQqjbNvonaz4RBfZgfRRijO9uwYd+6TRRba5Ud2 +-----END PRIVATE KEY----- diff --git a/etc/authority/keys/kestrel-dev.crt b/etc/authority/keys/kestrel-dev.crt new file mode 100644 index 000000000..81a587989 --- /dev/null +++ b/etc/authority/keys/kestrel-dev.crt @@ -0,0 +1,11 @@ +-----BEGIN CERTIFICATE----- +MIIBjzCCATWgAwIBAgIUQo2xRh6gHa/pIP5/McOMYZGWzNkwCgYIKoZIzj0EAwIw +HTEbMBkGA1UEAwwSKi5zdGVsbGEtb3BzLmxvY2FsMB4XDTI2MDIwNDAxMDgxMVoX +DTI3MDIwNDAxMDgxMVowHTEbMBkGA1UEAwwSKi5zdGVsbGEtb3BzLmxvY2FsMFkw +EwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEx9LZrM5VptvkH0OMO+08BVSDbJR7I4JQ +UOkT9SWw41iQ9N44LTfgkAwFNOwDBjedyfSubChlVFeMzG5zTfeknaNTMFEwHQYD +VR0OBBYEFK4aSrWW+fiMAh3KmDDqQAhUgQq9MB8GA1UdIwQYMBaAFK4aSrWW+fiM +Ah3KmDDqQAhUgQq9MA8GA1UdEwEB/wQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIg +e9gjH4e/4ZN8B0gAisYRnTOYjNFYhZ0i1r8hixgYvLgCIQDXSRkEkdPU2wh1CSDi +i6AyDm/GS12iLQthGRITJdYejg== +-----END CERTIFICATE----- diff --git a/etc/authority/keys/kestrel-dev.key b/etc/authority/keys/kestrel-dev.key new file mode 100644 index 000000000..64ab6bef5 --- /dev/null +++ b/etc/authority/keys/kestrel-dev.key @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgSrZgb+IjZY+zqdUF +9R3gUmL0xtruaXM7vEfvlo/8JP6hRANCAATH0tmszlWm2+QfQ4w77TwFVINslHsj +glBQ6RP1JbDjWJD03jgtN+CQDAU07AMGN53J9K5sKGVUV4zMbnNN96Sd +-----END PRIVATE KEY----- diff --git a/etc/authority/keys/kestrel-dev.pfx b/etc/authority/keys/kestrel-dev.pfx new file mode 100644 index 000000000..687a138b4 Binary files /dev/null and b/etc/authority/keys/kestrel-dev.pfx differ diff --git a/etc/authority/keys/signing-dev.pem b/etc/authority/keys/signing-dev.pem new file mode 100644 index 000000000..33a4bd08d --- /dev/null +++ b/etc/authority/keys/signing-dev.pem @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg9CXTA+ckwlbRXIPx +jH2M2A8qIv0edRVA9zDM2GL1i7ahRANCAAQsUvdTeXbrxwoZ079ZY67F4292WsQ4 +/XDHJtursur+I0bTow9ARTiJXLDeWwRiaVo5uujewBLutxhK45xwYLFJ +-----END PRIVATE KEY----- diff --git a/etc/issuer-directory/issuer-directory.yaml b/etc/issuer-directory/issuer-directory.yaml new file mode 100644 index 000000000..618d056b1 --- /dev/null +++ b/etc/issuer-directory/issuer-directory.yaml @@ -0,0 +1,21 @@ +IssuerDirectory: + # Override connection secrets via environment variables (ISSUERDIRECTORY__MONGO__*) + # rather than editing this file for production. + telemetry: + minimumLogLevel: Information + authority: + enabled: true + issuer: http://authority.stella-ops.local + requireHttpsMetadata: false + audiences: + - stellaops-platform + readScope: issuer-directory:read + writeScope: issuer-directory:write + adminScope: issuer-directory:admin + tenantHeader: X-StellaOps-Tenant + seedCsafPublishers: true + csafSeedPath: data/csaf-publishers.json + Postgres: + connectionString: "Host=db.stella-ops.local;Port=5432;Database=stellaops_platform;Username=stellaops;Password=stellaops" + schema: issuer + commandTimeoutSeconds: 30 diff --git a/scripts/build-all-solutions.ps1 b/scripts/build-all-solutions.ps1 new file mode 100644 index 000000000..b0d1b0cd0 --- /dev/null +++ b/scripts/build-all-solutions.ps1 @@ -0,0 +1,94 @@ +#!/usr/bin/env pwsh +<# +.SYNOPSIS + Builds (and optionally tests) all module solutions under src/. + +.DESCRIPTION + Discovers all *.sln files under src/ (excluding the root StellaOps.sln) + and runs dotnet build on each. Pass -Test to also run dotnet test. + +.PARAMETER Test + Also run dotnet test on each solution after building. + +.PARAMETER Configuration + Build configuration. Defaults to Debug. + +.EXAMPLE + .\scripts\build-all-solutions.ps1 + .\scripts\build-all-solutions.ps1 -Test + .\scripts\build-all-solutions.ps1 -Test -Configuration Release +#> +[CmdletBinding()] +param( + [switch]$Test, + [ValidateSet('Debug', 'Release')] + [string]$Configuration = 'Debug' +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = 'Continue' + +$repoRoot = Split-Path -Parent $PSScriptRoot +$srcDir = Join-Path $repoRoot 'src' + +$solutions = Get-ChildItem -Path $srcDir -Filter '*.sln' -Recurse | + Where-Object { $_.Name -ne 'StellaOps.sln' } | + Sort-Object FullName + +if ($solutions.Count -eq 0) { + Write-Error 'No solution files found under src/.' + exit 1 +} + +Write-Host "Found $($solutions.Count) solution(s) to build." -ForegroundColor Cyan +Write-Host '' + +$buildPass = @() +$buildFail = @() +$testPass = @() +$testFail = @() +$testSkipped = @() + +foreach ($sln in $solutions) { + $rel = [System.IO.Path]::GetRelativePath($repoRoot, $sln.FullName) + Write-Host "--- BUILD: $rel ---" -ForegroundColor Yellow + + dotnet build $sln.FullName --configuration $Configuration --nologo -v quiet + if ($LASTEXITCODE -eq 0) { + $buildPass += $rel + } else { + $buildFail += $rel + Write-Host " FAILED" -ForegroundColor Red + continue # skip test if build failed + } + + if ($Test) { + Write-Host "--- TEST: $rel ---" -ForegroundColor Yellow + dotnet test $sln.FullName --configuration $Configuration --nologo --no-build -v quiet + if ($LASTEXITCODE -eq 0) { + $testPass += $rel + } else { + $testFail += $rel + Write-Host " TEST FAILED" -ForegroundColor Red + } + } +} + +Write-Host '' +Write-Host '========== Summary ==========' -ForegroundColor Cyan +Write-Host "Build passed : $($buildPass.Count)" -ForegroundColor Green +if ($buildFail.Count -gt 0) { + Write-Host "Build failed : $($buildFail.Count)" -ForegroundColor Red + $buildFail | ForEach-Object { Write-Host " - $_" -ForegroundColor Red } +} +if ($Test) { + Write-Host "Test passed : $($testPass.Count)" -ForegroundColor Green + if ($testFail.Count -gt 0) { + Write-Host "Test failed : $($testFail.Count)" -ForegroundColor Red + $testFail | ForEach-Object { Write-Host " - $_" -ForegroundColor Red } + } +} + +if ($buildFail.Count -gt 0 -or $testFail.Count -gt 0) { + exit 1 +} diff --git a/scripts/build-all-solutions.sh b/scripts/build-all-solutions.sh new file mode 100644 index 000000000..2fb26aab1 --- /dev/null +++ b/scripts/build-all-solutions.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +# +# Build (and optionally test) all module solutions under src/. +# +# Usage: +# ./scripts/build-all-solutions.sh # build only +# ./scripts/build-all-solutions.sh --test # build + test +# ./scripts/build-all-solutions.sh --test --configuration Release + +set -euo pipefail + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +SRC_DIR="$REPO_ROOT/src" + +RUN_TESTS=false +CONFIGURATION="Debug" + +while [[ $# -gt 0 ]]; do + case "$1" in + --test|-t) RUN_TESTS=true; shift ;; + --configuration|-c) CONFIGURATION="$2"; shift 2 ;; + *) echo "Unknown option: $1" >&2; exit 1 ;; + esac +done + +# Discover solutions (exclude root StellaOps.sln) +mapfile -t SOLUTIONS < <(find "$SRC_DIR" -name '*.sln' ! -name 'StellaOps.sln' | sort) + +if [[ ${#SOLUTIONS[@]} -eq 0 ]]; then + echo "ERROR: No solution files found under src/." >&2 + exit 1 +fi + +echo "Found ${#SOLUTIONS[@]} solution(s) to build." +echo "" + +build_pass=() +build_fail=() +test_pass=() +test_fail=() + +for sln in "${SOLUTIONS[@]}"; do + rel="${sln#"$REPO_ROOT/"}" + echo "--- BUILD: $rel ---" + + if dotnet build "$sln" --configuration "$CONFIGURATION" --nologo -v quiet; then + build_pass+=("$rel") + else + build_fail+=("$rel") + echo " FAILED" + continue + fi + + if $RUN_TESTS; then + echo "--- TEST: $rel ---" + if dotnet test "$sln" --configuration "$CONFIGURATION" --nologo --no-build -v quiet; then + test_pass+=("$rel") + else + test_fail+=("$rel") + echo " TEST FAILED" + fi + fi +done + +echo "" +echo "========== Summary ==========" +echo "Build passed : ${#build_pass[@]}" +if [[ ${#build_fail[@]} -gt 0 ]]; then + echo "Build failed : ${#build_fail[@]}" + for f in "${build_fail[@]}"; do echo " - $f"; done +fi +if $RUN_TESTS; then + echo "Test passed : ${#test_pass[@]}" + if [[ ${#test_fail[@]} -gt 0 ]]; then + echo "Test failed : ${#test_fail[@]}" + for f in "${test_fail[@]}"; do echo " - $f"; done + fi +fi + +if [[ ${#build_fail[@]} -gt 0 ]] || [[ ${#test_fail[@]} -gt 0 ]]; then + exit 1 +fi diff --git a/scripts/setup.ps1 b/scripts/setup.ps1 new file mode 100644 index 000000000..b9bcda152 --- /dev/null +++ b/scripts/setup.ps1 @@ -0,0 +1,337 @@ +#!/usr/bin/env pwsh +<# +.SYNOPSIS + Automated developer environment setup for Stella Ops (Windows). +.DESCRIPTION + Validates prerequisites, starts infrastructure, builds solutions and Docker images, + and launches the full platform. +.PARAMETER SkipBuild + Skip .NET solution builds. +.PARAMETER InfraOnly + Only start infrastructure containers (PostgreSQL, Valkey, SeaweedFS, Rekor, Zot). +.PARAMETER ImagesOnly + Only build Docker images (skip infra start and .NET build). +.PARAMETER SkipImages + Skip Docker image builds. +#> +[CmdletBinding()] +param( + [switch]$SkipBuild, + [switch]$InfraOnly, + [switch]$ImagesOnly, + [switch]$SkipImages +) + +$ErrorActionPreference = 'Stop' + +$Root = git rev-parse --show-toplevel 2>$null +if (-not $Root) { + Write-Error 'Not inside a git repository. Run this script from within the Stella Ops repo.' + exit 1 +} +$Root = $Root.Trim() + +$ComposeDir = Join-Path $Root 'devops/compose' + +# ─── Helpers ──────────────────────────────────────────────────────────────── + +function Write-Step([string]$msg) { + Write-Host "`n>> $msg" -ForegroundColor Cyan +} + +function Write-Ok([string]$msg) { + Write-Host " [OK] $msg" -ForegroundColor Green +} + +function Write-Warn([string]$msg) { + Write-Host " [WARN] $msg" -ForegroundColor Yellow +} + +function Write-Fail([string]$msg) { + Write-Host " [FAIL] $msg" -ForegroundColor Red +} + +function Test-Command([string]$cmd) { + return [bool](Get-Command $cmd -ErrorAction SilentlyContinue) +} + +# ─── 1. Check prerequisites ──────────────────────────────────────────────── + +function Test-Prerequisites { + Write-Step 'Checking prerequisites' + $allGood = $true + + # dotnet + if (Test-Command 'dotnet') { + $v = (dotnet --version 2>$null) + if ($v -match '^10\.') { + Write-Ok "dotnet $v" + } else { + Write-Fail "dotnet $v found, but 10.x is required" + $allGood = $false + } + } else { + Write-Fail 'dotnet SDK not found. Install .NET 10 SDK.' + $allGood = $false + } + + # node + if (Test-Command 'node') { + $v = (node --version 2>$null).TrimStart('v') + $major = [int]($v -split '\.')[0] + if ($major -ge 20) { + Write-Ok "node $v" + } else { + Write-Fail "node $v found, but 20+ is required" + $allGood = $false + } + } else { + Write-Fail 'node not found. Install Node.js 20+.' + $allGood = $false + } + + # npm + if (Test-Command 'npm') { + $v = (npm --version 2>$null) + $major = [int]($v -split '\.')[0] + if ($major -ge 10) { + Write-Ok "npm $v" + } else { + Write-Fail "npm $v found, but 10+ is required" + $allGood = $false + } + } else { + Write-Fail 'npm not found.' + $allGood = $false + } + + # docker + if (Test-Command 'docker') { + $v = (docker --version 2>$null) + Write-Ok "docker: $v" + } else { + Write-Fail 'docker not found. Install Docker Desktop.' + $allGood = $false + } + + # docker compose + $composeOk = $false + try { + $null = docker compose version 2>$null + if ($LASTEXITCODE -eq 0) { $composeOk = $true } + } catch {} + if ($composeOk) { + Write-Ok 'docker compose available' + } else { + Write-Fail 'docker compose not available. Ensure Docker Desktop includes Compose V2.' + $allGood = $false + } + + # git + if (Test-Command 'git') { + Write-Ok "git $(git --version 2>$null)" + } else { + Write-Fail 'git not found.' + $allGood = $false + } + + if (-not $allGood) { + Write-Error 'Prerequisites not met. Install missing tools and re-run.' + exit 1 + } +} + +# ─── 2. Check hosts file ─────────────────────────────────────────────────── + +function Test-HostsFile { + Write-Step 'Checking hosts file for stella-ops.local entries' + $hostsPath = 'C:\Windows\System32\drivers\etc\hosts' + if (Test-Path $hostsPath) { + $content = Get-Content $hostsPath -Raw + if ($content -match 'stella-ops\.local') { + Write-Ok 'stella-ops.local entries found in hosts file' + } else { + Write-Warn 'stella-ops.local entries NOT found in hosts file.' + Write-Host ' Add the hosts block from docs/dev/DEV_ENVIRONMENT_SETUP.md section 2' -ForegroundColor Yellow + Write-Host ' to C:\Windows\System32\drivers\etc\hosts (run editor as Administrator)' -ForegroundColor Yellow + } + } else { + Write-Warn "Cannot read hosts file at $hostsPath" + } +} + +# ─── 3. Ensure .env ──────────────────────────────────────────────────────── + +function Initialize-EnvFile { + Write-Step 'Ensuring .env file exists' + $envFile = Join-Path $ComposeDir '.env' + $envExample = Join-Path $ComposeDir 'env/stellaops.env.example' + + if (Test-Path $envFile) { + Write-Ok ".env already exists at $envFile" + } elseif (Test-Path $envExample) { + Copy-Item $envExample $envFile + Write-Ok "Copied $envExample -> $envFile" + Write-Warn 'Review .env and change POSTGRES_PASSWORD at minimum.' + } else { + Write-Fail "Neither .env nor env/stellaops.env.example found in $ComposeDir" + exit 1 + } +} + +# ─── 4. Start infrastructure ─────────────────────────────────────────────── + +function Start-Infrastructure { + Write-Step 'Starting infrastructure containers (docker-compose.dev.yml)' + Push-Location $ComposeDir + try { + docker compose -f docker-compose.dev.yml up -d + if ($LASTEXITCODE -ne 0) { + Write-Fail 'Failed to start infrastructure containers.' + exit 1 + } + + Write-Host ' Waiting for containers to become healthy...' -ForegroundColor Gray + $maxWait = 120 + $elapsed = 0 + while ($elapsed -lt $maxWait) { + $ps = docker compose -f docker-compose.dev.yml ps --format json 2>$null + if ($ps) { + $allHealthy = $true + # docker compose ps --format json outputs one JSON object per line + foreach ($line in $ps -split "`n") { + $line = $line.Trim() + if (-not $line) { continue } + try { + $svc = $line | ConvertFrom-Json + if ($svc.Health -and $svc.Health -ne 'healthy') { + $allHealthy = $false + } + } catch {} + } + if ($allHealthy -and $elapsed -gt 5) { + Write-Ok 'All infrastructure containers healthy' + return + } + } + Start-Sleep -Seconds 5 + $elapsed += 5 + } + Write-Warn "Timed out waiting for healthy status after ${maxWait}s. Check with: docker compose -f docker-compose.dev.yml ps" + } + finally { + Pop-Location + } +} + +# ─── 5. Build .NET solutions ─────────────────────────────────────────────── + +function Build-Solutions { + Write-Step 'Building all .NET solutions' + $buildScript = Join-Path $Root 'scripts/build-all-solutions.ps1' + if (Test-Path $buildScript) { + & $buildScript + if ($LASTEXITCODE -ne 0) { + Write-Fail '.NET solution build failed.' + exit 1 + } + Write-Ok '.NET solutions built successfully' + } else { + Write-Warn "Build script not found at $buildScript. Skipping .NET build." + } +} + +# ─── 6. Build Docker images ──────────────────────────────────────────────── + +function Build-Images { + Write-Step 'Building Docker images' + $buildScript = Join-Path $Root 'devops/docker/build-all.ps1' + if (Test-Path $buildScript) { + & $buildScript + if ($LASTEXITCODE -ne 0) { + Write-Fail 'Docker image build failed.' + exit 1 + } + Write-Ok 'Docker images built successfully' + } else { + Write-Warn "Build script not found at $buildScript. Skipping image build." + } +} + +# ─── 7. Start full platform ──────────────────────────────────────────────── + +function Start-Platform { + Write-Step 'Starting full Stella Ops platform' + Push-Location $ComposeDir + try { + docker compose -f docker-compose.stella-ops.yml up -d + if ($LASTEXITCODE -ne 0) { + Write-Fail 'Failed to start platform services.' + exit 1 + } + Write-Ok 'Platform services started' + } + finally { + Pop-Location + } +} + +# ─── 8. Smoke test ───────────────────────────────────────────────────────── + +function Test-Smoke { + Write-Step 'Running smoke tests' + $endpoints = @( + @{ Name = 'PostgreSQL'; Cmd = { docker exec stellaops-dev-postgres pg_isready -U stellaops 2>$null; $LASTEXITCODE -eq 0 } }, + @{ Name = 'Valkey'; Cmd = { $r = docker exec stellaops-dev-valkey valkey-cli ping 2>$null; $r -eq 'PONG' } } + ) + + foreach ($ep in $endpoints) { + try { + $ok = & $ep.Cmd + if ($ok) { Write-Ok $ep.Name } else { Write-Warn "$($ep.Name) not responding" } + } catch { + Write-Warn "$($ep.Name) check failed: $_" + } + } +} + +# ─── Main ─────────────────────────────────────────────────────────────────── + +Write-Host '=============================================' -ForegroundColor Cyan +Write-Host ' Stella Ops Developer Environment Setup' -ForegroundColor Cyan +Write-Host '=============================================' -ForegroundColor Cyan + +Test-Prerequisites +Test-HostsFile + +if ($ImagesOnly) { + Build-Images + Write-Host "`nDone (images only)." -ForegroundColor Green + exit 0 +} + +Initialize-EnvFile +Start-Infrastructure + +if ($InfraOnly) { + Test-Smoke + Write-Host "`nDone (infra only). Infrastructure is running." -ForegroundColor Green + exit 0 +} + +if (-not $SkipBuild) { + Build-Solutions +} + +if (-not $SkipImages) { + Build-Images +} + +Start-Platform +Test-Smoke + +Write-Host "`n=============================================" -ForegroundColor Green +Write-Host ' Setup complete!' -ForegroundColor Green +Write-Host ' Platform: https://stella-ops.local' -ForegroundColor Green +Write-Host ' Docs: docs/dev/DEV_ENVIRONMENT_SETUP.md' -ForegroundColor Green +Write-Host '=============================================' -ForegroundColor Green diff --git a/scripts/setup.sh b/scripts/setup.sh new file mode 100644 index 000000000..6d104a135 --- /dev/null +++ b/scripts/setup.sh @@ -0,0 +1,294 @@ +#!/usr/bin/env bash +# Automated developer environment setup for Stella Ops (Linux/macOS). +# +# Usage: +# ./scripts/setup.sh [--skip-build] [--infra-only] [--images-only] [--skip-images] +set -euo pipefail + +# ─── Parse flags ──────────────────────────────────────────────────────────── + +SKIP_BUILD=false +INFRA_ONLY=false +IMAGES_ONLY=false +SKIP_IMAGES=false + +for arg in "$@"; do + case "$arg" in + --skip-build) SKIP_BUILD=true ;; + --infra-only) INFRA_ONLY=true ;; + --images-only) IMAGES_ONLY=true ;; + --skip-images) SKIP_IMAGES=true ;; + -h|--help) + echo "Usage: $0 [--skip-build] [--infra-only] [--images-only] [--skip-images]" + exit 0 + ;; + *) echo "Unknown flag: $arg" >&2; exit 1 ;; + esac +done + +ROOT=$(git rev-parse --show-toplevel 2>/dev/null || true) +if [[ -z "$ROOT" ]]; then + echo "ERROR: Not inside a git repository." >&2 + exit 1 +fi + +COMPOSE_DIR="${ROOT}/devops/compose" + +# ─── Helpers ──────────────────────────────────────────────────────────────── + +step() { printf '\n\033[1;36m>> %s\033[0m\n' "$1"; } +ok() { printf ' \033[0;32m[OK]\033[0m %s\n' "$1"; } +warn() { printf ' \033[0;33m[WARN]\033[0m %s\n' "$1"; } +fail() { printf ' \033[0;31m[FAIL]\033[0m %s\n' "$1"; } + +has_cmd() { command -v "$1" &>/dev/null; } + +# ─── 1. Check prerequisites ──────────────────────────────────────────────── + +check_prerequisites() { + step 'Checking prerequisites' + local all_good=true + + # dotnet + if has_cmd dotnet; then + local v; v=$(dotnet --version 2>/dev/null) + if [[ "$v" =~ ^10\. ]]; then + ok "dotnet $v" + else + fail "dotnet $v found, but 10.x is required" + all_good=false + fi + else + fail 'dotnet SDK not found. Install .NET 10 SDK.' + all_good=false + fi + + # node + if has_cmd node; then + local v; v=$(node --version 2>/dev/null | sed 's/^v//') + local major; major=$(echo "$v" | cut -d. -f1) + if (( major >= 20 )); then + ok "node $v" + else + fail "node $v found, but 20+ is required" + all_good=false + fi + else + fail 'node not found. Install Node.js 20+.' + all_good=false + fi + + # npm + if has_cmd npm; then + local v; v=$(npm --version 2>/dev/null) + local major; major=$(echo "$v" | cut -d. -f1) + if (( major >= 10 )); then + ok "npm $v" + else + fail "npm $v found, but 10+ is required" + all_good=false + fi + else + fail 'npm not found.' + all_good=false + fi + + # docker + if has_cmd docker; then + ok "docker: $(docker --version 2>/dev/null)" + else + fail 'docker not found. Install Docker.' + all_good=false + fi + + # docker compose + if docker compose version &>/dev/null; then + ok 'docker compose available' + else + fail 'docker compose not available. Install Compose V2.' + all_good=false + fi + + # git + if has_cmd git; then + ok "$(git --version 2>/dev/null)" + else + fail 'git not found.' + all_good=false + fi + + if [[ "$all_good" != "true" ]]; then + echo 'ERROR: Prerequisites not met. Install missing tools and re-run.' >&2 + exit 1 + fi +} + +# ─── 2. Check hosts file ─────────────────────────────────────────────────── + +check_hosts() { + step 'Checking hosts file for stella-ops.local entries' + if grep -q 'stella-ops\.local' /etc/hosts 2>/dev/null; then + ok 'stella-ops.local entries found in /etc/hosts' + else + warn 'stella-ops.local entries NOT found in /etc/hosts.' + echo ' Add the hosts block from docs/dev/DEV_ENVIRONMENT_SETUP.md section 2' + echo ' to /etc/hosts (use sudo).' + fi +} + +# ─── 3. Ensure .env ──────────────────────────────────────────────────────── + +ensure_env() { + step 'Ensuring .env file exists' + local env_file="${COMPOSE_DIR}/.env" + local env_example="${COMPOSE_DIR}/env/stellaops.env.example" + + if [[ -f "$env_file" ]]; then + ok ".env already exists at $env_file" + elif [[ -f "$env_example" ]]; then + cp "$env_example" "$env_file" + ok "Copied $env_example -> $env_file" + warn 'Review .env and change POSTGRES_PASSWORD at minimum.' + else + fail "Neither .env nor env/stellaops.env.example found in $COMPOSE_DIR" + exit 1 + fi +} + +# ─── 4. Start infrastructure ─────────────────────────────────────────────── + +start_infra() { + step 'Starting infrastructure containers (docker-compose.dev.yml)' + cd "$COMPOSE_DIR" + + docker compose -f docker-compose.dev.yml up -d + + echo ' Waiting for containers to become healthy...' + local max_wait=120 + local elapsed=0 + while (( elapsed < max_wait )); do + local all_healthy=true + while IFS= read -r line; do + [[ -z "$line" ]] && continue + local health; health=$(echo "$line" | python3 -c "import sys,json; print(json.load(sys.stdin).get('Health',''))" 2>/dev/null || true) + if [[ -n "$health" && "$health" != "healthy" ]]; then + all_healthy=false + fi + done < <(docker compose -f docker-compose.dev.yml ps --format json 2>/dev/null) + + if [[ "$all_healthy" == "true" && $elapsed -gt 5 ]]; then + ok 'All infrastructure containers healthy' + cd "$ROOT" + return + fi + sleep 5 + elapsed=$((elapsed + 5)) + done + warn "Timed out waiting for healthy status after ${max_wait}s." + cd "$ROOT" +} + +# ─── 5. Build .NET solutions ─────────────────────────────────────────────── + +build_solutions() { + step 'Building all .NET solutions' + local script="${ROOT}/scripts/build-all-solutions.sh" + if [[ -x "$script" ]]; then + "$script" + ok '.NET solutions built successfully' + elif [[ -f "$script" ]]; then + bash "$script" + ok '.NET solutions built successfully' + else + warn "Build script not found at $script. Skipping .NET build." + fi +} + +# ─── 6. Build Docker images ──────────────────────────────────────────────── + +build_images() { + step 'Building Docker images' + local script="${ROOT}/devops/docker/build-all.sh" + if [[ -x "$script" ]]; then + "$script" + ok 'Docker images built successfully' + elif [[ -f "$script" ]]; then + bash "$script" + ok 'Docker images built successfully' + else + warn "Build script not found at $script. Skipping image build." + fi +} + +# ─── 7. Start full platform ──────────────────────────────────────────────── + +start_platform() { + step 'Starting full Stella Ops platform' + cd "$COMPOSE_DIR" + docker compose -f docker-compose.stella-ops.yml up -d + ok 'Platform services started' + cd "$ROOT" +} + +# ─── 8. Smoke test ───────────────────────────────────────────────────────── + +smoke_test() { + step 'Running smoke tests' + + if docker exec stellaops-dev-postgres pg_isready -U stellaops &>/dev/null; then + ok 'PostgreSQL' + else + warn 'PostgreSQL not responding' + fi + + local pong; pong=$(docker exec stellaops-dev-valkey valkey-cli ping 2>/dev/null || true) + if [[ "$pong" == "PONG" ]]; then + ok 'Valkey' + else + warn 'Valkey not responding' + fi +} + +# ─── Main ─────────────────────────────────────────────────────────────────── + +echo '=============================================' +echo ' Stella Ops Developer Environment Setup' +echo '=============================================' + +check_prerequisites +check_hosts + +if [[ "$IMAGES_ONLY" == "true" ]]; then + build_images + echo '' + echo 'Done (images only).' + exit 0 +fi + +ensure_env +start_infra + +if [[ "$INFRA_ONLY" == "true" ]]; then + smoke_test + echo '' + echo 'Done (infra only). Infrastructure is running.' + exit 0 +fi + +if [[ "$SKIP_BUILD" != "true" ]]; then + build_solutions +fi + +if [[ "$SKIP_IMAGES" != "true" ]]; then + build_images +fi + +start_platform +smoke_test + +echo '' +echo '=============================================' +echo ' Setup complete!' +echo ' Platform: https://stella-ops.local' +echo ' Docs: docs/dev/DEV_ENVIRONMENT_SETUP.md' +echo '=============================================' diff --git a/src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj b/src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj index 2044d657c..d1edbf49c 100644 --- a/src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj +++ b/src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj @@ -19,5 +19,6 @@ + diff --git a/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/Program.cs b/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/Program.cs index 3d607a76b..8c3f35ca3 100644 --- a/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/Program.cs +++ b/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/Program.cs @@ -4,8 +4,9 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using StellaOps.AdvisoryAI.Hosting; using StellaOps.AdvisoryAI.Worker.Services; +using StellaOps.Worker.Health; -var builder = Microsoft.Extensions.Hosting.Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Configuration .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) @@ -16,5 +17,8 @@ builder.Services.AddAdvisoryAiCore(builder.Configuration); builder.Services.AddSingleton(); builder.Services.AddHostedService(); -var host = builder.Build(); -await host.RunAsync(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync(); diff --git a/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj b/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj index 66c6ba7c7..97e001e9a 100644 --- a/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj +++ b/src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj @@ -6,12 +6,16 @@ enable true + + + + diff --git a/src/AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj b/src/AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj index 67924f025..865a2ae77 100644 --- a/src/AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj +++ b/src/AirGap/StellaOps.AirGap.Controller/StellaOps.AirGap.Controller.csproj @@ -10,5 +10,6 @@ + diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.Helpers.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.Helpers.cs new file mode 100644 index 000000000..c6757eb7b --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.Helpers.cs @@ -0,0 +1,61 @@ +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp; +using Microsoft.CodeAnalysis.Diagnostics; +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Net.Http; +using System.Reflection; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class HttpClientUsageAnalyzerTests +{ + private static async Task> AnalyzeAsync(string source, string assemblyName) + { + var compilation = CSharpCompilation.Create( + assemblyName, + new[] + { + CSharpSyntaxTree.ParseText(source), + CSharpSyntaxTree.ParseText(PolicyStubSource), + }, + CreateMetadataReferences(), + new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); + + var analyzer = new HttpClientUsageAnalyzer(); + var compilationWithAnalyzers = compilation.WithAnalyzers(ImmutableArray.Create(analyzer)); + return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync().ConfigureAwait(false); + } + + private static IEnumerable CreateMetadataReferences() + { + yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(Uri).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(HttpClient).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location); + } + + private const string PolicyStubSource = """ + namespace StellaOps.AirGap.Policy + { + public interface IEgressPolicy + { + void EnsureAllowed(EgressRequest request); + } + + public readonly record struct EgressRequest(string Component, System.Uri Destination, string Intent); + + public static class EgressHttpClientFactory + { + public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request) + => throw new System.NotImplementedException(); + + public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request, System.Func clientFactory) + => throw new System.NotImplementedException(); + } + } + """; +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.cs index 1fe44fcd0..9a0279527 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/HttpClientUsageAnalyzerTests.cs @@ -1,28 +1,14 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Net.Http; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.CodeAnalysis; -using Microsoft.CodeAnalysis.CodeActions; -using Microsoft.CodeAnalysis.CodeFixes; -using Microsoft.CodeAnalysis.CSharp; -using Microsoft.CodeAnalysis.Diagnostics; -using Microsoft.CodeAnalysis.Text; +using StellaOps.TestKit; +using System; using Xunit; - -using StellaOps.TestKit; namespace StellaOps.AirGap.Policy.Analyzers.Tests; -public sealed class HttpClientUsageAnalyzerTests +public sealed partial class HttpClientUsageAnalyzerTests { [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task ReportsDiagnostic_ForNewHttpClient() + [Fact] + public async Task ReportsDiagnostic_ForNewHttpClientAsync() { const string source = """ using System.Net.Http; @@ -43,8 +29,8 @@ public sealed class HttpClientUsageAnalyzerTests } [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task DoesNotReportDiagnostic_InsidePolicyAssembly() + [Fact] + public async Task DoesNotReportDiagnostic_InsidePolicyAssemblyAsync() { const string source = """ using System.Net.Http; @@ -62,8 +48,11 @@ public sealed class HttpClientUsageAnalyzerTests } [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task DoesNotReportDiagnostic_ForTestingAssemblyNames() + [Theory] + [InlineData("Sample.App.Testing")] + [InlineData("Sample.App.Test")] + [InlineData("Sample.App.Tests")] + public async Task DoesNotReportDiagnostic_ForTestAssemblyNamesAsync(string assemblyName) { const string source = """ using System.Net.Http; @@ -79,53 +68,7 @@ public sealed class HttpClientUsageAnalyzerTests } """; - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App.Testing"); + var diagnostics = await AnalyzeAsync(source, assemblyName); Assert.DoesNotContain(diagnostics, d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); } - - private static async Task> AnalyzeAsync(string source, string assemblyName) - { - var compilation = CSharpCompilation.Create( - assemblyName, - new[] - { - CSharpSyntaxTree.ParseText(source), - CSharpSyntaxTree.ParseText(PolicyStubSource), - }, - CreateMetadataReferences(), - new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); - - var analyzer = new HttpClientUsageAnalyzer(); - var compilationWithAnalyzers = compilation.WithAnalyzers(ImmutableArray.Create(analyzer)); - return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync(); - } - - private static IEnumerable CreateMetadataReferences() - { - yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(Uri).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(HttpClient).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location); - } - - private const string PolicyStubSource = """ - namespace StellaOps.AirGap.Policy - { - public interface IEgressPolicy - { - void EnsureAllowed(EgressRequest request); - } - - public readonly record struct EgressRequest(string Component, System.Uri Destination, string Intent); - - public static class EgressHttpClientFactory - { - public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request) - => throw new System.NotImplementedException(); - - public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request, System.Func clientFactory) - => throw new System.NotImplementedException(); - } - } - """; } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Allowed.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Allowed.cs new file mode 100644 index 000000000..d11afb005 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Allowed.cs @@ -0,0 +1,89 @@ +using FluentAssertions; +using StellaOps.TestKit; +using System; +using Xunit; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task NoDiagnostic_ForHttpClientParameterAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + public void Run(HttpClient client) + { + client.GetStringAsync("https://example.com"); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, + "Using HttpClient as parameter should not trigger diagnostic"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task NoDiagnostic_ForHttpClientFieldAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + private HttpClient? _client; + + public void SetClient(HttpClient client) + { + _client = client; + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, + "Declaring HttpClient field should not trigger diagnostic"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task NoDiagnostic_ForFactoryMethodReturnAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public interface IHttpClientFactory + { + HttpClient CreateClient(string name); + } + + public sealed class Demo + { + private readonly IHttpClientFactory _factory; + + public Demo(IHttpClientFactory factory) => _factory = factory; + + public void Run() + { + var client = _factory.CreateClient("default"); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, + "Using factory method should not trigger diagnostic"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.AllowedAssemblies.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.AllowedAssemblies.cs new file mode 100644 index 000000000..e02b61aa1 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.AllowedAssemblies.cs @@ -0,0 +1,52 @@ +using FluentAssertions; +using StellaOps.TestKit; +using System; +using Xunit; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task NoDiagnostic_InTestAssemblyAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App.Tests; + + public sealed class DemoTests + { + public void TestMethod() + { + var client = new HttpClient(); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App.Tests"); + diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, + "Test assemblies should be exempt from diagnostic"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task NoDiagnostic_InPolicyAssemblyAsync() + { + const string source = """ + using System.Net.Http; + + namespace StellaOps.AirGap.Policy.Internal; + + internal static class Loopback + { + public static HttpClient Create() => new HttpClient(); + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "StellaOps.AirGap.Policy"); + diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, + "Policy assembly itself should be exempt"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Construction.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Construction.cs new file mode 100644 index 000000000..85cbfdb85 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Construction.cs @@ -0,0 +1,42 @@ +// ----------------------------------------------------------------------------- +// PolicyAnalyzerRoslynTests - AN1 Roslyn compilation tests for AirGap.Policy.Analyzers +// ----------------------------------------------------------------------------- +using FluentAssertions; +using StellaOps.TestKit; +using System; +using System.Linq; +using Xunit; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData("var client = new HttpClient();", true, "Direct construction should trigger diagnostic")] + [InlineData("var client = new System.Net.Http.HttpClient();", true, "Fully qualified construction should trigger diagnostic")] + [InlineData("HttpClient client = new();", true, "Target-typed new should trigger diagnostic")] + [InlineData("object client = new HttpClient();", true, "Implicit cast construction should trigger diagnostic")] + [InlineData("var client = new HttpClient(new HttpClientHandler());", true, "Handler construction should trigger diagnostic")] + public async Task DiagnosticTriggered_ForVariousHttpClientConstructionsAsync(string statement, bool shouldTrigger, string reason) + { + var source = $$""" + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + public void Run() + { + {{statement}} + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + var hasDiagnostic = diagnostics.Any(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); + + hasDiagnostic.Should().Be(shouldTrigger, reason); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Metadata.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Metadata.cs new file mode 100644 index 000000000..6544d83b3 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Diagnostics.Metadata.cs @@ -0,0 +1,96 @@ +using FluentAssertions; +using Microsoft.CodeAnalysis; +using StellaOps.TestKit; +using System; +using System.Linq; +using Xunit; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Diagnostic_HasCorrectSeverityAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + public void Run() + { + var client = new HttpClient(); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + var airgapDiagnostic = diagnostics.Single(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); + + airgapDiagnostic.Severity.Should().Be(DiagnosticSeverity.Warning, + "Diagnostic should be a warning, not an error"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Diagnostic_HasCorrectLocationAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + public void Run() + { + var client = new HttpClient(); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + var airgapDiagnostic = diagnostics.Single(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); + + airgapDiagnostic.Location.IsInSource.Should().BeTrue(); + var lineSpan = airgapDiagnostic.Location.GetLineSpan(); + lineSpan.StartLinePosition.Line.Should().Be(8, "Diagnostic should point to line 9 (0-indexed: 8)"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task MultipleHttpClientUsages_ReportMultipleDiagnosticsAsync() + { + const string source = """ + using System.Net.Http; + + namespace Sample.App; + + public sealed class Demo + { + public void Method1() + { + var client = new HttpClient(); + } + + public void Method2() + { + var client = new HttpClient(); + } + + public void Method3() + { + var client = new HttpClient(); + } + } + """; + + var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); + var airgapDiagnostics = diagnostics.Where(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId).ToList(); + + airgapDiagnostics.Should().HaveCount(3, "Each new HttpClient() should trigger a separate diagnostic"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Golden.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Golden.cs new file mode 100644 index 000000000..874dfc8c5 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Golden.cs @@ -0,0 +1,19 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Analyzer_SupportedDiagnostics_ContainsExpectedId() + { + var analyzer = new HttpClientUsageAnalyzer(); + var supportedDiagnostics = analyzer.SupportedDiagnostics; + + supportedDiagnostics.Should().HaveCount(1); + supportedDiagnostics[0].Id.Should().Be("AIRGAP001"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Helpers.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Helpers.cs new file mode 100644 index 000000000..679d4640c --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.Helpers.cs @@ -0,0 +1,75 @@ +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp; +using Microsoft.CodeAnalysis.Diagnostics; +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Reflection; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Policy.Analyzers.Tests; + +public sealed partial class PolicyAnalyzerRoslynTests +{ + private static async Task> AnalyzeAsync(string source, string assemblyName) + { + var compilation = CSharpCompilation.Create( + assemblyName, + new[] + { + CSharpSyntaxTree.ParseText(source), + CSharpSyntaxTree.ParseText(PolicyStubSource), + }, + CreateMetadataReferences(), + new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); + + var analyzer = new HttpClientUsageAnalyzer(); + var compilationWithAnalyzers = compilation.WithAnalyzers(ImmutableArray.Create(analyzer)); + return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync().ConfigureAwait(false); + } + + private static IEnumerable CreateMetadataReferences() + { + yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(Uri).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(HttpClient).GetTypeInfo().Assembly.Location); + yield return MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location); + + var systemRuntimePath = Path.GetDirectoryName(typeof(object).GetTypeInfo().Assembly.Location); + if (!string.IsNullOrEmpty(systemRuntimePath)) + { + var netstandard = Path.Combine(systemRuntimePath, "netstandard.dll"); + if (File.Exists(netstandard)) + { + yield return MetadataReference.CreateFromFile(netstandard); + } + + var systemRuntime = Path.Combine(systemRuntimePath, "System.Runtime.dll"); + if (File.Exists(systemRuntime)) + { + yield return MetadataReference.CreateFromFile(systemRuntime); + } + } + } + + private const string PolicyStubSource = """ + namespace StellaOps.AirGap.Policy + { + public interface IEgressPolicy + { + void EnsureAllowed(EgressRequest request); + } + + public readonly record struct EgressRequest(string Component, System.Uri Destination, string Intent); + + public static class EgressHttpClientFactory + { + public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request) + => throw new System.NotImplementedException(); + } + } + """; +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.cs deleted file mode 100644 index e17e6666f..000000000 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/PolicyAnalyzerRoslynTests.cs +++ /dev/null @@ -1,356 +0,0 @@ -// ----------------------------------------------------------------------------- -// PolicyAnalyzerRoslynTests.cs -// Sprint: SPRINT_5100_0010_0004_airgap_tests -// Tasks: AIRGAP-5100-005, AIRGAP-5100-006 -// Description: AN1 Roslyn compilation tests for AirGap.Policy.Analyzers -// ----------------------------------------------------------------------------- - -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Net.Http; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.CodeAnalysis; -using Microsoft.CodeAnalysis.CodeActions; -using Microsoft.CodeAnalysis.CodeFixes; -using Microsoft.CodeAnalysis.CSharp; -using Microsoft.CodeAnalysis.Diagnostics; -using Microsoft.CodeAnalysis.Text; -using Xunit; -using FluentAssertions; - - -using StellaOps.TestKit; -namespace StellaOps.AirGap.Policy.Analyzers.Tests; - -/// -/// AN1 Roslyn Compilation Tests for AirGap.Policy.Analyzers -/// Task AIRGAP-5100-005: Expected diagnostics, no false positives -/// Task AIRGAP-5100-006: Golden generated code tests for policy analyzers -/// -public sealed class PolicyAnalyzerRoslynTests -{ - #region AIRGAP-5100-005: Expected Diagnostics & No False Positives - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData("var client = new HttpClient();", true, "Direct construction should trigger diagnostic")] - [InlineData("var client = new System.Net.Http.HttpClient();", true, "Fully qualified construction should trigger diagnostic")] - [InlineData("HttpClient client = new();", true, "Target-typed new should trigger diagnostic")] - [InlineData("object client = new HttpClient();", true, "Implicit cast construction should trigger diagnostic")] - public async Task DiagnosticTriggered_ForVariousHttpClientConstructions(string statement, bool shouldTrigger, string reason) - { - var source = $$""" - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - public void Run() - { - {{statement}} - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - var hasDiagnostic = diagnostics.Any(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); - - hasDiagnostic.Should().Be(shouldTrigger, reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task NoDiagnostic_ForHttpClientParameter() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - public void Run(HttpClient client) - { - // Using HttpClient as parameter - not constructing it - client.GetStringAsync("https://example.com"); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, - "Using HttpClient as parameter should not trigger diagnostic"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task NoDiagnostic_ForHttpClientField() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - private HttpClient? _client; - - public void SetClient(HttpClient client) - { - _client = client; - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, - "Declaring HttpClient field should not trigger diagnostic"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task NoDiagnostic_ForFactoryMethodReturn() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public interface IHttpClientFactory - { - HttpClient CreateClient(string name); - } - - public sealed class Demo - { - private readonly IHttpClientFactory _factory; - - public Demo(IHttpClientFactory factory) => _factory = factory; - - public void Run() - { - var client = _factory.CreateClient("default"); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, - "Using factory method should not trigger diagnostic"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task NoDiagnostic_InTestAssembly() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App.Tests; - - public sealed class DemoTests - { - public void TestMethod() - { - var client = new HttpClient(); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App.Tests"); - diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, - "Test assemblies should be exempt from diagnostic"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task NoDiagnostic_InPolicyAssembly() - { - const string source = """ - using System.Net.Http; - - namespace StellaOps.AirGap.Policy.Internal; - - internal static class Loopback - { - public static HttpClient Create() => new HttpClient(); - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "StellaOps.AirGap.Policy"); - diagnostics.Should().NotContain(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId, - "Policy assembly itself should be exempt"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Diagnostic_HasCorrectSeverity() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - public void Run() - { - var client = new HttpClient(); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - var airgapDiagnostic = diagnostics.Single(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); - - airgapDiagnostic.Severity.Should().Be(DiagnosticSeverity.Warning, - "Diagnostic should be a warning, not an error"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Diagnostic_HasCorrectLocation() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - public void Run() - { - var client = new HttpClient(); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - var airgapDiagnostic = diagnostics.Single(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId); - - airgapDiagnostic.Location.IsInSource.Should().BeTrue(); - var lineSpan = airgapDiagnostic.Location.GetLineSpan(); - lineSpan.StartLinePosition.Line.Should().Be(8, "Diagnostic should point to line 9 (0-indexed: 8)"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task MultipleHttpClientUsages_ReportMultipleDiagnostics() - { - const string source = """ - using System.Net.Http; - - namespace Sample.App; - - public sealed class Demo - { - public void Method1() - { - var client = new HttpClient(); - } - - public void Method2() - { - var client = new HttpClient(); - } - - public void Method3() - { - var client = new HttpClient(); - } - } - """; - - var diagnostics = await AnalyzeAsync(source, assemblyName: "Sample.App"); - var airgapDiagnostics = diagnostics.Where(d => d.Id == HttpClientUsageAnalyzer.DiagnosticId).ToList(); - - airgapDiagnostics.Should().HaveCount(3, "Each new HttpClient() should trigger a separate diagnostic"); - } - - #endregion - - #region AIRGAP-5100-006: Golden Generated Code Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Analyzer_SupportedDiagnostics_ContainsExpectedId() - { - var analyzer = new HttpClientUsageAnalyzer(); - var supportedDiagnostics = analyzer.SupportedDiagnostics; - - supportedDiagnostics.Should().HaveCount(1); - supportedDiagnostics[0].Id.Should().Be("AIRGAP001"); - } - - #endregion - - #region Test Helpers - - private static async Task> AnalyzeAsync(string source, string assemblyName) - { - var compilation = CSharpCompilation.Create( - assemblyName, - new[] - { - CSharpSyntaxTree.ParseText(source), - CSharpSyntaxTree.ParseText(PolicyStubSource), - }, - CreateMetadataReferences(), - new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); - - var analyzer = new HttpClientUsageAnalyzer(); - var compilationWithAnalyzers = compilation.WithAnalyzers(ImmutableArray.Create(analyzer)); - return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync(); - } - - private static IEnumerable CreateMetadataReferences() - { - // Core runtime references - yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(Uri).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(HttpClient).GetTypeInfo().Assembly.Location); - yield return MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location); - - // Add System.Runtime for target-typed new - var systemRuntimePath = Path.GetDirectoryName(typeof(object).GetTypeInfo().Assembly.Location); - if (!string.IsNullOrEmpty(systemRuntimePath)) - { - var netstandard = Path.Combine(systemRuntimePath, "netstandard.dll"); - if (File.Exists(netstandard)) - { - yield return MetadataReference.CreateFromFile(netstandard); - } - - var systemRuntime = Path.Combine(systemRuntimePath, "System.Runtime.dll"); - if (File.Exists(systemRuntime)) - { - yield return MetadataReference.CreateFromFile(systemRuntime); - } - } - } - - private const string PolicyStubSource = """ - namespace StellaOps.AirGap.Policy - { - public interface IEgressPolicy - { - void EnsureAllowed(EgressRequest request); - } - - public readonly record struct EgressRequest(string Component, System.Uri Destination, string Intent); - - public static class EgressHttpClientFactory - { - public static System.Net.Http.HttpClient Create(IEgressPolicy egressPolicy, EgressRequest request) - => throw new System.NotImplementedException(); - } - } - """; - - #endregion -} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/TASKS.md b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/TASKS.md index ddc67e48a..4d120f2b1 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/TASKS.md @@ -1,10 +1,12 @@ # AirGap Policy Analyzers Tests Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | | AUDIT-0032-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. | | AUDIT-0032-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. | | AUDIT-0032-A | DONE | Waived (test project; revalidated 2026-01-06). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers.Tests/StellaOps.AirGap.Policy.Analyzers.Tests.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Analysis.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Analysis.cs new file mode 100644 index 000000000..588f0256c --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Analysis.cs @@ -0,0 +1,65 @@ +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.Diagnostics; +using Microsoft.CodeAnalysis.Operations; +using System; + +namespace StellaOps.AirGap.Policy.Analyzers; + +public sealed partial class HttpClientUsageAnalyzer +{ + private static void AnalyzeObjectCreation(OperationAnalysisContext context) + { + if (context.Operation is not IObjectCreationOperation creation) + { + return; + } + + var httpClientSymbol = context.Compilation.GetTypeByMetadataName(HttpClientMetadataName); + if (httpClientSymbol is null) + { + return; + } + + var createdType = creation.Type; + if (createdType is null || !SymbolEqualityComparer.Default.Equals(createdType, httpClientSymbol)) + { + return; + } + + if (IsWithinAllowedAssembly(context.ContainingSymbol)) + { + return; + } + + context.ReportDiagnostic(CreateDiagnostic(creation.Syntax.GetLocation())); + } + + private static bool IsWithinAllowedAssembly(ISymbol? symbol) + { + var containingAssembly = symbol?.ContainingAssembly; + if (containingAssembly is null) + { + return false; + } + + var assemblyName = containingAssembly.Name; + if (string.IsNullOrEmpty(assemblyName)) + { + return false; + } + + if (string.Equals(assemblyName, "StellaOps.AirGap.Policy", StringComparison.Ordinal)) + { + return true; + } + + if (assemblyName.EndsWith(".Tests", StringComparison.OrdinalIgnoreCase) || + assemblyName.EndsWith(".Test", StringComparison.OrdinalIgnoreCase) || + assemblyName.EndsWith(".Testing", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Diagnostics.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Diagnostics.cs new file mode 100644 index 000000000..0b7e14a00 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.Diagnostics.cs @@ -0,0 +1,23 @@ +using Microsoft.CodeAnalysis; + +namespace StellaOps.AirGap.Policy.Analyzers; + +public sealed partial class HttpClientUsageAnalyzer +{ + private const string HttpClientMetadataName = "System.Net.Http.HttpClient"; + private static readonly LocalizableString _title = "Replace raw HttpClient with EgressPolicy-aware client"; + private static readonly LocalizableString _messageFormat = "Instantiate HttpClient via StellaOps.AirGap.Policy wrappers to enforce sealed-mode egress controls"; + private static readonly LocalizableString _description = "Air-gapped environments must route outbound network calls through the EgressPolicy facade so requests are pre-authorised. Replace raw HttpClient usage with the shared factory helpers."; + + private static readonly DiagnosticDescriptor _rule = new( + DiagnosticId, + _title, + _messageFormat, + "Usage", + DiagnosticSeverity.Warning, + isEnabledByDefault: true, + description: _description); + + private static Diagnostic CreateDiagnostic(Location location) + => Diagnostic.Create(_rule, location); +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.cs index f471fa319..26b030f8d 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/HttpClientUsageAnalyzer.cs @@ -1,7 +1,5 @@ - using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; -using Microsoft.CodeAnalysis.Operations; using System; using System.Collections.Immutable; @@ -11,34 +9,21 @@ namespace StellaOps.AirGap.Policy.Analyzers; /// Flags direct new HttpClient() usage so services adopt the air-gap aware egress policy wrappers. /// [DiagnosticAnalyzer(LanguageNames.CSharp)] -public sealed class HttpClientUsageAnalyzer : DiagnosticAnalyzer +public sealed partial class HttpClientUsageAnalyzer : DiagnosticAnalyzer { /// /// Diagnostic identifier emitted when disallowed HttpClient usage is detected. /// public const string DiagnosticId = "AIRGAP001"; - private const string HttpClientMetadataName = "System.Net.Http.HttpClient"; - private static readonly LocalizableString Title = "Replace raw HttpClient with EgressPolicy-aware client"; - private static readonly LocalizableString MessageFormat = "Instantiate HttpClient via StellaOps.AirGap.Policy wrappers to enforce sealed-mode egress controls"; - private static readonly LocalizableString Description = "Air-gapped environments must route outbound network calls through the EgressPolicy facade so requests are pre-authorised. Replace raw HttpClient usage with the shared factory helpers."; - - private static readonly DiagnosticDescriptor Rule = new( - DiagnosticId, - Title, - MessageFormat, - "Usage", - DiagnosticSeverity.Warning, - isEnabledByDefault: true, - description: Description); - /// - public override ImmutableArray SupportedDiagnostics => ImmutableArray.Create(Rule); + public override ImmutableArray SupportedDiagnostics + => ImmutableArray.Create(_rule); /// public override void Initialize(AnalysisContext context) { - if (context == null) + if (context is null) { throw new ArgumentNullException(nameof(context)); } @@ -47,61 +32,4 @@ public sealed class HttpClientUsageAnalyzer : DiagnosticAnalyzer context.EnableConcurrentExecution(); context.RegisterOperationAction(AnalyzeObjectCreation, OperationKind.ObjectCreation); } - - private static void AnalyzeObjectCreation(OperationAnalysisContext context) - { - if (context.Operation is not IObjectCreationOperation creation) - { - return; - } - - var httpClientSymbol = context.Compilation.GetTypeByMetadataName(HttpClientMetadataName); - if (httpClientSymbol is null) - { - return; - } - - var createdType = creation.Type; - if (createdType is null || !SymbolEqualityComparer.Default.Equals(createdType, httpClientSymbol)) - { - return; - } - - if (IsWithinAllowedAssembly(context.ContainingSymbol)) - { - return; - } - - var diagnostic = Diagnostic.Create(Rule, creation.Syntax.GetLocation()); - context.ReportDiagnostic(diagnostic); - } - - private static bool IsWithinAllowedAssembly(ISymbol? symbol) - { - var containingAssembly = symbol?.ContainingAssembly; - if (containingAssembly is null) - { - return false; - } - - var assemblyName = containingAssembly.Name; - if (string.IsNullOrEmpty(assemblyName)) - { - return false; - } - - if (string.Equals(assemblyName, "StellaOps.AirGap.Policy", StringComparison.Ordinal)) - { - return true; - } - - if (assemblyName.EndsWith(".Tests", StringComparison.OrdinalIgnoreCase) || - assemblyName.EndsWith(".Test", StringComparison.OrdinalIgnoreCase) || - assemblyName.EndsWith(".Testing", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - return false; - } } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/TASKS.md b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/TASKS.md index 794989ab6..60d034612 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/TASKS.md @@ -1,10 +1,12 @@ # AirGap Policy Analyzers Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | | AUDIT-0031-M | DONE | Revalidated 2026-01-06; no new findings. | | AUDIT-0031-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0032. | | AUDIT-0031-A | DONE | Applied analyzer symbol match, test assembly exemptions, and code-fix preservation. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Analyzers/StellaOps.AirGap.Policy.Analyzers.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Helpers.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Helpers.cs new file mode 100644 index 000000000..6cc029a13 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Helpers.cs @@ -0,0 +1,45 @@ +using StellaOps.AirGap.Policy; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Policy.Tests; + +public sealed partial class EgressPolicyTests +{ + private sealed class RecordingPolicy : IEgressPolicy + { + public bool EnsureAllowedCalled { get; private set; } + + public bool IsSealed => true; + + public EgressPolicyMode Mode => EgressPolicyMode.Sealed; + + public EgressDecision Evaluate(EgressRequest request) + { + EnsureAllowedCalled = true; + return EgressDecision.Allowed; + } + + public ValueTask EvaluateAsync( + EgressRequest request, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + return new ValueTask(Evaluate(request)); + } + + public void EnsureAllowed(EgressRequest request) + { + EnsureAllowedCalled = true; + } + + public ValueTask EnsureAllowedAsync( + EgressRequest request, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + EnsureAllowed(request); + return ValueTask.CompletedTask; + } + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.HttpClientFactory.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.HttpClientFactory.cs new file mode 100644 index 000000000..24bcd0b6a --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.HttpClientFactory.cs @@ -0,0 +1,22 @@ +using StellaOps.AirGap.Policy; +using StellaOps.TestKit; +using System; +using Xunit; + +namespace StellaOps.AirGap.Policy.Tests; + +public sealed partial class EgressPolicyTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void EgressHttpClientFactory_Create_EnforcesPolicyBeforeReturningClient() + { + var recordingPolicy = new RecordingPolicy(); + var request = new EgressRequest("Component", new Uri("https://allowed.internal"), "mirror-sync"); + + using var client = EgressHttpClientFactory.Create(recordingPolicy, request); + + Assert.True(recordingPolicy.EnsureAllowedCalled); + Assert.NotNull(client); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Network.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Network.cs new file mode 100644 index 000000000..c475cdea4 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.Network.cs @@ -0,0 +1,58 @@ +using StellaOps.AirGap.Policy; +using StellaOps.TestKit; +using System; +using Xunit; + +namespace StellaOps.AirGap.Policy.Tests; + +public sealed partial class EgressPolicyTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void EnsureAllowed_SealedEnvironment_AllowsLoopbackWhenConfigured() + { + var options = new EgressPolicyOptions + { + Mode = EgressPolicyMode.Sealed, + AllowLoopback = true, + }; + + var policy = new EgressPolicy(options); + var request = new EgressRequest("PolicyEngine", new Uri("http://127.0.0.1:9000/health"), "local-probe"); + + policy.EnsureAllowed(request); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void EnsureAllowed_SealedEnvironment_AllowsPrivateNetworkWhenConfigured() + { + var options = new EgressPolicyOptions + { + Mode = EgressPolicyMode.Sealed, + AllowPrivateNetworks = true, + }; + + var policy = new EgressPolicy(options); + var request = new EgressRequest("PolicyEngine", new Uri("https://10.10.0.5:8443/status"), "mirror-sync"); + + policy.EnsureAllowed(request); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void EnsureAllowed_SealedEnvironment_BlocksPrivateNetworkWhenNotConfigured() + { + var options = new EgressPolicyOptions + { + Mode = EgressPolicyMode.Sealed, + AllowPrivateNetworks = false, + }; + + var policy = new EgressPolicy(options); + var request = new EgressRequest("PolicyEngine", new Uri("https://10.10.0.5:8443/status"), "mirror-sync"); + + var exception = Assert.Throws(() => policy.EnsureAllowed(request)); + Assert.Contains("10.10.0.5", exception.Message, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.RuleMatching.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.RuleMatching.cs new file mode 100644 index 000000000..e5ea9188e --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.RuleMatching.cs @@ -0,0 +1,29 @@ +using StellaOps.AirGap.Policy; +using StellaOps.TestKit; +using System; +using Xunit; + +namespace StellaOps.AirGap.Policy.Tests; + +public sealed partial class EgressPolicyTests +{ + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData("https://api.example.com", true)] + [InlineData("https://sub.api.example.com", true)] + [InlineData("https://example.com", false)] + public void Evaluate_SealedEnvironmentWildcardHost_Matches(string url, bool expectedAllowed) + { + var options = new EgressPolicyOptions + { + Mode = EgressPolicyMode.Sealed, + }; + options.AddAllowRule("*.example.com", transport: EgressTransport.Https); + + var policy = new EgressPolicy(options); + var request = new EgressRequest("PolicyEngine", new Uri(url), "mirror-sync"); + + var decision = policy.Evaluate(request); + Assert.Equal(expectedAllowed, decision.IsAllowed); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.ServiceCollection.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.ServiceCollection.cs new file mode 100644 index 000000000..0805c5705 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.ServiceCollection.cs @@ -0,0 +1,89 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Policy; +using StellaOps.TestKit; +using System; +using System.Collections.Generic; +using System.Linq; +using Xunit; + +namespace StellaOps.AirGap.Policy.Tests; + +public sealed partial class EgressPolicyTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ServiceCollection_AddAirGapEgressPolicy_RegistersService() + { + var services = new ServiceCollection(); + services.AddAirGapEgressPolicy(options => + { + options.Mode = EgressPolicyMode.Sealed; + options.AddAllowRule("mirror.internal", transport: EgressTransport.Https); + }); + + var descriptor = services.Single(service => service.ServiceType == typeof(IEgressPolicy)); + Assert.Equal(ServiceLifetime.Singleton, descriptor.Lifetime); + Assert.Equal(typeof(EgressPolicy), descriptor.ImplementationType); + + var configured = BuildConfiguredOptions(services); + var policy = new EgressPolicy(configured); + policy.EnsureAllowed(new EgressRequest("PolicyEngine", new Uri("https://mirror.internal"), "mirror-sync")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ServiceCollection_AddAirGapEgressPolicy_BindsFromConfiguration() + { + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["AirGap:Egress:Mode"] = "Sealed", + ["AirGap:Egress:AllowLoopback"] = "false", + ["AirGap:Egress:AllowPrivateNetworks"] = "true", + ["AirGap:Egress:RemediationDocumentationUrl"] = "https://docs.example/airgap", + ["AirGap:Egress:SupportContact"] = "airgap@example.org", + ["AirGap:Egress:Allowlist:0:HostPattern"] = "mirror.internal", + ["AirGap:Egress:Allowlist:0:Port"] = "443", + ["AirGap:Egress:Allowlist:0:Transport"] = "https", + ["AirGap:Egress:Allowlist:0:Description"] = "Primary mirror", + }) + .Build(); + + var services = new ServiceCollection(); + services.AddAirGapEgressPolicy(configuration); + + var configured = BuildConfiguredOptions(services); + Assert.Equal(EgressPolicyMode.Sealed, configured.Mode); + Assert.Equal("https://docs.example/airgap", configured.RemediationDocumentationUrl); + Assert.Equal("airgap@example.org", configured.SupportContact); + + var policy = new EgressPolicy(configured); + var decision = policy.Evaluate(new EgressRequest("ExportCenter", new Uri("https://mirror.internal/feeds"), "mirror-sync")); + Assert.True(decision.IsAllowed); + + var blocked = policy.Evaluate(new EgressRequest("ExportCenter", new Uri("https://external.example"), "mirror-sync")); + Assert.False(blocked.IsAllowed); + Assert.Contains("mirror.internal", blocked.Remediation, StringComparison.OrdinalIgnoreCase); + } + + private static EgressPolicyOptions BuildConfiguredOptions(IServiceCollection services) + { + var options = new EgressPolicyOptions(); + var configurators = services + .Where(descriptor => descriptor.ServiceType == typeof(IConfigureOptions)) + .Select(descriptor => descriptor.ImplementationInstance) + .OfType>() + .ToArray(); + + Assert.NotEmpty(configurators); + + foreach (var configurator in configurators) + { + configurator.Configure(options); + } + + return options; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.cs index 285d9ff37..e45a884b4 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/EgressPolicyTests.cs @@ -1,21 +1,14 @@ -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; using StellaOps.AirGap.Policy; +using StellaOps.TestKit; +using System; using Xunit; - -using StellaOps.TestKit; namespace StellaOps.AirGap.Policy.Tests; -public sealed class EgressPolicyTests +public sealed partial class EgressPolicyTests { [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void Evaluate_UnsealedEnvironment_AllowsRequest() { var options = new EgressPolicyOptions @@ -33,7 +26,7 @@ public sealed class EgressPolicyTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void EnsureAllowed_SealedEnvironmentWithMatchingRule_Allows() { var options = new EgressPolicyOptions @@ -49,7 +42,7 @@ public sealed class EgressPolicyTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void EnsureAllowed_SealedEnvironmentWithoutRule_ThrowsWithGuidance() { var options = new EgressPolicyOptions @@ -71,171 +64,4 @@ public sealed class EgressPolicyTests Assert.Equal(options.RemediationDocumentationUrl, exception.DocumentationUrl); Assert.Equal(options.SupportContact, exception.SupportContact); } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void EnsureAllowed_SealedEnvironment_AllowsLoopbackWhenConfigured() - { - var options = new EgressPolicyOptions - { - Mode = EgressPolicyMode.Sealed, - AllowLoopback = true, - }; - - var policy = new EgressPolicy(options); - var request = new EgressRequest("PolicyEngine", new Uri("http://127.0.0.1:9000/health"), "local-probe"); - - policy.EnsureAllowed(request); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void EnsureAllowed_SealedEnvironment_AllowsPrivateNetworkWhenConfigured() - { - var options = new EgressPolicyOptions - { - Mode = EgressPolicyMode.Sealed, - AllowPrivateNetworks = true, - }; - - var policy = new EgressPolicy(options); - var request = new EgressRequest("PolicyEngine", new Uri("https://10.10.0.5:8443/status"), "mirror-sync"); - - policy.EnsureAllowed(request); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void EnsureAllowed_SealedEnvironment_BlocksPrivateNetworkWhenNotConfigured() - { - var options = new EgressPolicyOptions - { - Mode = EgressPolicyMode.Sealed, - AllowPrivateNetworks = false, - }; - - var policy = new EgressPolicy(options); - var request = new EgressRequest("PolicyEngine", new Uri("https://10.10.0.5:8443/status"), "mirror-sync"); - - var exception = Assert.Throws(() => policy.EnsureAllowed(request)); - Assert.Contains("10.10.0.5", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData("https://api.example.com", true)] - [InlineData("https://sub.api.example.com", true)] - [InlineData("https://example.com", false)] - public void Evaluate_SealedEnvironmentWildcardHost_Matches(string url, bool expectedAllowed) - { - var options = new EgressPolicyOptions - { - Mode = EgressPolicyMode.Sealed, - }; - options.AddAllowRule("*.example.com", transport: EgressTransport.Https); - - var policy = new EgressPolicy(options); - var request = new EgressRequest("PolicyEngine", new Uri(url), "mirror-sync"); - - var decision = policy.Evaluate(request); - Assert.Equal(expectedAllowed, decision.IsAllowed); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ServiceCollection_AddAirGapEgressPolicy_RegistersService() - { - var services = new ServiceCollection(); - services.AddAirGapEgressPolicy(options => - { - options.Mode = EgressPolicyMode.Sealed; - options.AddAllowRule("mirror.internal", transport: EgressTransport.Https); - }); - - using var provider = services.BuildServiceProvider(); - var policy = provider.GetRequiredService(); - - Assert.True(policy.IsSealed); - policy.EnsureAllowed(new EgressRequest("PolicyEngine", new Uri("https://mirror.internal"), "mirror-sync")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ServiceCollection_AddAirGapEgressPolicy_BindsFromConfiguration() - { - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["AirGap:Egress:Mode"] = "Sealed", - ["AirGap:Egress:AllowLoopback"] = "false", - ["AirGap:Egress:AllowPrivateNetworks"] = "true", - ["AirGap:Egress:RemediationDocumentationUrl"] = "https://docs.example/airgap", - ["AirGap:Egress:SupportContact"] = "airgap@example.org", - ["AirGap:Egress:Allowlist:0:HostPattern"] = "mirror.internal", - ["AirGap:Egress:Allowlist:0:Port"] = "443", - ["AirGap:Egress:Allowlist:0:Transport"] = "https", - ["AirGap:Egress:Allowlist:0:Description"] = "Primary mirror", - }) - .Build(); - - var services = new ServiceCollection(); - services.AddAirGapEgressPolicy(configuration); - - using var provider = services.BuildServiceProvider(); - var policy = provider.GetRequiredService(); - - Assert.True(policy.IsSealed); - var decision = policy.Evaluate(new EgressRequest("ExportCenter", new Uri("https://mirror.internal/feeds"), "mirror-sync")); - Assert.True(decision.IsAllowed); - - var blocked = policy.Evaluate(new EgressRequest("ExportCenter", new Uri("https://external.example"), "mirror-sync")); - Assert.False(blocked.IsAllowed); - Assert.Contains("mirror.internal", blocked.Remediation, StringComparison.OrdinalIgnoreCase); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void EgressHttpClientFactory_Create_EnforcesPolicyBeforeReturningClient() - { - var recordingPolicy = new RecordingPolicy(); - var request = new EgressRequest("Component", new Uri("https://allowed.internal"), "mirror-sync"); - - using var client = EgressHttpClientFactory.Create(recordingPolicy, request); - - Assert.True(recordingPolicy.EnsureAllowedCalled); - Assert.NotNull(client); - } - - private sealed class RecordingPolicy : IEgressPolicy - { - public bool EnsureAllowedCalled { get; private set; } - - public bool IsSealed => true; - - public EgressPolicyMode Mode => EgressPolicyMode.Sealed; - - public EgressDecision Evaluate(EgressRequest request) - { - EnsureAllowedCalled = true; - return EgressDecision.Allowed; - } - - public ValueTask EvaluateAsync(EgressRequest request, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - return new ValueTask(Evaluate(request)); - } - - public void EnsureAllowed(EgressRequest request) - { - EnsureAllowedCalled = true; - } - - public ValueTask EnsureAllowedAsync(EgressRequest request, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - EnsureAllowed(request); - return ValueTask.CompletedTask; - } - } } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/TASKS.md b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/TASKS.md index be54129f6..a5eff0def 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/TASKS.md @@ -1,10 +1,12 @@ # AirGap Policy Tests Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | | AUDIT-0033-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. | | AUDIT-0033-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. | | AUDIT-0033-A | DONE | Waived (test project; revalidated 2026-01-06). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.Tests/StellaOps.AirGap.Policy.Tests.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.Message.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.Message.cs new file mode 100644 index 000000000..6b1a4c184 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.Message.cs @@ -0,0 +1,48 @@ +using System.Text; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class AirGapEgressBlockedException +{ + private static string BuildMessage( + EgressRequest request, + string reason, + string remediation, + string? documentationUrl, + string? supportContact) + { + var builder = new StringBuilder(); + builder.Append(ErrorCode) + .Append(": component '") + .Append(request.Component) + .Append("' attempted to reach '") + .Append(request.Destination) + .Append("' (intent: ") + .Append(request.Intent); + + if (!string.IsNullOrEmpty(request.Operation)) + { + builder.Append(", operation: ") + .Append(request.Operation); + } + + builder.Append("). Reason: ") + .Append(reason) + .Append(". Remediation: ") + .Append(remediation); + + if (!string.IsNullOrWhiteSpace(documentationUrl)) + { + builder.Append(" Documentation: ") + .Append(documentationUrl); + } + + if (!string.IsNullOrWhiteSpace(supportContact)) + { + builder.Append(" Contact: ") + .Append(supportContact); + } + + return builder.ToString(); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.cs index 7cf6047eb..498e1f47d 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/AirGapEgressBlockedException.cs @@ -1,12 +1,11 @@ using System; -using System.Text; namespace StellaOps.AirGap.Policy; /// /// Exception raised when an egress operation is blocked while sealed mode is active. /// -public sealed class AirGapEgressBlockedException : InvalidOperationException +public sealed partial class AirGapEgressBlockedException : InvalidOperationException { /// /// Error code surfaced to callers when egress is blocked. @@ -60,41 +59,4 @@ public sealed class AirGapEgressBlockedException : InvalidOperationException /// Gets an optional support contact (for example, an on-call alias). /// public string? SupportContact { get; } - - private static string BuildMessage(EgressRequest request, string reason, string remediation, string? documentationUrl, string? supportContact) - { - var builder = new StringBuilder(); - builder.Append(ErrorCode) - .Append(": component '") - .Append(request.Component) - .Append("' attempted to reach '") - .Append(request.Destination) - .Append("' (intent: ") - .Append(request.Intent); - - if (!string.IsNullOrEmpty(request.Operation)) - { - builder.Append(", operation: ") - .Append(request.Operation); - } - - builder.Append("). Reason: ") - .Append(reason) - .Append(". Remediation: ") - .Append(remediation); - - if (!string.IsNullOrWhiteSpace(documentationUrl)) - { - builder.Append(" Documentation: ") - .Append(documentationUrl); - } - - if (!string.IsNullOrWhiteSpace(supportContact)) - { - builder.Append(" Contact: ") - .Append(supportContact); - } - - return builder.ToString(); - } } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Evaluation.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Evaluation.cs new file mode 100644 index 000000000..c0b3b4ea0 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Evaluation.cs @@ -0,0 +1,91 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressPolicy +{ + /// + public EgressDecision Evaluate(EgressRequest request) + { + if (!HasValidDestination(request)) + { + return EgressDecision.Blocked( + "Egress request is missing a valid destination URI.", + BuildInvalidRequestRemediation(request)); + } + + var options = Volatile.Read(ref _options); + var rules = Volatile.Read(ref _rules); + + if (!IsSealed) + { + return EgressDecision.Allowed; + } + + if (options.AllowLoopback && IsLoopback(request.Destination)) + { + return EgressDecision.Allowed; + } + + if (options.AllowPrivateNetworks && IsPrivateNetwork(request.Destination)) + { + return EgressDecision.Allowed; + } + + foreach (var rule in rules) + { + if (rule.Allows(request)) + { + return EgressDecision.Allowed; + } + } + + var destinationLabel = request.Destination?.Host ?? "unknown-host"; + var reason = $"Destination '{destinationLabel}' is not present in the sealed-mode allow list."; + var remediation = BuildRemediation(request, rules); + return EgressDecision.Blocked(reason, remediation); + } + + /// + public ValueTask EvaluateAsync( + EgressRequest request, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + return ValueTask.FromResult(Evaluate(request)); + } + + /// + public void EnsureAllowed(EgressRequest request) + { + var decision = Evaluate(request); + if (decision.IsAllowed) + { + return; + } + + throw CreateException(request, decision); + } + + /// + public async ValueTask EnsureAllowedAsync( + EgressRequest request, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + var decision = await EvaluateAsync(request, cancellationToken).ConfigureAwait(false); + if (!decision.IsAllowed) + { + throw CreateException(request, decision); + } + } + + private AirGapEgressBlockedException CreateException(EgressRequest request, EgressDecision decision) + => new( + request, + decision.Reason ?? "Egress blocked.", + decision.Remediation ?? BuildRemediation(request, Volatile.Read(ref _rules)), + Volatile.Read(ref _options).RemediationDocumentationUrl, + Volatile.Read(ref _options).SupportContact); +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Network.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Network.cs new file mode 100644 index 000000000..6235cdee9 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Network.cs @@ -0,0 +1,54 @@ +using System; +using System.Net; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressPolicy +{ + private static bool HasValidDestination(EgressRequest request) + => request.Destination is { IsAbsoluteUri: true }; + + private static bool IsLoopback(Uri destination) + { + if (string.Equals(destination.Host, "localhost", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (IPAddress.TryParse(destination.Host, out var address)) + { + return IPAddress.IsLoopback(address); + } + + return false; + } + + private static bool IsPrivateNetwork(Uri destination) + { + if (!IPAddress.TryParse(destination.Host, out var address)) + { + return false; + } + + if (address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) + { + var bytes = address.GetAddressBytes(); + return bytes[0] switch + { + 10 => true, + 172 => bytes[1] >= 16 && bytes[1] <= 31, + 192 => bytes[1] == 168, + _ => false, + }; + } + + if (address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetworkV6) + { + var bytes = address.GetAddressBytes(); + var isUniqueLocal = bytes.Length > 0 && (bytes[0] & 0xFE) == 0xFC; // fc00::/7 + return address.IsIPv6LinkLocal || address.IsIPv6SiteLocal || isUniqueLocal; + } + + return false; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Options.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Options.cs new file mode 100644 index 000000000..606f01249 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Options.cs @@ -0,0 +1,16 @@ +using System; +using System.Threading; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressPolicy +{ + private void ApplyOptions(EgressPolicyOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + var rules = options.BuildRuleSet(); + Volatile.Write(ref _rules, rules); + Volatile.Write(ref _options, options); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Remediation.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Remediation.cs new file mode 100644 index 000000000..1ad3cda7d --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.Remediation.cs @@ -0,0 +1,70 @@ +using System; +using System.Globalization; +using System.Text; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressPolicy +{ + private string BuildRemediation(EgressRequest request, EgressRule[] rules) + { + var host = request.Destination?.Host; + if (string.IsNullOrWhiteSpace(host)) + { + host = "unknown-host"; + } + + var portSegment = request.Destination is { IsDefaultPort: false } + ? $":{request.Destination.Port.ToString(CultureInfo.InvariantCulture)}" + : string.Empty; + var transport = request.Transport.ToString().ToUpperInvariant(); + + var builder = new StringBuilder(); + builder.Append("Add '") + .Append(host) + .Append(portSegment) + .Append("' (") + .Append(transport) + .Append(") to the airgap.egressAllowlist configuration."); + + if (rules.Length == 0) + { + builder.Append(" No allow entries are currently configured; sealed mode blocks every external host."); + } + else + { + builder.Append(" Current allow list sample: "); + var limit = Math.Min(rules.Length, 3); + for (var i = 0; i < limit; i++) + { + if (i > 0) + { + builder.Append(", "); + } + + builder.Append(rules[i].HostPattern); + if (rules[i].Port is int port) + { + builder.Append(':') + .Append(port.ToString(CultureInfo.InvariantCulture)); + } + } + + if (rules.Length > limit) + { + builder.Append(", ..."); + } + + builder.Append(". Coordinate break-glass with platform operations before expanding access."); + } + + return builder.ToString(); + } + + private static string BuildInvalidRequestRemediation(EgressRequest request) + { + var component = string.IsNullOrWhiteSpace(request.Component) ? "unknown-component" : request.Component; + var intent = string.IsNullOrWhiteSpace(request.Intent) ? "unknown-intent" : request.Intent; + return $"Provide an absolute destination URI for component '{component}' (intent: {intent}) before evaluating sealed-mode egress."; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.cs index e7ae1d7e4..19deec915 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicy.cs @@ -1,17 +1,13 @@ - using Microsoft.Extensions.Options; using System; -using System.Globalization; -using System.Net; using System.Threading; -using System.Threading.Tasks; namespace StellaOps.AirGap.Policy; /// /// Default implementation of . /// -public sealed class EgressPolicy : IEgressPolicy +public sealed partial class EgressPolicy : IEgressPolicy { private readonly IDisposable? _optionsSubscription; private EgressRule[] _rules = Array.Empty(); @@ -27,7 +23,8 @@ public sealed class EgressPolicy : IEgressPolicy } /// - /// Initializes a new instance of the class with reload support. + /// Initializes a new instance of the class with + /// reload support. /// /// Options monitor that supplies updated policy settings. public EgressPolicy(IOptionsMonitor optionsMonitor) @@ -43,202 +40,4 @@ public sealed class EgressPolicy : IEgressPolicy /// public EgressPolicyMode Mode => Volatile.Read(ref _options).Mode; - - /// - public EgressDecision Evaluate(EgressRequest request) - { - if (!HasValidDestination(request)) - { - return EgressDecision.Blocked( - "Egress request is missing a valid destination URI.", - BuildInvalidRequestRemediation(request)); - } - - var options = Volatile.Read(ref _options); - var rules = Volatile.Read(ref _rules); - - if (!IsSealed) - { - return EgressDecision.Allowed; - } - - if (options.AllowLoopback && IsLoopback(request.Destination)) - { - return EgressDecision.Allowed; - } - - if (options.AllowPrivateNetworks && IsPrivateNetwork(request.Destination)) - { - return EgressDecision.Allowed; - } - - foreach (var rule in rules) - { - if (rule.Allows(request)) - { - return EgressDecision.Allowed; - } - } - - var destinationLabel = request.Destination?.Host ?? "unknown-host"; - var reason = $"Destination '{destinationLabel}' is not present in the sealed-mode allow list."; - var remediation = BuildRemediation(request, rules); - return EgressDecision.Blocked(reason, remediation); - } - - /// - public ValueTask EvaluateAsync(EgressRequest request, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - return ValueTask.FromResult(Evaluate(request)); - } - - /// - public void EnsureAllowed(EgressRequest request) - { - var decision = Evaluate(request); - if (decision.IsAllowed) - { - return; - } - - throw CreateException(request, decision); - } - - /// - public async ValueTask EnsureAllowedAsync(EgressRequest request, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - var decision = await EvaluateAsync(request, cancellationToken).ConfigureAwait(false); - if (!decision.IsAllowed) - { - throw CreateException(request, decision); - } - } - - private AirGapEgressBlockedException CreateException(EgressRequest request, EgressDecision decision) - => new( - request, - decision.Reason ?? "Egress blocked.", - decision.Remediation ?? BuildRemediation(request, Volatile.Read(ref _rules)), - Volatile.Read(ref _options).RemediationDocumentationUrl, - Volatile.Read(ref _options).SupportContact); - - private string BuildRemediation(EgressRequest request, EgressRule[] rules) - { - var host = request.Destination?.Host; - if (string.IsNullOrWhiteSpace(host)) - { - host = "unknown-host"; - } - - var portSegment = request.Destination is { IsDefaultPort: false } - ? $":{request.Destination.Port.ToString(CultureInfo.InvariantCulture)}" - : string.Empty; - var transport = request.Transport.ToString().ToUpperInvariant(); - - var builder = new System.Text.StringBuilder(); - builder.Append("Add '") - .Append(host) - .Append(portSegment) - .Append("' (") - .Append(transport) - .Append(") to the airgap.egressAllowlist configuration."); - - if (rules.Length == 0) - { - builder.Append(" No allow entries are currently configured; sealed mode blocks every external host."); - } - else - { - builder.Append(" Current allow list sample: "); - var limit = Math.Min(rules.Length, 3); - for (var i = 0; i < limit; i++) - { - if (i > 0) - { - builder.Append(", "); - } - - builder.Append(rules[i].HostPattern); - if (rules[i].Port is int port) - { - builder.Append(':') - .Append(port.ToString(CultureInfo.InvariantCulture)); - } - } - - if (rules.Length > limit) - { - builder.Append(", ..."); - } - - builder.Append(". Coordinate break-glass with platform operations before expanding access."); - } - - return builder.ToString(); - } - - private static string BuildInvalidRequestRemediation(EgressRequest request) - { - var component = string.IsNullOrWhiteSpace(request.Component) ? "unknown-component" : request.Component; - var intent = string.IsNullOrWhiteSpace(request.Intent) ? "unknown-intent" : request.Intent; - return $"Provide an absolute destination URI for component '{component}' (intent: {intent}) before evaluating sealed-mode egress."; - } - - private static bool HasValidDestination(EgressRequest request) - => request.Destination is { IsAbsoluteUri: true }; - - private static bool IsLoopback(Uri destination) - { - if (string.Equals(destination.Host, "localhost", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - if (IPAddress.TryParse(destination.Host, out var address)) - { - return IPAddress.IsLoopback(address); - } - - return false; - } - - private static bool IsPrivateNetwork(Uri destination) - { - if (!IPAddress.TryParse(destination.Host, out var address)) - { - return false; - } - - if (address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) - { - var bytes = address.GetAddressBytes(); - return bytes[0] switch - { - 10 => true, - 172 => bytes[1] >= 16 && bytes[1] <= 31, - 192 => bytes[1] == 168, - _ => false, - }; - } - - if (address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetworkV6) - { - var bytes = address.GetAddressBytes(); - var isUniqueLocal = bytes.Length > 0 && (bytes[0] & 0xFE) == 0xFC; // fc00::/7 - return address.IsIPv6LinkLocal || address.IsIPv6SiteLocal || isUniqueLocal; - } - - return false; - } - - private void ApplyOptions(EgressPolicyOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - var rules = options.BuildRuleSet(); - Volatile.Write(ref _rules, rules); - Volatile.Write(ref _options, options); - } } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Allowlist.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Allowlist.cs new file mode 100644 index 000000000..5fbae0bac --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Allowlist.cs @@ -0,0 +1,59 @@ +using Microsoft.Extensions.Configuration; +using System; +using System.Collections.Generic; + +namespace StellaOps.AirGap.Policy; + +public static partial class EgressPolicyServiceCollectionExtensions +{ + private static IEnumerable EnumerateAllowRuleSections( + IConfiguration effective, + IConfiguration primary, + IConfiguration root) + { + foreach (var rule in EnumerateAllowRuleSections(effective)) + { + yield return rule; + } + + if (!ReferenceEquals(primary, effective)) + { + foreach (var rule in EnumerateAllowRuleSections(primary)) + { + yield return rule; + } + } + + if (!ReferenceEquals(root, effective) && !ReferenceEquals(root, primary)) + { + foreach (var rule in EnumerateAllowRuleSections(root)) + { + yield return rule; + } + } + } + + private static IEnumerable EnumerateAllowRuleSections(IConfiguration configuration) + { + foreach (var candidate in EnumerateAllowlistContainers(configuration)) + { + if (!candidate.Exists()) + { + continue; + } + + foreach (var child in candidate.GetChildren()) + { + yield return child; + } + } + } + + private static IEnumerable EnumerateAllowlistContainers(IConfiguration configuration) + { + yield return configuration.GetSection("Allowlist"); + yield return configuration.GetSection("AllowList"); + yield return configuration.GetSection("EgressAllowlist"); + yield return configuration.GetSection("Allow"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Configuration.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Configuration.cs new file mode 100644 index 000000000..05d8d1268 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.Configuration.cs @@ -0,0 +1,97 @@ +using Microsoft.Extensions.Configuration; +using System; +using System.Collections.Generic; + +namespace StellaOps.AirGap.Policy; + +public static partial class EgressPolicyServiceCollectionExtensions +{ + private static IConfiguration ResolveConfigurationSection(IConfiguration configuration, string? sectionName) + { + if (!string.IsNullOrWhiteSpace(sectionName)) + { + var namedSection = configuration.GetSection(sectionName); + if (namedSection.Exists()) + { + return namedSection; + } + } + + return configuration; + } + + private static void ApplyConfiguration( + EgressPolicyOptions options, + IConfiguration primarySection, + IConfiguration root) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(primarySection); + ArgumentNullException.ThrowIfNull(root); + + var effectiveSection = ResolveEffectiveSection(primarySection); + var searchOrder = BuildSearchOrder(effectiveSection, primarySection, root); + + var modeValue = GetStringValue(searchOrder, "Mode"); + if (!string.IsNullOrWhiteSpace(modeValue) && + Enum.TryParse(modeValue, ignoreCase: true, out EgressPolicyMode parsedMode)) + { + options.Mode = parsedMode; + } + + var allowLoopback = GetNullableBool(searchOrder, "AllowLoopback"); + if (allowLoopback.HasValue) + { + options.AllowLoopback = allowLoopback.Value; + } + + var allowPrivateNetworks = GetNullableBool(searchOrder, "AllowPrivateNetworks"); + if (allowPrivateNetworks.HasValue) + { + options.AllowPrivateNetworks = allowPrivateNetworks.Value; + } + + var remediationUrl = GetStringValue(searchOrder, "RemediationDocumentationUrl"); + if (!string.IsNullOrWhiteSpace(remediationUrl)) + { + options.RemediationDocumentationUrl = remediationUrl.Trim(); + } + + var supportContact = GetStringValue(searchOrder, "SupportContact"); + if (!string.IsNullOrWhiteSpace(supportContact)) + { + options.SupportContact = supportContact.Trim(); + } + + var rules = new List(); + var seenRules = new HashSet(); + foreach (var ruleSection in EnumerateAllowRuleSections(effectiveSection, primarySection, root)) + { + var hostPattern = ruleSection["HostPattern"] + ?? ruleSection["Host"] + ?? ruleSection["Pattern"] + ?? ruleSection.Value; + + if (string.IsNullOrWhiteSpace(hostPattern)) + { + continue; + } + + hostPattern = hostPattern.Trim(); + var port = TryReadPort(ruleSection); + var transport = ParseTransport(ruleSection["Transport"] ?? ruleSection["Protocol"]); + + var description = ruleSection["Description"] ?? ruleSection["Notes"]; + description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(); + + var ruleKey = RuleKey.Create(hostPattern, port, transport); + if (seenRules.Add(ruleKey)) + { + rules.Add(new EgressRule(hostPattern, port, transport, description)); + } + } + + options.SetAllowRules(rules); + } + +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationHelpers.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationHelpers.cs new file mode 100644 index 000000000..266ef3205 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationHelpers.cs @@ -0,0 +1,32 @@ +using Microsoft.Extensions.Configuration; +using System; +using System.Collections.Generic; + +namespace StellaOps.AirGap.Policy; + +public static partial class EgressPolicyServiceCollectionExtensions +{ + private static IConfiguration ResolveEffectiveSection(IConfiguration configuration) + { + var egressSection = configuration.GetSection("Egress"); + return egressSection.Exists() ? egressSection : configuration; + } + + private static IEnumerable BuildSearchOrder( + IConfiguration effective, + IConfiguration primary, + IConfiguration root) + { + yield return effective; + + if (!ReferenceEquals(primary, effective)) + { + yield return primary; + } + + if (!ReferenceEquals(root, effective) && !ReferenceEquals(root, primary)) + { + yield return root; + } + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationValues.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationValues.cs new file mode 100644 index 000000000..2fb3b6ade --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.ConfigurationValues.cs @@ -0,0 +1,73 @@ +using Microsoft.Extensions.Configuration; +using System; +using System.Collections.Generic; +using System.Globalization; + +namespace StellaOps.AirGap.Policy; + +public static partial class EgressPolicyServiceCollectionExtensions +{ + private static string? GetStringValue(IEnumerable sections, string key) + { + foreach (var section in sections) + { + var value = section[key]; + if (!string.IsNullOrWhiteSpace(value)) + { + return value; + } + } + + return null; + } + + private static bool? GetNullableBool(IEnumerable sections, string key) + { + foreach (var section in sections) + { + var value = section[key]; + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + if (bool.TryParse(value, out var parsed)) + { + return parsed; + } + } + + return null; + } + + private static int? TryReadPort(IConfiguration section) + { + var raw = section["Port"]; + if (string.IsNullOrWhiteSpace(raw)) + { + return null; + } + + return int.TryParse(raw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) + ? parsed + : null; + } + + private static EgressTransport ParseTransport(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return EgressTransport.Any; + } + + return Enum.TryParse(value, ignoreCase: true, out EgressTransport parsed) + ? parsed + : EgressTransport.Any; + } + + private readonly record struct RuleKey(string HostPattern, int? Port, EgressTransport Transport) + { + public static RuleKey Create(string hostPattern, int? port, EgressTransport transport) + => new(hostPattern.Trim().ToLowerInvariant(), port, transport); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.cs index b9fac4913..efd9b9fbc 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressPolicyServiceCollectionExtensions.cs @@ -1,18 +1,14 @@ - using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; using System; -using System.Collections.Generic; -using System.Globalization; namespace StellaOps.AirGap.Policy; /// /// Dependency injection helpers for configuring the air-gap egress policy. /// -public static class EgressPolicyServiceCollectionExtensions +public static partial class EgressPolicyServiceCollectionExtensions { /// /// Registers using the provided configuration delegate. @@ -20,7 +16,9 @@ public static class EgressPolicyServiceCollectionExtensions /// Service collection that will be updated. /// Optional configuration delegate. /// The original . - public static IServiceCollection AddAirGapEgressPolicy(this IServiceCollection services, Action? configure = null) + public static IServiceCollection AddAirGapEgressPolicy( + this IServiceCollection services, + Action? configure = null) { ArgumentNullException.ThrowIfNull(services); @@ -33,11 +31,7 @@ public static class EgressPolicyServiceCollectionExtensions services.AddOptions().Configure(configure); } - services.TryAddSingleton(sp => - { - var optionsMonitor = sp.GetRequiredService>(); - return new EgressPolicy(optionsMonitor); - }); + services.TryAddSingleton(); return services; } @@ -67,228 +61,4 @@ public static class EgressPolicyServiceCollectionExtensions ApplyConfiguration(options, targetSection, configuration); }); } - - private static IConfiguration ResolveConfigurationSection(IConfiguration configuration, string? sectionName) - { - if (!string.IsNullOrWhiteSpace(sectionName)) - { - var namedSection = configuration.GetSection(sectionName); - if (namedSection.Exists()) - { - return namedSection; - } - } - - return configuration; - } - - private static void ApplyConfiguration(EgressPolicyOptions options, IConfiguration primarySection, IConfiguration root) - { - ArgumentNullException.ThrowIfNull(options); - ArgumentNullException.ThrowIfNull(primarySection); - ArgumentNullException.ThrowIfNull(root); - - var effectiveSection = ResolveEffectiveSection(primarySection); - var searchOrder = BuildSearchOrder(effectiveSection, primarySection, root); - - var modeValue = GetStringValue(searchOrder, "Mode"); - if (!string.IsNullOrWhiteSpace(modeValue) && - Enum.TryParse(modeValue, ignoreCase: true, out EgressPolicyMode parsedMode)) - { - options.Mode = parsedMode; - } - - var allowLoopback = GetNullableBool(searchOrder, "AllowLoopback"); - if (allowLoopback.HasValue) - { - options.AllowLoopback = allowLoopback.Value; - } - - var allowPrivateNetworks = GetNullableBool(searchOrder, "AllowPrivateNetworks"); - if (allowPrivateNetworks.HasValue) - { - options.AllowPrivateNetworks = allowPrivateNetworks.Value; - } - - var remediationUrl = GetStringValue(searchOrder, "RemediationDocumentationUrl"); - if (!string.IsNullOrWhiteSpace(remediationUrl)) - { - options.RemediationDocumentationUrl = remediationUrl.Trim(); - } - - var supportContact = GetStringValue(searchOrder, "SupportContact"); - if (!string.IsNullOrWhiteSpace(supportContact)) - { - options.SupportContact = supportContact.Trim(); - } - - var rules = new List(); - var seenRules = new HashSet(); - foreach (var ruleSection in EnumerateAllowRuleSections(effectiveSection, primarySection, root)) - { - var hostPattern = ruleSection["HostPattern"] - ?? ruleSection["Host"] - ?? ruleSection["Pattern"] - ?? ruleSection.Value; - - if (string.IsNullOrWhiteSpace(hostPattern)) - { - continue; - } - - hostPattern = hostPattern.Trim(); - var port = TryReadPort(ruleSection); - var transport = ParseTransport(ruleSection["Transport"] ?? ruleSection["Protocol"]); - - var description = ruleSection["Description"] ?? ruleSection["Notes"]; - description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(); - - var ruleKey = RuleKey.Create(hostPattern, port, transport); - if (seenRules.Add(ruleKey)) - { - rules.Add(new EgressRule(hostPattern, port, transport, description)); - } - } - - options.SetAllowRules(rules); - } - - private static IConfiguration ResolveEffectiveSection(IConfiguration configuration) - { - var egressSection = configuration.GetSection("Egress"); - return egressSection.Exists() ? egressSection : configuration; - } - - private static IEnumerable BuildSearchOrder( - IConfiguration effective, - IConfiguration primary, - IConfiguration root) - { - yield return effective; - - if (!ReferenceEquals(primary, effective)) - { - yield return primary; - } - - if (!ReferenceEquals(root, effective) && !ReferenceEquals(root, primary)) - { - yield return root; - } - } - - private static string? GetStringValue(IEnumerable sections, string key) - { - foreach (var section in sections) - { - var value = section[key]; - if (!string.IsNullOrWhiteSpace(value)) - { - return value; - } - } - - return null; - } - - private static bool? GetNullableBool(IEnumerable sections, string key) - { - foreach (var section in sections) - { - var value = section[key]; - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - if (bool.TryParse(value, out var parsed)) - { - return parsed; - } - } - - return null; - } - - private static IEnumerable EnumerateAllowRuleSections( - IConfiguration effective, - IConfiguration primary, - IConfiguration root) - { - foreach (var rule in EnumerateAllowRuleSections(effective)) - { - yield return rule; - } - - if (!ReferenceEquals(primary, effective)) - { - foreach (var rule in EnumerateAllowRuleSections(primary)) - { - yield return rule; - } - } - - if (!ReferenceEquals(root, effective) && !ReferenceEquals(root, primary)) - { - foreach (var rule in EnumerateAllowRuleSections(root)) - { - yield return rule; - } - } - } - - private static IEnumerable EnumerateAllowRuleSections(IConfiguration configuration) - { - foreach (var candidate in EnumerateAllowlistContainers(configuration)) - { - if (!candidate.Exists()) - { - continue; - } - - foreach (var child in candidate.GetChildren()) - { - yield return child; - } - } - } - - private static IEnumerable EnumerateAllowlistContainers(IConfiguration configuration) - { - yield return configuration.GetSection("Allowlist"); - yield return configuration.GetSection("AllowList"); - yield return configuration.GetSection("EgressAllowlist"); - yield return configuration.GetSection("Allow"); - } - - private static int? TryReadPort(IConfiguration section) - { - var raw = section["Port"]; - if (string.IsNullOrWhiteSpace(raw)) - { - return null; - } - - return int.TryParse(raw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) - ? parsed - : null; - } - - private static EgressTransport ParseTransport(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return EgressTransport.Any; - } - - return Enum.TryParse(value, ignoreCase: true, out EgressTransport parsed) - ? parsed - : EgressTransport.Any; - } - - private readonly record struct RuleKey(string HostPattern, int? Port, EgressTransport Transport) - { - public static RuleKey Create(string hostPattern, int? port, EgressTransport transport) - => new(hostPattern.Trim().ToLowerInvariant(), port, transport); - } } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Format.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Format.cs new file mode 100644 index 000000000..1119299a0 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Format.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressRule +{ + /// + public override string ToString() + => Port is null + ? $"{_hostPattern} ({Transport})" + : $"{_hostPattern}:{Port} ({Transport})"; +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Match.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Match.cs new file mode 100644 index 000000000..1e645ecc3 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.Match.cs @@ -0,0 +1,65 @@ +using System; + +namespace StellaOps.AirGap.Policy; + +public sealed partial class EgressRule +{ + /// + /// Determines whether the rule allows the supplied request. + /// + /// The request that will be evaluated. + /// when the request is allowed; otherwise . + public bool Allows(EgressRequest request) + { + if (request.Destination is null) + { + return false; + } + + if (Transport != EgressTransport.Any && Transport != request.Transport) + { + return false; + } + + if (!HostMatches(request.Destination.Host)) + { + return false; + } + + if (Port is null) + { + return true; + } + + var requestPort = request.Destination.Port; + return requestPort == Port.Value; + } + + private bool HostMatches(string host) + { + if (string.IsNullOrEmpty(host)) + { + return false; + } + + var normalized = host.ToLowerInvariant(); + + if (_wildcardAnyHost) + { + return true; + } + + if (_wildcardSuffix is not null) + { + if (!normalized.EndsWith(_wildcardSuffix, StringComparison.Ordinal)) + { + return false; + } + + var remainderLength = normalized.Length - _wildcardSuffix.Length; + return remainderLength > 0; + } + + return string.Equals(normalized, _hostPattern, StringComparison.Ordinal); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.cs b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.cs index fbff489b1..bcaf217b0 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.cs +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/EgressRule.cs @@ -5,7 +5,7 @@ namespace StellaOps.AirGap.Policy; /// /// Represents a single allow entry used when sealed mode is active. /// -public sealed class EgressRule +public sealed partial class EgressRule { private readonly string _hostPattern; private readonly string? _wildcardSuffix; @@ -59,69 +59,4 @@ public sealed class EgressRule /// Gets the transport classification required for the rule. /// public EgressTransport Transport { get; } - - /// - /// Determines whether the rule allows the supplied request. - /// - /// The request that will be evaluated. - /// when the request is allowed; otherwise . - public bool Allows(EgressRequest request) - { - if (request.Destination is null) - { - return false; - } - - if (Transport != EgressTransport.Any && Transport != request.Transport) - { - return false; - } - - if (!HostMatches(request.Destination.Host)) - { - return false; - } - - if (Port is null) - { - return true; - } - - var requestPort = request.Destination.Port; - return requestPort == Port.Value; - } - - private bool HostMatches(string host) - { - if (string.IsNullOrEmpty(host)) - { - return false; - } - - var normalized = host.ToLowerInvariant(); - - if (_wildcardAnyHost) - { - return true; - } - - if (_wildcardSuffix is not null) - { - if (!normalized.EndsWith(_wildcardSuffix, StringComparison.Ordinal)) - { - return false; - } - - var remainderLength = normalized.Length - _wildcardSuffix.Length; - return remainderLength > 0; - } - - return string.Equals(normalized, _hostPattern, StringComparison.Ordinal); - } - - /// - public override string ToString() - => Port is null - ? $"{_hostPattern} ({Transport})" - : $"{_hostPattern}:{Port} ({Transport})"; } diff --git a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/TASKS.md b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/TASKS.md index 3139266cb..a3d623d82 100644 --- a/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/TASKS.md @@ -1,10 +1,12 @@ # AirGap Policy Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | | AUDIT-0030-M | DONE | Revalidated 2026-01-06; new findings recorded in audit report. | | AUDIT-0030-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0033. | | AUDIT-0030-A | TODO | Replace direct new HttpClient usage in EgressHttpClientFactory. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/StellaOps.AirGap.Time/Controllers/TimeStatusController.cs b/src/AirGap/StellaOps.AirGap.Time/Controllers/TimeStatusController.cs index 96a7c64a8..dbbbd15f8 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Controllers/TimeStatusController.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Controllers/TimeStatusController.cs @@ -29,19 +29,19 @@ public class TimeStatusController : ControllerBase } [HttpGet("status")] - public async Task> GetStatus([FromQuery] string tenantId) + public async Task> GetStatusAsync([FromQuery] string tenantId) { if (string.IsNullOrWhiteSpace(tenantId)) { return BadRequest("tenantId-required"); } - var status = await _statusService.GetStatusAsync(tenantId, _timeProvider.GetUtcNow(), HttpContext.RequestAborted); + var status = await _statusService.GetStatusAsync(tenantId, _timeProvider.GetUtcNow(), HttpContext.RequestAborted).ConfigureAwait(false); return Ok(TimeStatusDto.FromStatus(status)); } [HttpPost("anchor")] - public async Task> SetAnchor([FromBody] SetAnchorRequest request) + public async Task> SetAnchorAsync([FromBody] SetAnchorRequest request) { if (!ModelState.IsValid) { @@ -78,9 +78,9 @@ public class TimeStatusController : ControllerBase request.WarningSeconds ?? StalenessBudget.Default.WarningSeconds, request.BreachSeconds ?? StalenessBudget.Default.BreachSeconds); - await _statusService.SetAnchorAsync(request.TenantId, anchor, budget, HttpContext.RequestAborted); + await _statusService.SetAnchorAsync(request.TenantId, anchor, budget, HttpContext.RequestAborted).ConfigureAwait(false); _logger.LogInformation("Time anchor set for tenant {Tenant} format={Format} digest={Digest} warning={Warning}s breach={Breach}s", request.TenantId, anchor.Format, anchor.TokenDigest, budget.WarningSeconds, budget.BreachSeconds); - var status = await _statusService.GetStatusAsync(request.TenantId, _timeProvider.GetUtcNow(), HttpContext.RequestAborted); + var status = await _statusService.GetStatusAsync(request.TenantId, _timeProvider.GetUtcNow(), HttpContext.RequestAborted).ConfigureAwait(false); return Ok(TimeStatusDto.FromStatus(status)); } } diff --git a/src/AirGap/StellaOps.AirGap.Time/Health/TimeAnchorHealthCheck.cs b/src/AirGap/StellaOps.AirGap.Time/Health/TimeAnchorHealthCheck.cs index a29aefcf2..3b189f1ec 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Health/TimeAnchorHealthCheck.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Health/TimeAnchorHealthCheck.cs @@ -21,7 +21,7 @@ public sealed class TimeAnchorHealthCheck : IHealthCheck public async Task CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) { var opts = _options.Value; - var status = await _statusService.GetStatusAsync(opts.TenantId, _timeProvider.GetUtcNow(), cancellationToken); + var status = await _statusService.GetStatusAsync(opts.TenantId, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); if (status.Anchor == TimeAnchor.Unknown) { diff --git a/src/AirGap/StellaOps.AirGap.Time/Hooks/SealedStartupHostedService.cs b/src/AirGap/StellaOps.AirGap.Time/Hooks/SealedStartupHostedService.cs new file mode 100644 index 000000000..0621d9ee9 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Hooks/SealedStartupHostedService.cs @@ -0,0 +1,58 @@ +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Hooks; + +public sealed class SealedStartupHostedService : IHostedService +{ + private readonly SealedStartupValidator _validator; + private readonly IOptions _options; + private readonly ILogger _logger; + + public SealedStartupHostedService( + SealedStartupValidator validator, + IOptions options, + ILogger logger) + { + _validator = validator; + _options = options; + _logger = logger; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + var opts = _options.Value; + var tenantId = opts.TenantId; + var budget = new StalenessBudget(opts.Staleness.WarningSeconds, opts.Staleness.BreachSeconds); + + _logger.LogInformation( + "AirGap Time starting for tenant {Tenant} with budgets warning={Warning}s breach={Breach}s", + tenantId, + budget.WarningSeconds, + budget.BreachSeconds); + + var result = await _validator.ValidateAsync(tenantId, budget, cancellationToken).ConfigureAwait(false); + if (!result.IsValid) + { + _logger.LogCritical( + "AirGap time validation failed: {Reason} (tenant {TenantId})", + result.Reason, + tenantId); + throw new InvalidOperationException($"sealed-startup-blocked:{result.Reason}"); + } + + _logger.LogInformation( + "AirGap time validation passed: anchor={Anchor} age={Age}s tenant={Tenant}", + result.Status?.Anchor.TokenDigest, + result.Status?.Staleness.AgeSeconds, + tenantId); + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Hooks/StartupValidationExtensions.cs b/src/AirGap/StellaOps.AirGap.Time/Hooks/StartupValidationExtensions.cs deleted file mode 100644 index 6d26241be..000000000 --- a/src/AirGap/StellaOps.AirGap.Time/Hooks/StartupValidationExtensions.cs +++ /dev/null @@ -1,31 +0,0 @@ - -using Microsoft.AspNetCore.Builder; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using StellaOps.AirGap.Time.Models; -using StellaOps.AirGap.Time.Services; - -namespace StellaOps.AirGap.Time.Hooks; - -public static class StartupValidationExtensions -{ - /// - /// Runs sealed-mode time anchor validation during app startup; aborts if missing or stale. - /// - public static IHost ValidateTimeAnchorOnStart(this IHost host, string tenantId, StalenessBudget budget) - { - using var scope = host.Services.CreateScope(); - var validator = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("AirGap.Time.Startup"); - - var result = validator.ValidateAsync(tenantId, budget, CancellationToken.None).GetAwaiter().GetResult(); - if (!result.IsValid) - { - logger.LogCritical("AirGap time validation failed: {Reason} (tenant {TenantId})", result.Reason, tenantId); - throw new InvalidOperationException($"sealed-startup-blocked:{result.Reason}"); - } - - logger.LogInformation("AirGap time validation passed: anchor={Anchor} age={Age}s tenant={Tenant}", result.Status?.Anchor.TokenDigest, result.Status?.Staleness.AgeSeconds, tenantId); - return host; - } -} diff --git a/src/AirGap/StellaOps.AirGap.Time/Program.cs b/src/AirGap/StellaOps.AirGap.Time/Program.cs index 0f172ec48..1c7ef873a 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Program.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Program.cs @@ -23,6 +23,7 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); +builder.Services.AddHostedService(); // AIRGAP-TIME-57-001: Time-anchor policy service builder.Services.Configure(builder.Configuration.GetSection("AirGap:Policy")); @@ -44,13 +45,4 @@ app.LogStellaOpsLocalHostname("airgap-time"); app.UseStellaOpsCors(); app.MapControllers(); app.MapHealthChecks("/healthz/ready"); - -var opts = app.Services.GetRequiredService>().Value; -var tenantId = opts.TenantId; -var budget = new StalenessBudget(opts.Staleness.WarningSeconds, opts.Staleness.BreachSeconds); - -app.Services.GetRequiredService>() - .LogInformation("AirGap Time starting for tenant {Tenant} with budgets warning={Warning}s breach={Breach}s", tenantId, budget.WarningSeconds, budget.BreachSeconds); - -app.ValidateTimeAnchorOnStart(tenantId, budget); app.Run(); diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Ed25519.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Ed25519.cs new file mode 100644 index 000000000..98ce088f7 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Ed25519.cs @@ -0,0 +1,29 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Time.Services; + +internal static class Ed25519 +{ + public static bool Verify(byte[] publicKey, byte[] message, byte[] signature) + { + try + { + using var ecdsa = ECDsa.Create(ECCurve.CreateFromValue("1.3.101.112")); + ecdsa.ImportSubjectPublicKeyInfo(CreateEd25519Spki(publicKey), out _); + return ecdsa.VerifyData(message, signature, HashAlgorithmName.SHA512); + } + catch + { + return false; + } + } + + private static byte[] CreateEd25519Spki(byte[] publicKey) + { + var spki = new byte[44]; + new byte[] { 0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70, 0x03, 0x21, 0x00 } + .CopyTo(spki, 0); + publicKey.CopyTo(spki, 12); + return spki; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/ITimeAnchorPolicyService.cs b/src/AirGap/StellaOps.AirGap.Time/Services/ITimeAnchorPolicyService.cs new file mode 100644 index 000000000..0ffe3cb16 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/ITimeAnchorPolicyService.cs @@ -0,0 +1,29 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Services; + +/// +/// Policy enforcement service for time anchors. +/// +public interface ITimeAnchorPolicyService +{ + Task ValidateTimeAnchorAsync(string tenantId, CancellationToken cancellationToken = default); + + Task EnforceBundleImportPolicyAsync( + string tenantId, + string bundleId, + DateTimeOffset? bundleTimestamp, + CancellationToken cancellationToken = default); + + Task EnforceOperationPolicyAsync( + string tenantId, + string operation, + CancellationToken cancellationToken = default); + + Task CalculateDriftAsync( + string tenantId, + DateTimeOffset targetTime, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Parsing.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Parsing.cs new file mode 100644 index 000000000..7658b6f8b --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Parsing.cs @@ -0,0 +1,70 @@ +using System; +using System.Formats.Asn1; +using System.Security.Cryptography; +using System.Security.Cryptography.Pkcs; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class Rfc3161Verifier +{ + private static readonly Oid _tstInfoOid = new("1.2.840.113549.1.9.16.1.4"); + private static readonly Oid _signingTimeOid = new("1.2.840.113549.1.9.5"); + + private static DateTimeOffset? ExtractSigningTime(SignedCms signedCms, SignerInfo signerInfo) + { + foreach (var attr in signerInfo.SignedAttributes) + { + if (attr.Oid.Value == _signingTimeOid.Value) + { + try + { + var reader = new AsnReader(attr.Values[0].RawData, AsnEncodingRules.DER); + return reader.ReadUtcTime(); + } + catch + { + continue; + } + } + } + + try + { + var content = signedCms.ContentInfo; + if (content.ContentType.Value == _tstInfoOid.Value) + { + var tstInfo = ParseTstInfo(content.Content); + if (tstInfo.HasValue) + { + return tstInfo.Value; + } + } + } + catch + { + return null; + } + + return null; + } + + private static DateTimeOffset? ParseTstInfo(ReadOnlyMemory tstInfoBytes) + { + try + { + var reader = new AsnReader(tstInfoBytes, AsnEncodingRules.DER); + var sequenceReader = reader.ReadSequence(); + + sequenceReader.ReadInteger(); + sequenceReader.ReadObjectIdentifier(); + sequenceReader.ReadSequence(); + sequenceReader.ReadInteger(); + + return sequenceReader.ReadGeneralizedTime(); + } + catch + { + return null; + } + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Revocation.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Revocation.cs new file mode 100644 index 000000000..bda5672ce --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Revocation.cs @@ -0,0 +1,76 @@ +using System; +using System.Formats.Asn1; +using System.Linq; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class Rfc3161Verifier +{ + private static bool TryVerifyOfflineRevocation( + TimeTokenVerificationOptions options, + out string reason) + { + var hasOcsp = options.OcspResponses.Count > 0; + var hasCrl = options.Crls.Count > 0; + + if (!hasOcsp && !hasCrl) + { + reason = "rfc3161-revocation-missing"; + return false; + } + + if (hasOcsp && options.OcspResponses.Any(IsOcspSuccess)) + { + reason = "rfc3161-revocation-ocsp"; + return true; + } + + if (hasCrl && options.Crls.Any(IsCrlParseable)) + { + reason = "rfc3161-revocation-crl"; + return true; + } + + reason = "rfc3161-revocation-invalid"; + return false; + } + + private static bool IsOcspSuccess(byte[] response) + { + try + { + var reader = new AsnReader(response, AsnEncodingRules.DER); + var sequence = reader.ReadSequence(); + var status = sequence.ReadEnumeratedValue(); + return status == OcspResponseStatus.Successful; + } + catch + { + return false; + } + } + + private static bool IsCrlParseable(byte[] crl) + { + try + { + var reader = new AsnReader(crl, AsnEncodingRules.DER); + reader.ReadSequence(); + return true; + } + catch + { + return false; + } + } + + private enum OcspResponseStatus + { + Successful = 0, + MalformedRequest = 1, + InternalError = 2, + TryLater = 3, + SigRequired = 5, + Unauthorized = 6 + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Trust.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Trust.cs new file mode 100644 index 000000000..8d3e892d4 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Trust.cs @@ -0,0 +1,78 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Cryptography.X509Certificates; +using System.Security.Cryptography.Pkcs; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class Rfc3161Verifier +{ + private static TimeTrustRoot? ValidateAgainstTrustRoots( + X509Certificate2 signerCert, + IReadOnlyList trustRoots, + IReadOnlyList extraCertificates, + DateTimeOffset verificationTime) + { + foreach (var root in trustRoots) + { + try + { + var rootCert = X509CertificateLoader.LoadCertificate(root.PublicKey); + if (signerCert.Thumbprint.Equals(rootCert.Thumbprint, StringComparison.OrdinalIgnoreCase)) + { + return root; + } + + using var chain = new X509Chain(); + chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + chain.ChainPolicy.CustomTrustStore.Add(rootCert); + chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; + chain.ChainPolicy.VerificationTime = verificationTime.UtcDateTime; + + foreach (var cert in extraCertificates) + { + if (!string.Equals(cert.Thumbprint, rootCert.Thumbprint, StringComparison.OrdinalIgnoreCase)) + { + chain.ChainPolicy.ExtraStore.Add(cert); + } + } + + if (chain.Build(signerCert)) + { + return root; + } + } + catch + { + continue; + } + } + + return null; + } + + private static IReadOnlyList BuildExtraCertificates( + SignedCms signedCms, + TimeTokenVerificationOptions? options) + { + var extra = new List(); + if (options?.CertificateChain is { Count: > 0 }) + { + extra.AddRange(options.CertificateChain); + } + + foreach (var cert in signedCms.Certificates.Cast()) + { + if (!extra.Any(existing => existing.Thumbprint.Equals(cert.Thumbprint, StringComparison.OrdinalIgnoreCase))) + { + extra.Add(cert); + } + } + + return extra; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Verify.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Verify.cs new file mode 100644 index 000000000..6c82a35aa --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.Verify.cs @@ -0,0 +1,104 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Security.Cryptography.Pkcs; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class Rfc3161Verifier +{ + public TimeAnchorValidationResult Verify( + ReadOnlySpan tokenBytes, + IReadOnlyList trustRoots, + out TimeAnchor anchor, + TimeTokenVerificationOptions? options = null) + { + anchor = TimeAnchor.Unknown; + + if (trustRoots.Count == 0) + { + return TimeAnchorValidationResult.Failure("rfc3161-trust-roots-required"); + } + + if (tokenBytes.IsEmpty) + { + return TimeAnchorValidationResult.Failure("rfc3161-token-empty"); + } + + var tokenDigest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant(); + + try + { + var signedCms = new SignedCms(); + signedCms.Decode(tokenBytes.ToArray()); + + try + { + signedCms.CheckSignature(verifySignatureOnly: true); + } + catch (CryptographicException ex) + { + return TimeAnchorValidationResult.Failure($"rfc3161-signature-invalid:{ex.Message}"); + } + + if (signedCms.SignerInfos.Count == 0) + { + return TimeAnchorValidationResult.Failure("rfc3161-no-signer"); + } + + var signerInfo = signedCms.SignerInfos[0]; + var signerCert = signerInfo.Certificate; + if (signerCert is null) + { + return TimeAnchorValidationResult.Failure("rfc3161-no-signer-certificate"); + } + + var signingTime = ExtractSigningTime(signedCms, signerInfo); + if (signingTime is null) + { + return TimeAnchorValidationResult.Failure("rfc3161-no-signing-time"); + } + + var extraCertificates = BuildExtraCertificates(signedCms, options); + var verificationTime = options?.VerificationTime ?? signingTime.Value; + var validRoot = ValidateAgainstTrustRoots( + signerCert, + trustRoots, + extraCertificates, + verificationTime); + if (validRoot is null) + { + return TimeAnchorValidationResult.Failure("rfc3161-certificate-not-trusted"); + } + + if (options?.Offline == true) + { + if (!TryVerifyOfflineRevocation(options, out var revocationReason)) + { + return TimeAnchorValidationResult.Failure(revocationReason); + } + } + + var certFingerprint = Convert.ToHexString(SHA256.HashData(signerCert.RawData)).ToLowerInvariant()[..16]; + + anchor = new TimeAnchor( + signingTime.Value, + $"rfc3161:{validRoot.KeyId}", + "RFC3161", + certFingerprint, + tokenDigest); + + return TimeAnchorValidationResult.Success("rfc3161-verified"); + } + catch (CryptographicException ex) + { + return TimeAnchorValidationResult.Failure($"rfc3161-decode-error:{ex.Message}"); + } + catch (Exception ex) + { + return TimeAnchorValidationResult.Failure($"rfc3161-error:{ex.Message}"); + } + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.cs b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.cs index 94445988e..a76766581 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/Rfc3161Verifier.cs @@ -1,10 +1,4 @@ - -using StellaOps.AirGap.Time.Models; using StellaOps.AirGap.Time.Parsing; -using System.Formats.Asn1; -using System.Security.Cryptography; -using System.Security.Cryptography.Pkcs; -using System.Security.Cryptography.X509Certificates; namespace StellaOps.AirGap.Time.Services; @@ -12,329 +6,7 @@ namespace StellaOps.AirGap.Time.Services; /// Verifies RFC 3161 timestamp tokens using SignedCms and X509 certificate chain validation. /// Per AIRGAP-TIME-57-001: Provides trusted time-anchor service with real crypto verification. /// -public sealed class Rfc3161Verifier : ITimeTokenVerifier +public sealed partial class Rfc3161Verifier : ITimeTokenVerifier { - // RFC 3161 OIDs - private static readonly Oid TstInfoOid = new("1.2.840.113549.1.9.16.1.4"); // id-ct-TSTInfo - private static readonly Oid SigningTimeOid = new("1.2.840.113549.1.9.5"); - public TimeTokenFormat Format => TimeTokenFormat.Rfc3161; - - public TimeAnchorValidationResult Verify( - ReadOnlySpan tokenBytes, - IReadOnlyList trustRoots, - out TimeAnchor anchor, - TimeTokenVerificationOptions? options = null) - { - anchor = TimeAnchor.Unknown; - - if (trustRoots.Count == 0) - { - return TimeAnchorValidationResult.Failure("rfc3161-trust-roots-required"); - } - - if (tokenBytes.IsEmpty) - { - return TimeAnchorValidationResult.Failure("rfc3161-token-empty"); - } - - // Compute token digest for reference - var tokenDigest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant(); - - try - { - // Parse the SignedCms structure - var signedCms = new SignedCms(); - signedCms.Decode(tokenBytes.ToArray()); - - // Verify signature (basic check without chain building) - try - { - signedCms.CheckSignature(verifySignatureOnly: true); - } - catch (CryptographicException ex) - { - return TimeAnchorValidationResult.Failure($"rfc3161-signature-invalid:{ex.Message}"); - } - - // Extract the signing certificate - if (signedCms.SignerInfos.Count == 0) - { - return TimeAnchorValidationResult.Failure("rfc3161-no-signer"); - } - - var signerInfo = signedCms.SignerInfos[0]; - var signerCert = signerInfo.Certificate; - - if (signerCert is null) - { - return TimeAnchorValidationResult.Failure("rfc3161-no-signer-certificate"); - } - - // Extract signing time from the TSTInfo or signed attributes - var signingTime = ExtractSigningTime(signedCms, signerInfo); - if (signingTime is null) - { - return TimeAnchorValidationResult.Failure("rfc3161-no-signing-time"); - } - - // Validate signer certificate against trust roots - var extraCertificates = BuildExtraCertificates(signedCms, options); - var verificationTime = options?.VerificationTime ?? signingTime.Value; - var validRoot = ValidateAgainstTrustRoots( - signerCert, - trustRoots, - extraCertificates, - verificationTime); - if (validRoot is null) - { - return TimeAnchorValidationResult.Failure("rfc3161-certificate-not-trusted"); - } - - if (options?.Offline == true) - { - if (!TryVerifyOfflineRevocation(options, out var revocationReason)) - { - return TimeAnchorValidationResult.Failure(revocationReason); - } - } - - // Compute certificate fingerprint - var certFingerprint = Convert.ToHexString(SHA256.HashData(signerCert.RawData)).ToLowerInvariant()[..16]; - - anchor = new TimeAnchor( - signingTime.Value, - $"rfc3161:{validRoot.KeyId}", - "RFC3161", - certFingerprint, - tokenDigest); - - return TimeAnchorValidationResult.Success("rfc3161-verified"); - } - catch (CryptographicException ex) - { - return TimeAnchorValidationResult.Failure($"rfc3161-decode-error:{ex.Message}"); - } - catch (Exception ex) - { - return TimeAnchorValidationResult.Failure($"rfc3161-error:{ex.Message}"); - } - } - - private static TimeTrustRoot? ValidateAgainstTrustRoots( - X509Certificate2 signerCert, - IReadOnlyList trustRoots, - IReadOnlyList extraCertificates, - DateTimeOffset verificationTime) - { - foreach (var root in trustRoots) - { - // Match by certificate thumbprint or subject key identifier - try - { - // Try direct certificate match - var rootCert = X509CertificateLoader.LoadCertificate(root.PublicKey); - if (signerCert.Thumbprint.Equals(rootCert.Thumbprint, StringComparison.OrdinalIgnoreCase)) - { - return root; - } - - // Try chain validation against root - using var chain = new X509Chain(); - chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - chain.ChainPolicy.CustomTrustStore.Add(rootCert); - chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; // Offline mode - chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; - chain.ChainPolicy.VerificationTime = verificationTime.UtcDateTime; - - foreach (var cert in extraCertificates) - { - if (!string.Equals(cert.Thumbprint, rootCert.Thumbprint, StringComparison.OrdinalIgnoreCase)) - { - chain.ChainPolicy.ExtraStore.Add(cert); - } - } - - if (chain.Build(signerCert)) - { - return root; - } - } - catch - { - // Invalid root certificate format, try next - continue; - } - } - - return null; - } - - private static IReadOnlyList BuildExtraCertificates( - SignedCms signedCms, - TimeTokenVerificationOptions? options) - { - var extra = new List(); - if (options?.CertificateChain is { Count: > 0 }) - { - extra.AddRange(options.CertificateChain); - } - - foreach (var cert in signedCms.Certificates.Cast()) - { - if (!extra.Any(existing => - existing.Thumbprint.Equals(cert.Thumbprint, StringComparison.OrdinalIgnoreCase))) - { - extra.Add(cert); - } - } - - return extra; - } - - private static bool TryVerifyOfflineRevocation( - TimeTokenVerificationOptions options, - out string reason) - { - var hasOcsp = options.OcspResponses.Count > 0; - var hasCrl = options.Crls.Count > 0; - - if (!hasOcsp && !hasCrl) - { - reason = "rfc3161-revocation-missing"; - return false; - } - - if (hasOcsp && options.OcspResponses.Any(IsOcspSuccess)) - { - reason = "rfc3161-revocation-ocsp"; - return true; - } - - if (hasCrl && options.Crls.Any(IsCrlParseable)) - { - reason = "rfc3161-revocation-crl"; - return true; - } - - reason = "rfc3161-revocation-invalid"; - return false; - } - - private static bool IsOcspSuccess(byte[] response) - { - try - { - var reader = new AsnReader(response, AsnEncodingRules.DER); - var sequence = reader.ReadSequence(); - var status = sequence.ReadEnumeratedValue(); - return status == OcspResponseStatus.Successful; - } - catch - { - return false; - } - } - - private static bool IsCrlParseable(byte[] crl) - { - try - { - var reader = new AsnReader(crl, AsnEncodingRules.DER); - reader.ReadSequence(); - return true; - } - catch - { - return false; - } - } - - private static DateTimeOffset? ExtractSigningTime(SignedCms signedCms, SignerInfo signerInfo) - { - // Try to get signing time from signed attributes - foreach (var attr in signerInfo.SignedAttributes) - { - if (attr.Oid.Value == SigningTimeOid.Value) - { - try - { - var reader = new AsnReader(attr.Values[0].RawData, AsnEncodingRules.DER); - var time = reader.ReadUtcTime(); - return time; - } - catch - { - continue; - } - } - } - - // Try to extract from TSTInfo content - try - { - var content = signedCms.ContentInfo; - if (content.ContentType.Value == TstInfoOid.Value) - { - var tstInfo = ParseTstInfo(content.Content); - if (tstInfo.HasValue) - { - return tstInfo.Value; - } - } - } - catch - { - // Fall through - } - - return null; - } - - private static DateTimeOffset? ParseTstInfo(ReadOnlyMemory tstInfoBytes) - { - // TSTInfo ::= SEQUENCE { - // version INTEGER, - // policy OBJECT IDENTIFIER, - // messageImprint MessageImprint, - // serialNumber INTEGER, - // genTime GeneralizedTime, - // ... - // } - try - { - var reader = new AsnReader(tstInfoBytes, AsnEncodingRules.DER); - var sequenceReader = reader.ReadSequence(); - - // Skip version - sequenceReader.ReadInteger(); - - // Skip policy OID - sequenceReader.ReadObjectIdentifier(); - - // Skip messageImprint (SEQUENCE) - sequenceReader.ReadSequence(); - - // Skip serialNumber - sequenceReader.ReadInteger(); - - // Read genTime (GeneralizedTime) - var genTime = sequenceReader.ReadGeneralizedTime(); - return genTime; - } - catch - { - return null; - } - } - - private enum OcspResponseStatus - { - Successful = 0, - MalformedRequest = 1, - InternalError = 2, - TryLater = 3, - SigRequired = 5, - Unauthorized = 6 - } } diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseResponse.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseResponse.cs new file mode 100644 index 000000000..6f4f3bb26 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseResponse.cs @@ -0,0 +1,106 @@ +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Buffers.Binary; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class RoughtimeVerifier +{ + private static TimeAnchorValidationResult ParseRoughtimeResponse( + ReadOnlySpan data, + out long midpointMicros, + out uint radiusMicros, + out ReadOnlySpan signature, + out ReadOnlySpan signedMessage) + { + midpointMicros = 0; + radiusMicros = 0; + signature = ReadOnlySpan.Empty; + signedMessage = ReadOnlySpan.Empty; + + if (data.Length < 8) + { + return TimeAnchorValidationResult.Failure("roughtime-message-too-short"); + } + + var numTags = BinaryPrimitives.ReadUInt32LittleEndian(data); + if (numTags == 0 || numTags > 100) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-tag-count"); + } + + var headerSize = 4 + (4 * ((int)numTags - 1)) + (4 * (int)numTags); + if (data.Length < headerSize) + { + return TimeAnchorValidationResult.Failure("roughtime-header-incomplete"); + } + + var offsetsStart = 4; + var tagsStart = offsetsStart + (4 * ((int)numTags - 1)); + var valuesStart = headerSize; + + ReadOnlySpan sigBytes = ReadOnlySpan.Empty; + ReadOnlySpan srepBytes = ReadOnlySpan.Empty; + + for (var i = 0; i < (int)numTags; i++) + { + var tag = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(tagsStart + (i * 4))); + var valueStart = valuesStart; + var valueEnd = data.Length; + + if (i > 0) + { + valueStart = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian( + data.Slice(offsetsStart + ((i - 1) * 4))); + } + + if (i < (int)numTags - 1) + { + valueEnd = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian( + data.Slice(offsetsStart + (i * 4))); + } + + if (valueStart < 0 || valueEnd > data.Length || valueStart > valueEnd) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-value-bounds"); + } + + var value = data.Slice(valueStart, valueEnd - valueStart); + + switch (tag) + { + case TagSig: + if (value.Length != Ed25519SignatureLength) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-signature-length"); + } + sigBytes = value; + break; + case TagSrep: + srepBytes = value; + break; + } + } + + if (sigBytes.IsEmpty) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-signature"); + } + + if (srepBytes.IsEmpty) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-srep"); + } + + var srepResult = ParseSignedResponse(srepBytes, out midpointMicros, out radiusMicros, out _, out _, out _); + if (!srepResult.IsValid) + { + return srepResult; + } + + signature = sigBytes; + signedMessage = srepBytes; + + return TimeAnchorValidationResult.Success("roughtime-parsed"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.Tags.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.Tags.cs new file mode 100644 index 000000000..76d835cfe --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.Tags.cs @@ -0,0 +1,104 @@ +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Buffers.Binary; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class RoughtimeVerifier +{ + private static TimeAnchorValidationResult ReadSignedResponseTags( + ReadOnlySpan data, + uint numTags, + int offsetsStart, + int tagsStart, + int valuesStart, + out long midpointMicros, + out uint radiusMicros, + out ReadOnlySpan rootBytes, + out ReadOnlySpan pathBytes, + out uint index, + out bool hasMidp, + out bool hasRadi, + out bool hasRoot, + out bool hasPath, + out bool hasIndex) + { + midpointMicros = 0; + radiusMicros = 0; + rootBytes = ReadOnlySpan.Empty; + pathBytes = ReadOnlySpan.Empty; + index = 0; + hasMidp = false; + hasRadi = false; + hasRoot = false; + hasPath = false; + hasIndex = false; + + for (var i = 0; i < (int)numTags; i++) + { + var tag = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(tagsStart + (i * 4))); + var valueStart = valuesStart; + var valueEnd = data.Length; + + if (i > 0) + { + valueStart = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian( + data.Slice(offsetsStart + ((i - 1) * 4))); + } + + if (i < (int)numTags - 1) + { + valueEnd = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian( + data.Slice(offsetsStart + (i * 4))); + } + + if (valueStart < 0 || valueEnd > data.Length || valueStart > valueEnd) + { + continue; + } + + var value = data.Slice(valueStart, valueEnd - valueStart); + + switch (tag) + { + case TagMidp: + if (value.Length == 8) + { + midpointMicros = BinaryPrimitives.ReadInt64LittleEndian(value); + hasMidp = true; + } + break; + case TagRadi: + if (value.Length == 4) + { + radiusMicros = BinaryPrimitives.ReadUInt32LittleEndian(value); + hasRadi = true; + } + break; + case TagRoot: + if (value.Length == MerkleNodeLength) + { + rootBytes = value; + hasRoot = true; + } + break; + case TagPath: + if (!value.IsEmpty && value.Length % MerkleNodeLength == 0) + { + pathBytes = value; + hasPath = true; + } + break; + case TagIndx: + if (value.Length == MerkleIndexLength) + { + index = BinaryPrimitives.ReadUInt32LittleEndian(value); + hasIndex = true; + } + break; + } + } + + return TimeAnchorValidationResult.Success("roughtime-srep-tags-read"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.cs new file mode 100644 index 000000000..5bcf9e542 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.ParseSignedResponse.cs @@ -0,0 +1,98 @@ +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Buffers.Binary; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class RoughtimeVerifier +{ + private static TimeAnchorValidationResult ParseSignedResponse( + ReadOnlySpan data, + out long midpointMicros, + out uint radiusMicros, + out ReadOnlySpan rootBytes, + out ReadOnlySpan pathBytes, + out uint index) + { + midpointMicros = 0; + radiusMicros = 0; + rootBytes = ReadOnlySpan.Empty; + pathBytes = ReadOnlySpan.Empty; + index = 0; + + if (data.Length < 8) + { + return TimeAnchorValidationResult.Failure("roughtime-srep-too-short"); + } + + var numTags = BinaryPrimitives.ReadUInt32LittleEndian(data); + if (numTags == 0 || numTags > 50) + { + return TimeAnchorValidationResult.Failure("roughtime-srep-invalid-tag-count"); + } + + var headerSize = 4 + (4 * ((int)numTags - 1)) + (4 * (int)numTags); + if (data.Length < headerSize) + { + return TimeAnchorValidationResult.Failure("roughtime-srep-header-incomplete"); + } + + var offsetsStart = 4; + var tagsStart = offsetsStart + (4 * ((int)numTags - 1)); + var valuesStart = headerSize; + + var readResult = ReadSignedResponseTags( + data, + numTags, + offsetsStart, + tagsStart, + valuesStart, + out midpointMicros, + out radiusMicros, + out rootBytes, + out pathBytes, + out index, + out var hasMidp, + out var hasRadi, + out var hasRoot, + out var hasPath, + out var hasIndex); + if (!readResult.IsValid) + { + return readResult; + } + + if (!hasMidp) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-midpoint"); + } + + if (!hasRoot) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-root"); + } + + if (!hasPath) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-path"); + } + + if (!hasIndex) + { + return TimeAnchorValidationResult.Failure("roughtime-missing-index"); + } + + var pathValidation = ValidateMerklePathStructure(rootBytes, pathBytes, index); + if (!pathValidation.IsValid) + { + return pathValidation; + } + + if (!hasRadi) + { + radiusMicros = 1_000_000; + } + + return TimeAnchorValidationResult.Success("roughtime-srep-parsed"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Validation.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Validation.cs new file mode 100644 index 000000000..3807b0f94 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Validation.cs @@ -0,0 +1,54 @@ +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class RoughtimeVerifier +{ + private static TimeAnchorValidationResult ValidateMerklePathStructure( + ReadOnlySpan rootBytes, + ReadOnlySpan pathBytes, + uint index) + { + if (rootBytes.Length != MerkleNodeLength) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-root-length"); + } + + if (pathBytes.IsEmpty || pathBytes.Length % MerkleNodeLength != 0) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-path-length"); + } + + var depth = pathBytes.Length / MerkleNodeLength; + if (depth <= 31) + { + var maxIndex = 1u << depth; + if (index >= maxIndex) + { + return TimeAnchorValidationResult.Failure("roughtime-invalid-index"); + } + } + + return TimeAnchorValidationResult.Success("roughtime-merkle-structure-valid"); + } + + private static bool VerifyEd25519Signature(ReadOnlySpan message, ReadOnlySpan signature, byte[] publicKey) + { + try + { + const string ContextPrefix = "RoughTime v1 response signature\0"; + var prefixBytes = System.Text.Encoding.ASCII.GetBytes(ContextPrefix); + var signedData = new byte[prefixBytes.Length + message.Length]; + prefixBytes.CopyTo(signedData, 0); + message.CopyTo(signedData.AsSpan(prefixBytes.Length)); + + return Ed25519.Verify(publicKey, signedData, signature.ToArray()); + } + catch + { + return false; + } + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Verify.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Verify.cs new file mode 100644 index 000000000..8645c38c1 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.Verify.cs @@ -0,0 +1,79 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Parsing; +using System; +using System.Collections.Generic; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class RoughtimeVerifier +{ + public TimeAnchorValidationResult Verify( + ReadOnlySpan tokenBytes, + IReadOnlyList trustRoots, + out TimeAnchor anchor, + TimeTokenVerificationOptions? options = null) + { + anchor = TimeAnchor.Unknown; + + if (trustRoots.Count == 0) + { + return TimeAnchorValidationResult.Failure("roughtime-trust-roots-required"); + } + + if (tokenBytes.IsEmpty) + { + return TimeAnchorValidationResult.Failure("roughtime-token-empty"); + } + + var tokenDigest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant(); + + var parseResult = ParseRoughtimeResponse( + tokenBytes, + out var midpointMicros, + out var radiusMicros, + out var signature, + out var signedMessage); + if (!parseResult.IsValid) + { + return parseResult; + } + + TimeTrustRoot? validRoot = null; + foreach (var root in trustRoots) + { + if (!string.Equals(root.Algorithm, "ed25519", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (root.PublicKey.Length != Ed25519PublicKeyLength) + { + continue; + } + + if (VerifyEd25519Signature(signedMessage, signature, root.PublicKey)) + { + validRoot = root; + break; + } + } + + if (validRoot is null) + { + return TimeAnchorValidationResult.Failure("roughtime-signature-invalid"); + } + + var anchorTime = DateTimeOffset.UnixEpoch.AddMicroseconds(midpointMicros); + var keyFingerprint = Convert.ToHexString(SHA256.HashData(validRoot.PublicKey)).ToLowerInvariant()[..16]; + + anchor = new TimeAnchor( + anchorTime, + $"roughtime:{validRoot.KeyId}", + "Roughtime", + keyFingerprint, + tokenDigest); + + return TimeAnchorValidationResult.Success($"roughtime-verified:radius={radiusMicros}us"); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.cs b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.cs index b303e6916..512e73676 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/RoughtimeVerifier.cs @@ -1,8 +1,4 @@ - -using StellaOps.AirGap.Time.Models; using StellaOps.AirGap.Time.Parsing; -using System.Buffers.Binary; -using System.Security.Cryptography; namespace StellaOps.AirGap.Time.Services; @@ -10,422 +6,20 @@ namespace StellaOps.AirGap.Time.Services; /// Verifies Roughtime tokens using Ed25519 signature verification. /// Per AIRGAP-TIME-57-001: Provides trusted time-anchor service with real crypto verification. /// -public sealed class RoughtimeVerifier : ITimeTokenVerifier +public sealed partial class RoughtimeVerifier : ITimeTokenVerifier { - // Roughtime wire format tag constants (32-bit little-endian ASCII codes) - private const uint TagSig = 0x00474953; // "SIG\0" - Signature - private const uint TagMidp = 0x5044494D; // "MIDP" - Midpoint - private const uint TagRadi = 0x49444152; // "RADI" - Radius - private const uint TagRoot = 0x544F4F52; // "ROOT" - Merkle root - private const uint TagPath = 0x48544150; // "PATH" - Merkle path - private const uint TagIndx = 0x58444E49; // "INDX" - Index - private const uint TagSrep = 0x50455253; // "SREP" - Signed response + private const uint TagSig = 0x00474953; + private const uint TagMidp = 0x5044494D; + private const uint TagRadi = 0x49444152; + private const uint TagRoot = 0x544F4F52; + private const uint TagPath = 0x48544150; + private const uint TagIndx = 0x58444E49; + private const uint TagSrep = 0x50455253; - // Ed25519 constants private const int Ed25519SignatureLength = 64; private const int Ed25519PublicKeyLength = 32; private const int MerkleNodeLength = 32; private const int MerkleIndexLength = 4; public TimeTokenFormat Format => TimeTokenFormat.Roughtime; - - public TimeAnchorValidationResult Verify( - ReadOnlySpan tokenBytes, - IReadOnlyList trustRoots, - out TimeAnchor anchor, - TimeTokenVerificationOptions? options = null) - { - anchor = TimeAnchor.Unknown; - - if (trustRoots.Count == 0) - { - return TimeAnchorValidationResult.Failure("roughtime-trust-roots-required"); - } - - if (tokenBytes.IsEmpty) - { - return TimeAnchorValidationResult.Failure("roughtime-token-empty"); - } - - // Compute token digest for reference - var tokenDigest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant(); - - // Parse Roughtime wire format - var parseResult = ParseRoughtimeResponse(tokenBytes, out var midpointMicros, out var radiusMicros, out var signature, out var signedMessage); - - if (!parseResult.IsValid) - { - return parseResult; - } - - // Find a valid trust root with Ed25519 key - TimeTrustRoot? validRoot = null; - foreach (var root in trustRoots) - { - if (!string.Equals(root.Algorithm, "ed25519", StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (root.PublicKey.Length != Ed25519PublicKeyLength) - { - continue; - } - - // Verify Ed25519 signature - if (VerifyEd25519Signature(signedMessage, signature, root.PublicKey)) - { - validRoot = root; - break; - } - } - - if (validRoot is null) - { - return TimeAnchorValidationResult.Failure("roughtime-signature-invalid"); - } - - // Convert midpoint from microseconds to DateTimeOffset - var anchorTime = DateTimeOffset.UnixEpoch.AddMicroseconds(midpointMicros); - - // Compute signature fingerprint from the public key - var keyFingerprint = Convert.ToHexString(SHA256.HashData(validRoot.PublicKey)).ToLowerInvariant()[..16]; - - anchor = new TimeAnchor( - anchorTime, - $"roughtime:{validRoot.KeyId}", - "Roughtime", - keyFingerprint, - tokenDigest); - - return TimeAnchorValidationResult.Success($"roughtime-verified:radius={radiusMicros}us"); - } - - private static TimeAnchorValidationResult ParseRoughtimeResponse( - ReadOnlySpan data, - out long midpointMicros, - out uint radiusMicros, - out ReadOnlySpan signature, - out ReadOnlySpan signedMessage) - { - midpointMicros = 0; - radiusMicros = 0; - signature = ReadOnlySpan.Empty; - signedMessage = ReadOnlySpan.Empty; - - // Roughtime wire format: [num_tags:u32] [offsets:u32[]] [tags:u32[]] [values...] - // Minimum size: 4 (num_tags) + at least one tag - if (data.Length < 8) - { - return TimeAnchorValidationResult.Failure("roughtime-message-too-short"); - } - - var numTags = BinaryPrimitives.ReadUInt32LittleEndian(data); - - if (numTags == 0 || numTags > 100) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-tag-count"); - } - - // Header size: 4 + 4*(numTags-1) offsets + 4*numTags tags - var headerSize = 4 + (4 * ((int)numTags - 1)) + (4 * (int)numTags); - - if (data.Length < headerSize) - { - return TimeAnchorValidationResult.Failure("roughtime-header-incomplete"); - } - - // Parse tags and extract required fields - var offsetsStart = 4; - var tagsStart = offsetsStart + (4 * ((int)numTags - 1)); - var valuesStart = headerSize; - - ReadOnlySpan sigBytes = ReadOnlySpan.Empty; - ReadOnlySpan srepBytes = ReadOnlySpan.Empty; - - for (var i = 0; i < (int)numTags; i++) - { - var tag = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(tagsStart + (i * 4))); - - // Calculate value bounds - var valueStart = valuesStart; - var valueEnd = data.Length; - - if (i > 0) - { - valueStart = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(offsetsStart + ((i - 1) * 4))); - } - - if (i < (int)numTags - 1) - { - valueEnd = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(offsetsStart + (i * 4))); - } - - if (valueStart < 0 || valueEnd > data.Length || valueStart > valueEnd) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-value-bounds"); - } - - var value = data.Slice(valueStart, valueEnd - valueStart); - - switch (tag) - { - case TagSig: - if (value.Length != Ed25519SignatureLength) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-signature-length"); - } - sigBytes = value; - break; - case TagSrep: - srepBytes = value; - break; - } - } - - if (sigBytes.IsEmpty) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-signature"); - } - - if (srepBytes.IsEmpty) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-srep"); - } - - // Parse SREP (signed response) for MIDP and RADI - var srepResult = ParseSignedResponse(srepBytes, out midpointMicros, out radiusMicros, out _, out _, out _); - if (!srepResult.IsValid) - { - return srepResult; - } - - signature = sigBytes; - signedMessage = srepBytes; - - return TimeAnchorValidationResult.Success("roughtime-parsed"); - } - - private static TimeAnchorValidationResult ParseSignedResponse( - ReadOnlySpan data, - out long midpointMicros, - out uint radiusMicros, - out ReadOnlySpan rootBytes, - out ReadOnlySpan pathBytes, - out uint index) - { - midpointMicros = 0; - radiusMicros = 0; - rootBytes = ReadOnlySpan.Empty; - pathBytes = ReadOnlySpan.Empty; - index = 0; - - if (data.Length < 8) - { - return TimeAnchorValidationResult.Failure("roughtime-srep-too-short"); - } - - var numTags = BinaryPrimitives.ReadUInt32LittleEndian(data); - - if (numTags == 0 || numTags > 50) - { - return TimeAnchorValidationResult.Failure("roughtime-srep-invalid-tag-count"); - } - - var headerSize = 4 + (4 * ((int)numTags - 1)) + (4 * (int)numTags); - - if (data.Length < headerSize) - { - return TimeAnchorValidationResult.Failure("roughtime-srep-header-incomplete"); - } - - var offsetsStart = 4; - var tagsStart = offsetsStart + (4 * ((int)numTags - 1)); - var valuesStart = headerSize; - - var hasMidp = false; - var hasRadi = false; - var hasRoot = false; - var hasPath = false; - var hasIndex = false; - - for (var i = 0; i < (int)numTags; i++) - { - var tag = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(tagsStart + (i * 4))); - - var valueStart = valuesStart; - var valueEnd = data.Length; - - if (i > 0) - { - valueStart = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(offsetsStart + ((i - 1) * 4))); - } - - if (i < (int)numTags - 1) - { - valueEnd = valuesStart + (int)BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(offsetsStart + (i * 4))); - } - - if (valueStart < 0 || valueEnd > data.Length || valueStart > valueEnd) - { - continue; - } - - var value = data.Slice(valueStart, valueEnd - valueStart); - - switch (tag) - { - case TagMidp: - if (value.Length == 8) - { - midpointMicros = BinaryPrimitives.ReadInt64LittleEndian(value); - hasMidp = true; - } - break; - case TagRadi: - if (value.Length == 4) - { - radiusMicros = BinaryPrimitives.ReadUInt32LittleEndian(value); - hasRadi = true; - } - break; - case TagRoot: - if (value.Length == MerkleNodeLength) - { - rootBytes = value; - hasRoot = true; - } - break; - case TagPath: - if (!value.IsEmpty && value.Length % MerkleNodeLength == 0) - { - pathBytes = value; - hasPath = true; - } - break; - case TagIndx: - if (value.Length == MerkleIndexLength) - { - index = BinaryPrimitives.ReadUInt32LittleEndian(value); - hasIndex = true; - } - break; - } - } - - if (!hasMidp) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-midpoint"); - } - - if (!hasRoot) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-root"); - } - - if (!hasPath) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-path"); - } - - if (!hasIndex) - { - return TimeAnchorValidationResult.Failure("roughtime-missing-index"); - } - - var pathValidation = ValidateMerklePathStructure(rootBytes, pathBytes, index); - if (!pathValidation.IsValid) - { - return pathValidation; - } - - if (!hasRadi) - { - // RADI is optional, default to 1 second uncertainty - radiusMicros = 1_000_000; - } - - return TimeAnchorValidationResult.Success("roughtime-srep-parsed"); - } - - private static TimeAnchorValidationResult ValidateMerklePathStructure(ReadOnlySpan rootBytes, ReadOnlySpan pathBytes, uint index) - { - if (rootBytes.Length != MerkleNodeLength) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-root-length"); - } - - if (pathBytes.IsEmpty || pathBytes.Length % MerkleNodeLength != 0) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-path-length"); - } - - var depth = pathBytes.Length / MerkleNodeLength; - if (depth <= 31) - { - var maxIndex = 1u << depth; - if (index >= maxIndex) - { - return TimeAnchorValidationResult.Failure("roughtime-invalid-index"); - } - } - - return TimeAnchorValidationResult.Success("roughtime-merkle-structure-valid"); - } - - private static bool VerifyEd25519Signature(ReadOnlySpan message, ReadOnlySpan signature, byte[] publicKey) - { - try - { - // Roughtime signs the context-prefixed message: "RoughTime v1 response signature\0" || SREP - const string ContextPrefix = "RoughTime v1 response signature\0"; - var prefixBytes = System.Text.Encoding.ASCII.GetBytes(ContextPrefix); - var signedData = new byte[prefixBytes.Length + message.Length]; - prefixBytes.CopyTo(signedData, 0); - message.CopyTo(signedData.AsSpan(prefixBytes.Length)); - - // Use .NET's Ed25519 verification - // Note: .NET 10 supports Ed25519 natively via ECDsa with curve Ed25519 - return Ed25519.Verify(publicKey, signedData, signature.ToArray()); - } - catch - { - return false; - } - } -} - -/// -/// Ed25519 signature verification helper using .NET cryptography. -/// -internal static class Ed25519 -{ - public static bool Verify(byte[] publicKey, byte[] message, byte[] signature) - { - try - { - // .NET 10 has native Ed25519 support via ECDsa - using var ecdsa = ECDsa.Create(ECCurve.CreateFromValue("1.3.101.112")); // Ed25519 OID - ecdsa.ImportSubjectPublicKeyInfo(CreateEd25519Spki(publicKey), out _); - return ecdsa.VerifyData(message, signature, HashAlgorithmName.SHA512); - } - catch - { - // Fallback: if Ed25519 curve not available, return false - return false; - } - } - - private static byte[] CreateEd25519Spki(byte[] publicKey) - { - // Ed25519 SPKI format: - // 30 2a - SEQUENCE (42 bytes) - // 30 05 - SEQUENCE (5 bytes) - // 06 03 2b 65 70 - OID 1.3.101.112 (Ed25519) - // 03 21 00 [32 bytes public key] - var spki = new byte[44]; - new byte[] { 0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70, 0x03, 0x21, 0x00 }.CopyTo(spki, 0); - publicKey.CopyTo(spki, 12); - return spki; - } } diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/SealedStartupValidator.cs b/src/AirGap/StellaOps.AirGap.Time/Services/SealedStartupValidator.cs index a593ced6b..dcacb19c7 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/SealedStartupValidator.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/SealedStartupValidator.cs @@ -25,7 +25,7 @@ public sealed class SealedStartupValidator public async Task ValidateAsync(string tenantId, StalenessBudget budget, CancellationToken cancellationToken) { - var status = await _statusService.GetStatusAsync(tenantId, _timeProvider.GetUtcNow(), cancellationToken); + var status = await _statusService.GetStatusAsync(tenantId, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); if (status.Anchor == TimeAnchor.Unknown) { diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorDriftResult.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorDriftResult.cs new file mode 100644 index 000000000..7ef7bc141 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorDriftResult.cs @@ -0,0 +1,12 @@ +using System; + +namespace StellaOps.AirGap.Time.Services; + +/// +/// Result of time drift calculation. +/// +public sealed record TimeAnchorDriftResult( + bool HasAnchor, + TimeSpan Drift, + bool DriftExceedsThreshold, + DateTimeOffset? AnchorTime); diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyErrorCodes.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyErrorCodes.cs new file mode 100644 index 000000000..cc8cf4da8 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyErrorCodes.cs @@ -0,0 +1,13 @@ +namespace StellaOps.AirGap.Time.Services; + +/// +/// Error codes for time-anchor policy violations. +/// +public static class TimeAnchorPolicyErrorCodes +{ + public const string AnchorMissing = "TIME_ANCHOR_MISSING"; + public const string AnchorStale = "TIME_ANCHOR_STALE"; + public const string AnchorBreached = "TIME_ANCHOR_BREACHED"; + public const string DriftExceeded = "TIME_ANCHOR_DRIFT_EXCEEDED"; + public const string PolicyViolation = "TIME_ANCHOR_POLICY_VIOLATION"; +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyOptions.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyOptions.cs new file mode 100644 index 000000000..262c9c3fa --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyOptions.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; + +namespace StellaOps.AirGap.Time.Services; + +/// +/// Policy configuration for time anchors. +/// +public sealed class TimeAnchorPolicyOptions +{ + public bool StrictEnforcement { get; set; } = true; + + public int MaxDriftSeconds { get; set; } = 86400; + + public bool AllowMissingAnchorInUnsealedMode { get; set; } = true; + + public IReadOnlyList StrictOperations { get; set; } = new[] + { + "bundle.import", + "attestation.sign", + "audit.record" + }; +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyResult.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyResult.cs new file mode 100644 index 000000000..f36922408 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyResult.cs @@ -0,0 +1,13 @@ +using StellaOps.AirGap.Time.Models; + +namespace StellaOps.AirGap.Time.Services; + +/// +/// Result of time-anchor policy evaluation. +/// +public sealed record TimeAnchorPolicyResult( + bool Allowed, + string? ErrorCode, + string? Reason, + string? Remediation, + StalenessEvaluation? Staleness); diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Bundle.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Bundle.cs new file mode 100644 index 000000000..8699070a9 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Bundle.cs @@ -0,0 +1,45 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class TimeAnchorPolicyService +{ + public async Task EnforceBundleImportPolicyAsync( + string tenantId, + string bundleId, + DateTimeOffset? bundleTimestamp, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleId); + + var baseResult = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); + if (!baseResult.Allowed) + { + return baseResult; + } + + if (bundleTimestamp.HasValue) + { + var driftResult = await CalculateDriftAsync(tenantId, bundleTimestamp.Value, cancellationToken).ConfigureAwait(false); + if (driftResult.DriftExceedsThreshold) + { + _logger.LogWarning( + "Bundle {BundleId} timestamp drift exceeds threshold for tenant {TenantId}: drift={DriftSeconds}s > max={MaxDriftSeconds}s [{ErrorCode}]", + bundleId, tenantId, driftResult.Drift.TotalSeconds, _options.MaxDriftSeconds, TimeAnchorPolicyErrorCodes.DriftExceeded); + + return new TimeAnchorPolicyResult( + Allowed: false, + ErrorCode: TimeAnchorPolicyErrorCodes.DriftExceeded, + Reason: $"Bundle timestamp drift exceeds maximum ({driftResult.Drift.TotalSeconds:F0}s > {_options.MaxDriftSeconds}s)", + Remediation: "Bundle is too old or time anchor is significantly out of sync. Refresh the time anchor or use a more recent bundle.", + Staleness: baseResult.Staleness); + } + } + + _logger.LogDebug("Bundle import policy passed for tenant {TenantId}, bundle {BundleId}", tenantId, bundleId); + return baseResult; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Drift.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Drift.cs new file mode 100644 index 000000000..ab7360da3 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Drift.cs @@ -0,0 +1,38 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class TimeAnchorPolicyService +{ + public async Task CalculateDriftAsync( + string tenantId, + DateTimeOffset targetTime, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var now = _timeProvider.GetUtcNow(); + var status = await _statusService.GetStatusAsync(tenantId, now, cancellationToken).ConfigureAwait(false); + + if (!status.HasAnchor) + { + return new TimeAnchorDriftResult( + HasAnchor: false, + Drift: TimeSpan.Zero, + DriftExceedsThreshold: false, + AnchorTime: null); + } + + var drift = targetTime - status.Anchor!.AnchorTime; + var absDriftSeconds = Math.Abs(drift.TotalSeconds); + var exceedsThreshold = absDriftSeconds > _options.MaxDriftSeconds; + + return new TimeAnchorDriftResult( + HasAnchor: true, + Drift: drift, + DriftExceedsThreshold: exceedsThreshold, + AnchorTime: status.Anchor.AnchorTime); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Operation.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Operation.cs new file mode 100644 index 000000000..023459235 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Operation.cs @@ -0,0 +1,49 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class TimeAnchorPolicyService +{ + public async Task EnforceOperationPolicyAsync( + string tenantId, + string operation, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(operation); + + var isStrictOperation = _options.StrictOperations.Contains(operation, StringComparer.OrdinalIgnoreCase); + + if (isStrictOperation) + { + var result = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); + if (!result.Allowed) + { + _logger.LogWarning( + "Strict operation {Operation} blocked for tenant {TenantId}: {Reason} [{ErrorCode}]", + operation, tenantId, result.Reason, result.ErrorCode); + } + return result; + } + + var baseResult = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); + + if (!baseResult.Allowed && !_options.StrictEnforcement) + { + _logger.LogDebug( + "Non-strict operation {Operation} allowed for tenant {TenantId} despite policy issue: {Reason}", + operation, tenantId, baseResult.Reason); + + return new TimeAnchorPolicyResult( + Allowed: true, + ErrorCode: baseResult.ErrorCode, + Reason: $"operation-allowed-with-warning:{baseResult.Reason}", + Remediation: baseResult.Remediation, + Staleness: baseResult.Staleness); + } + + return baseResult; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Validation.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Validation.cs new file mode 100644 index 000000000..69bcccbbb --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.Validation.cs @@ -0,0 +1,71 @@ +using StellaOps.AirGap.Time.Models; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.AirGap.Time.Services; + +public sealed partial class TimeAnchorPolicyService +{ + public async Task ValidateTimeAnchorAsync(string tenantId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var now = _timeProvider.GetUtcNow(); + var status = await _statusService.GetStatusAsync(tenantId, now, cancellationToken).ConfigureAwait(false); + + if (!status.HasAnchor) + { + if (_options.AllowMissingAnchorInUnsealedMode && !_options.StrictEnforcement) + { + _logger.LogDebug("Time anchor missing for tenant {TenantId}, allowed in non-strict mode", tenantId); + return new TimeAnchorPolicyResult( + Allowed: true, + ErrorCode: null, + Reason: "time-anchor-missing-allowed", + Remediation: null, + Staleness: null); + } + + _logger.LogWarning("Time anchor missing for tenant {TenantId} [{ErrorCode}]", + tenantId, TimeAnchorPolicyErrorCodes.AnchorMissing); + + return new TimeAnchorPolicyResult( + Allowed: false, + ErrorCode: TimeAnchorPolicyErrorCodes.AnchorMissing, + Reason: "No time anchor configured for tenant", + Remediation: "Set a time anchor using POST /api/v1/time/anchor with a valid Roughtime or RFC3161 token", + Staleness: null); + } + + var staleness = status.Staleness; + + if (staleness.IsBreach) + { + _logger.LogWarning( + "Time anchor staleness breached for tenant {TenantId}: age={AgeSeconds}s > breach={BreachSeconds}s [{ErrorCode}]", + tenantId, staleness.AgeSeconds, staleness.BreachSeconds, TimeAnchorPolicyErrorCodes.AnchorBreached); + + return new TimeAnchorPolicyResult( + Allowed: false, + ErrorCode: TimeAnchorPolicyErrorCodes.AnchorBreached, + Reason: $"Time anchor staleness breached ({staleness.AgeSeconds}s > {staleness.BreachSeconds}s)", + Remediation: "Refresh time anchor with a new token to continue operations", + Staleness: staleness); + } + + if (staleness.IsWarning) + { + _logger.LogWarning( + "Time anchor staleness warning for tenant {TenantId}: age={AgeSeconds}s approaching breach at {BreachSeconds}s [{ErrorCode}]", + tenantId, staleness.AgeSeconds, staleness.BreachSeconds, TimeAnchorPolicyErrorCodes.AnchorStale); + } + + return new TimeAnchorPolicyResult( + Allowed: true, + ErrorCode: null, + Reason: staleness.IsWarning ? "time-anchor-warning" : "time-anchor-valid", + Remediation: staleness.IsWarning ? "Consider refreshing time anchor soon" : null, + Staleness: staleness); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.cs index 41004a63e..4453fea45 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeAnchorPolicyService.cs @@ -1,113 +1,13 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using StellaOps.AirGap.Time.Models; +using System; namespace StellaOps.AirGap.Time.Services; -/// -/// Policy enforcement service for time anchors. -/// Per AIRGAP-TIME-57-001: Enforces time-anchor requirements in sealed-mode operations. -/// -public interface ITimeAnchorPolicyService -{ - /// - /// Validates that a valid time anchor exists and is not stale. - /// - Task ValidateTimeAnchorAsync(string tenantId, CancellationToken cancellationToken = default); - - /// - /// Enforces time-anchor requirements before bundle import. - /// - Task EnforceBundleImportPolicyAsync( - string tenantId, - string bundleId, - DateTimeOffset? bundleTimestamp, - CancellationToken cancellationToken = default); - - /// - /// Enforces time-anchor requirements before operations that require trusted time. - /// - Task EnforceOperationPolicyAsync( - string tenantId, - string operation, - CancellationToken cancellationToken = default); - - /// - /// Gets the time drift between the anchor and a given timestamp. - /// - Task CalculateDriftAsync( - string tenantId, - DateTimeOffset targetTime, - CancellationToken cancellationToken = default); -} - -/// -/// Result of time-anchor policy evaluation. -/// -public sealed record TimeAnchorPolicyResult( - bool Allowed, - string? ErrorCode, - string? Reason, - string? Remediation, - StalenessEvaluation? Staleness); - -/// -/// Result of time drift calculation. -/// -public sealed record TimeAnchorDriftResult( - bool HasAnchor, - TimeSpan Drift, - bool DriftExceedsThreshold, - DateTimeOffset? AnchorTime); - -/// -/// Policy configuration for time anchors. -/// -public sealed class TimeAnchorPolicyOptions -{ - /// - /// Whether to enforce strict time-anchor requirements. - /// When true, operations fail if time anchor is missing or stale. - /// - public bool StrictEnforcement { get; set; } = true; - - /// - /// Maximum allowed drift between anchor time and operation time in seconds. - /// - public int MaxDriftSeconds { get; set; } = 86400; // 24 hours - - /// - /// Whether to allow operations when no time anchor exists (unsealed mode only). - /// - public bool AllowMissingAnchorInUnsealedMode { get; set; } = true; - - /// - /// Operations that require strict time-anchor enforcement regardless of mode. - /// - public IReadOnlyList StrictOperations { get; set; } = new[] - { - "bundle.import", - "attestation.sign", - "audit.record" - }; -} - -/// -/// Error codes for time-anchor policy violations. -/// -public static class TimeAnchorPolicyErrorCodes -{ - public const string AnchorMissing = "TIME_ANCHOR_MISSING"; - public const string AnchorStale = "TIME_ANCHOR_STALE"; - public const string AnchorBreached = "TIME_ANCHOR_BREACHED"; - public const string DriftExceeded = "TIME_ANCHOR_DRIFT_EXCEEDED"; - public const string PolicyViolation = "TIME_ANCHOR_POLICY_VIOLATION"; -} - /// /// Implementation of time-anchor policy service. /// -public sealed class TimeAnchorPolicyService : ITimeAnchorPolicyService +public sealed partial class TimeAnchorPolicyService : ITimeAnchorPolicyService { private readonly TimeStatusService _statusService; private readonly TimeAnchorPolicyOptions _options; @@ -125,182 +25,4 @@ public sealed class TimeAnchorPolicyService : ITimeAnchorPolicyService _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _timeProvider = timeProvider ?? TimeProvider.System; } - - public async Task ValidateTimeAnchorAsync(string tenantId, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - - var now = _timeProvider.GetUtcNow(); - var status = await _statusService.GetStatusAsync(tenantId, now, cancellationToken).ConfigureAwait(false); - - // Check if anchor exists - if (!status.HasAnchor) - { - if (_options.AllowMissingAnchorInUnsealedMode && !_options.StrictEnforcement) - { - _logger.LogDebug("Time anchor missing for tenant {TenantId}, allowed in non-strict mode", tenantId); - return new TimeAnchorPolicyResult( - Allowed: true, - ErrorCode: null, - Reason: "time-anchor-missing-allowed", - Remediation: null, - Staleness: null); - } - - _logger.LogWarning("Time anchor missing for tenant {TenantId} [{ErrorCode}]", - tenantId, TimeAnchorPolicyErrorCodes.AnchorMissing); - - return new TimeAnchorPolicyResult( - Allowed: false, - ErrorCode: TimeAnchorPolicyErrorCodes.AnchorMissing, - Reason: "No time anchor configured for tenant", - Remediation: "Set a time anchor using POST /api/v1/time/anchor with a valid Roughtime or RFC3161 token", - Staleness: null); - } - - // Evaluate staleness - var staleness = status.Staleness; - - // Check for breach - if (staleness.IsBreach) - { - _logger.LogWarning( - "Time anchor staleness breached for tenant {TenantId}: age={AgeSeconds}s > breach={BreachSeconds}s [{ErrorCode}]", - tenantId, staleness.AgeSeconds, staleness.BreachSeconds, TimeAnchorPolicyErrorCodes.AnchorBreached); - - return new TimeAnchorPolicyResult( - Allowed: false, - ErrorCode: TimeAnchorPolicyErrorCodes.AnchorBreached, - Reason: $"Time anchor staleness breached ({staleness.AgeSeconds}s > {staleness.BreachSeconds}s)", - Remediation: "Refresh time anchor with a new token to continue operations", - Staleness: staleness); - } - - // Check for warning (allowed but logged) - if (staleness.IsWarning) - { - _logger.LogWarning( - "Time anchor staleness warning for tenant {TenantId}: age={AgeSeconds}s approaching breach at {BreachSeconds}s [{ErrorCode}]", - tenantId, staleness.AgeSeconds, staleness.BreachSeconds, TimeAnchorPolicyErrorCodes.AnchorStale); - } - - return new TimeAnchorPolicyResult( - Allowed: true, - ErrorCode: null, - Reason: staleness.IsWarning ? "time-anchor-warning" : "time-anchor-valid", - Remediation: staleness.IsWarning ? "Consider refreshing time anchor soon" : null, - Staleness: staleness); - } - - public async Task EnforceBundleImportPolicyAsync( - string tenantId, - string bundleId, - DateTimeOffset? bundleTimestamp, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(bundleId); - - // First validate basic time anchor requirements - var baseResult = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); - if (!baseResult.Allowed) - { - return baseResult; - } - - // If bundle has a timestamp, check drift - if (bundleTimestamp.HasValue) - { - var driftResult = await CalculateDriftAsync(tenantId, bundleTimestamp.Value, cancellationToken).ConfigureAwait(false); - if (driftResult.DriftExceedsThreshold) - { - _logger.LogWarning( - "Bundle {BundleId} timestamp drift exceeds threshold for tenant {TenantId}: drift={DriftSeconds}s > max={MaxDriftSeconds}s [{ErrorCode}]", - bundleId, tenantId, driftResult.Drift.TotalSeconds, _options.MaxDriftSeconds, TimeAnchorPolicyErrorCodes.DriftExceeded); - - return new TimeAnchorPolicyResult( - Allowed: false, - ErrorCode: TimeAnchorPolicyErrorCodes.DriftExceeded, - Reason: $"Bundle timestamp drift exceeds maximum ({driftResult.Drift.TotalSeconds:F0}s > {_options.MaxDriftSeconds}s)", - Remediation: "Bundle is too old or time anchor is significantly out of sync. Refresh the time anchor or use a more recent bundle.", - Staleness: baseResult.Staleness); - } - } - - _logger.LogDebug("Bundle import policy passed for tenant {TenantId}, bundle {BundleId}", tenantId, bundleId); - return baseResult; - } - - public async Task EnforceOperationPolicyAsync( - string tenantId, - string operation, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(operation); - - var isStrictOperation = _options.StrictOperations.Contains(operation, StringComparer.OrdinalIgnoreCase); - - // For strict operations, always require valid time anchor - if (isStrictOperation) - { - var result = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); - if (!result.Allowed) - { - _logger.LogWarning( - "Strict operation {Operation} blocked for tenant {TenantId}: {Reason} [{ErrorCode}]", - operation, tenantId, result.Reason, result.ErrorCode); - } - return result; - } - - // For non-strict operations, allow with warning if anchor is missing/stale - var baseResult = await ValidateTimeAnchorAsync(tenantId, cancellationToken).ConfigureAwait(false); - - if (!baseResult.Allowed && !_options.StrictEnforcement) - { - _logger.LogDebug( - "Non-strict operation {Operation} allowed for tenant {TenantId} despite policy issue: {Reason}", - operation, tenantId, baseResult.Reason); - - return new TimeAnchorPolicyResult( - Allowed: true, - ErrorCode: baseResult.ErrorCode, - Reason: $"operation-allowed-with-warning:{baseResult.Reason}", - Remediation: baseResult.Remediation, - Staleness: baseResult.Staleness); - } - - return baseResult; - } - - public async Task CalculateDriftAsync( - string tenantId, - DateTimeOffset targetTime, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - - var now = _timeProvider.GetUtcNow(); - var status = await _statusService.GetStatusAsync(tenantId, now, cancellationToken).ConfigureAwait(false); - - if (!status.HasAnchor) - { - return new TimeAnchorDriftResult( - HasAnchor: false, - Drift: TimeSpan.Zero, - DriftExceedsThreshold: false, - AnchorTime: null); - } - - var drift = targetTime - status.Anchor!.AnchorTime; - var absDriftSeconds = Math.Abs(drift.TotalSeconds); - var exceedsThreshold = absDriftSeconds > _options.MaxDriftSeconds; - - return new TimeAnchorDriftResult( - HasAnchor: true, - Drift: drift, - DriftExceedsThreshold: exceedsThreshold, - AnchorTime: status.Anchor.AnchorTime); - } } diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeStatusService.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeStatusService.cs index 41b01aaf9..06ee1b346 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/TimeStatusService.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeStatusService.cs @@ -28,12 +28,12 @@ public sealed class TimeStatusService public async Task SetAnchorAsync(string tenantId, TimeAnchor anchor, StalenessBudget budget, CancellationToken cancellationToken = default) { budget.Validate(); - await _store.SetAsync(tenantId, anchor, budget, cancellationToken); + await _store.SetAsync(tenantId, anchor, budget, cancellationToken).ConfigureAwait(false); } public async Task GetStatusAsync(string tenantId, DateTimeOffset nowUtc, CancellationToken cancellationToken = default) { - var (anchor, budget) = await _store.GetAsync(tenantId, cancellationToken); + var (anchor, budget) = await _store.GetAsync(tenantId, cancellationToken).ConfigureAwait(false); var eval = _calculator.Evaluate(anchor, budget, nowUtc); var content = _calculator.EvaluateContent(anchor, _contentBudgets, nowUtc); var status = new TimeStatus(anchor, eval, budget, content, nowUtc); diff --git a/src/AirGap/StellaOps.AirGap.Time/Services/TimeTelemetry.cs b/src/AirGap/StellaOps.AirGap.Time/Services/TimeTelemetry.cs index a588768ff..567f9d917 100644 --- a/src/AirGap/StellaOps.AirGap.Time/Services/TimeTelemetry.cs +++ b/src/AirGap/StellaOps.AirGap.Time/Services/TimeTelemetry.cs @@ -6,7 +6,7 @@ namespace StellaOps.AirGap.Time.Services; public sealed class TimeTelemetry { - private static readonly Meter Meter = new("StellaOps.AirGap.Time", "1.0.0"); + private static readonly Meter _meter = new("StellaOps.AirGap.Time", "1.0.0"); private const int MaxEntries = 1024; // Bound eviction queue to 3x max entries to prevent unbounded memory growth private const int MaxEvictionQueueSize = MaxEntries * 3; @@ -16,13 +16,13 @@ public sealed class TimeTelemetry private readonly ObservableGauge _anchorAgeGauge; - private static readonly Counter StatusCounter = Meter.CreateCounter("airgap_time_anchor_status_total"); - private static readonly Counter WarningCounter = Meter.CreateCounter("airgap_time_anchor_warning_total"); - private static readonly Counter BreachCounter = Meter.CreateCounter("airgap_time_anchor_breach_total"); + private static readonly Counter _statusCounter = _meter.CreateCounter("airgap_time_anchor_status_total"); + private static readonly Counter _warningCounter = _meter.CreateCounter("airgap_time_anchor_warning_total"); + private static readonly Counter _breachCounter = _meter.CreateCounter("airgap_time_anchor_breach_total"); public TimeTelemetry() { - _anchorAgeGauge = Meter.CreateObservableGauge( + _anchorAgeGauge = _meter.CreateObservableGauge( "airgap_time_anchor_age_seconds", () => _latest.Select(kvp => new Measurement(kvp.Value.AgeSeconds, new KeyValuePair("tenant", kvp.Key)))); } @@ -47,17 +47,17 @@ public sealed class TimeTelemetry { "is_breach", status.Staleness.IsBreach } }; - StatusCounter.Add(1, tags); + _statusCounter.Add(1, tags); if (status.Staleness.IsWarning) { - WarningCounter.Add(1, tags); + _warningCounter.Add(1, tags); } if (status.Staleness.IsBreach) { - BreachCounter.Add(1, tags); - } + _breachCounter.Add(1, tags); + } } public Snapshot? GetLatest(string tenantId) diff --git a/src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj b/src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj index 6b8cc34f0..8e08f99fb 100644 --- a/src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj +++ b/src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj @@ -12,5 +12,6 @@ + diff --git a/src/AirGap/StellaOps.AirGap.Time/TASKS.md b/src/AirGap/StellaOps.AirGap.Time/TASKS.md index 92023fa18..4fa449369 100644 --- a/src/AirGap/StellaOps.AirGap.Time/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Time/TASKS.md @@ -1,7 +1,7 @@ # AirGap Time Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | @@ -9,3 +9,5 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0034-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0035. | | AUDIT-0034-A | TODO | Address TimeTelemetry queue growth, TimeTokenParser endianness, and default store wiring. | | TASK-029-002 | DONE | Offline RFC3161 verification using bundled TSA chain/OCSP/CRL. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/StellaOps.AirGap.Time/StellaOps.AirGap.Time.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.All.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.All.cs new file mode 100644 index 000000000..be99754c0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.All.cs @@ -0,0 +1,73 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class AdvisorySnapshotExtractor +{ + /// + /// Extracts advisories from all configured feeds. + /// + public async Task ExtractAllAsync( + AdvisoryExtractionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var contents = new List(); + var errors = new List(); + var totalRecords = 0; + + try + { + var feeds = await _dataSource.GetAvailableFeedsAsync(cancellationToken) + .ConfigureAwait(false); + + // Sort feeds for deterministic output. + var sortedFeeds = feeds.OrderBy(f => f.FeedId, StringComparer.Ordinal).ToList(); + + foreach (var feed in sortedFeeds) + { + if (request.FeedIds is { Count: > 0 } && !request.FeedIds.Contains(feed.FeedId)) + { + continue; + } + + try + { + var feedResult = await ExtractFeedAsync(feed.FeedId, request, cancellationToken) + .ConfigureAwait(false); + if (feedResult.Success && feedResult.Content is not null) + { + contents.Add(feedResult.Content); + totalRecords += feedResult.RecordCount; + } + else if (!string.IsNullOrEmpty(feedResult.Error)) + { + errors.Add($"{feed.FeedId}: {feedResult.Error}"); + } + } + catch (Exception ex) + { + errors.Add($"{feed.FeedId}: {ex.Message}"); + } + } + + return new AdvisoryExtractionResult + { + Success = errors.Count == 0, + Advisories = contents, + TotalRecordCount = totalRecords, + Errors = errors + }; + } + catch (Exception ex) + { + return new AdvisoryExtractionResult + { + Success = false, + Advisories = [], + Errors = [$"Extraction failed: {ex.Message}"] + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.Feed.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.Feed.cs new file mode 100644 index 000000000..ba1b7c311 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.Feed.cs @@ -0,0 +1,73 @@ +using StellaOps.AirGap.Bundle.Services; +using System.Globalization; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class AdvisorySnapshotExtractor +{ + /// + /// Extracts advisories from a specific feed. + /// + public async Task ExtractFeedAsync( + string feedId, + AdvisoryExtractionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(feedId); + + try + { + var advisories = await _dataSource.GetAdvisoriesAsync( + feedId, + request.Since, + request.MaxRecords, + cancellationToken).ConfigureAwait(false); + + if (advisories.Count == 0) + { + return new FeedExtractionResult + { + Success = true, + RecordCount = 0 + }; + } + + var snapshotAt = _timeProvider.GetUtcNow(); + + // Serialize advisories to NDJSON format for deterministic output. + var contentBuilder = new StringBuilder(); + foreach (var advisory in advisories.OrderBy(a => a.Id, StringComparer.Ordinal)) + { + var json = JsonSerializer.Serialize(advisory, _jsonOptions); + contentBuilder.AppendLine(json); + } + + var contentBytes = Encoding.UTF8.GetBytes(contentBuilder.ToString()); + var fileName = $"{feedId}-{snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture)}.ndjson"; + + return new FeedExtractionResult + { + Success = true, + RecordCount = advisories.Count, + Content = new AdvisoryContent + { + FeedId = feedId, + FileName = fileName, + Content = contentBytes, + SnapshotAt = snapshotAt, + RecordCount = advisories.Count + } + }; + } + catch (Exception ex) + { + return new FeedExtractionResult + { + Success = false, + Error = ex.Message + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.cs index 0fcd260ba..914f18f46 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotExtractor.cs @@ -7,8 +7,6 @@ using StellaOps.AirGap.Bundle.Services; -using System.Globalization; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Extractors; @@ -16,9 +14,9 @@ namespace StellaOps.AirGap.Bundle.Extractors; /// /// Extracts advisory data from Concelier database for inclusion in knowledge snapshot bundles. /// -public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor +public sealed partial class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase @@ -38,233 +36,4 @@ public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); } - /// - /// Extracts advisories from all configured feeds. - /// - public async Task ExtractAllAsync( - AdvisoryExtractionRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var contents = new List(); - var errors = new List(); - var totalRecords = 0; - - try - { - var feeds = await _dataSource.GetAvailableFeedsAsync(cancellationToken); - - // Sort feeds for deterministic output - var sortedFeeds = feeds.OrderBy(f => f.FeedId, StringComparer.Ordinal).ToList(); - - foreach (var feed in sortedFeeds) - { - // Skip if specific feeds are requested and this isn't one of them - if (request.FeedIds is { Count: > 0 } && !request.FeedIds.Contains(feed.FeedId)) - { - continue; - } - - try - { - var feedResult = await ExtractFeedAsync(feed.FeedId, request, cancellationToken); - if (feedResult.Success && feedResult.Content is not null) - { - contents.Add(feedResult.Content); - totalRecords += feedResult.RecordCount; - } - else if (!string.IsNullOrEmpty(feedResult.Error)) - { - errors.Add($"{feed.FeedId}: {feedResult.Error}"); - } - } - catch (Exception ex) - { - errors.Add($"{feed.FeedId}: {ex.Message}"); - } - } - - return new AdvisoryExtractionResult - { - Success = errors.Count == 0, - Advisories = contents, - TotalRecordCount = totalRecords, - Errors = errors - }; - } - catch (Exception ex) - { - return new AdvisoryExtractionResult - { - Success = false, - Advisories = [], - Errors = [$"Extraction failed: {ex.Message}"] - }; - } - } - - /// - /// Extracts advisories from a specific feed. - /// - public async Task ExtractFeedAsync( - string feedId, - AdvisoryExtractionRequest request, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(feedId); - - try - { - var advisories = await _dataSource.GetAdvisoriesAsync( - feedId, - request.Since, - request.MaxRecords, - cancellationToken); - - if (advisories.Count == 0) - { - return new FeedExtractionResult - { - Success = true, - RecordCount = 0 - }; - } - - var snapshotAt = _timeProvider.GetUtcNow(); - - // Serialize advisories to NDJSON format for deterministic output - var contentBuilder = new StringBuilder(); - foreach (var advisory in advisories.OrderBy(a => a.Id, StringComparer.Ordinal)) - { - var json = JsonSerializer.Serialize(advisory, JsonOptions); - contentBuilder.AppendLine(json); - } - - var contentBytes = Encoding.UTF8.GetBytes(contentBuilder.ToString()); - // Use invariant culture for deterministic filename formatting - var fileName = $"{feedId}-{snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture)}.ndjson"; - - return new FeedExtractionResult - { - Success = true, - RecordCount = advisories.Count, - Content = new AdvisoryContent - { - FeedId = feedId, - FileName = fileName, - Content = contentBytes, - SnapshotAt = snapshotAt, - RecordCount = advisories.Count - } - }; - } - catch (Exception ex) - { - return new FeedExtractionResult - { - Success = false, - Error = ex.Message - }; - } - } } - -/// -/// Interface for advisory snapshot extraction. -/// -public interface IAdvisorySnapshotExtractor -{ - Task ExtractAllAsync( - AdvisoryExtractionRequest request, - CancellationToken cancellationToken = default); - - Task ExtractFeedAsync( - string feedId, - AdvisoryExtractionRequest request, - CancellationToken cancellationToken = default); -} - -/// -/// Interface for advisory data access. -/// This should be implemented by Concelier to provide advisory data. -/// -public interface IAdvisoryDataSource -{ - Task> GetAvailableFeedsAsync(CancellationToken cancellationToken = default); - - Task> GetAdvisoriesAsync( - string feedId, - DateTimeOffset? since = null, - int? maxRecords = null, - CancellationToken cancellationToken = default); -} - -#region Data Models - -/// -/// Information about an available feed. -/// -public sealed record FeedInfo(string FeedId, string Name, string? Ecosystem); - -/// -/// A single advisory record. -/// -public sealed record AdvisoryRecord -{ - public required string Id { get; init; } - public required string FeedId { get; init; } - public string? CveId { get; init; } - public string? Summary { get; init; } - public string? Severity { get; init; } - public double? CvssScore { get; init; } - public DateTimeOffset? PublishedAt { get; init; } - public DateTimeOffset? ModifiedAt { get; init; } - public IReadOnlyList? AffectedPackages { get; init; } - public IReadOnlyDictionary? RawData { get; init; } -} - -/// -/// Request for extracting advisories. -/// -public sealed record AdvisoryExtractionRequest -{ - /// - /// Specific feed IDs to extract. Empty means all feeds. - /// - public IReadOnlyList? FeedIds { get; init; } - - /// - /// Only extract advisories modified since this time. - /// - public DateTimeOffset? Since { get; init; } - - /// - /// Maximum records per feed. - /// - public int? MaxRecords { get; init; } -} - -/// -/// Result of extracting advisories from all feeds. -/// -public sealed record AdvisoryExtractionResult -{ - public bool Success { get; init; } - public IReadOnlyList Advisories { get; init; } = []; - public int TotalRecordCount { get; init; } - public IReadOnlyList Errors { get; init; } = []; -} - -/// -/// Result of extracting a single feed. -/// -public sealed record FeedExtractionResult -{ - public bool Success { get; init; } - public int RecordCount { get; init; } - public AdvisoryContent? Content { get; init; } - public string? Error { get; init; } -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotModels.cs new file mode 100644 index 000000000..1e6737c12 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/AdvisorySnapshotModels.cs @@ -0,0 +1,68 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Information about an available feed. +/// +public sealed record FeedInfo(string FeedId, string Name, string? Ecosystem); + +/// +/// A single advisory record. +/// +public sealed record AdvisoryRecord +{ + public required string Id { get; init; } + public required string FeedId { get; init; } + public string? CveId { get; init; } + public string? Summary { get; init; } + public string? Severity { get; init; } + public double? CvssScore { get; init; } + public DateTimeOffset? PublishedAt { get; init; } + public DateTimeOffset? ModifiedAt { get; init; } + public IReadOnlyList? AffectedPackages { get; init; } + public IReadOnlyDictionary? RawData { get; init; } +} + +/// +/// Request for extracting advisories. +/// +public sealed record AdvisoryExtractionRequest +{ + /// + /// Specific feed IDs to extract. Empty means all feeds. + /// + public IReadOnlyList? FeedIds { get; init; } + + /// + /// Only extract advisories modified since this time. + /// + public DateTimeOffset? Since { get; init; } + + /// + /// Maximum records per feed. + /// + public int? MaxRecords { get; init; } +} + +/// +/// Result of extracting advisories from all feeds. +/// +public sealed record AdvisoryExtractionResult +{ + public bool Success { get; init; } + public IReadOnlyList Advisories { get; init; } = []; + public int TotalRecordCount { get; init; } + public IReadOnlyList Errors { get; init; } = []; +} + +/// +/// Result of extracting a single feed. +/// +public sealed record FeedExtractionResult +{ + public bool Success { get; init; } + public int RecordCount { get; init; } + public AdvisoryContent? Content { get; init; } + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisoryDataSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisoryDataSource.cs new file mode 100644 index 000000000..1d37b3200 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisoryDataSource.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for advisory data access. +/// This should be implemented by Concelier to provide advisory data. +/// +public interface IAdvisoryDataSource +{ + Task> GetAvailableFeedsAsync(CancellationToken cancellationToken = default); + + Task> GetAdvisoriesAsync( + string feedId, + DateTimeOffset? since = null, + int? maxRecords = null, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisorySnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisorySnapshotExtractor.cs new file mode 100644 index 000000000..ffd4dfb3c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IAdvisorySnapshotExtractor.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for advisory snapshot extraction. +/// +public interface IAdvisorySnapshotExtractor +{ + Task ExtractAllAsync( + AdvisoryExtractionRequest request, + CancellationToken cancellationToken = default); + + Task ExtractFeedAsync( + string feedId, + AdvisoryExtractionRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicyDataSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicyDataSource.cs new file mode 100644 index 000000000..73f32a827 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicyDataSource.cs @@ -0,0 +1,14 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for policy data access. +/// This should be implemented by the Policy module to provide policy data. +/// +public interface IPolicyDataSource +{ + Task> GetAvailablePoliciesAsync(CancellationToken cancellationToken = default); + + Task GetPolicyInfoAsync(string policyId, CancellationToken cancellationToken = default); + + Task GetPolicyContentAsync(string policyId, CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicySnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicySnapshotExtractor.cs new file mode 100644 index 000000000..3ec6ea13b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IPolicySnapshotExtractor.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for policy snapshot extraction. +/// +public interface IPolicySnapshotExtractor +{ + Task ExtractAllAsync( + PolicyExtractionRequest request, + CancellationToken cancellationToken = default); + + Task ExtractPolicyAsync( + string policyId, + PolicyExtractionRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexDataSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexDataSource.cs new file mode 100644 index 000000000..eb89621f4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexDataSource.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for VEX data access. +/// This should be implemented by Excititor to provide VEX data. +/// +public interface IVexDataSource +{ + Task> GetAvailableSourcesAsync(CancellationToken cancellationToken = default); + + Task> GetStatementsAsync( + string sourceId, + DateTimeOffset? since = null, + int? maxStatements = null, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexSnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexSnapshotExtractor.cs new file mode 100644 index 000000000..27a0ec0e2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/IVexSnapshotExtractor.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Interface for VEX snapshot extraction. +/// +public interface IVexSnapshotExtractor +{ + Task ExtractAllAsync( + VexExtractionRequest request, + CancellationToken cancellationToken = default); + + Task ExtractSourceAsync( + string sourceId, + VexExtractionRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Models.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Models.cs new file mode 100644 index 000000000..cd62e5988 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Models.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class PolicySnapshotExtractor +{ + private sealed record OpaBundleManifest + { + public required string Revision { get; init; } + public required string[] Roots { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Packaging.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Packaging.cs new file mode 100644 index 000000000..f9be91697 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Packaging.cs @@ -0,0 +1,63 @@ +using System.IO.Compression; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class PolicySnapshotExtractor +{ + private static readonly JsonSerializerOptions _jsonOptions = new() + { + WriteIndented = false, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private static async Task PackageRegoBundleAsync( + PolicyInfo policyInfo, + byte[] policyContent, + CancellationToken cancellationToken) + { + await Task.CompletedTask.ConfigureAwait(false); // Operations below are synchronous + + using var outputStream = new MemoryStream(); + using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal); + + // Write a simple tar with the rego file + // Note: This is a minimal implementation; a full implementation would use System.Formats.Tar + var header = CreateTarHeader($"{policyInfo.PolicyId}/policy.rego", policyContent.Length); + gzipStream.Write(header); + gzipStream.Write(policyContent); + + // Pad to 512-byte boundary + var padding = 512 - (policyContent.Length % 512); + if (padding < 512) + { + gzipStream.Write(new byte[padding]); + } + + // Add manifest.json + var manifest = new OpaBundleManifest + { + Revision = policyInfo.Version, + Roots = [policyInfo.PolicyId] + }; + var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, _jsonOptions); + + var manifestHeader = CreateTarHeader(".manifest", manifestBytes.Length); + gzipStream.Write(manifestHeader); + gzipStream.Write(manifestBytes); + + padding = 512 - (manifestBytes.Length % 512); + if (padding < 512) + { + gzipStream.Write(new byte[padding]); + } + + // Write tar end-of-archive marker (two 512-byte zero blocks) + gzipStream.Write(new byte[1024]); + + gzipStream.Close(); + return outputStream.ToArray(); + } + +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.Build.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.Build.cs new file mode 100644 index 000000000..4a5f88f85 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.Build.cs @@ -0,0 +1,46 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class PolicySnapshotExtractor +{ + private static async Task BuildPolicyContentAsync( + PolicyInfo policyInfo, + byte[] policyContent, + CancellationToken cancellationToken) + { + byte[] contentBytes; + string fileName; + + switch (policyInfo.Type) + { + case "OpaRego": + contentBytes = await PackageRegoBundleAsync(policyInfo, policyContent, cancellationToken) + .ConfigureAwait(false); + fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.tar.gz"; + break; + + case "LatticeRules": + case "UnknownBudgets": + case "ScoringWeights": + contentBytes = policyContent; + fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json"; + break; + + default: + contentBytes = policyContent; + fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.bin"; + break; + } + + return new PolicyContent + { + PolicyId = policyInfo.PolicyId, + Name = policyInfo.Name, + Version = policyInfo.Version, + FileName = fileName, + Content = contentBytes, + Type = policyInfo.Type + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.cs new file mode 100644 index 000000000..c93509351 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Policy.cs @@ -0,0 +1,57 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class PolicySnapshotExtractor +{ + /// + /// Extracts a specific policy. + /// + public async Task ExtractPolicyAsync( + string policyId, + PolicyExtractionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(policyId); + + try + { + var policyInfo = await _dataSource.GetPolicyInfoAsync(policyId, cancellationToken) + .ConfigureAwait(false); + if (policyInfo is null) + { + return new PolicySingleExtractionResult + { + Success = false, + Error = "Policy not found" + }; + } + + var policyContent = await _dataSource.GetPolicyContentAsync(policyId, cancellationToken) + .ConfigureAwait(false); + if (policyContent is null || policyContent.Length == 0) + { + return new PolicySingleExtractionResult + { + Success = false, + Error = "Policy content is empty" + }; + } + + return new PolicySingleExtractionResult + { + Success = true, + Content = await BuildPolicyContentAsync(policyInfo, policyContent, cancellationToken) + .ConfigureAwait(false) + }; + } + catch (Exception ex) + { + return new PolicySingleExtractionResult + { + Success = false, + Error = ex.Message + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Tar.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Tar.cs new file mode 100644 index 000000000..07c87b2d2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.Tar.cs @@ -0,0 +1,54 @@ +using System.Text; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class PolicySnapshotExtractor +{ + /// + /// Fixed mtime for deterministic tar headers (2024-01-01 00:00:00 UTC). + /// + private const long DeterministicMtime = 1704067200; + + private static byte[] CreateTarHeader(string fileName, long fileSize) + { + var header = new byte[512]; + var nameBytes = Encoding.ASCII.GetBytes(fileName); + Array.Copy(nameBytes, header, Math.Min(nameBytes.Length, 100)); + + // Mode (100-107) - 0644 + Encoding.ASCII.GetBytes("0000644").CopyTo(header, 100); + + // Owner/group UID/GID (108-123) - zeros + Encoding.ASCII.GetBytes("0000000").CopyTo(header, 108); + Encoding.ASCII.GetBytes("0000000").CopyTo(header, 116); + + // File size in octal (124-135) + Encoding.ASCII.GetBytes(Convert.ToString(fileSize, 8).PadLeft(11, '0')).CopyTo(header, 124); + + // Modification time (136-147) - use deterministic mtime for reproducible output + Encoding.ASCII.GetBytes(Convert.ToString(DeterministicMtime, 8).PadLeft(11, '0')).CopyTo(header, 136); + + // Checksum placeholder (148-155) - spaces + for (var i = 148; i < 156; i++) + { + header[i] = 0x20; + } + + // Type flag (156) - regular file + header[156] = (byte)'0'; + + // USTAR magic (257-264) + Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257); + Encoding.ASCII.GetBytes("00").CopyTo(header, 263); + + // Calculate and set checksum + var checksum = 0; + foreach (var b in header) + { + checksum += b; + } + Encoding.ASCII.GetBytes(Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ").CopyTo(header, 148); + + return header; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.cs index 8652ac2cf..b74f464f5 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotExtractor.cs @@ -4,31 +4,15 @@ // Task: SEAL-008 - Implement policy bundle extractor // Description: Extracts policy bundle data for knowledge snapshot bundles. // ----------------------------------------------------------------------------- - - using StellaOps.AirGap.Bundle.Services; -using System.IO.Compression; -using System.Text; -using System.Text.Json; namespace StellaOps.AirGap.Bundle.Extractors; /// /// Extracts policy bundles from the Policy registry for inclusion in knowledge snapshot bundles. /// -public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor +public sealed partial class PolicySnapshotExtractor : IPolicySnapshotExtractor { - private static readonly JsonSerializerOptions JsonOptions = new() - { - WriteIndented = false, - PropertyNamingPolicy = JsonNamingPolicy.CamelCase - }; - - /// - /// Fixed mtime for deterministic tar headers (2024-01-01 00:00:00 UTC). - /// - private const long DeterministicMtime = 1704067200; - private readonly IPolicyDataSource _dataSource; private readonly TimeProvider _timeProvider; @@ -57,7 +41,8 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor try { - var policies = await _dataSource.GetAvailablePoliciesAsync(cancellationToken); + var policies = await _dataSource.GetAvailablePoliciesAsync(cancellationToken) + .ConfigureAwait(false); // Sort policies for deterministic output var sortedPolicies = policies.OrderBy(p => p.PolicyId, StringComparer.Ordinal).ToList(); @@ -72,7 +57,8 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor try { - var policyResult = await ExtractPolicyAsync(policy.PolicyId, request, cancellationToken); + var policyResult = await ExtractPolicyAsync(policy.PolicyId, request, cancellationToken) + .ConfigureAwait(false); if (policyResult.Success && policyResult.Content is not null) { contents.Add(policyResult.Content); @@ -105,271 +91,4 @@ public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor }; } } - - /// - /// Extracts a specific policy. - /// - public async Task ExtractPolicyAsync( - string policyId, - PolicyExtractionRequest request, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(policyId); - - try - { - var policyInfo = await _dataSource.GetPolicyInfoAsync(policyId, cancellationToken); - if (policyInfo is null) - { - return new PolicySingleExtractionResult - { - Success = false, - Error = "Policy not found" - }; - } - - var policyContent = await _dataSource.GetPolicyContentAsync(policyId, cancellationToken); - if (policyContent is null || policyContent.Length == 0) - { - return new PolicySingleExtractionResult - { - Success = false, - Error = "Policy content is empty" - }; - } - - // Package policy based on type - byte[] contentBytes; - string fileName; - - switch (policyInfo.Type) - { - case "OpaRego": - // Package Rego files as a tar.gz bundle - contentBytes = await PackageRegoBundle(policyInfo, policyContent, cancellationToken); - fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.tar.gz"; - break; - - case "LatticeRules": - // LatticeRules are JSON files - contentBytes = policyContent; - fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json"; - break; - - case "UnknownBudgets": - // Unknown budgets are JSON files - contentBytes = policyContent; - fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json"; - break; - - case "ScoringWeights": - // Scoring weights are JSON files - contentBytes = policyContent; - fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json"; - break; - - default: - // Unknown types are passed through as-is - contentBytes = policyContent; - fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.bin"; - break; - } - - return new PolicySingleExtractionResult - { - Success = true, - Content = new PolicyContent - { - PolicyId = policyInfo.PolicyId, - Name = policyInfo.Name, - Version = policyInfo.Version, - FileName = fileName, - Content = contentBytes, - Type = policyInfo.Type - } - }; - } - catch (Exception ex) - { - return new PolicySingleExtractionResult - { - Success = false, - Error = ex.Message - }; - } - } - - private static async Task PackageRegoBundle( - PolicyInfo policyInfo, - byte[] policyContent, - CancellationToken cancellationToken) - { - await Task.CompletedTask; // Operations below are synchronous - - using var outputStream = new MemoryStream(); - using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal); - - // Write a simple tar with the rego file - // Note: This is a minimal implementation; a full implementation would use System.Formats.Tar - var header = CreateTarHeader($"{policyInfo.PolicyId}/policy.rego", policyContent.Length); - gzipStream.Write(header); - gzipStream.Write(policyContent); - - // Pad to 512-byte boundary - var padding = 512 - (policyContent.Length % 512); - if (padding < 512) - { - gzipStream.Write(new byte[padding]); - } - - // Add manifest.json - var manifest = new OpaBundleManifest - { - Revision = policyInfo.Version, - Roots = [policyInfo.PolicyId] - }; - var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions); - - var manifestHeader = CreateTarHeader(".manifest", manifestBytes.Length); - gzipStream.Write(manifestHeader); - gzipStream.Write(manifestBytes); - - padding = 512 - (manifestBytes.Length % 512); - if (padding < 512) - { - gzipStream.Write(new byte[padding]); - } - - // Write tar end-of-archive marker (two 512-byte zero blocks) - gzipStream.Write(new byte[1024]); - - gzipStream.Close(); - return outputStream.ToArray(); - } - - private static byte[] CreateTarHeader(string fileName, long fileSize) - { - var header = new byte[512]; - var nameBytes = Encoding.ASCII.GetBytes(fileName); - Array.Copy(nameBytes, header, Math.Min(nameBytes.Length, 100)); - - // Mode (100-107) - 0644 - Encoding.ASCII.GetBytes("0000644").CopyTo(header, 100); - - // Owner/group UID/GID (108-123) - zeros - Encoding.ASCII.GetBytes("0000000").CopyTo(header, 108); - Encoding.ASCII.GetBytes("0000000").CopyTo(header, 116); - - // File size in octal (124-135) - Encoding.ASCII.GetBytes(Convert.ToString(fileSize, 8).PadLeft(11, '0')).CopyTo(header, 124); - - // Modification time (136-147) - use deterministic mtime for reproducible output - Encoding.ASCII.GetBytes(Convert.ToString(DeterministicMtime, 8).PadLeft(11, '0')).CopyTo(header, 136); - - // Checksum placeholder (148-155) - spaces - for (var i = 148; i < 156; i++) - { - header[i] = 0x20; - } - - // Type flag (156) - regular file - header[156] = (byte)'0'; - - // USTAR magic (257-264) - Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257); - Encoding.ASCII.GetBytes("00").CopyTo(header, 263); - - // Calculate and set checksum - var checksum = 0; - foreach (var b in header) - { - checksum += b; - } - Encoding.ASCII.GetBytes(Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ").CopyTo(header, 148); - - return header; - } - - private sealed record OpaBundleManifest - { - public required string Revision { get; init; } - public required string[] Roots { get; init; } - } } - -/// -/// Interface for policy snapshot extraction. -/// -public interface IPolicySnapshotExtractor -{ - Task ExtractAllAsync( - PolicyExtractionRequest request, - CancellationToken cancellationToken = default); - - Task ExtractPolicyAsync( - string policyId, - PolicyExtractionRequest request, - CancellationToken cancellationToken = default); -} - -/// -/// Interface for policy data access. -/// This should be implemented by the Policy module to provide policy data. -/// -public interface IPolicyDataSource -{ - Task> GetAvailablePoliciesAsync(CancellationToken cancellationToken = default); - - Task GetPolicyInfoAsync(string policyId, CancellationToken cancellationToken = default); - - Task GetPolicyContentAsync(string policyId, CancellationToken cancellationToken = default); -} - -#region Data Models - -/// -/// Information about a policy. -/// -public sealed record PolicyInfo -{ - public required string PolicyId { get; init; } - public required string Name { get; init; } - public required string Version { get; init; } - public required string Type { get; init; } - public string? Description { get; init; } - public DateTimeOffset? CreatedAt { get; init; } - public DateTimeOffset? ModifiedAt { get; init; } -} - -/// -/// Request for extracting policies. -/// -public sealed record PolicyExtractionRequest -{ - /// - /// Specific policy types to extract. Empty means all types. - /// - public IReadOnlyList? Types { get; init; } -} - -/// -/// Result of extracting policies. -/// -public sealed record PolicyExtractionResult -{ - public bool Success { get; init; } - public IReadOnlyList Policies { get; init; } = []; - public IReadOnlyList Errors { get; init; } = []; -} - -/// -/// Result of extracting a single policy. -/// -public sealed record PolicySingleExtractionResult -{ - public bool Success { get; init; } - public PolicyContent? Content { get; init; } - public string? Error { get; init; } -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotModels.cs new file mode 100644 index 000000000..348053511 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/PolicySnapshotModels.cs @@ -0,0 +1,48 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Information about a policy. +/// +public sealed record PolicyInfo +{ + public required string PolicyId { get; init; } + public required string Name { get; init; } + public required string Version { get; init; } + public required string Type { get; init; } + public string? Description { get; init; } + public DateTimeOffset? CreatedAt { get; init; } + public DateTimeOffset? ModifiedAt { get; init; } +} + +/// +/// Request for extracting policies. +/// +public sealed record PolicyExtractionRequest +{ + /// + /// Specific policy types to extract. Empty means all types. + /// + public IReadOnlyList? Types { get; init; } +} + +/// +/// Result of extracting policies. +/// +public sealed record PolicyExtractionResult +{ + public bool Success { get; init; } + public IReadOnlyList Policies { get; init; } = []; + public IReadOnlyList Errors { get; init; } = []; +} + +/// +/// Result of extracting a single policy. +/// +public sealed record PolicySingleExtractionResult +{ + public bool Success { get; init; } + public PolicyContent? Content { get; init; } + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.All.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.All.cs new file mode 100644 index 000000000..a99f97a3e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.All.cs @@ -0,0 +1,73 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class VexSnapshotExtractor +{ + /// + /// Extracts VEX statements from all configured sources. + /// + public async Task ExtractAllAsync( + VexExtractionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var contents = new List(); + var errors = new List(); + var totalStatements = 0; + + try + { + var sources = await _dataSource.GetAvailableSourcesAsync(cancellationToken) + .ConfigureAwait(false); + + // Sort sources for deterministic output. + var sortedSources = sources.OrderBy(s => s.SourceId, StringComparer.Ordinal).ToList(); + + foreach (var source in sortedSources) + { + if (request.SourceIds is { Count: > 0 } && !request.SourceIds.Contains(source.SourceId)) + { + continue; + } + + try + { + var sourceResult = await ExtractSourceAsync(source.SourceId, request, cancellationToken) + .ConfigureAwait(false); + if (sourceResult.Success && sourceResult.Content is not null) + { + contents.Add(sourceResult.Content); + totalStatements += sourceResult.StatementCount; + } + else if (!string.IsNullOrEmpty(sourceResult.Error)) + { + errors.Add($"{source.SourceId}: {sourceResult.Error}"); + } + } + catch (Exception ex) + { + errors.Add($"{source.SourceId}: {ex.Message}"); + } + } + + return new VexExtractionResult + { + Success = errors.Count == 0, + VexStatements = contents, + TotalStatementCount = totalStatements, + Errors = errors + }; + } + catch (Exception ex) + { + return new VexExtractionResult + { + Success = false, + VexStatements = [], + Errors = [$"Extraction failed: {ex.Message}"] + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.Source.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.Source.cs new file mode 100644 index 000000000..65f9fb6e4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.Source.cs @@ -0,0 +1,77 @@ +using System.Globalization; +using StellaOps.AirGap.Bundle.Services; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Extractors; + +public sealed partial class VexSnapshotExtractor +{ + /// + /// Extracts VEX statements from a specific source. + /// + public async Task ExtractSourceAsync( + string sourceId, + VexExtractionRequest request, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sourceId); + + try + { + var statements = await _dataSource.GetStatementsAsync( + sourceId, + request.Since, + request.MaxStatements, + cancellationToken) + .ConfigureAwait(false); + + if (statements.Count == 0) + { + return new VexSourceExtractionResult + { + Success = true, + StatementCount = 0 + }; + } + + var snapshotAt = _timeProvider.GetUtcNow(); + var timestampStr = snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture); + + // Serialize statements to OpenVEX format. + var document = new OpenVexDocument + { + Context = "https://openvex.dev/ns", + Id = $"urn:stellaops:vex:{sourceId}:{timestampStr}", + Author = sourceId, + Timestamp = snapshotAt, + Version = 1, + Statements = statements.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal).ToList() + }; + + var contentBytes = JsonSerializer.SerializeToUtf8Bytes(document, _jsonOptions); + var fileName = $"{sourceId}-{timestampStr}.json"; + + return new VexSourceExtractionResult + { + Success = true, + StatementCount = statements.Count, + Content = new VexContent + { + SourceId = sourceId, + FileName = fileName, + Content = contentBytes, + SnapshotAt = snapshotAt, + StatementCount = statements.Count + } + }; + } + catch (Exception ex) + { + return new VexSourceExtractionResult + { + Success = false, + Error = ex.Message + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.cs index 7cee91f19..0a14d7e75 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotExtractor.cs @@ -4,11 +4,6 @@ // Task: SEAL-007 - Implement VEX snapshot extractor // Description: Extracts VEX statement data from Excititor for knowledge snapshot bundles. // ----------------------------------------------------------------------------- - - -using StellaOps.AirGap.Bundle.Services; -using System.Globalization; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Extractors; @@ -17,9 +12,9 @@ namespace StellaOps.AirGap.Bundle.Extractors; /// Extracts VEX (Vulnerability Exploitability eXchange) statements from Excititor /// database for inclusion in knowledge snapshot bundles. /// -public sealed class VexSnapshotExtractor : IVexSnapshotExtractor +public sealed partial class VexSnapshotExtractor : IVexSnapshotExtractor { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase @@ -39,258 +34,4 @@ public sealed class VexSnapshotExtractor : IVexSnapshotExtractor _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); } - /// - /// Extracts VEX statements from all configured sources. - /// - public async Task ExtractAllAsync( - VexExtractionRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var contents = new List(); - var errors = new List(); - var totalStatements = 0; - - try - { - var sources = await _dataSource.GetAvailableSourcesAsync(cancellationToken); - - // Sort sources for deterministic output - var sortedSources = sources.OrderBy(s => s.SourceId, StringComparer.Ordinal).ToList(); - - foreach (var source in sortedSources) - { - // Skip if specific sources are requested and this isn't one of them - if (request.SourceIds is { Count: > 0 } && !request.SourceIds.Contains(source.SourceId)) - { - continue; - } - - try - { - var sourceResult = await ExtractSourceAsync(source.SourceId, request, cancellationToken); - if (sourceResult.Success && sourceResult.Content is not null) - { - contents.Add(sourceResult.Content); - totalStatements += sourceResult.StatementCount; - } - else if (!string.IsNullOrEmpty(sourceResult.Error)) - { - errors.Add($"{source.SourceId}: {sourceResult.Error}"); - } - } - catch (Exception ex) - { - errors.Add($"{source.SourceId}: {ex.Message}"); - } - } - - return new VexExtractionResult - { - Success = errors.Count == 0, - VexStatements = contents, - TotalStatementCount = totalStatements, - Errors = errors - }; - } - catch (Exception ex) - { - return new VexExtractionResult - { - Success = false, - VexStatements = [], - Errors = [$"Extraction failed: {ex.Message}"] - }; - } - } - - /// - /// Extracts VEX statements from a specific source. - /// - public async Task ExtractSourceAsync( - string sourceId, - VexExtractionRequest request, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(sourceId); - - try - { - var statements = await _dataSource.GetStatementsAsync( - sourceId, - request.Since, - request.MaxStatements, - cancellationToken); - - if (statements.Count == 0) - { - return new VexSourceExtractionResult - { - Success = true, - StatementCount = 0 - }; - } - - var snapshotAt = _timeProvider.GetUtcNow(); - var timestampStr = snapshotAt.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture); - - // Serialize statements to OpenVEX format - var document = new OpenVexDocument - { - Context = "https://openvex.dev/ns", - Id = $"urn:stellaops:vex:{sourceId}:{timestampStr}", - Author = sourceId, - Timestamp = snapshotAt, - Version = 1, - Statements = statements.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal).ToList() - }; - - var contentBytes = JsonSerializer.SerializeToUtf8Bytes(document, JsonOptions); - var fileName = $"{sourceId}-{timestampStr}.json"; - - return new VexSourceExtractionResult - { - Success = true, - StatementCount = statements.Count, - Content = new VexContent - { - SourceId = sourceId, - FileName = fileName, - Content = contentBytes, - SnapshotAt = snapshotAt, - StatementCount = statements.Count - } - }; - } - catch (Exception ex) - { - return new VexSourceExtractionResult - { - Success = false, - Error = ex.Message - }; - } - } } - -/// -/// Interface for VEX snapshot extraction. -/// -public interface IVexSnapshotExtractor -{ - Task ExtractAllAsync( - VexExtractionRequest request, - CancellationToken cancellationToken = default); - - Task ExtractSourceAsync( - string sourceId, - VexExtractionRequest request, - CancellationToken cancellationToken = default); -} - -/// -/// Interface for VEX data access. -/// This should be implemented by Excititor to provide VEX data. -/// -public interface IVexDataSource -{ - Task> GetAvailableSourcesAsync(CancellationToken cancellationToken = default); - - Task> GetStatementsAsync( - string sourceId, - DateTimeOffset? since = null, - int? maxStatements = null, - CancellationToken cancellationToken = default); -} - -#region Data Models - -/// -/// Information about an available VEX source. -/// -public sealed record VexSourceInfo(string SourceId, string Name, string? Publisher); - -/// -/// A VEX statement following OpenVEX format. -/// -public sealed record VexStatement -{ - public required string VulnerabilityId { get; init; } - public required string Status { get; init; } - public string? Justification { get; init; } - public string? ImpactStatement { get; init; } - public string? ActionStatement { get; init; } - public DateTimeOffset? Timestamp { get; init; } - public IReadOnlyList? Products { get; init; } -} - -/// -/// A product reference in a VEX statement. -/// -public sealed record VexProduct -{ - public required string Id { get; init; } - public string? Name { get; init; } - public string? Version { get; init; } - public string? Purl { get; init; } - public IReadOnlyList? Hashes { get; init; } -} - -/// -/// OpenVEX document format. -/// -public sealed record OpenVexDocument -{ - public required string Context { get; init; } - public required string Id { get; init; } - public required string Author { get; init; } - public required DateTimeOffset Timestamp { get; init; } - public required int Version { get; init; } - public required IReadOnlyList Statements { get; init; } -} - -/// -/// Request for extracting VEX statements. -/// -public sealed record VexExtractionRequest -{ - /// - /// Specific source IDs to extract. Empty means all sources. - /// - public IReadOnlyList? SourceIds { get; init; } - - /// - /// Only extract statements modified since this time. - /// - public DateTimeOffset? Since { get; init; } - - /// - /// Maximum statements per source. - /// - public int? MaxStatements { get; init; } -} - -/// -/// Result of extracting VEX statements from all sources. -/// -public sealed record VexExtractionResult -{ - public bool Success { get; init; } - public IReadOnlyList VexStatements { get; init; } = []; - public int TotalStatementCount { get; init; } - public IReadOnlyList Errors { get; init; } = []; -} - -/// -/// Result of extracting a single VEX source. -/// -public sealed record VexSourceExtractionResult -{ - public bool Success { get; init; } - public int StatementCount { get; init; } - public VexContent? Content { get; init; } - public string? Error { get; init; } -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotModels.cs new file mode 100644 index 000000000..dc081e3bb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Extractors/VexSnapshotModels.cs @@ -0,0 +1,90 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Extractors; + +/// +/// Information about an available VEX source. +/// +public sealed record VexSourceInfo(string SourceId, string Name, string? Publisher); + +/// +/// A VEX statement following OpenVEX format. +/// +public sealed record VexStatement +{ + public required string VulnerabilityId { get; init; } + public required string Status { get; init; } + public string? Justification { get; init; } + public string? ImpactStatement { get; init; } + public string? ActionStatement { get; init; } + public DateTimeOffset? Timestamp { get; init; } + public IReadOnlyList? Products { get; init; } +} + +/// +/// A product reference in a VEX statement. +/// +public sealed record VexProduct +{ + public required string Id { get; init; } + public string? Name { get; init; } + public string? Version { get; init; } + public string? Purl { get; init; } + public IReadOnlyList? Hashes { get; init; } +} + +/// +/// OpenVEX document format. +/// +public sealed record OpenVexDocument +{ + public required string Context { get; init; } + public required string Id { get; init; } + public required string Author { get; init; } + public required DateTimeOffset Timestamp { get; init; } + public required int Version { get; init; } + public required IReadOnlyList Statements { get; init; } +} + +/// +/// Request for extracting VEX statements. +/// +public sealed record VexExtractionRequest +{ + /// + /// Specific source IDs to extract. Empty means all sources. + /// + public IReadOnlyList? SourceIds { get; init; } + + /// + /// Only extract statements modified since this time. + /// + public DateTimeOffset? Since { get; init; } + + /// + /// Maximum statements per source. + /// + public int? MaxStatements { get; init; } +} + +/// +/// Result of extracting VEX statements from all sources. +/// +public sealed record VexExtractionResult +{ + public bool Success { get; init; } + public IReadOnlyList VexStatements { get; init; } = []; + public int TotalStatementCount { get; init; } + public IReadOnlyList Errors { get; init; } = []; +} + +/// +/// Result of extracting a single VEX source. +/// +public sealed record VexSourceExtractionResult +{ + public bool Success { get; init; } + public int StatementCount { get; init; } + public VexContent? Content { get; init; } + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Configs.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Configs.cs new file mode 100644 index 000000000..8b10d5493 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Configs.cs @@ -0,0 +1,92 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.FunctionMap; + +public static partial class FunctionMapBundleIntegration +{ + /// + /// Creates a bundle artifact build config for a function map predicate file. + /// + /// Path to the function map JSON file on disk. + /// Service name for the function map (used in bundle path). + /// A configured . + public static BundleArtifactBuildConfig CreateFunctionMapConfig(string sourcePath, string serviceName) + { + var fileName = $"{SanitizeName(serviceName)}-function-map.json"; + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.FunctionMap, + ContentType = MediaTypes.FunctionMap, + SourcePath = sourcePath, + RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" + }; + } + + /// + /// Creates a bundle artifact build config for a DSSE-signed function map. + /// + /// Path to the DSSE envelope JSON file on disk. + /// Service name for the function map (used in bundle path). + /// A configured . + public static BundleArtifactBuildConfig CreateFunctionMapDsseConfig(string sourcePath, string serviceName) + { + var fileName = $"{SanitizeName(serviceName)}-function-map.dsse.json"; + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.FunctionMapDsse, + ContentType = MediaTypes.FunctionMapDsse, + SourcePath = sourcePath, + RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" + }; + } + + /// + /// Creates a bundle artifact build config for a runtime observations file. + /// + /// Path to the NDJSON observations file on disk. + /// Date label for the observations file (e.g., "2026-01-22"). + /// A configured . + public static BundleArtifactBuildConfig CreateObservationsConfig(string sourcePath, string dateLabel) + { + var fileName = $"observations-{SanitizeName(dateLabel)}.ndjson"; + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.Observations, + ContentType = MediaTypes.Observations, + SourcePath = sourcePath, + RelativePath = $"{BundlePaths.ObservationsDir}/{fileName}" + }; + } + + /// + /// Creates a bundle artifact build config for a verification report. + /// + /// Path to the verification report JSON file on disk. + /// A configured . + public static BundleArtifactBuildConfig CreateVerificationReportConfig(string sourcePath) + { + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.VerificationReport, + ContentType = MediaTypes.VerificationReport, + SourcePath = sourcePath, + RelativePath = $"{BundlePaths.VerificationDir}/verification-report.json" + }; + } + + /// + /// Creates a bundle artifact build config for a DSSE-signed verification report. + /// + /// Path to the DSSE envelope JSON file on disk. + /// A configured . + public static BundleArtifactBuildConfig CreateVerificationReportDsseConfig(string sourcePath) + { + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.VerificationReportDsse, + ContentType = MediaTypes.FunctionMapDsse, + SourcePath = sourcePath, + RelativePath = $"{BundlePaths.VerificationDir}/verification-report.dsse.json" + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.ContentConfigs.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.ContentConfigs.cs new file mode 100644 index 000000000..a5cdcad13 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.ContentConfigs.cs @@ -0,0 +1,42 @@ +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.FunctionMap; + +public static partial class FunctionMapBundleIntegration +{ + /// + /// Creates a bundle artifact build config from in-memory function map content. + /// + /// Function map predicate JSON bytes. + /// Service name for the function map. + /// A configured . + public static BundleArtifactBuildConfig CreateFunctionMapFromContent(byte[] content, string serviceName) + { + var fileName = $"{SanitizeName(serviceName)}-function-map.json"; + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.FunctionMap, + ContentType = MediaTypes.FunctionMap, + Content = content, + RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" + }; + } + + /// + /// Creates a bundle artifact build config from in-memory observations content. + /// + /// Observations NDJSON bytes. + /// Date label for the observations file. + /// A configured . + public static BundleArtifactBuildConfig CreateObservationsFromContent(byte[] content, string dateLabel) + { + var fileName = $"observations-{SanitizeName(dateLabel)}.ndjson"; + return new BundleArtifactBuildConfig + { + Type = ArtifactTypes.Observations, + ContentType = MediaTypes.Observations, + Content = content, + RelativePath = $"{BundlePaths.ObservationsDir}/{fileName}" + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Validation.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Validation.cs new file mode 100644 index 000000000..00f88684e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.Validation.cs @@ -0,0 +1,25 @@ +namespace StellaOps.AirGap.Bundle.FunctionMap; + +public static partial class FunctionMapBundleIntegration +{ + /// + /// Checks if the given artifact type string represents a function-map related artifact. + /// + public static bool IsFunctionMapArtifact(string? artifactType) + { + return artifactType is ArtifactTypes.FunctionMap + or ArtifactTypes.FunctionMapDsse + or ArtifactTypes.Observations + or ArtifactTypes.VerificationReport + or ArtifactTypes.VerificationReportDsse; + } + + /// + /// Checks if the given artifact type is a DSSE-signed artifact that should be verified. + /// + public static bool IsDsseArtifact(string? artifactType) + { + return artifactType is ArtifactTypes.FunctionMapDsse + or ArtifactTypes.VerificationReportDsse; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.cs index df26b1595..060f10a07 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/FunctionMap/FunctionMapBundleIntegration.cs @@ -2,10 +2,6 @@ // Copyright (c) 2025 StellaOps // Sprint: SPRINT_20260122_039_Scanner_runtime_linkage_verification // Task: RLV-011 - Bundle Integration: function_map Artifact Type - -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Services; - namespace StellaOps.AirGap.Bundle.FunctionMap; /// @@ -13,7 +9,7 @@ namespace StellaOps.AirGap.Bundle.FunctionMap; /// Provides standardized artifact type strings, media types, and factory methods /// for building function-map bundle configurations. /// -public static class FunctionMapBundleIntegration +public static partial class FunctionMapBundleIntegration { /// /// Artifact type strings for bundle manifest entries. @@ -69,149 +65,6 @@ public static class FunctionMapBundleIntegration public const string VerificationDir = "verification"; } - /// - /// Creates a bundle artifact build config for a function map predicate file. - /// - /// Path to the function map JSON file on disk. - /// Service name for the function map (used in bundle path). - /// A configured . - public static BundleArtifactBuildConfig CreateFunctionMapConfig(string sourcePath, string serviceName) - { - var fileName = $"{SanitizeName(serviceName)}-function-map.json"; - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.FunctionMap, - ContentType = MediaTypes.FunctionMap, - SourcePath = sourcePath, - RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" - }; - } - - /// - /// Creates a bundle artifact build config for a DSSE-signed function map. - /// - /// Path to the DSSE envelope JSON file on disk. - /// Service name for the function map (used in bundle path). - /// A configured . - public static BundleArtifactBuildConfig CreateFunctionMapDsseConfig(string sourcePath, string serviceName) - { - var fileName = $"{SanitizeName(serviceName)}-function-map.dsse.json"; - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.FunctionMapDsse, - ContentType = MediaTypes.FunctionMapDsse, - SourcePath = sourcePath, - RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" - }; - } - - /// - /// Creates a bundle artifact build config for a runtime observations file. - /// - /// Path to the NDJSON observations file on disk. - /// Date label for the observations file (e.g., "2026-01-22"). - /// A configured . - public static BundleArtifactBuildConfig CreateObservationsConfig(string sourcePath, string dateLabel) - { - var fileName = $"observations-{SanitizeName(dateLabel)}.ndjson"; - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.Observations, - ContentType = MediaTypes.Observations, - SourcePath = sourcePath, - RelativePath = $"{BundlePaths.ObservationsDir}/{fileName}" - }; - } - - /// - /// Creates a bundle artifact build config for a verification report. - /// - /// Path to the verification report JSON file on disk. - /// A configured . - public static BundleArtifactBuildConfig CreateVerificationReportConfig(string sourcePath) - { - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.VerificationReport, - ContentType = MediaTypes.VerificationReport, - SourcePath = sourcePath, - RelativePath = $"{BundlePaths.VerificationDir}/verification-report.json" - }; - } - - /// - /// Creates a bundle artifact build config for a DSSE-signed verification report. - /// - /// Path to the DSSE envelope JSON file on disk. - /// A configured . - public static BundleArtifactBuildConfig CreateVerificationReportDsseConfig(string sourcePath) - { - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.VerificationReportDsse, - ContentType = MediaTypes.FunctionMapDsse, - SourcePath = sourcePath, - RelativePath = $"{BundlePaths.VerificationDir}/verification-report.dsse.json" - }; - } - - /// - /// Creates a bundle artifact build config from in-memory function map content. - /// - /// Function map predicate JSON bytes. - /// Service name for the function map. - /// A configured . - public static BundleArtifactBuildConfig CreateFunctionMapFromContent(byte[] content, string serviceName) - { - var fileName = $"{SanitizeName(serviceName)}-function-map.json"; - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.FunctionMap, - ContentType = MediaTypes.FunctionMap, - Content = content, - RelativePath = $"{BundlePaths.FunctionMapsDir}/{fileName}" - }; - } - - /// - /// Creates a bundle artifact build config from in-memory observations content. - /// - /// Observations NDJSON bytes. - /// Date label for the observations file. - /// A configured . - public static BundleArtifactBuildConfig CreateObservationsFromContent(byte[] content, string dateLabel) - { - var fileName = $"observations-{SanitizeName(dateLabel)}.ndjson"; - return new BundleArtifactBuildConfig - { - Type = ArtifactTypes.Observations, - ContentType = MediaTypes.Observations, - Content = content, - RelativePath = $"{BundlePaths.ObservationsDir}/{fileName}" - }; - } - - /// - /// Checks if the given artifact type string represents a function-map related artifact. - /// - public static bool IsFunctionMapArtifact(string? artifactType) - { - return artifactType is ArtifactTypes.FunctionMap - or ArtifactTypes.FunctionMapDsse - or ArtifactTypes.Observations - or ArtifactTypes.VerificationReport - or ArtifactTypes.VerificationReportDsse; - } - - /// - /// Checks if the given artifact type is a DSSE-signed artifact that should be verified. - /// - public static bool IsDsseArtifact(string? artifactType) - { - return artifactType is ArtifactTypes.FunctionMapDsse - or ArtifactTypes.VerificationReportDsse; - } - private static string SanitizeName(string value) { if (string.IsNullOrWhiteSpace(value)) diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/AdvisorySnapshotEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/AdvisorySnapshotEntry.cs new file mode 100644 index 000000000..ef2a0bcac --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/AdvisorySnapshotEntry.cs @@ -0,0 +1,14 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Entry for an advisory feed in the snapshot. +/// +public sealed class AdvisorySnapshotEntry +{ + public required string FeedId { get; init; } + public required string RelativePath { get; init; } + public required string Digest { get; init; } + public required long SizeBytes { get; init; } + public DateTimeOffset SnapshotAt { get; init; } + public int RecordCount { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifact.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifact.cs new file mode 100644 index 000000000..fd5b3a66b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifact.cs @@ -0,0 +1,17 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Artifact entry in a bundle (v2.0.0). +/// Sprint: SPRINT_20260118_018 (TASK-018-001) +/// +public sealed record BundleArtifact( + /// Relative path within the bundle. + string? Path, + /// Artifact type: sbom, vex, dsse, rekor-proof, oci-referrers, etc. + string Type, + /// Content type (MIME). + string? ContentType, + /// SHA-256 digest of the artifact. + string? Digest, + /// Size in bytes. + long? SizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactType.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactType.cs new file mode 100644 index 000000000..4c1eeb7d2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactType.cs @@ -0,0 +1,66 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Bundle artifact type. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum BundleArtifactType +{ + /// SBOM document. + [JsonPropertyName("sbom")] + Sbom, + + /// DSSE-signed SBOM statement. + [JsonPropertyName("sbom.dsse")] + SbomDsse, + + /// VEX document. + [JsonPropertyName("vex")] + Vex, + + /// DSSE-signed VEX statement. + [JsonPropertyName("vex.dsse")] + VexDsse, + + /// Rekor inclusion proof. + [JsonPropertyName("rekor.proof")] + RekorProof, + + /// OCI referrers index. + [JsonPropertyName("oci.referrers")] + OciReferrers, + + /// Policy snapshot. + [JsonPropertyName("policy")] + Policy, + + /// Feed snapshot. + [JsonPropertyName("feed")] + Feed, + + /// Rekor checkpoint. + [JsonPropertyName("rekor.checkpoint")] + RekorCheckpoint, + + /// Function map predicate (runtime->static linkage). + [JsonPropertyName("function-map")] + FunctionMap, + + /// DSSE-signed function map statement. + [JsonPropertyName("function-map.dsse")] + FunctionMapDsse, + + /// Runtime observations data (NDJSON). + [JsonPropertyName("observations")] + Observations, + + /// Verification report (function map verification result). + [JsonPropertyName("verification-report")] + VerificationReport, + + /// Other/generic artifact. + [JsonPropertyName("other")] + Other +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactV2.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactV2.cs new file mode 100644 index 000000000..a77214aaf --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleArtifactV2.cs @@ -0,0 +1,29 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Bundle artifact entry. +/// +public sealed record BundleArtifactV2 +{ + /// Path within bundle. + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// Artifact type. + [JsonPropertyName("type")] + public BundleArtifactType Type { get; init; } + + /// Content digest (sha256). + [JsonPropertyName("digest")] + public string? Digest { get; init; } + + /// Media type. + [JsonPropertyName("mediaType")] + public string? MediaType { get; init; } + + /// Size in bytes. + [JsonPropertyName("size")] + public long Size { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleFormatV2.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleFormatV2.cs deleted file mode 100644 index b02230792..000000000 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleFormatV2.cs +++ /dev/null @@ -1,266 +0,0 @@ -// ----------------------------------------------------------------------------- -// BundleFormatV2.cs -// Sprint: SPRINT_20260118_018_AirGap_router_integration -// Task: TASK-018-001 - Complete Air-Gap Bundle Format -// Description: Air-gap bundle format v2.0.0 matching advisory specification -// ----------------------------------------------------------------------------- - -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.AirGap.Bundle.Models; - -/// -/// Air-gap bundle manifest v2.0.0 per advisory specification. -/// -public sealed record BundleManifestV2 -{ - /// Schema version. - [JsonPropertyName("schemaVersion")] - public string SchemaVersion { get; init; } = "2.0.0"; - - /// Canonical manifest hash (sha256 over canonical JSON). - [JsonPropertyName("canonicalManifestHash")] - public string? CanonicalManifestHash { get; init; } - - /// Subject digests for the bundle target. - [JsonPropertyName("subject")] - public BundleSubject? Subject { get; init; } - - /// Timestamp entries for offline verification. - [JsonPropertyName("timestamps")] - public ImmutableArray Timestamps { get; init; } = []; - - /// Rekor proof entries for offline verification. - [JsonPropertyName("rekorProofs")] - public ImmutableArray RekorProofs { get; init; } = []; - - /// Bundle information. - [JsonPropertyName("bundle")] - public required BundleInfoV2 Bundle { get; init; } - - /// Verification configuration. - [JsonPropertyName("verify")] - public BundleVerifySectionV2? Verify { get; init; } - - /// Bundle metadata. - [JsonPropertyName("metadata")] - public BundleMetadata? Metadata { get; init; } -} - -/// -/// Bundle information. -/// -public sealed record BundleInfoV2 -{ - /// Primary image reference. - [JsonPropertyName("image")] - public required string Image { get; init; } - - /// Image digest. - [JsonPropertyName("digest")] - public string? Digest { get; init; } - - /// Bundle artifacts. - [JsonPropertyName("artifacts")] - public required ImmutableArray Artifacts { get; init; } - - /// OCI referrer manifest. - [JsonPropertyName("referrers")] - public OciReferrerIndex? Referrers { get; init; } -} - -/// -/// Bundle artifact entry. -/// -public sealed record BundleArtifactV2 -{ - /// Path within bundle. - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// Artifact type. - [JsonPropertyName("type")] - public BundleArtifactType Type { get; init; } - - /// Content digest (sha256). - [JsonPropertyName("digest")] - public string? Digest { get; init; } - - /// Media type. - [JsonPropertyName("mediaType")] - public string? MediaType { get; init; } - - /// Size in bytes. - [JsonPropertyName("size")] - public long Size { get; init; } -} - -/// -/// Bundle artifact type. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum BundleArtifactType -{ - /// SBOM document. - [JsonPropertyName("sbom")] - Sbom, - - /// DSSE-signed SBOM statement. - [JsonPropertyName("sbom.dsse")] - SbomDsse, - - /// VEX document. - [JsonPropertyName("vex")] - Vex, - - /// DSSE-signed VEX statement. - [JsonPropertyName("vex.dsse")] - VexDsse, - - /// Rekor inclusion proof. - [JsonPropertyName("rekor.proof")] - RekorProof, - - /// OCI referrers index. - [JsonPropertyName("oci.referrers")] - OciReferrers, - - /// Policy snapshot. - [JsonPropertyName("policy")] - Policy, - - /// Feed snapshot. - [JsonPropertyName("feed")] - Feed, - - /// Rekor checkpoint. - [JsonPropertyName("rekor.checkpoint")] - RekorCheckpoint, - - /// Function map predicate (runtime→static linkage). - [JsonPropertyName("function-map")] - FunctionMap, - - /// DSSE-signed function map statement. - [JsonPropertyName("function-map.dsse")] - FunctionMapDsse, - - /// Runtime observations data (NDJSON). - [JsonPropertyName("observations")] - Observations, - - /// Verification report (function map verification result). - [JsonPropertyName("verification-report")] - VerificationReport, - - /// Other/generic artifact. - [JsonPropertyName("other")] - Other -} - -/// -/// Bundle verification section. -/// -public sealed record BundleVerifySectionV2 -{ - /// Trusted signing keys. - [JsonPropertyName("keys")] - public ImmutableArray Keys { get; init; } = []; - - /// Verification expectations. - [JsonPropertyName("expectations")] - public VerifyExpectations? Expectations { get; init; } - - /// Certificate roots for verification. - [JsonPropertyName("certificateRoots")] - public ImmutableArray CertificateRoots { get; init; } = []; -} - -/// -/// Verification expectations. -/// -public sealed record VerifyExpectations -{ - /// Expected payload types. - [JsonPropertyName("payloadTypes")] - public ImmutableArray PayloadTypes { get; init; } = []; - - /// Whether Rekor inclusion is required. - [JsonPropertyName("rekorRequired")] - public bool RekorRequired { get; init; } - - /// Expected issuers. - [JsonPropertyName("issuers")] - public ImmutableArray Issuers { get; init; } = []; - - /// Minimum signature count. - [JsonPropertyName("minSignatures")] - public int MinSignatures { get; init; } = 1; -} - -/// -/// OCI referrer index. -/// -public sealed record OciReferrerIndex -{ - /// Referrer descriptors. - [JsonPropertyName("manifests")] - public ImmutableArray Manifests { get; init; } = []; -} - -/// -/// OCI referrer descriptor. -/// -public sealed record OciReferrerDescriptor -{ - /// Media type. - [JsonPropertyName("mediaType")] - public required string MediaType { get; init; } - - /// Digest. - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// Artifact type. - [JsonPropertyName("artifactType")] - public string? ArtifactType { get; init; } - - /// Size. - [JsonPropertyName("size")] - public long Size { get; init; } - - /// Annotations. - [JsonPropertyName("annotations")] - public IReadOnlyDictionary? Annotations { get; init; } -} - -/// -/// Bundle metadata. -/// -public sealed record BundleMetadata -{ - /// When bundle was created. - [JsonPropertyName("createdAt")] - public DateTimeOffset CreatedAt { get; init; } - - /// Bundle creator. - [JsonPropertyName("createdBy")] - public string? CreatedBy { get; init; } - - /// Bundle description. - [JsonPropertyName("description")] - public string? Description { get; init; } - - /// Source environment. - [JsonPropertyName("sourceEnvironment")] - public string? SourceEnvironment { get; init; } - - /// Target environment. - [JsonPropertyName("targetEnvironment")] - public string? TargetEnvironment { get; init; } - - /// Additional labels. - [JsonPropertyName("labels")] - public IReadOnlyDictionary? Labels { get; init; } -} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleInfoV2.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleInfoV2.cs new file mode 100644 index 000000000..f93b5e348 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleInfoV2.cs @@ -0,0 +1,26 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Bundle information. +/// +public sealed record BundleInfoV2 +{ + /// Primary image reference. + [JsonPropertyName("image")] + public required string Image { get; init; } + + /// Image digest. + [JsonPropertyName("digest")] + public string? Digest { get; init; } + + /// Bundle artifacts. + [JsonPropertyName("artifacts")] + public required ImmutableArray Artifacts { get; init; } + + /// OCI referrer manifest. + [JsonPropertyName("referrers")] + public OciReferrerIndex? Referrers { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifest.cs index 0bba07d8e..f4f3d1436 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifest.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifest.cs @@ -1,4 +1,4 @@ -using System.Collections.Immutable; +using System.Collections.Immutable; namespace StellaOps.AirGap.Bundle.Models; @@ -71,200 +71,3 @@ public sealed record BundleManifest /// public ImmutableArray RekorProofs { get; init; } = []; } - -/// -/// Artifact entry in a bundle (v2.0.0). -/// Sprint: SPRINT_20260118_018 (TASK-018-001) -/// -public sealed record BundleArtifact( - /// Relative path within the bundle. - string? Path, - /// Artifact type: sbom, vex, dsse, rekor-proof, oci-referrers, etc. - string Type, - /// Content type (MIME). - string? ContentType, - /// SHA-256 digest of the artifact. - string? Digest, - /// Size in bytes. - long? SizeBytes); - -/// -/// Verification section for bundle validation (v2.0.0). -/// Sprint: SPRINT_20260118_018 (TASK-018-001) -/// -public sealed record BundleVerifySection -{ - /// - /// Trusted signing keys for verification. - /// Formats: kms://..., file://..., sigstore://... - /// - public ImmutableArray Keys { get; init; } = []; - - /// - /// Verification expectations. - /// - public BundleVerifyExpectations? Expectations { get; init; } - - /// - /// Optional: path to trust root certificate. - /// - public string? TrustRoot { get; init; } - - /// - /// Optional: Rekor checkpoint for offline proof verification. - /// - public string? RekorCheckpointPath { get; init; } -} - -/// -/// Verification expectations (v2.0.0). -/// Sprint: SPRINT_20260118_018 (TASK-018-001) -/// -public sealed record BundleVerifyExpectations -{ - /// - /// Expected payload types in DSSE envelopes. - /// Example: ["application/vnd.cyclonedx+json;version=1.6", "application/vnd.openvex+json"] - /// - public ImmutableArray PayloadTypes { get; init; } = []; - - /// - /// Whether Rekor proof is required for verification. - /// - public bool RekorRequired { get; init; } = true; - - /// - /// Minimum number of signatures required. - /// - public int MinSignatures { get; init; } = 1; - - /// - /// Required artifact types that must be present. - /// - public ImmutableArray RequiredArtifacts { get; init; } = []; - - /// - /// Whether all artifacts must pass checksum verification. - /// - public bool VerifyChecksums { get; init; } = true; -} - -public sealed record FeedComponent( - string FeedId, - string Name, - string Version, - string RelativePath, - string Digest, - long SizeBytes, - DateTimeOffset SnapshotAt, - FeedFormat Format); - -public enum FeedFormat -{ - StellaOpsNative, - TrivyDb, - GrypeDb, - OsvJson -} - -public sealed record PolicyComponent( - string PolicyId, - string Name, - string Version, - string RelativePath, - string Digest, - long SizeBytes, - PolicyType Type); - -public enum PolicyType -{ - OpaRego, - LatticeRules, - UnknownBudgets, - ScoringWeights, - /// - /// Local RBAC policy file for Authority offline fallback. - /// Sprint: SPRINT_20260112_018_AUTH_local_rbac_fallback Task: RBAC-010 - /// - LocalRbac -} - -public sealed record CryptoComponent( - string ComponentId, - string Name, - string RelativePath, - string Digest, - long SizeBytes, - CryptoComponentType Type, - DateTimeOffset? ExpiresAt); - -public enum CryptoComponentType -{ - TrustRoot, - IntermediateCa, - TimestampRoot, - SigningKey, - FulcioRoot -} - -public sealed record CatalogComponent( - string CatalogId, - string Ecosystem, - string Version, - string RelativePath, - string Digest, - long SizeBytes, - DateTimeOffset SnapshotAt); - -public sealed record RekorSnapshot( - string TreeId, - long TreeSize, - string RootHash, - string RelativePath, - string Digest, - DateTimeOffset SnapshotAt); - -public sealed record CryptoProviderComponent( - string ProviderId, - string Name, - string Version, - string RelativePath, - string Digest, - long SizeBytes, - ImmutableArray SupportedAlgorithms); - -/// -/// Component for a rule bundle (e.g., secrets detection rules). -/// -/// Bundle identifier (e.g., "secrets.ruleset"). -/// Bundle type (e.g., "secrets", "malware"). -/// Bundle version in YYYY.MM format. -/// Relative path to the bundle directory. -/// Combined digest of all files in the bundle. -/// Total size of the bundle in bytes. -/// Number of rules in the bundle. -/// Key ID used to sign the bundle. -/// When the bundle was signed. -/// List of files in the bundle. -public sealed record RuleBundleComponent( - string BundleId, - string BundleType, - string Version, - string RelativePath, - string Digest, - long SizeBytes, - int RuleCount, - string? SignerKeyId, - DateTimeOffset? SignedAt, - ImmutableArray Files); - -/// -/// A file within a rule bundle component. -/// -/// Filename (e.g., "secrets.ruleset.manifest.json"). -/// SHA256 digest of the file. -/// File size in bytes. -public sealed record RuleBundleFileComponent( - string Name, - string Digest, - long SizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifestV2.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifestV2.cs new file mode 100644 index 000000000..cd3337a68 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleManifestV2.cs @@ -0,0 +1,42 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Air-gap bundle manifest v2.0.0 per advisory specification. +/// +public sealed record BundleManifestV2 +{ + /// Schema version. + [JsonPropertyName("schemaVersion")] + public string SchemaVersion { get; init; } = "2.0.0"; + + /// Canonical manifest hash (sha256 over canonical JSON). + [JsonPropertyName("canonicalManifestHash")] + public string? CanonicalManifestHash { get; init; } + + /// Subject digests for the bundle target. + [JsonPropertyName("subject")] + public BundleSubject? Subject { get; init; } + + /// Timestamp entries for offline verification. + [JsonPropertyName("timestamps")] + public ImmutableArray Timestamps { get; init; } = []; + + /// Rekor proof entries for offline verification. + [JsonPropertyName("rekorProofs")] + public ImmutableArray RekorProofs { get; init; } = []; + + /// Bundle information. + [JsonPropertyName("bundle")] + public required BundleInfoV2 Bundle { get; init; } + + /// Verification configuration. + [JsonPropertyName("verify")] + public BundleVerifySectionV2? Verify { get; init; } + + /// Bundle metadata. + [JsonPropertyName("metadata")] + public BundleMetadata? Metadata { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleMetadata.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleMetadata.cs new file mode 100644 index 000000000..ba8298158 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleMetadata.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Bundle metadata. +/// +public sealed record BundleMetadata +{ + /// When bundle was created. + [JsonPropertyName("createdAt")] + public DateTimeOffset CreatedAt { get; init; } + + /// Bundle creator. + [JsonPropertyName("createdBy")] + public string? CreatedBy { get; init; } + + /// Bundle description. + [JsonPropertyName("description")] + public string? Description { get; init; } + + /// Source environment. + [JsonPropertyName("sourceEnvironment")] + public string? SourceEnvironment { get; init; } + + /// Target environment. + [JsonPropertyName("targetEnvironment")] + public string? TargetEnvironment { get; init; } + + /// Additional labels. + [JsonPropertyName("labels")] + public IReadOnlyDictionary? Labels { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifyExpectations.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifyExpectations.cs new file mode 100644 index 000000000..9155880de --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifyExpectations.cs @@ -0,0 +1,36 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Verification expectations (v2.0.0). +/// Sprint: SPRINT_20260118_018 (TASK-018-001) +/// +public sealed record BundleVerifyExpectations +{ + /// + /// Expected payload types in DSSE envelopes. + /// Example: ["application/vnd.cyclonedx+json;version=1.6", "application/vnd.openvex+json"] + /// + public ImmutableArray PayloadTypes { get; init; } = []; + + /// + /// Whether Rekor proof is required for verification. + /// + public bool RekorRequired { get; init; } = true; + + /// + /// Minimum number of signatures required. + /// + public int MinSignatures { get; init; } = 1; + + /// + /// Required artifact types that must be present. + /// + public ImmutableArray RequiredArtifacts { get; init; } = []; + + /// + /// Whether all artifacts must pass checksum verification. + /// + public bool VerifyChecksums { get; init; } = true; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySection.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySection.cs new file mode 100644 index 000000000..cb87391ff --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySection.cs @@ -0,0 +1,31 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Verification section for bundle validation (v2.0.0). +/// Sprint: SPRINT_20260118_018 (TASK-018-001) +/// +public sealed record BundleVerifySection +{ + /// + /// Trusted signing keys for verification. + /// Formats: kms://..., file://..., sigstore://... + /// + public ImmutableArray Keys { get; init; } = []; + + /// + /// Verification expectations. + /// + public BundleVerifyExpectations? Expectations { get; init; } + + /// + /// Optional: path to trust root certificate. + /// + public string? TrustRoot { get; init; } + + /// + /// Optional: Rekor checkpoint for offline proof verification. + /// + public string? RekorCheckpointPath { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySectionV2.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySectionV2.cs new file mode 100644 index 000000000..69c612b96 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/BundleVerifySectionV2.cs @@ -0,0 +1,22 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Bundle verification section. +/// +public sealed record BundleVerifySectionV2 +{ + /// Trusted signing keys. + [JsonPropertyName("keys")] + public ImmutableArray Keys { get; init; } = []; + + /// Verification expectations. + [JsonPropertyName("expectations")] + public VerifyExpectations? Expectations { get; init; } + + /// Certificate roots for verification. + [JsonPropertyName("certificateRoots")] + public ImmutableArray CertificateRoots { get; init; } = []; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CatalogComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CatalogComponent.cs new file mode 100644 index 000000000..0b7aba578 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CatalogComponent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record CatalogComponent( + string CatalogId, + string Ecosystem, + string Version, + string RelativePath, + string Digest, + long SizeBytes, + DateTimeOffset SnapshotAt); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponent.cs new file mode 100644 index 000000000..ba40685c9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record CryptoComponent( + string ComponentId, + string Name, + string RelativePath, + string Digest, + long SizeBytes, + CryptoComponentType Type, + DateTimeOffset? ExpiresAt); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponentType.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponentType.cs new file mode 100644 index 000000000..75eda1fb2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoComponentType.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public enum CryptoComponentType +{ + TrustRoot, + IntermediateCa, + TimestampRoot, + SigningKey, + FulcioRoot +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoProviderComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoProviderComponent.cs new file mode 100644 index 000000000..eb4b35f45 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/CryptoProviderComponent.cs @@ -0,0 +1,12 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record CryptoProviderComponent( + string ProviderId, + string Name, + string Version, + string RelativePath, + string Digest, + long SizeBytes, + ImmutableArray SupportedAlgorithms); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedComponent.cs new file mode 100644 index 000000000..fb32c5ad1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedComponent.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record FeedComponent( + string FeedId, + string Name, + string Version, + string RelativePath, + string Digest, + long SizeBytes, + DateTimeOffset SnapshotAt, + FeedFormat Format); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedFormat.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedFormat.cs new file mode 100644 index 000000000..7aeb580ae --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/FeedFormat.cs @@ -0,0 +1,9 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public enum FeedFormat +{ + StellaOpsNative, + TrivyDb, + GrypeDb, + OsvJson +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/KnowledgeSnapshotManifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/KnowledgeSnapshotManifest.cs index f06f01949..fb1016d50 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/KnowledgeSnapshotManifest.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/KnowledgeSnapshotManifest.cs @@ -4,7 +4,6 @@ // Task: SEAL-001 - Define KnowledgeSnapshotManifest schema // Description: Manifest model for sealed knowledge snapshots. // ----------------------------------------------------------------------------- - namespace StellaOps.AirGap.Bundle.Models; /// @@ -28,139 +27,3 @@ public sealed class KnowledgeSnapshotManifest public List RuleBundles { get; init; } = []; public TimeAnchorEntry? TimeAnchor { get; set; } } - -/// -/// Entry for an advisory feed in the snapshot. -/// -public sealed class AdvisorySnapshotEntry -{ - public required string FeedId { get; init; } - public required string RelativePath { get; init; } - public required string Digest { get; init; } - public required long SizeBytes { get; init; } - public DateTimeOffset SnapshotAt { get; init; } - public int RecordCount { get; init; } -} - -/// -/// Entry for VEX statements in the snapshot. -/// -public sealed class VexSnapshotEntry -{ - public required string SourceId { get; init; } - public required string RelativePath { get; init; } - public required string Digest { get; init; } - public required long SizeBytes { get; init; } - public DateTimeOffset SnapshotAt { get; init; } - public int StatementCount { get; init; } -} - -/// -/// Entry for a policy in the snapshot. -/// -public sealed class PolicySnapshotEntry -{ - public required string PolicyId { get; init; } - public required string Name { get; init; } - public required string Version { get; init; } - public required string RelativePath { get; init; } - public required string Digest { get; init; } - public required long SizeBytes { get; init; } - public string Type { get; init; } = "OpaRego"; -} - -/// -/// Entry for a trust root in the snapshot. -/// -public sealed class TrustRootSnapshotEntry -{ - public required string KeyId { get; init; } - public required string RelativePath { get; init; } - public required string Digest { get; init; } - public required long SizeBytes { get; init; } - public string Algorithm { get; init; } = "ES256"; - public DateTimeOffset? ExpiresAt { get; init; } -} - -/// -/// Entry for a rule bundle in the snapshot. -/// Used for detection rule bundles (secrets, malware, etc.). -/// -public sealed class RuleBundleSnapshotEntry -{ - /// - /// Bundle identifier (e.g., "secrets.ruleset"). - /// - public required string BundleId { get; init; } - - /// - /// Bundle type (e.g., "secrets", "malware"). - /// - public required string BundleType { get; init; } - - /// - /// Bundle version in YYYY.MM format. - /// - public required string Version { get; init; } - - /// - /// Relative path to the bundle directory in the snapshot. - /// - public required string RelativePath { get; init; } - - /// - /// List of files in the bundle with their digests. - /// - public required List Files { get; init; } - - /// - /// Number of rules in the bundle. - /// - public int RuleCount { get; init; } - - /// - /// Key ID used to sign the bundle. - /// - public string? SignerKeyId { get; init; } - - /// - /// When the bundle was signed. - /// - public DateTimeOffset? SignedAt { get; init; } - - /// - /// When the bundle signature was verified during export. - /// - public DateTimeOffset? VerifiedAt { get; init; } -} - -/// -/// A file within a rule bundle. -/// -public sealed class RuleBundleFile -{ - /// - /// Filename (e.g., "secrets.ruleset.manifest.json"). - /// - public required string Name { get; init; } - - /// - /// SHA256 digest of the file. - /// - public required string Digest { get; init; } - - /// - /// File size in bytes. - /// - public required long SizeBytes { get; init; } -} - -/// -/// Time anchor entry in the manifest. -/// -public sealed class TimeAnchorEntry -{ - public required DateTimeOffset AnchorTime { get; init; } - public required string Source { get; init; } - public string? Digest { get; init; } -} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerDescriptor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerDescriptor.cs new file mode 100644 index 000000000..dd59ba75a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerDescriptor.cs @@ -0,0 +1,29 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// OCI referrer descriptor. +/// +public sealed record OciReferrerDescriptor +{ + /// Media type. + [JsonPropertyName("mediaType")] + public required string MediaType { get; init; } + + /// Digest. + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// Artifact type. + [JsonPropertyName("artifactType")] + public string? ArtifactType { get; init; } + + /// Size. + [JsonPropertyName("size")] + public long Size { get; init; } + + /// Annotations. + [JsonPropertyName("annotations")] + public IReadOnlyDictionary? Annotations { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerIndex.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerIndex.cs new file mode 100644 index 000000000..2ac15da37 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/OciReferrerIndex.cs @@ -0,0 +1,14 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// OCI referrer index. +/// +public sealed record OciReferrerIndex +{ + /// Referrer descriptors. + [JsonPropertyName("manifests")] + public ImmutableArray Manifests { get; init; } = []; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyComponent.cs new file mode 100644 index 000000000..8a9ff4ca4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyComponent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record PolicyComponent( + string PolicyId, + string Name, + string Version, + string RelativePath, + string Digest, + long SizeBytes, + PolicyType Type); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicySnapshotEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicySnapshotEntry.cs new file mode 100644 index 000000000..35dea675b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicySnapshotEntry.cs @@ -0,0 +1,15 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Entry for a policy in the snapshot. +/// +public sealed class PolicySnapshotEntry +{ + public required string PolicyId { get; init; } + public required string Name { get; init; } + public required string Version { get; init; } + public required string RelativePath { get; init; } + public required string Digest { get; init; } + public required long SizeBytes { get; init; } + public string Type { get; init; } = "OpaRego"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyType.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyType.cs new file mode 100644 index 000000000..371789bae --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/PolicyType.cs @@ -0,0 +1,14 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public enum PolicyType +{ + OpaRego, + LatticeRules, + UnknownBudgets, + ScoringWeights, + /// + /// Local RBAC policy file for Authority offline fallback. + /// Sprint: SPRINT_20260112_018_AUTH_local_rbac_fallback Task: RBAC-010 + /// + LocalRbac +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RekorSnapshot.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RekorSnapshot.cs new file mode 100644 index 000000000..04f7a11c0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RekorSnapshot.cs @@ -0,0 +1,9 @@ +namespace StellaOps.AirGap.Bundle.Models; + +public sealed record RekorSnapshot( + string TreeId, + long TreeSize, + string RootHash, + string RelativePath, + string Digest, + DateTimeOffset SnapshotAt); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleComponent.cs new file mode 100644 index 000000000..356cd0cff --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleComponent.cs @@ -0,0 +1,28 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Component for a rule bundle (e.g., secrets detection rules). +/// +/// Bundle identifier (e.g., "secrets.ruleset"). +/// Bundle type (e.g., "secrets", "malware"). +/// Bundle version in YYYY.MM format. +/// Relative path to the bundle directory. +/// Combined digest of all files in the bundle. +/// Total size of the bundle in bytes. +/// Number of rules in the bundle. +/// Key ID used to sign the bundle. +/// When the bundle was signed. +/// List of files in the bundle. +public sealed record RuleBundleComponent( + string BundleId, + string BundleType, + string Version, + string RelativePath, + string Digest, + long SizeBytes, + int RuleCount, + string? SignerKeyId, + DateTimeOffset? SignedAt, + ImmutableArray Files); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFile.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFile.cs new file mode 100644 index 000000000..73974a3ac --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFile.cs @@ -0,0 +1,22 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// A file within a rule bundle. +/// +public sealed class RuleBundleFile +{ + /// + /// Filename (e.g., "secrets.ruleset.manifest.json"). + /// + public required string Name { get; init; } + + /// + /// SHA256 digest of the file. + /// + public required string Digest { get; init; } + + /// + /// File size in bytes. + /// + public required long SizeBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFileComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFileComponent.cs new file mode 100644 index 000000000..d00ae0fa1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleFileComponent.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// A file within a rule bundle component. +/// +/// Filename (e.g., "secrets.ruleset.manifest.json"). +/// SHA256 digest of the file. +/// File size in bytes. +public sealed record RuleBundleFileComponent( + string Name, + string Digest, + long SizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleSnapshotEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleSnapshotEntry.cs new file mode 100644 index 000000000..764e14939 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/RuleBundleSnapshotEntry.cs @@ -0,0 +1,53 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Entry for a rule bundle in the snapshot. +/// Used for detection rule bundles (secrets, malware, etc.). +/// +public sealed class RuleBundleSnapshotEntry +{ + /// + /// Bundle identifier (e.g., "secrets.ruleset"). + /// + public required string BundleId { get; init; } + + /// + /// Bundle type (e.g., "secrets", "malware"). + /// + public required string BundleType { get; init; } + + /// + /// Bundle version in YYYY.MM format. + /// + public required string Version { get; init; } + + /// + /// Relative path to the bundle directory in the snapshot. + /// + public required string RelativePath { get; init; } + + /// + /// List of files in the bundle with their digests. + /// + public required List Files { get; init; } + + /// + /// Number of rules in the bundle. + /// + public int RuleCount { get; init; } + + /// + /// Key ID used to sign the bundle. + /// + public string? SignerKeyId { get; init; } + + /// + /// When the bundle was signed. + /// + public DateTimeOffset? SignedAt { get; init; } + + /// + /// When the bundle signature was verified during export. + /// + public DateTimeOffset? VerifiedAt { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TimeAnchorEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TimeAnchorEntry.cs new file mode 100644 index 000000000..aae1909f5 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TimeAnchorEntry.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Time anchor entry in the manifest. +/// +public sealed class TimeAnchorEntry +{ + public required DateTimeOffset AnchorTime { get; init; } + public required string Source { get; init; } + public string? Digest { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TrustRootSnapshotEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TrustRootSnapshotEntry.cs new file mode 100644 index 000000000..538f022d4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/TrustRootSnapshotEntry.cs @@ -0,0 +1,14 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Entry for a trust root in the snapshot. +/// +public sealed class TrustRootSnapshotEntry +{ + public required string KeyId { get; init; } + public required string RelativePath { get; init; } + public required string Digest { get; init; } + public required long SizeBytes { get; init; } + public string Algorithm { get; init; } = "ES256"; + public DateTimeOffset? ExpiresAt { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VerifyExpectations.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VerifyExpectations.cs new file mode 100644 index 000000000..4f452a2ea --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VerifyExpectations.cs @@ -0,0 +1,26 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Verification expectations. +/// +public sealed record VerifyExpectations +{ + /// Expected payload types. + [JsonPropertyName("payloadTypes")] + public ImmutableArray PayloadTypes { get; init; } = []; + + /// Whether Rekor inclusion is required. + [JsonPropertyName("rekorRequired")] + public bool RekorRequired { get; init; } + + /// Expected issuers. + [JsonPropertyName("issuers")] + public ImmutableArray Issuers { get; init; } = []; + + /// Minimum signature count. + [JsonPropertyName("minSignatures")] + public int MinSignatures { get; init; } = 1; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VexSnapshotEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VexSnapshotEntry.cs new file mode 100644 index 000000000..976b63db1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Models/VexSnapshotEntry.cs @@ -0,0 +1,14 @@ +namespace StellaOps.AirGap.Bundle.Models; + +/// +/// Entry for VEX statements in the snapshot. +/// +public sealed class VexSnapshotEntry +{ + public required string SourceId { get; init; } + public required string RelativePath { get; init; } + public required string Digest { get; init; } + public required long SizeBytes { get; init; } + public DateTimeOffset SnapshotAt { get; init; } + public int StatementCount { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Serialization/BundleManifestSerializer.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Serialization/BundleManifestSerializer.cs index f197f3afc..ee1630064 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Serialization/BundleManifestSerializer.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Serialization/BundleManifestSerializer.cs @@ -14,7 +14,7 @@ namespace StellaOps.AirGap.Bundle.Serialization; /// public static class BundleManifestSerializer { - private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + private static readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.Web) { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase, @@ -28,14 +28,14 @@ public static class BundleManifestSerializer public static string Serialize(BundleManifest manifest) { - var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions); + var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, _jsonOptions); var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes); return Encoding.UTF8.GetString(canonicalBytes); } public static BundleManifest Deserialize(string json) { - return JsonSerializer.Deserialize(json, JsonOptions) + return JsonSerializer.Deserialize(json, _jsonOptions) ?? throw new InvalidOperationException("Failed to deserialize bundle manifest"); } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/AdvisoryContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/AdvisoryContent.cs new file mode 100644 index 000000000..89ddfad65 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/AdvisoryContent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record AdvisoryContent +{ + public required string FeedId { get; init; } + public required string FileName { get; init; } + public required byte[] Content { get; init; } + public DateTimeOffset? SnapshotAt { get; init; } + public int RecordCount { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleArtifactBuildConfig.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleArtifactBuildConfig.cs new file mode 100644 index 000000000..7933b73bb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleArtifactBuildConfig.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record BundleArtifactBuildConfig +{ + public required string Type { get; init; } + public string? ContentType { get; init; } + public string? SourcePath { get; init; } + public byte[]? Content { get; init; } + public string? RelativePath { get; init; } + public string? FileName { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuildRequest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuildRequest.cs new file mode 100644 index 000000000..e5885600b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuildRequest.cs @@ -0,0 +1,17 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record BundleBuildRequest( + string Name, + string Version, + DateTimeOffset? ExpiresAt, + IReadOnlyList Feeds, + IReadOnlyList Policies, + IReadOnlyList CryptoMaterials, + IReadOnlyList RuleBundles, + IReadOnlyList? Timestamps = null, + IReadOnlyList? Artifacts = null, + bool StrictInlineArtifacts = false, + ICollection? WarningSink = null, + BundleBuilderOptions? ExportOptions = null); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Add.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Add.cs new file mode 100644 index 000000000..6cc7b22b1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Add.cs @@ -0,0 +1,88 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Validation; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task<(BundleArtifact Artifact, long SizeBytes)> AddArtifactAsync( + BundleArtifactBuildConfig config, + string outputPath, + bool strictInlineArtifacts, + ICollection? warningSink, + CancellationToken ct) + { + ArgumentNullException.ThrowIfNull(config); + + if (string.IsNullOrWhiteSpace(config.Type)) + { + throw new ArgumentException("Artifact type is required.", nameof(config)); + } + + var hasSourcePath = !string.IsNullOrWhiteSpace(config.SourcePath); + var hasContent = config.Content is { Length: > 0 }; + if (!hasSourcePath && !hasContent) + { + throw new ArgumentException("Artifact content or source path is required.", nameof(config)); + } + + string? relativePath = string.IsNullOrWhiteSpace(config.RelativePath) ? null : config.RelativePath; + if (!string.IsNullOrWhiteSpace(relativePath) && !PathValidation.IsSafeRelativePath(relativePath)) + { + throw new ArgumentException($"Invalid relative path: {relativePath}", nameof(config)); + } + + string digest; + long sizeBytes; + + if (hasSourcePath) + { + var sourcePath = Path.GetFullPath(config.SourcePath!); + if (!File.Exists(sourcePath)) + { + throw new FileNotFoundException("Artifact source file not found.", sourcePath); + } + + var info = new FileInfo(sourcePath); + sizeBytes = info.Length; + digest = await ComputeSha256DigestAsync(sourcePath, ct).ConfigureAwait(false); + relativePath = ApplyInlineSizeGuard( + relativePath, + config, + digest, + sizeBytes, + strictInlineArtifacts, + warningSink); + + if (!string.IsNullOrWhiteSpace(relativePath)) + { + var targetPath = PathValidation.SafeCombine(outputPath, relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); + File.Copy(sourcePath, targetPath, overwrite: true); + } + } + else + { + var content = config.Content ?? Array.Empty(); + sizeBytes = content.Length; + digest = ComputeSha256Digest(content); + relativePath = ApplyInlineSizeGuard( + relativePath, + config, + digest, + sizeBytes, + strictInlineArtifacts, + warningSink); + + if (!string.IsNullOrWhiteSpace(relativePath)) + { + var targetPath = PathValidation.SafeCombine(outputPath, relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); + await File.WriteAllBytesAsync(targetPath, content, ct).ConfigureAwait(false); + } + } + + var artifact = new BundleArtifact(relativePath, config.Type, config.ContentType, digest, sizeBytes); + return (artifact, sizeBytes); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Guards.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Guards.cs new file mode 100644 index 000000000..a25606c9e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Guards.cs @@ -0,0 +1,57 @@ +using StellaOps.AirGap.Bundle.Validation; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static string? ApplyInlineSizeGuard( + string? relativePath, + BundleArtifactBuildConfig config, + string digest, + long sizeBytes, + bool strictInlineArtifacts, + ICollection? warningSink) + { + if (!string.IsNullOrWhiteSpace(relativePath)) + { + return relativePath; + } + + if (!BundleSizeValidator.RequiresExternalization(sizeBytes)) + { + return null; + } + + var warning = BundleSizeValidator.GetInlineSizeWarning(sizeBytes) + ?? "Inline artifact size exceeds the maximum allowed size."; + + if (strictInlineArtifacts) + { + throw new InvalidOperationException(warning); + } + + warningSink?.Add(warning); + + var fileName = string.IsNullOrWhiteSpace(config.FileName) + ? BuildInlineFallbackName(config.Type, digest) + : EnsureSafeFileName(config.FileName); + + var fallbackPath = $"artifacts/{fileName}"; + if (!PathValidation.IsSafeRelativePath(fallbackPath)) + { + throw new ArgumentException($"Invalid artifact fallback path: {fallbackPath}", nameof(config)); + } + + return fallbackPath; + } + + private static string BuildInlineFallbackName(string type, string digest) + { + var normalizedType = SanitizeFileSegment(type); + var digestValue = digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? digest[7..] + : digest; + var shortDigest = digestValue.Length > 12 ? digestValue[..12] : digestValue; + return $"{normalizedType}-{shortDigest}.blob"; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Hashing.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Hashing.cs new file mode 100644 index 000000000..72d75a5a4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Hashing.cs @@ -0,0 +1,19 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task ComputeSha256DigestAsync(string filePath, CancellationToken ct) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static string ComputeSha256Digest(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Naming.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Naming.cs new file mode 100644 index 000000000..5d7031266 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.Naming.cs @@ -0,0 +1,46 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static string SanitizeFileSegment(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "artifact"; + } + + var buffer = new char[value.Length]; + var index = 0; + foreach (var ch in value) + { + if (char.IsLetterOrDigit(ch) || ch == '-' || ch == '_') + { + buffer[index++] = ch; + } + else + { + buffer[index++] = '-'; + } + } + + var cleaned = new string(buffer, 0, index).Trim('-'); + return string.IsNullOrWhiteSpace(cleaned) ? "artifact" : cleaned; + } + + private static string EnsureSafeFileName(string fileName) + { + if (string.IsNullOrWhiteSpace(fileName)) + { + throw new ArgumentException("Artifact file name is required."); + } + + if (fileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0 || + fileName.Contains('/') || + fileName.Contains('\\')) + { + throw new ArgumentException($"Invalid artifact file name: {fileName}"); + } + + return fileName; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.cs new file mode 100644 index 000000000..14b4dc62b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Artifacts.cs @@ -0,0 +1,29 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task> BuildArtifactsAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var artifacts = new List(); + var artifactConfigs = request.Artifacts ?? Array.Empty(); + foreach (var artifactConfig in artifactConfigs) + { + var (artifact, _) = await AddArtifactAsync( + artifactConfig, + outputPath, + request.StrictInlineArtifacts, + request.WarningSink, + ct).ConfigureAwait(false); + artifacts.Add(artifact); + } + + return artifacts.ToImmutableArray(); + } + +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Build.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Build.cs new file mode 100644 index 000000000..a2d915f25 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Build.cs @@ -0,0 +1,51 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + public async Task BuildAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct = default) + { + Directory.CreateDirectory(outputPath); + + var feeds = await BuildFeedComponentsAsync(request, outputPath, ct).ConfigureAwait(false); + var policies = await BuildPolicyComponentsAsync(request, outputPath, ct).ConfigureAwait(false); + var cryptoMaterials = await BuildCryptoComponentsAsync(request, outputPath, ct).ConfigureAwait(false); + var ruleBundles = await BuildRuleBundlesAsync(request, outputPath, ct).ConfigureAwait(false); + var (timestamps, timestampSizeBytes) = await BuildTimestampsAsync(request, outputPath, ct).ConfigureAwait(false); + var artifacts = await BuildArtifactsAsync(request, outputPath, ct).ConfigureAwait(false); + + var artifactsSizeBytes = artifacts.Sum(a => a.SizeBytes ?? 0); + var totalSize = feeds.Sum(f => f.SizeBytes) + + policies.Sum(p => p.SizeBytes) + + cryptoMaterials.Sum(c => c.SizeBytes) + + ruleBundles.Sum(r => r.SizeBytes) + + timestampSizeBytes + + artifactsSizeBytes; + + var exportMode = request.ExportOptions?.Mode ?? BundleExportMode.Light; + var manifest = new BundleManifest + { + BundleId = _guidProvider.NewGuid().ToString(), + SchemaVersion = "1.0.0", + Name = request.Name, + Version = request.Version, + CreatedAt = _timeProvider.GetUtcNow(), + ExpiresAt = request.ExpiresAt, + Feeds = feeds, + Policies = policies, + CryptoMaterials = cryptoMaterials, + RuleBundles = ruleBundles, + Timestamps = timestamps, + Artifacts = artifacts, + ExportMode = exportMode.ToString().ToLowerInvariant(), + TotalSizeBytes = totalSize + }; + + return BundleManifestSerializer.WithDigest(manifest); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Components.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Components.cs new file mode 100644 index 000000000..fa2c7872e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Components.cs @@ -0,0 +1,83 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private async Task> BuildFeedComponentsAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var feeds = new List(); + foreach (var feedConfig in request.Feeds) + { + // Validate relative path before combining + var targetPath = PathValidation.SafeCombine(outputPath, feedConfig.RelativePath); + + var component = await CopyComponentAsync(feedConfig, outputPath, targetPath, ct).ConfigureAwait(false); + feeds.Add(new FeedComponent( + feedConfig.FeedId, + feedConfig.Name, + feedConfig.Version, + component.RelativePath, + component.Digest, + component.SizeBytes, + feedConfig.SnapshotAt, + feedConfig.Format)); + } + + return feeds.ToImmutableArray(); + } + + private async Task> BuildPolicyComponentsAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var policies = new List(); + foreach (var policyConfig in request.Policies) + { + // Validate relative path before combining + var targetPath = PathValidation.SafeCombine(outputPath, policyConfig.RelativePath); + + var component = await CopyComponentAsync(policyConfig, outputPath, targetPath, ct).ConfigureAwait(false); + policies.Add(new PolicyComponent( + policyConfig.PolicyId, + policyConfig.Name, + policyConfig.Version, + component.RelativePath, + component.Digest, + component.SizeBytes, + policyConfig.Type)); + } + + return policies.ToImmutableArray(); + } + + private async Task> BuildCryptoComponentsAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var cryptoMaterials = new List(); + foreach (var cryptoConfig in request.CryptoMaterials) + { + // Validate relative path before combining + var targetPath = PathValidation.SafeCombine(outputPath, cryptoConfig.RelativePath); + + var component = await CopyComponentAsync(cryptoConfig, outputPath, targetPath, ct).ConfigureAwait(false); + cryptoMaterials.Add(new CryptoComponent( + cryptoConfig.ComponentId, + cryptoConfig.Name, + component.RelativePath, + component.Digest, + component.SizeBytes, + cryptoConfig.Type, + cryptoConfig.ExpiresAt)); + } + + return cryptoMaterials.ToImmutableArray(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Copy.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Copy.cs new file mode 100644 index 000000000..bc078c848 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Copy.cs @@ -0,0 +1,30 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task CopyComponentAsync( + BundleComponentSource source, + string outputPath, + string targetPath, + CancellationToken ct) + { + Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); + + await using (var input = File.OpenRead(source.SourcePath)) + await using (var output = File.Create(targetPath)) + { + await input.CopyToAsync(output, ct).ConfigureAwait(false); + } + + await using var digestStream = File.OpenRead(targetPath); + var hash = await SHA256.HashDataAsync(digestStream, ct).ConfigureAwait(false); + var digest = Convert.ToHexString(hash).ToLowerInvariant(); + + var info = new FileInfo(targetPath); + return new CopiedComponent(source.RelativePath, digest, info.Length); + } + + private sealed record CopiedComponent(string RelativePath, string Digest, long SizeBytes); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.RuleBundles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.RuleBundles.cs new file mode 100644 index 000000000..cd8fc7a97 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.RuleBundles.cs @@ -0,0 +1,71 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task> BuildRuleBundlesAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var ruleBundles = new List(); + foreach (var ruleBundleConfig in request.RuleBundles) + { + // Validate relative path before combining + var targetDir = PathValidation.SafeCombine(outputPath, ruleBundleConfig.RelativePath); + Directory.CreateDirectory(targetDir); + + var files = new List(); + long bundleTotalSize = 0; + var digestBuilder = new StringBuilder(); + + // Copy all files from source directory + if (Directory.Exists(ruleBundleConfig.SourceDirectory)) + { + foreach (var sourceFile in Directory.GetFiles(ruleBundleConfig.SourceDirectory) + .OrderBy(f => Path.GetFileName(f), StringComparer.Ordinal)) + { + var fileName = Path.GetFileName(sourceFile); + var targetFile = Path.Combine(targetDir, fileName); + + await using (var input = File.OpenRead(sourceFile)) + await using (var output = File.Create(targetFile)) + { + await input.CopyToAsync(output, ct).ConfigureAwait(false); + } + + await using var digestStream = File.OpenRead(targetFile); + var hash = await SHA256.HashDataAsync(digestStream, ct).ConfigureAwait(false); + var fileDigest = Convert.ToHexString(hash).ToLowerInvariant(); + + var fileInfo = new FileInfo(targetFile); + files.Add(new RuleBundleFileComponent(fileName, fileDigest, fileInfo.Length)); + bundleTotalSize += fileInfo.Length; + digestBuilder.Append(fileDigest); + } + } + + // Compute combined digest from all file digests + var combinedDigest = Convert.ToHexString( + SHA256.HashData(Encoding.UTF8.GetBytes(digestBuilder.ToString()))).ToLowerInvariant(); + + ruleBundles.Add(new RuleBundleComponent( + ruleBundleConfig.BundleId, + ruleBundleConfig.BundleType, + ruleBundleConfig.Version, + ruleBundleConfig.RelativePath, + combinedDigest, + bundleTotalSize, + ruleBundleConfig.RuleCount, + ruleBundleConfig.SignerKeyId, + ruleBundleConfig.SignedAt, + files.ToImmutableArray())); + } + + return ruleBundles.ToImmutableArray(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Copy.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Copy.cs new file mode 100644 index 000000000..a3dd99848 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Copy.cs @@ -0,0 +1,25 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task CopyTimestampFileAsync( + string sourcePath, + string relativePath, + string outputPath, + CancellationToken ct) + { + var targetPath = PathValidation.SafeCombine(outputPath, relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); + + await using (var input = File.OpenRead(sourcePath)) + await using (var output = File.Create(targetPath)) + { + await input.CopyToAsync(output, ct).ConfigureAwait(false); + } + + var info = new FileInfo(targetPath); + return new CopiedTimestampComponent(relativePath, info.Length); + } + + private sealed record CopiedTimestampComponent(string RelativePath, long SizeBytes); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Revocation.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Revocation.cs new file mode 100644 index 000000000..4cc912506 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Revocation.cs @@ -0,0 +1,48 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private static async Task<(ImmutableArray Paths, long SizeBytes)> WriteRevocationBlobsAsync( + string baseDir, + string extension, + string prefix, + IReadOnlyList blobs, + string outputPath, + CancellationToken ct) + { + if (blobs.Count == 0) + { + return ([], 0); + } + + var paths = new List(blobs.Count); + long totalSize = 0; + + foreach (var blob in blobs + .OrderBy(b => b.CertificateIndex) + .ThenBy(b => ComputeShortHash(b.Data), StringComparer.Ordinal)) + { + var hash = ComputeShortHash(blob.Data); + var fileName = $"{prefix}-{blob.CertificateIndex:D2}-{hash}.{extension}"; + var relativePath = $"{baseDir}/{fileName}"; + var targetPath = PathValidation.SafeCombine(outputPath, relativePath); + + Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); + await File.WriteAllBytesAsync(targetPath, blob.Data, ct).ConfigureAwait(false); + + totalSize += blob.Data.Length; + paths.Add(relativePath); + } + + return (paths.ToImmutableArray(), totalSize); + } + + private static string ComputeShortHash(byte[] data) + { + var hash = SHA256.HashData(data); + return Convert.ToHexString(hash).ToLowerInvariant()[..16]; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Rfc3161.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Rfc3161.cs new file mode 100644 index 000000000..e6c4f7289 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.Rfc3161.cs @@ -0,0 +1,55 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private async Task<(Rfc3161TimestampEntry Entry, long SizeBytes)> BuildRfc3161TimestampAsync( + Rfc3161TimestampBuildConfig config, + string outputPath, + CancellationToken ct) + { + if (config.TimeStampToken is not { Length: > 0 }) + { + throw new ArgumentException("RFC3161 timestamp token is required.", nameof(config)); + } + + var tokenHash = SHA256.HashData(config.TimeStampToken); + var tokenPrefix = Convert.ToHexString(tokenHash).ToLowerInvariant()[..12]; + + var chainResult = await _tsaChainBundler.BundleAsync( + config.TimeStampToken, + outputPath, + tokenPrefix, + ct).ConfigureAwait(false); + + var ocspBlobs = await _ocspFetcher.FetchAsync(chainResult.Certificates, ct).ConfigureAwait(false); + var (ocspPaths, ocspSizeBytes) = await WriteRevocationBlobsAsync( + "tsa/ocsp", + "der", + tokenPrefix, + ocspBlobs, + outputPath, + ct).ConfigureAwait(false); + + var crlBlobs = await _crlFetcher.FetchAsync(chainResult.Certificates, ct).ConfigureAwait(false); + var (crlPaths, crlSizeBytes) = await WriteRevocationBlobsAsync( + "tsa/crl", + "crl", + tokenPrefix, + crlBlobs, + outputPath, + ct).ConfigureAwait(false); + + var entry = new Rfc3161TimestampEntry + { + TsaChainPaths = chainResult.ChainPaths, + OcspBlobs = ocspPaths, + CrlBlobs = crlPaths, + TstBase64 = Convert.ToBase64String(config.TimeStampToken) + }; + + return (entry, chainResult.TotalSizeBytes + ocspSizeBytes + crlSizeBytes); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.cs new file mode 100644 index 000000000..6ee51c523 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.Timestamps.cs @@ -0,0 +1,50 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class BundleBuilder +{ + private async Task<(ImmutableArray Entries, long SizeBytes)> BuildTimestampsAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct) + { + var timestamps = new List(); + long timestampSizeBytes = 0; + var timestampConfigs = request.Timestamps ?? Array.Empty(); + + foreach (var timestampConfig in timestampConfigs) + { + switch (timestampConfig) + { + case Rfc3161TimestampBuildConfig rfc3161: + var (rfcEntry, rfcSizeBytes) = await BuildRfc3161TimestampAsync( + rfc3161, + outputPath, + ct).ConfigureAwait(false); + timestamps.Add(rfcEntry); + timestampSizeBytes += rfcSizeBytes; + break; + case EidasQtsTimestampBuildConfig eidas: + var qtsComponent = await CopyTimestampFileAsync( + eidas.SourcePath, + eidas.RelativePath, + outputPath, + ct).ConfigureAwait(false); + timestamps.Add(new EidasQtsTimestampEntry + { + QtsMetaPath = qtsComponent.RelativePath + }); + timestampSizeBytes += qtsComponent.SizeBytes; + break; + default: + throw new NotSupportedException( + $"Unsupported timestamp build config type '{timestampConfig.GetType().Name}'."); + } + } + + return (timestamps.ToImmutableArray(), timestampSizeBytes); + } + +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.cs index 326f5ed4c..c8563868a 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleBuilder.cs @@ -1,14 +1,8 @@ - using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Validation; -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; namespace StellaOps.AirGap.Bundle.Services; -public sealed class BundleBuilder : IBundleBuilder +public sealed partial class BundleBuilder : IBundleBuilder { private readonly TimeProvider _timeProvider; private readonly IGuidProvider _guidProvider; @@ -38,603 +32,4 @@ public sealed class BundleBuilder : IBundleBuilder _ocspFetcher = ocspFetcher ?? new OcspResponseFetcher(); _crlFetcher = crlFetcher ?? new CrlFetcher(); } - - public async Task BuildAsync( - BundleBuildRequest request, - string outputPath, - CancellationToken ct = default) - { - Directory.CreateDirectory(outputPath); - - var feeds = new List(); - var policies = new List(); - var cryptoMaterials = new List(); - - foreach (var feedConfig in request.Feeds) - { - // Validate relative path before combining - var targetPath = PathValidation.SafeCombine(outputPath, feedConfig.RelativePath); - - var component = await CopyComponentAsync(feedConfig, outputPath, targetPath, ct).ConfigureAwait(false); - feeds.Add(new FeedComponent( - feedConfig.FeedId, - feedConfig.Name, - feedConfig.Version, - component.RelativePath, - component.Digest, - component.SizeBytes, - feedConfig.SnapshotAt, - feedConfig.Format)); - } - - foreach (var policyConfig in request.Policies) - { - // Validate relative path before combining - var targetPath = PathValidation.SafeCombine(outputPath, policyConfig.RelativePath); - - var component = await CopyComponentAsync(policyConfig, outputPath, targetPath, ct).ConfigureAwait(false); - policies.Add(new PolicyComponent( - policyConfig.PolicyId, - policyConfig.Name, - policyConfig.Version, - component.RelativePath, - component.Digest, - component.SizeBytes, - policyConfig.Type)); - } - - foreach (var cryptoConfig in request.CryptoMaterials) - { - // Validate relative path before combining - var targetPath = PathValidation.SafeCombine(outputPath, cryptoConfig.RelativePath); - - var component = await CopyComponentAsync(cryptoConfig, outputPath, targetPath, ct).ConfigureAwait(false); - cryptoMaterials.Add(new CryptoComponent( - cryptoConfig.ComponentId, - cryptoConfig.Name, - component.RelativePath, - component.Digest, - component.SizeBytes, - cryptoConfig.Type, - cryptoConfig.ExpiresAt)); - } - - var ruleBundles = new List(); - foreach (var ruleBundleConfig in request.RuleBundles) - { - // Validate relative path before combining - var targetDir = PathValidation.SafeCombine(outputPath, ruleBundleConfig.RelativePath); - Directory.CreateDirectory(targetDir); - - var files = new List(); - long bundleTotalSize = 0; - var digestBuilder = new System.Text.StringBuilder(); - - // Copy all files from source directory - if (Directory.Exists(ruleBundleConfig.SourceDirectory)) - { - foreach (var sourceFile in Directory.GetFiles(ruleBundleConfig.SourceDirectory) - .OrderBy(f => Path.GetFileName(f), StringComparer.Ordinal)) - { - var fileName = Path.GetFileName(sourceFile); - var targetFile = Path.Combine(targetDir, fileName); - - await using (var input = File.OpenRead(sourceFile)) - await using (var output = File.Create(targetFile)) - { - await input.CopyToAsync(output, ct).ConfigureAwait(false); - } - - await using var digestStream = File.OpenRead(targetFile); - var hash = await SHA256.HashDataAsync(digestStream, ct).ConfigureAwait(false); - var fileDigest = Convert.ToHexString(hash).ToLowerInvariant(); - - var fileInfo = new FileInfo(targetFile); - files.Add(new RuleBundleFileComponent(fileName, fileDigest, fileInfo.Length)); - bundleTotalSize += fileInfo.Length; - digestBuilder.Append(fileDigest); - } - } - - // Compute combined digest from all file digests - var combinedDigest = Convert.ToHexString( - SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(digestBuilder.ToString()))).ToLowerInvariant(); - - ruleBundles.Add(new RuleBundleComponent( - ruleBundleConfig.BundleId, - ruleBundleConfig.BundleType, - ruleBundleConfig.Version, - ruleBundleConfig.RelativePath, - combinedDigest, - bundleTotalSize, - ruleBundleConfig.RuleCount, - ruleBundleConfig.SignerKeyId, - ruleBundleConfig.SignedAt, - files.ToImmutableArray())); - } - - var timestamps = new List(); - long timestampSizeBytes = 0; - var timestampConfigs = request.Timestamps ?? Array.Empty(); - foreach (var timestampConfig in timestampConfigs) - { - switch (timestampConfig) - { - case Rfc3161TimestampBuildConfig rfc3161: - var (rfcEntry, rfcSizeBytes) = await BuildRfc3161TimestampAsync( - rfc3161, - outputPath, - ct).ConfigureAwait(false); - timestamps.Add(rfcEntry); - timestampSizeBytes += rfcSizeBytes; - break; - case EidasQtsTimestampBuildConfig eidas: - var qtsComponent = await CopyTimestampFileAsync( - eidas.SourcePath, - eidas.RelativePath, - outputPath, - ct).ConfigureAwait(false); - timestamps.Add(new EidasQtsTimestampEntry - { - QtsMetaPath = qtsComponent.RelativePath - }); - timestampSizeBytes += qtsComponent.SizeBytes; - break; - default: - throw new NotSupportedException( - $"Unsupported timestamp build config type '{timestampConfig.GetType().Name}'."); - } - } - - var artifacts = new List(); - long artifactsSizeBytes = 0; - var artifactConfigs = request.Artifacts ?? Array.Empty(); - foreach (var artifactConfig in artifactConfigs) - { - var (artifact, sizeBytes) = await AddArtifactAsync( - artifactConfig, - outputPath, - request.StrictInlineArtifacts, - request.WarningSink, - ct).ConfigureAwait(false); - artifacts.Add(artifact); - artifactsSizeBytes += sizeBytes; - } - - var totalSize = feeds.Sum(f => f.SizeBytes) + - policies.Sum(p => p.SizeBytes) + - cryptoMaterials.Sum(c => c.SizeBytes) + - ruleBundles.Sum(r => r.SizeBytes) + - timestampSizeBytes + - artifactsSizeBytes; - - var exportMode = request.ExportOptions?.Mode ?? BundleExportMode.Light; - var manifest = new BundleManifest - { - BundleId = _guidProvider.NewGuid().ToString(), - SchemaVersion = "1.0.0", - Name = request.Name, - Version = request.Version, - CreatedAt = _timeProvider.GetUtcNow(), - ExpiresAt = request.ExpiresAt, - Feeds = feeds.ToImmutableArray(), - Policies = policies.ToImmutableArray(), - CryptoMaterials = cryptoMaterials.ToImmutableArray(), - RuleBundles = ruleBundles.ToImmutableArray(), - Timestamps = timestamps.ToImmutableArray(), - Artifacts = artifacts.ToImmutableArray(), - ExportMode = exportMode.ToString().ToLowerInvariant(), - TotalSizeBytes = totalSize - }; - - return BundleManifestSerializer.WithDigest(manifest); - } - - private static async Task<(BundleArtifact Artifact, long SizeBytes)> AddArtifactAsync( - BundleArtifactBuildConfig config, - string outputPath, - bool strictInlineArtifacts, - ICollection? warningSink, - CancellationToken ct) - { - ArgumentNullException.ThrowIfNull(config); - - if (string.IsNullOrWhiteSpace(config.Type)) - { - throw new ArgumentException("Artifact type is required.", nameof(config)); - } - - var hasSourcePath = !string.IsNullOrWhiteSpace(config.SourcePath); - var hasContent = config.Content is { Length: > 0 }; - if (!hasSourcePath && !hasContent) - { - throw new ArgumentException("Artifact content or source path is required.", nameof(config)); - } - - string? relativePath = string.IsNullOrWhiteSpace(config.RelativePath) ? null : config.RelativePath; - if (!string.IsNullOrWhiteSpace(relativePath) && !PathValidation.IsSafeRelativePath(relativePath)) - { - throw new ArgumentException($"Invalid relative path: {relativePath}", nameof(config)); - } - - string digest; - long sizeBytes; - - if (hasSourcePath) - { - var sourcePath = Path.GetFullPath(config.SourcePath!); - if (!File.Exists(sourcePath)) - { - throw new FileNotFoundException("Artifact source file not found.", sourcePath); - } - - var info = new FileInfo(sourcePath); - sizeBytes = info.Length; - digest = await ComputeSha256DigestAsync(sourcePath, ct).ConfigureAwait(false); - relativePath = ApplyInlineSizeGuard( - relativePath, - config, - digest, - sizeBytes, - strictInlineArtifacts, - warningSink); - - if (!string.IsNullOrWhiteSpace(relativePath)) - { - var targetPath = PathValidation.SafeCombine(outputPath, relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); - File.Copy(sourcePath, targetPath, overwrite: true); - } - } - else - { - var content = config.Content ?? Array.Empty(); - sizeBytes = content.Length; - digest = ComputeSha256Digest(content); - relativePath = ApplyInlineSizeGuard( - relativePath, - config, - digest, - sizeBytes, - strictInlineArtifacts, - warningSink); - - if (!string.IsNullOrWhiteSpace(relativePath)) - { - var targetPath = PathValidation.SafeCombine(outputPath, relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); - await File.WriteAllBytesAsync(targetPath, content, ct).ConfigureAwait(false); - } - } - - var artifact = new BundleArtifact(relativePath, config.Type, config.ContentType, digest, sizeBytes); - return (artifact, sizeBytes); - } - - private static string? ApplyInlineSizeGuard( - string? relativePath, - BundleArtifactBuildConfig config, - string digest, - long sizeBytes, - bool strictInlineArtifacts, - ICollection? warningSink) - { - if (!string.IsNullOrWhiteSpace(relativePath)) - { - return relativePath; - } - - if (!BundleSizeValidator.RequiresExternalization(sizeBytes)) - { - return null; - } - - var warning = BundleSizeValidator.GetInlineSizeWarning(sizeBytes) - ?? "Inline artifact size exceeds the maximum allowed size."; - - if (strictInlineArtifacts) - { - throw new InvalidOperationException(warning); - } - - warningSink?.Add(warning); - - var fileName = string.IsNullOrWhiteSpace(config.FileName) - ? BuildInlineFallbackName(config.Type, digest) - : EnsureSafeFileName(config.FileName); - - var fallbackPath = $"artifacts/{fileName}"; - if (!PathValidation.IsSafeRelativePath(fallbackPath)) - { - throw new ArgumentException($"Invalid artifact fallback path: {fallbackPath}", nameof(config)); - } - - return fallbackPath; - } - - private static string BuildInlineFallbackName(string type, string digest) - { - var normalizedType = SanitizeFileSegment(type); - var digestValue = digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? digest[7..] - : digest; - var shortDigest = digestValue.Length > 12 ? digestValue[..12] : digestValue; - return $"{normalizedType}-{shortDigest}.blob"; - } - - private static string SanitizeFileSegment(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return "artifact"; - } - - var buffer = new char[value.Length]; - var index = 0; - foreach (var ch in value) - { - if (char.IsLetterOrDigit(ch) || ch == '-' || ch == '_') - { - buffer[index++] = ch; - } - else - { - buffer[index++] = '-'; - } - } - - var cleaned = new string(buffer, 0, index).Trim('-'); - return string.IsNullOrWhiteSpace(cleaned) ? "artifact" : cleaned; - } - - private static string EnsureSafeFileName(string fileName) - { - if (string.IsNullOrWhiteSpace(fileName)) - { - throw new ArgumentException("Artifact file name is required."); - } - - if (fileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0 || - fileName.Contains('/') || - fileName.Contains('\\')) - { - throw new ArgumentException($"Invalid artifact file name: {fileName}"); - } - - return fileName; - } - - private static async Task ComputeSha256DigestAsync(string filePath, CancellationToken ct) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static string ComputeSha256Digest(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static async Task CopyComponentAsync( - BundleComponentSource source, - string outputPath, - string targetPath, - CancellationToken ct) - { - Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); - - await using (var input = File.OpenRead(source.SourcePath)) - await using (var output = File.Create(targetPath)) - { - await input.CopyToAsync(output, ct).ConfigureAwait(false); - } - - await using var digestStream = File.OpenRead(targetPath); - var hash = await SHA256.HashDataAsync(digestStream, ct).ConfigureAwait(false); - var digest = Convert.ToHexString(hash).ToLowerInvariant(); - - var info = new FileInfo(targetPath); - return new CopiedComponent(source.RelativePath, digest, info.Length); - } - - private async Task<(Rfc3161TimestampEntry Entry, long SizeBytes)> BuildRfc3161TimestampAsync( - Rfc3161TimestampBuildConfig config, - string outputPath, - CancellationToken ct) - { - if (config.TimeStampToken is not { Length: > 0 }) - { - throw new ArgumentException("RFC3161 timestamp token is required.", nameof(config)); - } - - var tokenHash = SHA256.HashData(config.TimeStampToken); - var tokenPrefix = Convert.ToHexString(tokenHash).ToLowerInvariant()[..12]; - - var chainResult = await _tsaChainBundler.BundleAsync( - config.TimeStampToken, - outputPath, - tokenPrefix, - ct).ConfigureAwait(false); - - var ocspBlobs = await _ocspFetcher.FetchAsync(chainResult.Certificates, ct).ConfigureAwait(false); - var (ocspPaths, ocspSizeBytes) = await WriteRevocationBlobsAsync( - "tsa/ocsp", - "der", - tokenPrefix, - ocspBlobs, - outputPath, - ct).ConfigureAwait(false); - - var crlBlobs = await _crlFetcher.FetchAsync(chainResult.Certificates, ct).ConfigureAwait(false); - var (crlPaths, crlSizeBytes) = await WriteRevocationBlobsAsync( - "tsa/crl", - "crl", - tokenPrefix, - crlBlobs, - outputPath, - ct).ConfigureAwait(false); - - var entry = new Rfc3161TimestampEntry - { - TsaChainPaths = chainResult.ChainPaths, - OcspBlobs = ocspPaths, - CrlBlobs = crlPaths, - TstBase64 = Convert.ToBase64String(config.TimeStampToken) - }; - - return (entry, chainResult.TotalSizeBytes + ocspSizeBytes + crlSizeBytes); - } - - private static async Task<(ImmutableArray Paths, long SizeBytes)> WriteRevocationBlobsAsync( - string baseDir, - string extension, - string prefix, - IReadOnlyList blobs, - string outputPath, - CancellationToken ct) - { - if (blobs.Count == 0) - { - return ([], 0); - } - - var paths = new List(blobs.Count); - long totalSize = 0; - - foreach (var blob in blobs - .OrderBy(b => b.CertificateIndex) - .ThenBy(b => ComputeShortHash(b.Data), StringComparer.Ordinal)) - { - var hash = ComputeShortHash(blob.Data); - var fileName = $"{prefix}-{blob.CertificateIndex:D2}-{hash}.{extension}"; - var relativePath = $"{baseDir}/{fileName}"; - var targetPath = PathValidation.SafeCombine(outputPath, relativePath); - - Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); - await File.WriteAllBytesAsync(targetPath, blob.Data, ct).ConfigureAwait(false); - - totalSize += blob.Data.Length; - paths.Add(relativePath); - } - - return (paths.ToImmutableArray(), totalSize); - } - - private static string ComputeShortHash(byte[] data) - { - var hash = SHA256.HashData(data); - return Convert.ToHexString(hash).ToLowerInvariant()[..16]; - } - - private static async Task CopyTimestampFileAsync( - string sourcePath, - string relativePath, - string outputPath, - CancellationToken ct) - { - var targetPath = PathValidation.SafeCombine(outputPath, relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(targetPath) ?? outputPath); - - await using (var input = File.OpenRead(sourcePath)) - await using (var output = File.Create(targetPath)) - { - await input.CopyToAsync(output, ct).ConfigureAwait(false); - } - - var info = new FileInfo(targetPath); - return new CopiedTimestampComponent(relativePath, info.Length); - } - - private sealed record CopiedComponent(string RelativePath, string Digest, long SizeBytes); - private sealed record CopiedTimestampComponent(string RelativePath, long SizeBytes); } - -public interface IBundleBuilder -{ - Task BuildAsync(BundleBuildRequest request, string outputPath, CancellationToken ct = default); -} - -public sealed record BundleBuildRequest( - string Name, - string Version, - DateTimeOffset? ExpiresAt, - IReadOnlyList Feeds, - IReadOnlyList Policies, - IReadOnlyList CryptoMaterials, - IReadOnlyList RuleBundles, - IReadOnlyList? Timestamps = null, - IReadOnlyList? Artifacts = null, - bool StrictInlineArtifacts = false, - ICollection? WarningSink = null, - BundleBuilderOptions? ExportOptions = null); - -public abstract record BundleComponentSource(string SourcePath, string RelativePath); - -public sealed record FeedBuildConfig( - string FeedId, - string Name, - string Version, - string SourcePath, - string RelativePath, - DateTimeOffset SnapshotAt, - FeedFormat Format) - : BundleComponentSource(SourcePath, RelativePath); - -public sealed record PolicyBuildConfig( - string PolicyId, - string Name, - string Version, - string SourcePath, - string RelativePath, - PolicyType Type) - : BundleComponentSource(SourcePath, RelativePath); - -public sealed record CryptoBuildConfig( - string ComponentId, - string Name, - string SourcePath, - string RelativePath, - CryptoComponentType Type, - DateTimeOffset? ExpiresAt) - : BundleComponentSource(SourcePath, RelativePath); - -public abstract record TimestampBuildConfig; - -public sealed record Rfc3161TimestampBuildConfig(byte[] TimeStampToken) - : TimestampBuildConfig; - -public sealed record EidasQtsTimestampBuildConfig(string SourcePath, string RelativePath) - : TimestampBuildConfig; - -public sealed record BundleArtifactBuildConfig -{ - public required string Type { get; init; } - public string? ContentType { get; init; } - public string? SourcePath { get; init; } - public byte[]? Content { get; init; } - public string? RelativePath { get; init; } - public string? FileName { get; init; } -} - -/// -/// Configuration for building a rule bundle component. -/// -/// Bundle identifier (e.g., "secrets.ruleset"). -/// Bundle type (e.g., "secrets", "malware"). -/// Bundle version in YYYY.MM format. -/// Source directory containing the rule bundle files. -/// Relative path in the output bundle. -/// Number of rules in the bundle. -/// Key ID used to sign the bundle. -/// When the bundle was signed. -public sealed record RuleBundleBuildConfig( - string BundleId, - string BundleType, - string Version, - string SourceDirectory, - string RelativePath, - int RuleCount, - string? SignerKeyId, - DateTimeOffset? SignedAt); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleComponentSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleComponentSource.cs new file mode 100644 index 000000000..70597531f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleComponentSource.cs @@ -0,0 +1,3 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public abstract record BundleComponentSource(string SourcePath, string RelativePath); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleValidationOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleValidationOptions.cs new file mode 100644 index 000000000..b6e0b0682 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/BundleValidationOptions.cs @@ -0,0 +1,48 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Options for configuring bundle validation behavior. +/// +public sealed class BundleValidationOptions +{ + /// + /// Maximum age in days for feed snapshots before they are flagged as stale. + /// Default is 7 days. + /// + public int MaxFeedAgeDays { get; set; } = 7; + + /// + /// Whether to fail validation on stale feeds or just warn. + /// + public bool FailOnStaleFeed { get; set; } + + /// + /// Whether to validate policy digests. + /// + public bool ValidatePolicies { get; set; } = true; + + /// + /// Whether to validate crypto material digests. + /// + public bool ValidateCryptoMaterials { get; set; } = true; + + /// + /// Whether to validate catalog digests if present. + /// + public bool ValidateCatalogs { get; set; } = true; + + /// + /// Whether to validate Rekor snapshot entries if present. + /// + public bool ValidateRekorSnapshots { get; set; } = true; + + /// + /// Whether to validate crypto provider entries if present. + /// + public bool ValidateCryptoProviders { get; set; } = true; + + /// + /// Whether to validate artifact digests (function maps, observations, verification reports). + /// + public bool ValidateArtifacts { get; set; } = true; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ComponentBuildConfigs.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ComponentBuildConfigs.cs new file mode 100644 index 000000000..cca07bb24 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ComponentBuildConfigs.cs @@ -0,0 +1,31 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record FeedBuildConfig( + string FeedId, + string Name, + string Version, + string SourcePath, + string RelativePath, + DateTimeOffset SnapshotAt, + FeedFormat Format) + : BundleComponentSource(SourcePath, RelativePath); + +public sealed record PolicyBuildConfig( + string PolicyId, + string Name, + string Version, + string SourcePath, + string RelativePath, + PolicyType Type) + : BundleComponentSource(SourcePath, RelativePath); + +public sealed record CryptoBuildConfig( + string ComponentId, + string Name, + string SourcePath, + string RelativePath, + CryptoComponentType Type, + DateTimeOffset? ExpiresAt) + : BundleComponentSource(SourcePath, RelativePath); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.Import.cs new file mode 100644 index 000000000..f1d6bce32 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.Import.cs @@ -0,0 +1,94 @@ +using StellaOps.Concelier.RawModels; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class ConcelierAdvisoryImportTarget +{ + /// + public async Task ImportAdvisoriesAsync( + AdvisoryImportData data, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(data); + + if (data.Content.Length == 0) + { + return new ModuleImportResultData + { + Failed = 1, + Error = "Empty advisory content" + }; + } + + var created = 0; + var updated = 0; + var failed = 0; + var errors = new List(); + + try + { + // Parse NDJSON content - each line is a complete AdvisoryRawDocument. + var contentString = Encoding.UTF8.GetString(data.Content); + var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries); + + foreach (var line in lines) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var document = JsonSerializer.Deserialize(line.Trim(), _jsonOptions); + if (document is null) + { + failed++; + errors.Add("Failed to parse advisory line"); + continue; + } + + var tenantedDocument = document with { Tenant = _tenant }; + var result = await _repository.UpsertAsync(tenantedDocument, cancellationToken) + .ConfigureAwait(false); + + if (result.Inserted) + { + created++; + } + else + { + updated++; + } + } + catch (JsonException ex) + { + failed++; + errors.Add($"JSON parse error: {ex.Message}"); + } + catch (Exception ex) + { + failed++; + errors.Add($"Advisory import error: {ex.Message}"); + } + } + } + catch (Exception ex) + { + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Failed = failed + 1, + Error = $"Import failed: {ex.Message}" + }; + } + + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Failed = failed, + Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.cs index 699c003c1..e512c1b3e 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ConcelierAdvisoryImportTarget.cs @@ -1,18 +1,4 @@ -// ----------------------------------------------------------------------------- -// ConcelierAdvisoryImportTarget.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Tasks: SEAL-015 - Apply snapshot advisory content to Concelier database -// Description: Adapter implementing IAdvisoryImportTarget for Concelier module. -// ----------------------------------------------------------------------------- - - -using StellaOps.AirGap.Bundle.Models; using StellaOps.Concelier.Core.Raw; -using StellaOps.Concelier.RawModels; -using StellaOps.Determinism; -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -21,9 +7,9 @@ namespace StellaOps.AirGap.Bundle.Services; /// Implements IAdvisoryImportTarget by adapting to Concelier's IAdvisoryRawRepository. /// Parses NDJSON advisory content and upserts records to the advisory database. /// -public sealed class ConcelierAdvisoryImportTarget : IAdvisoryImportTarget +public sealed partial class ConcelierAdvisoryImportTarget : IAdvisoryImportTarget { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, PropertyNameCaseInsensitive = true @@ -40,231 +26,4 @@ public sealed class ConcelierAdvisoryImportTarget : IAdvisoryImportTarget _tenant = tenant; } - /// - public async Task ImportAdvisoriesAsync( - AdvisoryImportData data, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(data); - - if (data.Content.Length == 0) - { - return new ModuleImportResultData - { - Failed = 1, - Error = "Empty advisory content" - }; - } - - var created = 0; - var updated = 0; - var failed = 0; - var errors = new List(); - - try - { - // Parse NDJSON content - each line is a complete AdvisoryRawDocument - var contentString = Encoding.UTF8.GetString(data.Content); - var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries); - - foreach (var line in lines) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - var document = JsonSerializer.Deserialize(line.Trim(), JsonOptions); - if (document is null) - { - failed++; - errors.Add("Failed to parse advisory line"); - continue; - } - - // Ensure tenant is set correctly - var tenantedDocument = document with { Tenant = _tenant }; - - var result = await _repository.UpsertAsync(tenantedDocument, cancellationToken); - - if (result.Inserted) - { - created++; - } - else - { - updated++; - } - } - catch (JsonException ex) - { - failed++; - errors.Add($"JSON parse error: {ex.Message}"); - } - catch (Exception ex) - { - failed++; - errors.Add($"Advisory import error: {ex.Message}"); - } - } - } - catch (Exception ex) - { - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Failed = failed + 1, - Error = $"Import failed: {ex.Message}" - }; - } - - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Failed = failed, - Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null - }; - } -} - -/// -/// Lightweight in-memory implementation of IAdvisoryRawRepository for air-gap scenarios. -/// Used when direct database access is unavailable. -/// -public sealed class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository -{ - private readonly Dictionary _records = new(); - private readonly object _lock = new(); - private readonly TimeProvider _timeProvider; - private readonly IGuidProvider _guidProvider; - - public InMemoryAdvisoryRawRepository( - TimeProvider? timeProvider = null, - IGuidProvider? guidProvider = null) - { - _timeProvider = timeProvider ?? TimeProvider.System; - _guidProvider = guidProvider ?? SystemGuidProvider.Instance; - } - - public Task UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) - { - var contentHash = ComputeHash(document); - var key = $"{document.Tenant}:{contentHash}"; - var now = _timeProvider.GetUtcNow(); - - lock (_lock) - { - if (_records.TryGetValue(key, out var existing)) - { - return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: false, Record: existing)); - } - - var record = new AdvisoryRawRecord( - Id: _guidProvider.NewGuid().ToString(), - Document: document, - IngestedAt: now, - CreatedAt: now); - - _records[key] = record; - return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: true, Record: record)); - } - } - - public Task FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) - { - lock (_lock) - { - var record = _records.Values.FirstOrDefault(r => r.Document.Tenant == tenant && r.Id == id); - return Task.FromResult(record); - } - } - - public Task QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) - { - lock (_lock) - { - var query = _records.Values.Where(r => r.Document.Tenant == options.Tenant); - - if (!options.Vendors.IsEmpty) - { - query = query.Where(r => options.Vendors.Contains(r.Document.Source.Vendor)); - } - - if (options.Since.HasValue) - { - query = query.Where(r => r.IngestedAt >= options.Since.Value); - } - - var records = query.Take(options.Limit).ToList(); - return Task.FromResult(new AdvisoryRawQueryResult( - Records: records, - NextCursor: records.Count == options.Limit && records.Count > 0 ? records[^1].Id : null, - HasMore: records.Count == options.Limit)); - } - } - - public Task> FindByAdvisoryKeyAsync( - string tenant, - IReadOnlyCollection searchValues, - IReadOnlyCollection sourceVendors, - CancellationToken cancellationToken) - { - lock (_lock) - { - var query = _records.Values.Where(r => r.Document.Tenant == tenant); - - if (searchValues.Count > 0) - { - query = query.Where(r => - searchValues.Contains(r.Document.AdvisoryKey) || - r.Document.Identifiers.Aliases.Any(a => searchValues.Contains(a))); - } - - if (sourceVendors.Count > 0) - { - query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor)); - } - - return Task.FromResult>(query.ToList()); - } - } - - public Task> ListForVerificationAsync( - string tenant, - DateTimeOffset since, - DateTimeOffset until, - IReadOnlyCollection sourceVendors, - CancellationToken cancellationToken) - { - lock (_lock) - { - var query = _records.Values - .Where(r => r.Document.Tenant == tenant && r.IngestedAt >= since && r.IngestedAt <= until); - - if (sourceVendors.Count > 0) - { - query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor)); - } - - return Task.FromResult>(query.ToList()); - } - } - - public int Count => _records.Count; - - public IEnumerable GetAllRecords() - { - lock (_lock) - { - return _records.Values.ToList(); - } - } - - private static string ComputeHash(AdvisoryRawDocument document) - { - var json = JsonSerializer.Serialize(document); - var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json)); - return $"sha256:{Convert.ToHexStringLower(bytes)}"; - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Extraction.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Extraction.cs new file mode 100644 index 000000000..83475325b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Extraction.cs @@ -0,0 +1,83 @@ +using System.Formats.Asn1; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class CrlFetcher +{ + private static IReadOnlyList ExtractCrlUris(X509Certificate2 certificate) + { + try + { + var ext = certificate.Extensions.Cast() + .FirstOrDefault(e => e.Oid?.Value == "2.5.29.31"); + if (ext is null) + { + return Array.Empty(); + } + + var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); + var bytes = reader.ReadOctetString(); + var dpReader = new AsnReader(bytes, AsnEncodingRules.DER); + var sequence = dpReader.ReadSequence(); + + var uris = new List(); + while (sequence.HasData) + { + var distributionPoint = sequence.ReadSequence(); + if (!distributionPoint.HasData) + { + continue; + } + + var tag = distributionPoint.PeekTag(); + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) + { + var dpName = distributionPoint.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + if (dpName.HasData) + { + var nameTag = dpName.PeekTag(); + if (nameTag.TagClass == TagClass.ContextSpecific && nameTag.TagValue == 0) + { + var fullName = dpName.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + if (fullName.HasData) + { + var names = fullName.ReadSequence(); + while (names.HasData) + { + var nameTagValue = names.PeekTag(); + if (nameTagValue.TagClass == TagClass.ContextSpecific && + nameTagValue.TagValue == 6) + { + var uriValue = names.ReadCharacterString( + UniversalTagNumber.IA5String, + new Asn1Tag(TagClass.ContextSpecific, 6)); + if (Uri.TryCreate(uriValue, UriKind.Absolute, out var uri)) + { + uris.Add(uri); + } + } + else + { + names.ReadEncodedValue(); + } + } + } + } + } + } + + while (distributionPoint.HasData) + { + distributionPoint.ReadEncodedValue(); + } + } + + return uris; + } + catch + { + return Array.Empty(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Networked.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Networked.cs new file mode 100644 index 000000000..c6324c19e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.Networked.cs @@ -0,0 +1,36 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class CrlFetcher +{ + public static CrlFetcher CreateNetworked(HttpClient? client = null) + { + client ??= _defaultClient; + return new CrlFetcher(async (uri, ct) => + { + using var response = await client.GetAsync(uri, ct).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + return null; + } + + return await response.Content.ReadAsByteArrayAsync(ct).ConfigureAwait(false); + }); + } + + private async Task FetchCachedAsync(Uri uri, CancellationToken ct) + { + var key = uri.ToString(); + if (_cache.TryGetValue(key, out var cached)) + { + return cached; + } + + var data = await _fetcher!(uri, ct).ConfigureAwait(false); + if (data is { Length: > 0 }) + { + _cache[key] = data; + } + + return data; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.cs index 4bdb026f6..bc86b8f15 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/CrlFetcher.cs @@ -1,20 +1,10 @@ - -using System.Formats.Asn1; -using System.Net.Http; using System.Security.Cryptography.X509Certificates; namespace StellaOps.AirGap.Bundle.Services; -public interface ICrlFetcher +public sealed partial class CrlFetcher : ICrlFetcher { - Task> FetchAsync( - IReadOnlyList certificateChain, - CancellationToken ct = default); -} - -public sealed class CrlFetcher : ICrlFetcher -{ - private static readonly HttpClient DefaultClient = new(); + private static readonly HttpClient _defaultClient = new(); private readonly Func>? _fetcher; private readonly Dictionary _cache = new(StringComparer.Ordinal); @@ -23,21 +13,6 @@ public sealed class CrlFetcher : ICrlFetcher _fetcher = fetcher; } - public static CrlFetcher CreateNetworked(HttpClient? client = null) - { - client ??= DefaultClient; - return new CrlFetcher(async (uri, ct) => - { - using var response = await client.GetAsync(uri, ct).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - return null; - } - - return await response.Content.ReadAsByteArrayAsync(ct).ConfigureAwait(false); - }); - } - public async Task> FetchAsync( IReadOnlyList certificateChain, CancellationToken ct = default) @@ -65,97 +40,4 @@ public sealed class CrlFetcher : ICrlFetcher return results; } - - private async Task FetchCachedAsync(Uri uri, CancellationToken ct) - { - var key = uri.ToString(); - if (_cache.TryGetValue(key, out var cached)) - { - return cached; - } - - var data = await _fetcher!(uri, ct).ConfigureAwait(false); - if (data is { Length: > 0 }) - { - _cache[key] = data; - } - - return data; - } - - private static IReadOnlyList ExtractCrlUris(X509Certificate2 certificate) - { - try - { - var ext = certificate.Extensions.Cast() - .FirstOrDefault(e => e.Oid?.Value == "2.5.29.31"); - if (ext is null) - { - return Array.Empty(); - } - - var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); - var bytes = reader.ReadOctetString(); - var dpReader = new AsnReader(bytes, AsnEncodingRules.DER); - var sequence = dpReader.ReadSequence(); - - var uris = new List(); - while (sequence.HasData) - { - var distributionPoint = sequence.ReadSequence(); - if (!distributionPoint.HasData) - { - continue; - } - - var tag = distributionPoint.PeekTag(); - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) - { - var dpName = distributionPoint.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - if (dpName.HasData) - { - var nameTag = dpName.PeekTag(); - if (nameTag.TagClass == TagClass.ContextSpecific && nameTag.TagValue == 0) - { - var fullName = dpName.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - if (fullName.HasData) - { - var names = fullName.ReadSequence(); - while (names.HasData) - { - var nameTagValue = names.PeekTag(); - if (nameTagValue.TagClass == TagClass.ContextSpecific && - nameTagValue.TagValue == 6) - { - var uriValue = names.ReadCharacterString( - UniversalTagNumber.IA5String, - new Asn1Tag(TagClass.ContextSpecific, 6)); - if (Uri.TryCreate(uriValue, UriKind.Absolute, out var uri)) - { - uris.Add(uri); - } - } - else - { - names.ReadEncodedValue(); - } - } - } - } - } - } - - while (distributionPoint.HasData) - { - distributionPoint.ReadEncodedValue(); - } - } - - return uris; - } - catch - { - return Array.Empty(); - } - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Document.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Document.cs new file mode 100644 index 000000000..0cf38faee --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Document.cs @@ -0,0 +1,26 @@ +using StellaOps.Excititor.Core; +using System.Collections.Immutable; +using System.Globalization; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class ExcititorVexImportTarget +{ + private static VexRawDocument BuildDocument(VexStatementDto statement, string line, VexImportData data) + { + var contentBytes = Encoding.UTF8.GetBytes(line); + var digest = ComputeDigest(contentBytes); + + return new VexRawDocument( + ProviderId: data.SourceId, + Format: DetectFormat(statement), + SourceUri: statement.SourceUri ?? new Uri($"urn:stellaops:airgap:vex:{digest}"), + RetrievedAt: data.SnapshotAt, + Digest: digest, + Content: contentBytes, + Metadata: ImmutableDictionary.Empty + .Add("importSource", "airgap-snapshot") + .Add("snapshotAt", data.SnapshotAt.ToString("O", CultureInfo.InvariantCulture))); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Helpers.cs new file mode 100644 index 000000000..21d16dc92 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Helpers.cs @@ -0,0 +1,30 @@ +using StellaOps.Excititor.Core; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class ExcititorVexImportTarget +{ + private static string ComputeDigest(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexStringLower(hash)}"; + } + + private static VexDocumentFormat DetectFormat(VexStatementDto statement) + { + // Detect format from statement structure + if (!string.IsNullOrEmpty(statement.Context)) + { + if (statement.Context.Contains("openvex", StringComparison.OrdinalIgnoreCase)) + return VexDocumentFormat.OpenVex; + if (statement.Context.Contains("csaf", StringComparison.OrdinalIgnoreCase)) + return VexDocumentFormat.Csaf; + if (statement.Context.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)) + return VexDocumentFormat.CycloneDx; + } + + // Default to OpenVEX + return VexDocumentFormat.OpenVex; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Import.cs new file mode 100644 index 000000000..5eee4dea8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.Import.cs @@ -0,0 +1,85 @@ +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class ExcititorVexImportTarget +{ + /// + public async Task ImportVexStatementsAsync( + VexImportData data, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(data); + + if (data.Content.Length == 0) + { + return new ModuleImportResultData + { + Failed = 1, + Error = "Empty VEX content" + }; + } + + var created = 0; + var updated = 0; + var failed = 0; + var errors = new List(); + + try + { + // Parse NDJSON content - each line is a VEX statement + var contentString = Encoding.UTF8.GetString(data.Content); + var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries); + + foreach (var line in lines) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var statement = JsonSerializer.Deserialize(line.Trim(), _jsonOptions); + if (statement is null) + { + failed++; + errors.Add("Failed to parse VEX statement line"); + continue; + } + + var document = BuildDocument(statement, line.Trim(), data); + + await _sink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + created++; + } + catch (JsonException ex) + { + failed++; + errors.Add($"JSON parse error: {ex.Message}"); + } + catch (Exception ex) + { + failed++; + errors.Add($"VEX import error: {ex.Message}"); + } + } + } + catch (Exception ex) + { + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Failed = failed + 1, + Error = $"Import failed: {ex.Message}" + }; + } + + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Failed = failed, + Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.cs index 011e87bbf..f41558504 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ExcititorVexImportTarget.cs @@ -1,19 +1,5 @@ -// ----------------------------------------------------------------------------- -// ExcititorVexImportTarget.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Tasks: SEAL-016 - Apply snapshot VEX content to Excititor database -// Description: Adapter implementing IVexImportTarget for Excititor module. -// ----------------------------------------------------------------------------- - - -using StellaOps.AirGap.Bundle.Models; -using StellaOps.Determinism; using StellaOps.Excititor.Core; using StellaOps.Excititor.Core.Storage; -using System.Collections.Immutable; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -22,9 +8,9 @@ namespace StellaOps.AirGap.Bundle.Services; /// Implements IVexImportTarget by adapting to Excititor's IVexRawDocumentSink. /// Parses NDJSON VEX statement content and stores records to the VEX database. /// -public sealed class ExcititorVexImportTarget : IVexImportTarget +public sealed partial class ExcititorVexImportTarget : IVexImportTarget { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, PropertyNameCaseInsensitive = true @@ -40,227 +26,4 @@ public sealed class ExcititorVexImportTarget : IVexImportTarget _sink = sink ?? throw new ArgumentNullException(nameof(sink)); _tenant = tenant; } - - /// - public async Task ImportVexStatementsAsync( - VexImportData data, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(data); - - if (data.Content.Length == 0) - { - return new ModuleImportResultData - { - Failed = 1, - Error = "Empty VEX content" - }; - } - - var created = 0; - var updated = 0; - var failed = 0; - var errors = new List(); - - try - { - // Parse NDJSON content - each line is a VEX statement - var contentString = Encoding.UTF8.GetString(data.Content); - var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries); - - foreach (var line in lines) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - var statement = JsonSerializer.Deserialize(line.Trim(), JsonOptions); - if (statement is null) - { - failed++; - errors.Add("Failed to parse VEX statement line"); - continue; - } - - // Convert to VexRawDocument - var contentBytes = Encoding.UTF8.GetBytes(line.Trim()); - var digest = ComputeDigest(contentBytes); - - var document = new VexRawDocument( - ProviderId: data.SourceId, - Format: DetectFormat(statement), - SourceUri: statement.SourceUri ?? new Uri($"urn:stellaops:airgap:vex:{digest}"), - RetrievedAt: data.SnapshotAt, - Digest: digest, - Content: contentBytes, - Metadata: ImmutableDictionary.Empty - .Add("importSource", "airgap-snapshot") - .Add("snapshotAt", data.SnapshotAt.ToString("O", CultureInfo.InvariantCulture))); - - await _sink.StoreAsync(document, cancellationToken); - created++; - } - catch (JsonException ex) - { - failed++; - errors.Add($"JSON parse error: {ex.Message}"); - } - catch (Exception ex) - { - failed++; - errors.Add($"VEX import error: {ex.Message}"); - } - } - } - catch (Exception ex) - { - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Failed = failed + 1, - Error = $"Import failed: {ex.Message}" - }; - } - - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Failed = failed, - Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null - }; - } - - private static string ComputeDigest(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexStringLower(hash)}"; - } - - private static VexDocumentFormat DetectFormat(VexStatementDto statement) - { - // Detect format from statement structure - if (!string.IsNullOrEmpty(statement.Context)) - { - if (statement.Context.Contains("openvex", StringComparison.OrdinalIgnoreCase)) - return VexDocumentFormat.OpenVex; - if (statement.Context.Contains("csaf", StringComparison.OrdinalIgnoreCase)) - return VexDocumentFormat.Csaf; - if (statement.Context.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)) - return VexDocumentFormat.CycloneDx; - } - - // Default to OpenVEX - return VexDocumentFormat.OpenVex; - } -} - -/// -/// Lightweight in-memory implementation of IVexRawDocumentSink for air-gap scenarios. -/// -public sealed class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawStore -{ - private readonly Dictionary _records = new(); - private readonly string _tenant; - private readonly object _lock = new(); - private readonly TimeProvider _timeProvider; - - public InMemoryVexRawDocumentSink( - string tenant = "default", - TimeProvider? timeProvider = null) - { - _tenant = tenant; - _timeProvider = timeProvider ?? TimeProvider.System; - } - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - lock (_lock) - { - if (!_records.ContainsKey(document.Digest)) - { - _records[document.Digest] = new VexRawRecord( - Digest: document.Digest, - Tenant: _tenant, - ProviderId: document.ProviderId, - Format: document.Format, - SourceUri: document.SourceUri, - RetrievedAt: document.RetrievedAt, - Metadata: document.Metadata, - Content: document.Content, - InlineContent: true, - RecordedAt: _timeProvider.GetUtcNow()); - } - } - - return ValueTask.CompletedTask; - } - - public ValueTask FindByDigestAsync(string digest, CancellationToken cancellationToken) - { - lock (_lock) - { - _records.TryGetValue(digest, out var record); - return ValueTask.FromResult(record); - } - } - - public ValueTask QueryAsync(VexRawQuery query, CancellationToken cancellationToken) - { - lock (_lock) - { - var items = _records.Values - .Where(r => r.Tenant == query.Tenant) - .Where(r => query.ProviderIds.Count == 0 || query.ProviderIds.Contains(r.ProviderId)) - .Where(r => query.Digests.Count == 0 || query.Digests.Contains(r.Digest)) - .Where(r => query.Formats.Count == 0 || query.Formats.Contains(r.Format)) - .Where(r => !query.Since.HasValue || r.RetrievedAt >= query.Since.Value) - .Where(r => !query.Until.HasValue || r.RetrievedAt <= query.Until.Value) - .Take(query.Limit) - .Select(r => new VexRawDocumentSummary( - r.Digest, - r.ProviderId, - r.Format, - r.SourceUri, - r.RetrievedAt, - r.InlineContent, - r.Metadata)) - .ToList(); - - return ValueTask.FromResult(new VexRawDocumentPage( - items, - NextCursor: items.Count == query.Limit && items.Count > 0 - ? new VexRawCursor(items[^1].RetrievedAt, items[^1].Digest) - : null, - HasMore: items.Count == query.Limit)); - } - } - - public int Count => _records.Count; - - public IEnumerable GetAllRecords() - { - lock (_lock) - { - return _records.Values.ToList(); - } - } -} - -/// -/// DTO for deserializing VEX statements from NDJSON. -/// -internal sealed record VexStatementDto -{ - public string? Context { get; init; } - public string? Id { get; init; } - public string? Vulnerability { get; init; } - public string? Status { get; init; } - public string? Justification { get; init; } - public string? Impact { get; init; } - public string? ActionStatement { get; init; } - public Uri? SourceUri { get; init; } - public DateTimeOffset? Timestamp { get; init; } - public ImmutableArray Products { get; init; } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IAdvisoryImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IAdvisoryImportTarget.cs new file mode 100644 index 000000000..e5a9f058c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IAdvisoryImportTarget.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Target interface for importing advisories (SEAL-015). +/// Implemented by Concelier module. +/// +public interface IAdvisoryImportTarget +{ + Task ImportAdvisoriesAsync( + AdvisoryImportData data, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IBundleBuilder.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IBundleBuilder.cs new file mode 100644 index 000000000..05dca8d27 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IBundleBuilder.cs @@ -0,0 +1,9 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public interface IBundleBuilder +{ + Task BuildAsync( + BundleBuildRequest request, + string outputPath, + CancellationToken ct = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ICrlFetcher.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ICrlFetcher.cs new file mode 100644 index 000000000..2039fe118 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ICrlFetcher.cs @@ -0,0 +1,10 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public interface ICrlFetcher +{ + Task> FetchAsync( + IReadOnlyList certificateChain, + CancellationToken ct = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IGuidProvider.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IGuidProvider.cs new file mode 100644 index 000000000..573764563 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IGuidProvider.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Provides unique identifiers. Inject to enable deterministic testing. +/// +public interface IGuidProvider +{ + /// + /// Creates a new unique identifier. + /// + Guid NewGuid(); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IKnowledgeSnapshotImporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IKnowledgeSnapshotImporter.cs new file mode 100644 index 000000000..1ca771a41 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IKnowledgeSnapshotImporter.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Interface for knowledge snapshot importing. +/// +public interface IKnowledgeSnapshotImporter +{ + Task ImportAsync( + SnapshotImportRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IOcspResponseFetcher.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IOcspResponseFetcher.cs new file mode 100644 index 000000000..692976811 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IOcspResponseFetcher.cs @@ -0,0 +1,10 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public interface IOcspResponseFetcher +{ + Task> FetchAsync( + IReadOnlyList certificateChain, + CancellationToken ct = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyImportTarget.cs new file mode 100644 index 000000000..54245f6ff --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyImportTarget.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Target interface for importing policies (SEAL-017). +/// Implemented by Policy module. +/// +public interface IPolicyImportTarget +{ + Task ImportPolicyAsync( + PolicyImportData data, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyPackImportStore.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyPackImportStore.cs new file mode 100644 index 000000000..291d627fb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IPolicyPackImportStore.cs @@ -0,0 +1,22 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Store interface for importing policy packs from air-gap snapshots. +/// +public interface IPolicyPackImportStore +{ + /// + /// Finds an imported policy pack by content digest. + /// + Task FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken); + + /// + /// Saves an imported policy pack. + /// + Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken); + + /// + /// Lists all imported policy packs for a tenant. + /// + Task> ListAsync(string tenantId, CancellationToken cancellationToken); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleReader.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleReader.cs new file mode 100644 index 000000000..ae25fbcc3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleReader.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Interface for snapshot bundle reading. +/// +public interface ISnapshotBundleReader +{ + Task ReadAsync( + SnapshotBundleReadRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleWriter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleWriter.cs new file mode 100644 index 000000000..c590bf2e3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotBundleWriter.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Interface for snapshot bundle writing. +/// +public interface ISnapshotBundleWriter +{ + Task WriteAsync( + SnapshotBundleRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotManifestSigner.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotManifestSigner.cs new file mode 100644 index 000000000..707a96628 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ISnapshotManifestSigner.cs @@ -0,0 +1,15 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Interface for manifest signing operations. +/// +public interface ISnapshotManifestSigner +{ + Task SignAsync( + ManifestSigningRequest request, + CancellationToken cancellationToken = default); + + Task VerifyAsync( + ManifestVerificationRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITimeAnchorService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITimeAnchorService.cs new file mode 100644 index 000000000..27e5413a8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITimeAnchorService.cs @@ -0,0 +1,16 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Interface for time anchor operations. +/// +public interface ITimeAnchorService +{ + Task CreateAnchorAsync( + TimeAnchorRequest request, + CancellationToken cancellationToken = default); + + Task ValidateAnchorAsync( + TimeAnchorContent anchor, + TimeAnchorValidationRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITsaChainBundler.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITsaChainBundler.cs new file mode 100644 index 000000000..504b7a5bb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/ITsaChainBundler.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public interface ITsaChainBundler +{ + Task BundleAsync( + ReadOnlyMemory timeStampToken, + string outputPath, + string? filePrefix = null, + CancellationToken ct = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IVexImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IVexImportTarget.cs new file mode 100644 index 000000000..a98d66990 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/IVexImportTarget.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Target interface for importing VEX statements (SEAL-016). +/// Implemented by Excititor module. +/// +public interface IVexImportTarget +{ + Task ImportVexStatementsAsync( + VexImportData data, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Query.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Query.cs new file mode 100644 index 000000000..e76bbab39 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Query.cs @@ -0,0 +1,95 @@ +using StellaOps.Concelier.Core.Raw; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class InMemoryAdvisoryRawRepository +{ + public Task FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) + { + lock (_lock) + { + var record = _records.Values.FirstOrDefault(r => r.Document.Tenant == tenant && r.Id == id); + return Task.FromResult(record); + } + } + + public Task QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) + { + lock (_lock) + { + var query = _records.Values.Where(r => r.Document.Tenant == options.Tenant); + + if (!options.Vendors.IsEmpty) + { + query = query.Where(r => options.Vendors.Contains(r.Document.Source.Vendor)); + } + + if (options.Since.HasValue) + { + query = query.Where(r => r.IngestedAt >= options.Since.Value); + } + + var records = query.Take(options.Limit).ToList(); + return Task.FromResult(new AdvisoryRawQueryResult( + Records: records, + NextCursor: records.Count == options.Limit && records.Count > 0 ? records[^1].Id : null, + HasMore: records.Count == options.Limit)); + } + } + + public Task> FindByAdvisoryKeyAsync( + string tenant, + IReadOnlyCollection searchValues, + IReadOnlyCollection sourceVendors, + CancellationToken cancellationToken) + { + lock (_lock) + { + var query = _records.Values.Where(r => r.Document.Tenant == tenant); + + if (searchValues.Count > 0) + { + query = query.Where(r => + searchValues.Contains(r.Document.AdvisoryKey) || + r.Document.Identifiers.Aliases.Any(a => searchValues.Contains(a))); + } + + if (sourceVendors.Count > 0) + { + query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor)); + } + + return Task.FromResult>(query.ToList()); + } + } + + public Task> ListForVerificationAsync( + string tenant, + DateTimeOffset since, + DateTimeOffset until, + IReadOnlyCollection sourceVendors, + CancellationToken cancellationToken) + { + lock (_lock) + { + var query = _records.Values + .Where(r => r.Document.Tenant == tenant && r.IngestedAt >= since && r.IngestedAt <= until); + + if (sourceVendors.Count > 0) + { + query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor)); + } + + return Task.FromResult>(query.ToList()); + } + } + + public IEnumerable GetAllRecords() + { + lock (_lock) + { + return _records.Values.ToList(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Upsert.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Upsert.cs new file mode 100644 index 000000000..585e1beeb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.Upsert.cs @@ -0,0 +1,41 @@ +using StellaOps.Concelier.Core.Raw; +using StellaOps.Concelier.RawModels; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class InMemoryAdvisoryRawRepository +{ + public Task UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) + { + var contentHash = ComputeHash(document); + var key = $"{document.Tenant}:{contentHash}"; + var now = _timeProvider.GetUtcNow(); + + lock (_lock) + { + if (_records.TryGetValue(key, out var existing)) + { + return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: false, Record: existing)); + } + + var record = new AdvisoryRawRecord( + Id: _guidProvider.NewGuid().ToString(), + Document: document, + IngestedAt: now, + CreatedAt: now); + + _records[key] = record; + return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: true, Record: record)); + } + } + + private static string ComputeHash(AdvisoryRawDocument document) + { + var json = JsonSerializer.Serialize(document); + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json)); + return $"sha256:{Convert.ToHexStringLower(bytes)}"; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.cs new file mode 100644 index 000000000..7b8eb2c1e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryAdvisoryRawRepository.cs @@ -0,0 +1,27 @@ +using StellaOps.Concelier.Core.Raw; +using StellaOps.Concelier.RawModels; +using StellaOps.Determinism; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Lightweight in-memory implementation of IAdvisoryRawRepository for air-gap scenarios. +/// Used when direct database access is unavailable. +/// +public sealed partial class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository +{ + private readonly Dictionary _records = new(); + private readonly object _lock = new(); + private readonly TimeProvider _timeProvider; + private readonly IGuidProvider _guidProvider; + + public InMemoryAdvisoryRawRepository( + TimeProvider? timeProvider = null, + IGuidProvider? guidProvider = null) + { + _timeProvider = timeProvider ?? TimeProvider.System; + _guidProvider = guidProvider ?? SystemGuidProvider.Instance; + } + + public int Count => _records.Count; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.Operations.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.Operations.cs new file mode 100644 index 000000000..127a32e62 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.Operations.cs @@ -0,0 +1,39 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class InMemoryPolicyPackImportStore +{ + public Task FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken) + { + lock (_lock) + { + var pack = _packs.Values.FirstOrDefault(p => p.TenantId == tenantId && p.Digest == digest); + return Task.FromResult(pack); + } + } + + public Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken) + { + lock (_lock) + { + _packs[$"{pack.TenantId}:{pack.Id}"] = pack; + } + return Task.CompletedTask; + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + lock (_lock) + { + var packs = _packs.Values.Where(p => p.TenantId == tenantId).ToList(); + return Task.FromResult>(packs); + } + } + + public IEnumerable GetAllPacks() + { + lock (_lock) + { + return _packs.Values.ToList(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.cs new file mode 100644 index 000000000..033074e0a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryPolicyPackImportStore.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Lightweight in-memory implementation of IPolicyPackImportStore for air-gap scenarios. +/// +public sealed partial class InMemoryPolicyPackImportStore : IPolicyPackImportStore +{ + private readonly Dictionary _packs = new(); + private readonly object _lock = new(); + + public int Count => _packs.Count; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Query.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Query.cs new file mode 100644 index 000000000..e8a639db7 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Query.cs @@ -0,0 +1,55 @@ +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Storage; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class InMemoryVexRawDocumentSink +{ + public ValueTask FindByDigestAsync(string digest, CancellationToken cancellationToken) + { + lock (_lock) + { + _records.TryGetValue(digest, out var record); + return ValueTask.FromResult(record); + } + } + + public ValueTask QueryAsync(VexRawQuery query, CancellationToken cancellationToken) + { + lock (_lock) + { + var items = _records.Values + .Where(r => r.Tenant == query.Tenant) + .Where(r => query.ProviderIds.Count == 0 || query.ProviderIds.Contains(r.ProviderId)) + .Where(r => query.Digests.Count == 0 || query.Digests.Contains(r.Digest)) + .Where(r => query.Formats.Count == 0 || query.Formats.Contains(r.Format)) + .Where(r => !query.Since.HasValue || r.RetrievedAt >= query.Since.Value) + .Where(r => !query.Until.HasValue || r.RetrievedAt <= query.Until.Value) + .Take(query.Limit) + .Select(r => new VexRawDocumentSummary( + r.Digest, + r.ProviderId, + r.Format, + r.SourceUri, + r.RetrievedAt, + r.InlineContent, + r.Metadata)) + .ToList(); + + return ValueTask.FromResult(new VexRawDocumentPage( + items, + NextCursor: items.Count == query.Limit && items.Count > 0 + ? new VexRawCursor(items[^1].RetrievedAt, items[^1].Digest) + : null, + HasMore: items.Count == query.Limit)); + } + } + + public IEnumerable GetAllRecords() + { + lock (_lock) + { + return _records.Values.ToList(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Store.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Store.cs new file mode 100644 index 000000000..afe51e89a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.Store.cs @@ -0,0 +1,30 @@ +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Storage; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class InMemoryVexRawDocumentSink +{ + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + lock (_lock) + { + if (!_records.ContainsKey(document.Digest)) + { + _records[document.Digest] = new VexRawRecord( + Digest: document.Digest, + Tenant: _tenant, + ProviderId: document.ProviderId, + Format: document.Format, + SourceUri: document.SourceUri, + RetrievedAt: document.RetrievedAt, + Metadata: document.Metadata, + Content: document.Content, + InlineContent: true, + RecordedAt: _timeProvider.GetUtcNow()); + } + } + + return ValueTask.CompletedTask; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.cs new file mode 100644 index 000000000..41681755c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/InMemoryVexRawDocumentSink.cs @@ -0,0 +1,26 @@ +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Storage; +using StellaOps.Determinism; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Lightweight in-memory implementation of IVexRawDocumentSink for air-gap scenarios. +/// +public sealed partial class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawStore +{ + private readonly Dictionary _records = new(); + private readonly string _tenant; + private readonly object _lock = new(); + private readonly TimeProvider _timeProvider; + + public InMemoryVexRawDocumentSink( + string tenant = "default", + TimeProvider? timeProvider = null) + { + _tenant = tenant; + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public int Count => _records.Count; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImportModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImportModels.cs new file mode 100644 index 000000000..318a28597 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImportModels.cs @@ -0,0 +1,89 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record SnapshotImportRequest +{ + public required string BundlePath { get; init; } + public bool ImportAdvisories { get; init; } = true; + public bool ImportVex { get; init; } = true; + public bool ImportPolicies { get; init; } = true; + public bool DryRun { get; init; } + public bool FailOnAnyError { get; init; } +} + +public sealed record SnapshotImportResult +{ + public bool Success { get; init; } + public string? BundleId { get; init; } + public DateTimeOffset StartedAt { get; init; } + public DateTimeOffset CompletedAt { get; init; } + public ImportStatistics? Statistics { get; init; } + public IReadOnlyList? Errors { get; init; } + public string? Error { get; init; } + + public static SnapshotImportResult Failed(string error, TimeProvider? timeProvider = null) + { + var now = (timeProvider ?? TimeProvider.System).GetUtcNow(); + return new() + { + Success = false, + Error = error, + StartedAt = now, + CompletedAt = now + }; + } +} + +public sealed record ImportStatistics +{ + public int AdvisoriesProcessed { get; set; } + public int AdvisoriesCreated { get; set; } + public int AdvisoriesUpdated { get; set; } + public int AdvisoriesFailed { get; set; } + + public int VexProcessed { get; set; } + public int VexCreated { get; set; } + public int VexUpdated { get; set; } + public int VexFailed { get; set; } + + public int PoliciesProcessed { get; set; } + public int PoliciesCreated { get; set; } + public int PoliciesUpdated { get; set; } + public int PoliciesFailed { get; set; } + + public int TotalProcessed => AdvisoriesProcessed + VexProcessed + PoliciesProcessed; + public int TotalCreated => AdvisoriesCreated + VexCreated + PoliciesCreated; + public int TotalUpdated => AdvisoriesUpdated + VexUpdated + PoliciesUpdated; + public int TotalFailed => AdvisoriesFailed + VexFailed + PoliciesFailed; +} + +public sealed record AdvisoryImportData +{ + public required string FeedId { get; init; } + public required byte[] Content { get; init; } + public DateTimeOffset SnapshotAt { get; init; } + public int RecordCount { get; init; } +} + +public sealed record VexImportData +{ + public required string SourceId { get; init; } + public required byte[] Content { get; init; } + public DateTimeOffset SnapshotAt { get; init; } + public int StatementCount { get; init; } +} + +public sealed record PolicyImportData +{ + public required string PolicyId { get; init; } + public required byte[] Content { get; init; } + public string? Version { get; init; } + public DateTimeOffset SnapshotAt { get; init; } +} + +public sealed record ModuleImportResultData +{ + public int Created { get; init; } + public int Updated { get; init; } + public int Failed { get; init; } + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Advisories.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Advisories.cs new file mode 100644 index 000000000..8ad949a9c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Advisories.cs @@ -0,0 +1,69 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ImportAdvisoriesAsync( + string bundleDir, + IReadOnlyList entries, + bool dryRun, + CancellationToken ct) + { + var result = new ModuleImportResult(); + + foreach (var entry in entries) + { + try + { + // Validate path to prevent traversal attacks + if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) + { + result.Failed++; + result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); + continue; + } + + var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); + if (!File.Exists(filePath)) + { + result.Failed++; + result.Errors.Add($"File not found: {entry.RelativePath}"); + continue; + } + + var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false); + result.Processed++; + + if (!dryRun && _advisoryTarget is not null) + { + var importResult = await _advisoryTarget.ImportAdvisoriesAsync( + new AdvisoryImportData + { + FeedId = entry.FeedId, + Content = content, + SnapshotAt = entry.SnapshotAt, + RecordCount = entry.RecordCount + }, + ct).ConfigureAwait(false); + + result.Created += importResult.Created; + result.Updated += importResult.Updated; + result.Failed += importResult.Failed; + + if (importResult.Error is not null) + { + result.Errors.Add(importResult.Error); + } + } + } + catch (Exception ex) + { + result.Failed++; + result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); + } + } + + return result; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Extraction.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Extraction.cs new file mode 100644 index 000000000..73c554c13 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Extraction.cs @@ -0,0 +1,57 @@ +using System.Formats.Tar; +using System.IO.Compression; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct) + { + var normalizedTargetDir = Path.GetFullPath(targetDir); + + await using var fileStream = File.OpenRead(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); + await using var tarReader = new TarReader(gzipStream, leaveOpen: false); + + while (await tarReader.GetNextEntryAsync(copyData: true, ct).ConfigureAwait(false) is { } entry) + { + if (string.IsNullOrEmpty(entry.Name)) + { + continue; + } + + // Validate entry path to prevent traversal attacks + if (!PathValidation.IsSafeRelativePath(entry.Name)) + { + throw new InvalidOperationException($"Unsafe tar entry path detected: {entry.Name}"); + } + + var destinationPath = Path.GetFullPath(Path.Combine(normalizedTargetDir, entry.Name)); + + // Verify the path is within the target directory + if (!destinationPath.StartsWith(normalizedTargetDir, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Tar entry path escapes target directory: {entry.Name}"); + } + + // Create directory if needed + var entryDir = Path.GetDirectoryName(destinationPath); + if (!string.IsNullOrEmpty(entryDir)) + { + Directory.CreateDirectory(entryDir); + } + + // Extract based on entry type + if (entry.EntryType == TarEntryType.Directory) + { + Directory.CreateDirectory(destinationPath); + } + else if (entry.EntryType == TarEntryType.RegularFile || + entry.EntryType == TarEntryType.V7RegularFile) + { + await entry.ExtractToFileAsync(destinationPath, overwrite: true, ct).ConfigureAwait(false); + } + // Skip symbolic links and other special entry types for security + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Import.cs new file mode 100644 index 000000000..083fd1631 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Import.cs @@ -0,0 +1,36 @@ + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + /// + /// Imports all content from a verified snapshot bundle. + /// + public async Task ImportAsync( + SnapshotImportRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath); + + if (!File.Exists(request.BundlePath)) + { + return SnapshotImportResult.Failed("Bundle file not found", _timeProvider); + } + + var tempDir = CreateTempDir(); + + try + { + return await ImportFromBundleAsync(request, tempDir, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + return SnapshotImportResult.Failed($"Import failed: {ex.Message}", _timeProvider); + } + finally + { + CleanupTempDir(tempDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportAdvisories.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportAdvisories.cs new file mode 100644 index 000000000..c51fcd2ef --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportAdvisories.cs @@ -0,0 +1,41 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ApplyAdvisoryImportAsync( + SnapshotImportRequest request, + string tempDir, + KnowledgeSnapshotManifest manifest, + ImportStatistics stats, + List errors, + CancellationToken cancellationToken) + { + if (request.ImportAdvisories && _advisoryTarget is not null) + { + var advisoryResult = await ImportAdvisoriesAsync( + tempDir, + manifest.Advisories, + request.DryRun, + cancellationToken) + .ConfigureAwait(false); + + stats.AdvisoriesProcessed = advisoryResult.Processed; + stats.AdvisoriesCreated = advisoryResult.Created; + stats.AdvisoriesUpdated = advisoryResult.Updated; + stats.AdvisoriesFailed = advisoryResult.Failed; + + if (advisoryResult.Errors.Count > 0) + { + errors.AddRange(advisoryResult.Errors.Select(e => $"Advisory: {e}")); + } + return; + } + + if (request.ImportAdvisories) + { + errors.Add("Advisory import target not configured"); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportFlow.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportFlow.cs new file mode 100644 index 000000000..efc13df0f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportFlow.cs @@ -0,0 +1,45 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ImportFromBundleAsync( + SnapshotImportRequest request, + string tempDir, + CancellationToken cancellationToken) + { + await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken).ConfigureAwait(false); + + var manifest = await ReadManifestAsync(tempDir, cancellationToken).ConfigureAwait(false); + if (manifest is null) + { + return SnapshotImportResult.Failed("Failed to parse manifest", _timeProvider); + } + + var result = new SnapshotImportResult + { + Success = true, + BundleId = manifest.BundleId, + StartedAt = _timeProvider.GetUtcNow() + }; + + var errors = new List(); + var stats = new ImportStatistics(); + + await ApplyAdvisoryImportAsync(request, tempDir, manifest, stats, errors, cancellationToken) + .ConfigureAwait(false); + await ApplyVexImportAsync(request, tempDir, manifest, stats, errors, cancellationToken) + .ConfigureAwait(false); + await ApplyPolicyImportAsync(request, tempDir, manifest, stats, errors, cancellationToken) + .ConfigureAwait(false); + + return result with + { + CompletedAt = _timeProvider.GetUtcNow(), + Statistics = stats, + Errors = errors.Count > 0 ? [.. errors] : null, + Success = errors.Count == 0 || !request.FailOnAnyError + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportPolicies.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportPolicies.cs new file mode 100644 index 000000000..c7f223250 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportPolicies.cs @@ -0,0 +1,41 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ApplyPolicyImportAsync( + SnapshotImportRequest request, + string tempDir, + KnowledgeSnapshotManifest manifest, + ImportStatistics stats, + List errors, + CancellationToken cancellationToken) + { + if (request.ImportPolicies && _policyTarget is not null) + { + var policyResult = await ImportPoliciesAsync( + tempDir, + manifest.Policies, + request.DryRun, + cancellationToken) + .ConfigureAwait(false); + + stats.PoliciesProcessed = policyResult.Processed; + stats.PoliciesCreated = policyResult.Created; + stats.PoliciesUpdated = policyResult.Updated; + stats.PoliciesFailed = policyResult.Failed; + + if (policyResult.Errors.Count > 0) + { + errors.AddRange(policyResult.Errors.Select(e => $"Policy: {e}")); + } + return; + } + + if (request.ImportPolicies) + { + errors.Add("Policy import target not configured"); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportVex.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportVex.cs new file mode 100644 index 000000000..fd9ea1d7b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.ImportVex.cs @@ -0,0 +1,41 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ApplyVexImportAsync( + SnapshotImportRequest request, + string tempDir, + KnowledgeSnapshotManifest manifest, + ImportStatistics stats, + List errors, + CancellationToken cancellationToken) + { + if (request.ImportVex && _vexTarget is not null) + { + var vexResult = await ImportVexStatementsAsync( + tempDir, + manifest.VexStatements, + request.DryRun, + cancellationToken) + .ConfigureAwait(false); + + stats.VexProcessed = vexResult.Processed; + stats.VexCreated = vexResult.Created; + stats.VexUpdated = vexResult.Updated; + stats.VexFailed = vexResult.Failed; + + if (vexResult.Errors.Count > 0) + { + errors.AddRange(vexResult.Errors.Select(e => $"VEX: {e}")); + } + return; + } + + if (request.ImportVex) + { + errors.Add("VEX import target not configured"); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Manifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Manifest.cs new file mode 100644 index 000000000..20793aa74 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Manifest.cs @@ -0,0 +1,19 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ReadManifestAsync(string bundleDir, CancellationToken ct) + { + var manifestPath = Path.Combine(bundleDir, "manifest.json"); + if (!File.Exists(manifestPath)) + { + return null; + } + + var manifestBytes = await File.ReadAllBytesAsync(manifestPath, ct).ConfigureAwait(false); + return JsonSerializer.Deserialize(manifestBytes, _jsonOptions); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Policies.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Policies.cs new file mode 100644 index 000000000..90125a5dd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Policies.cs @@ -0,0 +1,68 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ImportPoliciesAsync( + string bundleDir, + IReadOnlyList entries, + bool dryRun, + CancellationToken ct) + { + var result = new ModuleImportResult(); + + foreach (var entry in entries) + { + try + { + // Validate path to prevent traversal attacks + if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) + { + result.Failed++; + result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); + continue; + } + + var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); + if (!File.Exists(filePath)) + { + result.Failed++; + result.Errors.Add($"File not found: {entry.RelativePath}"); + continue; + } + + var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false); + result.Processed++; + + if (!dryRun && _policyTarget is not null) + { + var importResult = await _policyTarget.ImportPolicyAsync( + new PolicyImportData + { + PolicyId = entry.PolicyId, + Content = content, + Version = entry.Version + }, + ct).ConfigureAwait(false); + + result.Created += importResult.Created; + result.Updated += importResult.Updated; + result.Failed += importResult.Failed; + + if (importResult.Error is not null) + { + result.Errors.Add(importResult.Error); + } + } + } + catch (Exception ex) + { + result.Failed++; + result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); + } + } + + return result; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Results.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Results.cs new file mode 100644 index 000000000..f0f5f695b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Results.cs @@ -0,0 +1,13 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private sealed class ModuleImportResult + { + public int Processed { get; set; } + public int Created { get; set; } + public int Updated { get; set; } + public int Failed { get; set; } + public List Errors { get; } = []; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.TempDir.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.TempDir.cs new file mode 100644 index 000000000..b0ace5d17 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.TempDir.cs @@ -0,0 +1,26 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private string CreateTempDir() + { + var tempDir = Path.Combine(Path.GetTempPath(), $"import-{_guidProvider.NewGuid():N}"); + Directory.CreateDirectory(tempDir); + return tempDir; + } + + private static void CleanupTempDir(string tempDir) + { + try + { + if (Directory.Exists(tempDir)) + { + Directory.Delete(tempDir, recursive: true); + } + } + catch + { + // Ignore cleanup errors. + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Vex.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Vex.cs new file mode 100644 index 000000000..870e51249 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.Vex.cs @@ -0,0 +1,69 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class KnowledgeSnapshotImporter +{ + private async Task ImportVexStatementsAsync( + string bundleDir, + IReadOnlyList entries, + bool dryRun, + CancellationToken ct) + { + var result = new ModuleImportResult(); + + foreach (var entry in entries) + { + try + { + // Validate path to prevent traversal attacks + if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) + { + result.Failed++; + result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); + continue; + } + + var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); + if (!File.Exists(filePath)) + { + result.Failed++; + result.Errors.Add($"File not found: {entry.RelativePath}"); + continue; + } + + var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false); + result.Processed++; + + if (!dryRun && _vexTarget is not null) + { + var importResult = await _vexTarget.ImportVexStatementsAsync( + new VexImportData + { + SourceId = entry.SourceId, + Content = content, + SnapshotAt = entry.SnapshotAt, + StatementCount = entry.StatementCount + }, + ct).ConfigureAwait(false); + + result.Created += importResult.Created; + result.Updated += importResult.Updated; + result.Failed += importResult.Failed; + + if (importResult.Error is not null) + { + result.Errors.Add(importResult.Error); + } + } + } + catch (Exception ex) + { + result.Failed++; + result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); + } + } + + return result; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.cs index 0f555ab55..48b35f85c 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/KnowledgeSnapshotImporter.cs @@ -1,15 +1,4 @@ -// ----------------------------------------------------------------------------- -// KnowledgeSnapshotImporter.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Tasks: SEAL-015, SEAL-016, SEAL-017 - Apply snapshot content to databases -// Description: Imports knowledge snapshot content to Concelier, Excititor, and Policy. -// ----------------------------------------------------------------------------- - - -using StellaOps.AirGap.Bundle.Models; using StellaOps.Determinism; -using System.Formats.Tar; -using System.IO.Compression; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -17,9 +6,9 @@ namespace StellaOps.AirGap.Bundle.Services; /// /// Imports knowledge snapshot content to module databases. /// -public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter +public sealed partial class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; @@ -43,526 +32,4 @@ public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter _timeProvider = timeProvider ?? TimeProvider.System; _guidProvider = guidProvider ?? SystemGuidProvider.Instance; } - - /// - /// Imports all content from a verified snapshot bundle. - /// - public async Task ImportAsync( - SnapshotImportRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath); - - if (!File.Exists(request.BundlePath)) - { - return SnapshotImportResult.Failed("Bundle file not found", _timeProvider); - } - - var tempDir = Path.Combine(Path.GetTempPath(), $"import-{_guidProvider.NewGuid():N}"); - Directory.CreateDirectory(tempDir); - - try - { - // Extract bundle - await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken); - - // Read manifest - var manifestPath = Path.Combine(tempDir, "manifest.json"); - if (!File.Exists(manifestPath)) - { - return SnapshotImportResult.Failed("Manifest not found in bundle", _timeProvider); - } - - var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken); - var manifest = JsonSerializer.Deserialize(manifestBytes, JsonOptions); - if (manifest is null) - { - return SnapshotImportResult.Failed("Failed to parse manifest", _timeProvider); - } - - var result = new SnapshotImportResult - { - Success = true, - BundleId = manifest.BundleId, - StartedAt = _timeProvider.GetUtcNow() - }; - - var errors = new List(); - var stats = new ImportStatistics(); - - // Import advisories (SEAL-015) - if (request.ImportAdvisories && _advisoryTarget is not null) - { - var advisoryResult = await ImportAdvisoriesAsync( - tempDir, manifest.Advisories, request.DryRun, cancellationToken); - - stats.AdvisoriesProcessed = advisoryResult.Processed; - stats.AdvisoriesCreated = advisoryResult.Created; - stats.AdvisoriesUpdated = advisoryResult.Updated; - stats.AdvisoriesFailed = advisoryResult.Failed; - - if (advisoryResult.Errors.Count > 0) - { - errors.AddRange(advisoryResult.Errors.Select(e => $"Advisory: {e}")); - } - } - else if (request.ImportAdvisories) - { - errors.Add("Advisory import target not configured"); - } - - // Import VEX statements (SEAL-016) - if (request.ImportVex && _vexTarget is not null) - { - var vexResult = await ImportVexStatementsAsync( - tempDir, manifest.VexStatements, request.DryRun, cancellationToken); - - stats.VexProcessed = vexResult.Processed; - stats.VexCreated = vexResult.Created; - stats.VexUpdated = vexResult.Updated; - stats.VexFailed = vexResult.Failed; - - if (vexResult.Errors.Count > 0) - { - errors.AddRange(vexResult.Errors.Select(e => $"VEX: {e}")); - } - } - else if (request.ImportVex) - { - errors.Add("VEX import target not configured"); - } - - // Import policies (SEAL-017) - if (request.ImportPolicies && _policyTarget is not null) - { - var policyResult = await ImportPoliciesAsync( - tempDir, manifest.Policies, request.DryRun, cancellationToken); - - stats.PoliciesProcessed = policyResult.Processed; - stats.PoliciesCreated = policyResult.Created; - stats.PoliciesUpdated = policyResult.Updated; - stats.PoliciesFailed = policyResult.Failed; - - if (policyResult.Errors.Count > 0) - { - errors.AddRange(policyResult.Errors.Select(e => $"Policy: {e}")); - } - } - else if (request.ImportPolicies) - { - errors.Add("Policy import target not configured"); - } - - result = result with - { - CompletedAt = _timeProvider.GetUtcNow(), - Statistics = stats, - Errors = errors.Count > 0 ? [.. errors] : null, - Success = errors.Count == 0 || !request.FailOnAnyError - }; - - return result; - } - catch (Exception ex) - { - return SnapshotImportResult.Failed($"Import failed: {ex.Message}", _timeProvider); - } - finally - { - try - { - if (Directory.Exists(tempDir)) - { - Directory.Delete(tempDir, recursive: true); - } - } - catch - { - // Ignore cleanup errors - } - } - } - - private async Task ImportAdvisoriesAsync( - string bundleDir, - IReadOnlyList entries, - bool dryRun, - CancellationToken ct) - { - var result = new ModuleImportResult(); - - foreach (var entry in entries) - { - try - { - // Validate path to prevent traversal attacks - if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) - { - result.Failed++; - result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); - continue; - } - - var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); - if (!File.Exists(filePath)) - { - result.Failed++; - result.Errors.Add($"File not found: {entry.RelativePath}"); - continue; - } - - var content = await File.ReadAllBytesAsync(filePath, ct); - result.Processed++; - - if (!dryRun && _advisoryTarget is not null) - { - var importResult = await _advisoryTarget.ImportAdvisoriesAsync( - new AdvisoryImportData - { - FeedId = entry.FeedId, - Content = content, - SnapshotAt = entry.SnapshotAt, - RecordCount = entry.RecordCount - }, - ct); - - result.Created += importResult.Created; - result.Updated += importResult.Updated; - result.Failed += importResult.Failed; - - if (importResult.Error is not null) - { - result.Errors.Add(importResult.Error); - } - } - } - catch (Exception ex) - { - result.Failed++; - result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); - } - } - - return result; - } - - private async Task ImportVexStatementsAsync( - string bundleDir, - IReadOnlyList entries, - bool dryRun, - CancellationToken ct) - { - var result = new ModuleImportResult(); - - foreach (var entry in entries) - { - try - { - // Validate path to prevent traversal attacks - if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) - { - result.Failed++; - result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); - continue; - } - - var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); - if (!File.Exists(filePath)) - { - result.Failed++; - result.Errors.Add($"File not found: {entry.RelativePath}"); - continue; - } - - var content = await File.ReadAllBytesAsync(filePath, ct); - result.Processed++; - - if (!dryRun && _vexTarget is not null) - { - var importResult = await _vexTarget.ImportVexStatementsAsync( - new VexImportData - { - SourceId = entry.SourceId, - Content = content, - SnapshotAt = entry.SnapshotAt, - StatementCount = entry.StatementCount - }, - ct); - - result.Created += importResult.Created; - result.Updated += importResult.Updated; - result.Failed += importResult.Failed; - - if (importResult.Error is not null) - { - result.Errors.Add(importResult.Error); - } - } - } - catch (Exception ex) - { - result.Failed++; - result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); - } - } - - return result; - } - - private async Task ImportPoliciesAsync( - string bundleDir, - IReadOnlyList entries, - bool dryRun, - CancellationToken ct) - { - var result = new ModuleImportResult(); - - foreach (var entry in entries) - { - try - { - // Validate path to prevent traversal attacks - if (!PathValidation.IsSafeRelativePath(entry.RelativePath)) - { - result.Failed++; - result.Errors.Add($"Unsafe path detected: {entry.RelativePath}"); - continue; - } - - var filePath = PathValidation.SafeCombine(bundleDir, entry.RelativePath); - if (!File.Exists(filePath)) - { - result.Failed++; - result.Errors.Add($"File not found: {entry.RelativePath}"); - continue; - } - - var content = await File.ReadAllBytesAsync(filePath, ct); - result.Processed++; - - if (!dryRun && _policyTarget is not null) - { - var importResult = await _policyTarget.ImportPolicyAsync( - new PolicyImportData - { - PolicyId = entry.PolicyId, - Content = content, - Version = entry.Version - }, - ct); - - result.Created += importResult.Created; - result.Updated += importResult.Updated; - result.Failed += importResult.Failed; - - if (importResult.Error is not null) - { - result.Errors.Add(importResult.Error); - } - } - } - catch (Exception ex) - { - result.Failed++; - result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}"); - } - } - - return result; - } - - private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct) - { - var normalizedTargetDir = Path.GetFullPath(targetDir); - - await using var fileStream = File.OpenRead(bundlePath); - await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); - await using var tarReader = new TarReader(gzipStream, leaveOpen: false); - - while (await tarReader.GetNextEntryAsync(copyData: true, ct) is { } entry) - { - if (string.IsNullOrEmpty(entry.Name)) - { - continue; - } - - // Validate entry path to prevent traversal attacks - if (!PathValidation.IsSafeRelativePath(entry.Name)) - { - throw new InvalidOperationException($"Unsafe tar entry path detected: {entry.Name}"); - } - - var destinationPath = Path.GetFullPath(Path.Combine(normalizedTargetDir, entry.Name)); - - // Verify the path is within the target directory - if (!destinationPath.StartsWith(normalizedTargetDir, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Tar entry path escapes target directory: {entry.Name}"); - } - - // Create directory if needed - var entryDir = Path.GetDirectoryName(destinationPath); - if (!string.IsNullOrEmpty(entryDir)) - { - Directory.CreateDirectory(entryDir); - } - - // Extract based on entry type - if (entry.EntryType == TarEntryType.Directory) - { - Directory.CreateDirectory(destinationPath); - } - else if (entry.EntryType == TarEntryType.RegularFile || - entry.EntryType == TarEntryType.V7RegularFile) - { - await entry.ExtractToFileAsync(destinationPath, overwrite: true, ct); - } - // Skip symbolic links and other special entry types for security - } - } - - private sealed class ModuleImportResult - { - public int Processed { get; set; } - public int Created { get; set; } - public int Updated { get; set; } - public int Failed { get; set; } - public List Errors { get; } = []; - } } - -/// -/// Interface for knowledge snapshot importing. -/// -public interface IKnowledgeSnapshotImporter -{ - Task ImportAsync( - SnapshotImportRequest request, - CancellationToken cancellationToken = default); -} - -/// -/// Target interface for importing advisories (SEAL-015). -/// Implemented by Concelier module. -/// -public interface IAdvisoryImportTarget -{ - Task ImportAdvisoriesAsync( - AdvisoryImportData data, - CancellationToken cancellationToken = default); -} - -/// -/// Target interface for importing VEX statements (SEAL-016). -/// Implemented by Excititor module. -/// -public interface IVexImportTarget -{ - Task ImportVexStatementsAsync( - VexImportData data, - CancellationToken cancellationToken = default); -} - -/// -/// Target interface for importing policies (SEAL-017). -/// Implemented by Policy module. -/// -public interface IPolicyImportTarget -{ - Task ImportPolicyAsync( - PolicyImportData data, - CancellationToken cancellationToken = default); -} - -#region Request and Result Models - -public sealed record SnapshotImportRequest -{ - public required string BundlePath { get; init; } - public bool ImportAdvisories { get; init; } = true; - public bool ImportVex { get; init; } = true; - public bool ImportPolicies { get; init; } = true; - public bool DryRun { get; init; } = false; - public bool FailOnAnyError { get; init; } = false; -} - -public sealed record SnapshotImportResult -{ - public bool Success { get; init; } - public string? BundleId { get; init; } - public DateTimeOffset StartedAt { get; init; } - public DateTimeOffset CompletedAt { get; init; } - public ImportStatistics? Statistics { get; init; } - public IReadOnlyList? Errors { get; init; } - public string? Error { get; init; } - - public static SnapshotImportResult Failed(string error, TimeProvider? timeProvider = null) - { - var now = (timeProvider ?? TimeProvider.System).GetUtcNow(); - return new() - { - Success = false, - Error = error, - StartedAt = now, - CompletedAt = now - }; - } -} - -public sealed record ImportStatistics -{ - public int AdvisoriesProcessed { get; set; } - public int AdvisoriesCreated { get; set; } - public int AdvisoriesUpdated { get; set; } - public int AdvisoriesFailed { get; set; } - - public int VexProcessed { get; set; } - public int VexCreated { get; set; } - public int VexUpdated { get; set; } - public int VexFailed { get; set; } - - public int PoliciesProcessed { get; set; } - public int PoliciesCreated { get; set; } - public int PoliciesUpdated { get; set; } - public int PoliciesFailed { get; set; } - - public int TotalProcessed => AdvisoriesProcessed + VexProcessed + PoliciesProcessed; - public int TotalCreated => AdvisoriesCreated + VexCreated + PoliciesCreated; - public int TotalUpdated => AdvisoriesUpdated + VexUpdated + PoliciesUpdated; - public int TotalFailed => AdvisoriesFailed + VexFailed + PoliciesFailed; -} - -public sealed record AdvisoryImportData -{ - public required string FeedId { get; init; } - public required byte[] Content { get; init; } - public DateTimeOffset SnapshotAt { get; init; } - public int RecordCount { get; init; } -} - -public sealed record VexImportData -{ - public required string SourceId { get; init; } - public required byte[] Content { get; init; } - public DateTimeOffset SnapshotAt { get; init; } - public int StatementCount { get; init; } -} - -public sealed record PolicyImportData -{ - public required string PolicyId { get; init; } - public required byte[] Content { get; init; } - public string? Version { get; init; } - public DateTimeOffset SnapshotAt { get; init; } -} - -public sealed record ModuleImportResultData -{ - public int Created { get; init; } - public int Updated { get; init; } - public int Failed { get; init; } - public string? Error { get; init; } -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Install.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Install.cs new file mode 100644 index 000000000..f80c18e6c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Install.cs @@ -0,0 +1,51 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public static partial class LocalRbacBundleExtensions +{ + /// + /// Extracts and installs local RBAC policy from a bundle to the target path. + /// + /// Path to the extracted bundle. + /// Bundle manifest. + /// Target path to install the policy file. + /// Cancellation token. + /// True if the policy was installed successfully. + public static async Task InstallLocalRbacPolicyAsync( + string bundlePath, + BundleManifest manifest, + string targetPolicyPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath); + ArgumentNullException.ThrowIfNull(manifest); + ArgumentException.ThrowIfNullOrWhiteSpace(targetPolicyPath); + + var policy = manifest.GetLocalRbacPolicy(); + if (policy is null) + { + return false; + } + + var sourcePath = Path.Combine(bundlePath, policy.RelativePath); + if (!File.Exists(sourcePath)) + { + throw new FileNotFoundException( + $"Local RBAC policy not found in bundle at expected path: {policy.RelativePath}", + sourcePath); + } + + var targetDir = Path.GetDirectoryName(targetPolicyPath); + if (!string.IsNullOrEmpty(targetDir)) + { + Directory.CreateDirectory(targetDir); + } + + await using var sourceStream = File.OpenRead(sourcePath); + await using var targetStream = File.Create(targetPolicyPath); + await sourceStream.CopyToAsync(targetStream, cancellationToken).ConfigureAwait(false); + + return true; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Manifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Manifest.cs new file mode 100644 index 000000000..bdac073f6 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.Manifest.cs @@ -0,0 +1,46 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public static partial class LocalRbacBundleExtensions +{ + /// + /// Checks if a bundle manifest contains local RBAC policy. + /// + /// Bundle manifest to check. + /// True if the manifest contains local RBAC policy. + public static bool HasLocalRbacPolicy(this BundleManifest manifest) + { + ArgumentNullException.ThrowIfNull(manifest); + + foreach (var policy in manifest.Policies) + { + if (policy.Type == PolicyType.LocalRbac) + { + return true; + } + } + + return false; + } + + /// + /// Gets the local RBAC policy component from a bundle manifest. + /// + /// Bundle manifest to search. + /// The local RBAC policy component, or null if not found. + public static PolicyComponent? GetLocalRbacPolicy(this BundleManifest manifest) + { + ArgumentNullException.ThrowIfNull(manifest); + + foreach (var policy in manifest.Policies) + { + if (policy.Type == PolicyType.LocalRbac) + { + return policy; + } + } + + return null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.PolicyConfig.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.PolicyConfig.cs new file mode 100644 index 000000000..cc3715057 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.PolicyConfig.cs @@ -0,0 +1,57 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public static partial class LocalRbacBundleExtensions +{ + /// + /// Creates a policy build config for including local RBAC policy in an offline kit bundle. + /// + /// Source path to the local RBAC policy file (YAML or JSON). + /// Policy version string. + /// Optional relative path in the bundle (defaults to "authority/local-policy.yaml"). + /// PolicyBuildConfig ready for inclusion in BundleBuildRequest. + public static PolicyBuildConfig CreateLocalRbacPolicyConfig( + string sourcePolicyPath, + string version, + string? relativePath = null) + { + ArgumentNullException.ThrowIfNull(sourcePolicyPath); + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + if (!File.Exists(sourcePolicyPath)) + { + throw new FileNotFoundException( + "Local RBAC policy file not found. Ensure the policy file exists before building the offline kit.", + sourcePolicyPath); + } + + var fileName = Path.GetFileName(sourcePolicyPath); + var targetPath = relativePath ?? Path.Combine(DefaultPolicyDirectory, fileName); + + return new PolicyBuildConfig( + PolicyId: "local-rbac-policy", + Name: "Local RBAC Policy", + Version: version, + SourcePath: sourcePolicyPath, + RelativePath: targetPath, + Type: PolicyType.LocalRbac); + } + + /// + /// Adds local RBAC policies to a list of policy build configs. + /// + /// Existing list of policy build configs. + /// Source path to the local RBAC policy file. + /// Policy version string. + /// New list with the local RBAC policy added. + public static IReadOnlyList WithLocalRbacPolicy( + this IReadOnlyList policies, + string sourcePolicyPath, + string version) + { + var list = new List(policies); + list.Add(CreateLocalRbacPolicyConfig(sourcePolicyPath, version)); + return list; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.cs index 2fbd878f5..8c6020769 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacBundleExtensions.cs @@ -6,19 +6,13 @@ // ----------------------------------------------------------------------------- -using StellaOps.AirGap.Bundle.Models; -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading; -using System.Threading.Tasks; namespace StellaOps.AirGap.Bundle.Services; /// /// Extension methods for including local RBAC policy in offline kit bundles. /// -public static class LocalRbacBundleExtensions +public static partial class LocalRbacBundleExtensions { /// /// Default policy file name in the offline kit. @@ -30,151 +24,4 @@ public static class LocalRbacBundleExtensions /// public const string DefaultPolicyDirectory = "authority"; - /// - /// Creates a policy build config for including local RBAC policy in an offline kit bundle. - /// - /// Source path to the local RBAC policy file (YAML or JSON). - /// Policy version string. - /// Optional relative path in the bundle (defaults to "authority/local-policy.yaml"). - /// PolicyBuildConfig ready for inclusion in BundleBuildRequest. - public static PolicyBuildConfig CreateLocalRbacPolicyConfig( - string sourcePolicyPath, - string version, - string? relativePath = null) - { - ArgumentNullException.ThrowIfNull(sourcePolicyPath); - ArgumentException.ThrowIfNullOrWhiteSpace(version); - - if (!File.Exists(sourcePolicyPath)) - { - throw new FileNotFoundException( - "Local RBAC policy file not found. Ensure the policy file exists before building the offline kit.", - sourcePolicyPath); - } - - var fileName = Path.GetFileName(sourcePolicyPath); - var targetPath = relativePath ?? Path.Combine(DefaultPolicyDirectory, fileName); - - return new PolicyBuildConfig( - PolicyId: "local-rbac-policy", - Name: "Local RBAC Policy", - Version: version, - SourcePath: sourcePolicyPath, - RelativePath: targetPath, - Type: PolicyType.LocalRbac); - } - - /// - /// Adds local RBAC policies to a list of policy build configs. - /// - /// Existing list of policy build configs. - /// Source path to the local RBAC policy file. - /// Policy version string. - /// New list with the local RBAC policy added. - public static IReadOnlyList WithLocalRbacPolicy( - this IReadOnlyList policies, - string sourcePolicyPath, - string version) - { - var list = new List(policies); - list.Add(CreateLocalRbacPolicyConfig(sourcePolicyPath, version)); - return list; - } - - /// - /// Checks if a bundle manifest contains local RBAC policy. - /// - /// Bundle manifest to check. - /// True if the manifest contains local RBAC policy. - public static bool HasLocalRbacPolicy(this BundleManifest manifest) - { - ArgumentNullException.ThrowIfNull(manifest); - - foreach (var policy in manifest.Policies) - { - if (policy.Type == PolicyType.LocalRbac) - { - return true; - } - } - - return false; - } - - /// - /// Gets the local RBAC policy component from a bundle manifest. - /// - /// Bundle manifest to search. - /// The local RBAC policy component, or null if not found. - public static PolicyComponent? GetLocalRbacPolicy(this BundleManifest manifest) - { - ArgumentNullException.ThrowIfNull(manifest); - - foreach (var policy in manifest.Policies) - { - if (policy.Type == PolicyType.LocalRbac) - { - return policy; - } - } - - return null; - } - - /// - /// Extracts and installs local RBAC policy from a bundle to the target path. - /// - /// Path to the extracted bundle. - /// Bundle manifest. - /// Target path to install the policy file. - /// Cancellation token. - /// True if the policy was installed successfully. - public static async Task InstallLocalRbacPolicyAsync( - string bundlePath, - BundleManifest manifest, - string targetPolicyPath, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath); - ArgumentNullException.ThrowIfNull(manifest); - ArgumentException.ThrowIfNullOrWhiteSpace(targetPolicyPath); - - var policy = manifest.GetLocalRbacPolicy(); - if (policy is null) - { - return false; - } - - var sourcePath = Path.Combine(bundlePath, policy.RelativePath); - if (!File.Exists(sourcePath)) - { - throw new FileNotFoundException( - $"Local RBAC policy not found in bundle at expected path: {policy.RelativePath}", - sourcePath); - } - - // Ensure target directory exists - var targetDir = Path.GetDirectoryName(targetPolicyPath); - if (!string.IsNullOrEmpty(targetDir)) - { - Directory.CreateDirectory(targetDir); - } - - // Copy with verification - await using var sourceStream = File.OpenRead(sourcePath); - await using var targetStream = File.Create(targetPolicyPath); - await sourceStream.CopyToAsync(targetStream, cancellationToken).ConfigureAwait(false); - - return true; - } } - -/// -/// Result of local RBAC policy installation from an offline kit. -/// -public sealed record LocalRbacInstallResult( - bool Success, - string? InstalledPath, - string? PolicyVersion, - string? PolicyDigest, - string? Error); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacInstallResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacInstallResult.cs new file mode 100644 index 000000000..a7dd3cdd3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/LocalRbacInstallResult.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Result of local RBAC policy installation from an offline kit. +/// +public sealed record LocalRbacInstallResult( + bool Success, + string? InstalledPath, + string? PolicyVersion, + string? PolicyDigest, + string? Error); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Extraction.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Extraction.cs new file mode 100644 index 000000000..11e6a33fd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Extraction.cs @@ -0,0 +1,61 @@ +using System.Formats.Asn1; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class OcspResponseFetcher +{ + private static IReadOnlyList ExtractOcspUris(X509Certificate2 certificate) + { + try + { + var ext = certificate.Extensions.Cast() + .FirstOrDefault(e => e.Oid?.Value == "1.3.6.1.5.5.7.1.1"); + if (ext is null) + { + return Array.Empty(); + } + + var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); + var bytes = reader.ReadOctetString(); + var aiaReader = new AsnReader(bytes, AsnEncodingRules.DER); + var sequence = aiaReader.ReadSequence(); + + var uris = new List(); + while (sequence.HasData) + { + var accessDescription = sequence.ReadSequence(); + var accessMethod = accessDescription.ReadObjectIdentifier(); + if (!accessDescription.HasData) + { + continue; + } + + var tag = accessDescription.PeekTag(); + if (accessMethod == "1.3.6.1.5.5.7.48.1" && + tag.TagClass == TagClass.ContextSpecific && + tag.TagValue == 6) + { + var uriValue = accessDescription.ReadCharacterString( + UniversalTagNumber.IA5String, + new Asn1Tag(TagClass.ContextSpecific, 6)); + + if (Uri.TryCreate(uriValue, UriKind.Absolute, out var uri)) + { + uris.Add(uri); + } + } + else + { + accessDescription.ReadEncodedValue(); + } + } + + return uris; + } + catch + { + return Array.Empty(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Networked.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Networked.cs new file mode 100644 index 000000000..8627700de --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.Networked.cs @@ -0,0 +1,36 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class OcspResponseFetcher +{ + public static OcspResponseFetcher CreateNetworked(HttpClient? client = null) + { + client ??= _defaultClient; + return new OcspResponseFetcher(async (uri, ct) => + { + using var response = await client.GetAsync(uri, ct).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + return null; + } + + return await response.Content.ReadAsByteArrayAsync(ct).ConfigureAwait(false); + }); + } + + private async Task FetchCachedAsync(Uri uri, CancellationToken ct) + { + var key = uri.ToString(); + if (_cache.TryGetValue(key, out var cached)) + { + return cached; + } + + var data = await _fetcher!(uri, ct).ConfigureAwait(false); + if (data is { Length: > 0 }) + { + _cache[key] = data; + } + + return data; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.cs index 01c6f1aae..33cfe9fb8 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/OcspResponseFetcher.cs @@ -1,20 +1,10 @@ - -using System.Formats.Asn1; -using System.Net.Http; using System.Security.Cryptography.X509Certificates; namespace StellaOps.AirGap.Bundle.Services; -public interface IOcspResponseFetcher +public sealed partial class OcspResponseFetcher : IOcspResponseFetcher { - Task> FetchAsync( - IReadOnlyList certificateChain, - CancellationToken ct = default); -} - -public sealed class OcspResponseFetcher : IOcspResponseFetcher -{ - private static readonly HttpClient DefaultClient = new(); + private static readonly HttpClient _defaultClient = new(); private readonly Func>? _fetcher; private readonly Dictionary _cache = new(StringComparer.Ordinal); @@ -23,21 +13,6 @@ public sealed class OcspResponseFetcher : IOcspResponseFetcher _fetcher = fetcher; } - public static OcspResponseFetcher CreateNetworked(HttpClient? client = null) - { - client ??= DefaultClient; - return new OcspResponseFetcher(async (uri, ct) => - { - using var response = await client.GetAsync(uri, ct).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - return null; - } - - return await response.Content.ReadAsByteArrayAsync(ct).ConfigureAwait(false); - }); - } - public async Task> FetchAsync( IReadOnlyList certificateChain, CancellationToken ct = default) @@ -65,75 +40,4 @@ public sealed class OcspResponseFetcher : IOcspResponseFetcher return results; } - - private async Task FetchCachedAsync(Uri uri, CancellationToken ct) - { - var key = uri.ToString(); - if (_cache.TryGetValue(key, out var cached)) - { - return cached; - } - - var data = await _fetcher!(uri, ct).ConfigureAwait(false); - if (data is { Length: > 0 }) - { - _cache[key] = data; - } - - return data; - } - - private static IReadOnlyList ExtractOcspUris(X509Certificate2 certificate) - { - try - { - var ext = certificate.Extensions.Cast() - .FirstOrDefault(e => e.Oid?.Value == "1.3.6.1.5.5.7.1.1"); - if (ext is null) - { - return Array.Empty(); - } - - var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); - var bytes = reader.ReadOctetString(); - var aiaReader = new AsnReader(bytes, AsnEncodingRules.DER); - var sequence = aiaReader.ReadSequence(); - - var uris = new List(); - while (sequence.HasData) - { - var accessDescription = sequence.ReadSequence(); - var accessMethod = accessDescription.ReadObjectIdentifier(); - if (!accessDescription.HasData) - { - continue; - } - - var tag = accessDescription.PeekTag(); - if (accessMethod == "1.3.6.1.5.5.7.48.1" && - tag.TagClass == TagClass.ContextSpecific && - tag.TagValue == 6) - { - var uriValue = accessDescription.ReadCharacterString( - UniversalTagNumber.IA5String, - new Asn1Tag(TagClass.ContextSpecific, 6)); - - if (Uri.TryCreate(uriValue, UriKind.Absolute, out var uri)) - { - uris.Add(uri); - } - } - else - { - accessDescription.ReadEncodedValue(); - } - } - - return uris; - } - catch - { - return Array.Empty(); - } - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/Abstractions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PathValidation.cs similarity index 53% rename from src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/Abstractions.cs rename to src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PathValidation.cs index ddc245d00..23d978375 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/Abstractions.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PathValidation.cs @@ -1,82 +1,5 @@ -// ----------------------------------------------------------------------------- -// Abstractions.cs -// Description: Abstractions for deterministic/testable time and ID generation. -// ----------------------------------------------------------------------------- - namespace StellaOps.AirGap.Bundle.Services; -/// -/// Provides unique identifiers. Inject to enable deterministic testing. -/// -public interface IGuidProvider -{ - /// - /// Creates a new unique identifier. - /// - Guid NewGuid(); -} - -/// -/// Default GUID provider using system random GUIDs. -/// -public sealed class SystemGuidProvider : IGuidProvider -{ - /// - /// Singleton instance of the system GUID provider. - /// - public static SystemGuidProvider Instance { get; } = new(); - - /// - public Guid NewGuid() => Guid.NewGuid(); -} - -/// -/// Options for configuring bundle validation behavior. -/// -public sealed class BundleValidationOptions -{ - /// - /// Maximum age in days for feed snapshots before they are flagged as stale. - /// Default is 7 days. - /// - public int MaxFeedAgeDays { get; set; } = 7; - - /// - /// Whether to fail validation on stale feeds or just warn. - /// - public bool FailOnStaleFeed { get; set; } - - /// - /// Whether to validate policy digests. - /// - public bool ValidatePolicies { get; set; } = true; - - /// - /// Whether to validate crypto material digests. - /// - public bool ValidateCryptoMaterials { get; set; } = true; - - /// - /// Whether to validate catalog digests if present. - /// - public bool ValidateCatalogs { get; set; } = true; - - /// - /// Whether to validate Rekor snapshot entries if present. - /// - public bool ValidateRekorSnapshots { get; set; } = true; - - /// - /// Whether to validate crypto provider entries if present. - /// - public bool ValidateCryptoProviders { get; set; } = true; - - /// - /// Whether to validate artifact digests (function maps, observations, verification reports). - /// - public bool ValidateArtifacts { get; set; } = true; -} - /// /// Utility methods for path validation and security. /// @@ -153,7 +76,7 @@ public static class PathValidation if (!combined.StartsWith(normalizedRoot, StringComparison.OrdinalIgnoreCase)) { throw new ArgumentException( - $"Path traversal detected: combined path escapes root directory", + "Path traversal detected: combined path escapes root directory", nameof(relativePath)); } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyContent.cs new file mode 100644 index 000000000..abf036abb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyContent.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record PolicyContent +{ + public required string PolicyId { get; init; } + public required string Name { get; init; } + public required string Version { get; init; } + public required string FileName { get; init; } + public required byte[] Content { get; init; } + public string Type { get; init; } = "OpaRego"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportModels.cs new file mode 100644 index 000000000..6f8f353c9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportModels.cs @@ -0,0 +1,51 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Imported policy pack record. +/// +public sealed record ImportedPolicyPack( + string Id, + string TenantId, + string Digest, + string Version, + byte[] Content, + PolicyPackMetadata? Metadata, + DateTimeOffset ImportedAt); + +/// +/// DTO for deserializing policy bundle. +/// +internal sealed record PolicyBundleDto +{ + public int SchemaVersion { get; init; } = 1; + public string? DomainId { get; init; } + public string? Name { get; init; } + public string? Description { get; init; } + public string? Version { get; init; } + public PolicyPackMetadata? Metadata { get; init; } + public IReadOnlyList? Rules { get; init; } +} + +/// +/// Policy pack metadata. +/// +public sealed record PolicyPackMetadata +{ + public string? Author { get; init; } + public string? License { get; init; } + public string? Homepage { get; init; } + public DateTimeOffset? CreatedAt { get; init; } + public IReadOnlyList? Tags { get; init; } +} + +/// +/// Policy rule within a bundle. +/// +internal sealed record PolicyRuleDto +{ + public string? Id { get; init; } + public string? Name { get; init; } + public string? Description { get; init; } + public string? Severity { get; init; } + public string? Expression { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Helpers.cs new file mode 100644 index 000000000..e11eb5f7d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Helpers.cs @@ -0,0 +1,21 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class PolicyRegistryImportTarget +{ + private static string ComputeDigest(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexStringLower(hash)}"; + } + + private static PolicyBundleDto ParsePolicyBundle(byte[] content) + { + var json = Encoding.UTF8.GetString(content); + return JsonSerializer.Deserialize(json, _jsonOptions) + ?? throw new InvalidDataException("Failed to parse policy bundle"); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Import.cs new file mode 100644 index 000000000..ad2675c14 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.Import.cs @@ -0,0 +1,87 @@ +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class PolicyRegistryImportTarget +{ + /// + public async Task ImportPolicyAsync( + PolicyImportData data, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(data); + + if (data.Content.Length == 0) + { + return new ModuleImportResultData + { + Failed = 1, + Error = "Empty policy content" + }; + } + + var created = 0; + var updated = 0; + var failed = 0; + + try + { + // Compute content digest for deduplication + var digest = ComputeDigest(data.Content); + + // Check if already exists + var existing = await _store.FindByDigestAsync(_tenantId, digest, cancellationToken) + .ConfigureAwait(false); + if (existing is not null) + { + updated++; + return new ModuleImportResultData + { + Updated = updated, + Error = null + }; + } + + // Parse policy bundle to validate + var bundle = ParsePolicyBundle(data.Content); + + // Store the policy pack + var pack = new ImportedPolicyPack( + Id: data.PolicyId, + TenantId: _tenantId, + Digest: digest, + Version: data.Version ?? "1.0.0", + Content: data.Content, + Metadata: bundle.Metadata, + ImportedAt: _timeProvider.GetUtcNow()); + + await _store.SaveAsync(pack, cancellationToken).ConfigureAwait(false); + created++; + + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Error = null + }; + } + catch (JsonException ex) + { + return new ModuleImportResultData + { + Failed = 1, + Error = $"JSON parse error: {ex.Message}" + }; + } + catch (Exception ex) + { + return new ModuleImportResultData + { + Created = created, + Updated = updated, + Failed = failed + 1, + Error = $"Policy import error: {ex.Message}" + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.cs index a59fd2961..20220d1f2 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/PolicyRegistryImportTarget.cs @@ -1,15 +1,3 @@ -// ----------------------------------------------------------------------------- -// PolicyRegistryImportTarget.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Tasks: SEAL-017 - Apply snapshot policy content to Policy registry -// Description: Adapter implementing IPolicyImportTarget for Policy module. -// ----------------------------------------------------------------------------- - - -using StellaOps.AirGap.Bundle.Models; -using StellaOps.Determinism; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -18,9 +6,9 @@ namespace StellaOps.AirGap.Bundle.Services; /// Implements IPolicyImportTarget for importing policy packs from snapshots. /// Parses policy bundle content and stores to the policy registry. /// -public sealed class PolicyRegistryImportTarget : IPolicyImportTarget +public sealed partial class PolicyRegistryImportTarget : IPolicyImportTarget { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, PropertyNameCaseInsensitive = true @@ -39,214 +27,4 @@ public sealed class PolicyRegistryImportTarget : IPolicyImportTarget _tenantId = tenantId; _timeProvider = timeProvider ?? TimeProvider.System; } - - /// - public async Task ImportPolicyAsync( - PolicyImportData data, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(data); - - if (data.Content.Length == 0) - { - return new ModuleImportResultData - { - Failed = 1, - Error = "Empty policy content" - }; - } - - var created = 0; - var updated = 0; - var failed = 0; - - try - { - // Compute content digest for deduplication - var digest = ComputeDigest(data.Content); - - // Check if already exists - var existing = await _store.FindByDigestAsync(_tenantId, digest, cancellationToken); - if (existing is not null) - { - updated++; - return new ModuleImportResultData - { - Updated = updated, - Error = null - }; - } - - // Parse policy bundle to validate - var bundle = ParsePolicyBundle(data.Content); - - // Store the policy pack - var pack = new ImportedPolicyPack( - Id: data.PolicyId, - TenantId: _tenantId, - Digest: digest, - Version: data.Version ?? "1.0.0", - Content: data.Content, - Metadata: bundle.Metadata, - ImportedAt: _timeProvider.GetUtcNow()); - - await _store.SaveAsync(pack, cancellationToken); - created++; - - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Error = null - }; - } - catch (JsonException ex) - { - return new ModuleImportResultData - { - Failed = 1, - Error = $"JSON parse error: {ex.Message}" - }; - } - catch (Exception ex) - { - return new ModuleImportResultData - { - Created = created, - Updated = updated, - Failed = failed + 1, - Error = $"Policy import error: {ex.Message}" - }; - } - } - - private static string ComputeDigest(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexStringLower(hash)}"; - } - - private static PolicyBundleDto ParsePolicyBundle(byte[] content) - { - var json = Encoding.UTF8.GetString(content); - return JsonSerializer.Deserialize(json, JsonOptions) - ?? throw new InvalidDataException("Failed to parse policy bundle"); - } -} - -/// -/// Store interface for importing policy packs from air-gap snapshots. -/// -public interface IPolicyPackImportStore -{ - /// - /// Finds an imported policy pack by content digest. - /// - Task FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken); - - /// - /// Saves an imported policy pack. - /// - Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken); - - /// - /// Lists all imported policy packs for a tenant. - /// - Task> ListAsync(string tenantId, CancellationToken cancellationToken); -} - -/// -/// Lightweight in-memory implementation of IPolicyPackImportStore for air-gap scenarios. -/// -public sealed class InMemoryPolicyPackImportStore : IPolicyPackImportStore -{ - private readonly Dictionary _packs = new(); - private readonly object _lock = new(); - - public Task FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken) - { - lock (_lock) - { - var pack = _packs.Values.FirstOrDefault(p => p.TenantId == tenantId && p.Digest == digest); - return Task.FromResult(pack); - } - } - - public Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken) - { - lock (_lock) - { - _packs[$"{pack.TenantId}:{pack.Id}"] = pack; - } - return Task.CompletedTask; - } - - public Task> ListAsync(string tenantId, CancellationToken cancellationToken) - { - lock (_lock) - { - var packs = _packs.Values.Where(p => p.TenantId == tenantId).ToList(); - return Task.FromResult>(packs); - } - } - - public int Count => _packs.Count; - - public IEnumerable GetAllPacks() - { - lock (_lock) - { - return _packs.Values.ToList(); - } - } -} - -/// -/// Imported policy pack record. -/// -public sealed record ImportedPolicyPack( - string Id, - string TenantId, - string Digest, - string Version, - byte[] Content, - PolicyPackMetadata? Metadata, - DateTimeOffset ImportedAt); - -/// -/// DTO for deserializing policy bundle. -/// -internal sealed record PolicyBundleDto -{ - public int SchemaVersion { get; init; } = 1; - public string? DomainId { get; init; } - public string? Name { get; init; } - public string? Description { get; init; } - public string? Version { get; init; } - public PolicyPackMetadata? Metadata { get; init; } - public IReadOnlyList? Rules { get; init; } -} - -/// -/// Policy pack metadata. -/// -public sealed record PolicyPackMetadata -{ - public string? Author { get; init; } - public string? License { get; init; } - public string? Homepage { get; init; } - public DateTimeOffset? CreatedAt { get; init; } - public IReadOnlyList? Tags { get; init; } -} - -/// -/// Policy rule within a bundle. -/// -internal sealed record PolicyRuleDto -{ - public string? Id { get; init; } - public string? Name { get; init; } - public string? Description { get; init; } - public string? Severity { get; init; } - public string? Expression { get; init; } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleBuildConfig.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleBuildConfig.cs new file mode 100644 index 000000000..ec44878da --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleBuildConfig.cs @@ -0,0 +1,22 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Configuration for building a rule bundle component. +/// +/// Bundle identifier (e.g., "secrets.ruleset"). +/// Bundle type (e.g., "secrets", "malware"). +/// Bundle version in YYYY.MM format. +/// Source directory containing the rule bundle files. +/// Relative path in the output bundle. +/// Number of rules in the bundle. +/// Key ID used to sign the bundle. +/// When the bundle was signed. +public sealed record RuleBundleBuildConfig( + string BundleId, + string BundleType, + string Version, + string SourceDirectory, + string RelativePath, + int RuleCount, + string? SignerKeyId, + DateTimeOffset? SignedAt); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleContent.cs new file mode 100644 index 000000000..b8387f250 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleContent.cs @@ -0,0 +1,47 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Content for a rule bundle (e.g., secrets detection rules). +/// +public sealed record RuleBundleContent +{ + /// + /// Bundle identifier (e.g., "secrets.ruleset"). + /// + public required string BundleId { get; init; } + + /// + /// Bundle type (e.g., "secrets", "malware"). + /// + public required string BundleType { get; init; } + + /// + /// Bundle version in YYYY.MM format. + /// + public required string Version { get; init; } + + /// + /// Files in the bundle. + /// + public required List Files { get; init; } + + /// + /// Number of rules in the bundle. + /// + public int RuleCount { get; init; } + + /// + /// Key ID used to sign the bundle. + /// + public string? SignerKeyId { get; init; } + + /// + /// When the bundle was signed. + /// + public DateTimeOffset? SignedAt { get; init; } + + /// + /// When the bundle signature was verified during export. + /// + public DateTimeOffset? VerifiedAt { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleFileContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleFileContent.cs new file mode 100644 index 000000000..5cd1aad0b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/RuleBundleFileContent.cs @@ -0,0 +1,17 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// A file within a rule bundle. +/// +public sealed record RuleBundleFileContent +{ + /// + /// Filename (e.g., "secrets.ruleset.manifest.json"). + /// + public required string Name { get; init; } + + /// + /// File content. + /// + public required byte[] Content { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadRequest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadRequest.cs new file mode 100644 index 000000000..dd98cb9ce --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadRequest.cs @@ -0,0 +1,56 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Request for reading a snapshot bundle. +/// +public sealed record SnapshotBundleReadRequest +{ + public required string BundlePath { get; init; } + + /// + /// Verify the manifest signature. + /// + public bool VerifySignature { get; init; } = true; + + /// + /// Fail if signature is invalid. + /// + public bool RequireValidSignature { get; init; } + + /// + /// Verify the merkle root. + /// + public bool VerifyMerkleRoot { get; init; } = true; + + /// + /// Fail if merkle root is invalid. + /// + public bool RequireValidMerkleRoot { get; init; } = true; + + /// + /// Verify time anchor freshness. + /// + public bool VerifyTimeAnchor { get; init; } = true; + + /// + /// Fail if time anchor is invalid. + /// + public bool RequireValidTimeAnchor { get; init; } + + /// + /// Maximum age in hours for time anchor validation. + /// + public int? MaxAgeHours { get; init; } + + /// + /// Maximum clock drift in seconds for time anchor validation. + /// + public int? MaxClockDriftSeconds { get; init; } + + /// + /// Public key for signature verification. + /// + public AsymmetricAlgorithm? PublicKey { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadResult.cs new file mode 100644 index 000000000..6778cbe9c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReadResult.cs @@ -0,0 +1,34 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Result of reading a snapshot bundle. +/// +public sealed record SnapshotBundleReadResult +{ + public bool Success { get; init; } + public KnowledgeSnapshotManifest? Manifest { get; init; } + public string? BundleDigest { get; init; } + public string? Error { get; init; } + + // Signature verification + public bool? SignatureVerified { get; init; } + public string? SignatureKeyId { get; init; } + public string? SignatureError { get; init; } + + // Merkle root verification + public bool? MerkleRootVerified { get; init; } + public string? MerkleRootError { get; init; } + + // Time anchor verification + public bool? TimeAnchorValid { get; init; } + public double? TimeAnchorAgeHours { get; init; } + public string? TimeAnchorError { get; init; } + + public static SnapshotBundleReadResult Failed(string error) => new() + { + Success = false, + Error = error + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Extract.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Extract.cs new file mode 100644 index 000000000..86209df12 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Extract.cs @@ -0,0 +1,39 @@ +using System.Formats.Tar; +using System.IO.Compression; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct) + { + await using var fileStream = File.OpenRead(bundlePath); + await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); + await using var tarReader = new TarReader(gzipStream); + + TarEntry? entry; + while ((entry = await tarReader.GetNextEntryAsync(copyData: false, ct).ConfigureAwait(false)) is not null) + { + ct.ThrowIfCancellationRequested(); + + if (!PathValidation.IsSafeRelativePath(entry.Name)) + { + throw new InvalidOperationException( + $"Unsafe path detected in bundle: '{entry.Name}'. Path traversal or absolute paths are not allowed."); + } + + var targetPath = PathValidation.SafeCombine(targetDir, entry.Name); + var targetEntryDir = Path.GetDirectoryName(targetPath); + if (!string.IsNullOrEmpty(targetEntryDir) && !Directory.Exists(targetEntryDir)) + { + Directory.CreateDirectory(targetEntryDir); + } + + if (entry.EntryType == TarEntryType.RegularFile && entry.DataStream is not null) + { + await using var outputStream = File.Create(targetPath); + await entry.DataStream.CopyToAsync(outputStream, ct).ConfigureAwait(false); + } + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Hashing.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Hashing.cs new file mode 100644 index 000000000..f814c9e10 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Hashing.cs @@ -0,0 +1,57 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task ComputeFileDigestAsync(string filePath, CancellationToken ct) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static string ComputeSha256(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static string ComputeMerkleRoot(List entries) + { + if (entries.Count == 0) + { + return string.Empty; + } + + var leaves = entries + .OrderBy(e => e.Path, StringComparer.Ordinal) + .Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}"))) + .ToArray(); + + while (leaves.Length > 1) + { + leaves = PairwiseHash(leaves).ToArray(); + } + + return Convert.ToHexString(leaves[0]).ToLowerInvariant(); + } + + private static IEnumerable PairwiseHash(byte[][] nodes) + { + for (var i = 0; i < nodes.Length; i += 2) + { + if (i + 1 >= nodes.Length) + { + yield return SHA256.HashData(nodes[i]); + continue; + } + + var combined = new byte[nodes[i].Length + nodes[i + 1].Length]; + Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length); + Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length); + yield return SHA256.HashData(combined); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Manifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Manifest.cs new file mode 100644 index 000000000..1c431e794 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Manifest.cs @@ -0,0 +1,27 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task ReadManifestAsync( + string tempDir, + CancellationToken cancellationToken) + { + var manifestPath = Path.Combine(tempDir, "manifest.json"); + if (!File.Exists(manifestPath)) + { + return ManifestReadResult.Failed("Manifest not found in bundle"); + } + + var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken).ConfigureAwait(false); + var manifest = JsonSerializer.Deserialize(manifestBytes, _jsonOptions); + if (manifest is null) + { + return ManifestReadResult.Failed("Failed to parse manifest"); + } + + return ManifestReadResult.FromManifest(manifest, manifestBytes); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Add.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Add.cs new file mode 100644 index 000000000..83870b6a9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Add.cs @@ -0,0 +1,55 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task AddRuleBundleEntriesAsync( + List entries, + string bundleDir, + RuleBundleSnapshotEntry ruleBundle, + CancellationToken cancellationToken) + { + foreach (var file in ruleBundle.Files) + { + var relativePath = $"{ruleBundle.RelativePath}/{file.Name}"; + var error = await AddEntryAsync( + entries, + bundleDir, + relativePath, + file.Digest, + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + } + + return null; + } + + private static async Task AddEntryAsync( + List entries, + string bundleDir, + string relativePath, + string expectedDigest, + CancellationToken cancellationToken) + { + var filePath = Path.Combine(bundleDir, relativePath.Replace('/', Path.DirectorySeparatorChar)); + if (!File.Exists(filePath)) + { + return $"Missing file: {relativePath}"; + } + + var content = await File.ReadAllBytesAsync(filePath, cancellationToken).ConfigureAwait(false); + var digest = ComputeSha256(content); + if (digest != expectedDigest) + { + return $"Digest mismatch for {relativePath}"; + } + + entries.Add(new BundleEntry(relativePath, digest, content.Length)); + return null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Entries.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Entries.cs new file mode 100644 index 000000000..552359a41 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.Entries.cs @@ -0,0 +1,32 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task AddEntriesAsync( + List entries, + string bundleDir, + IEnumerable source, + Func pathSelector, + Func digestSelector, + CancellationToken cancellationToken) + { + foreach (var entry in source) + { + var error = await AddEntryAsync( + entries, + bundleDir, + pathSelector(entry), + digestSelector(entry), + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + } + + return null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.cs new file mode 100644 index 000000000..ff40739cd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.MerkleEntries.cs @@ -0,0 +1,77 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task BuildMerkleEntriesAsync( + List entries, + string bundleDir, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + var error = await AddEntriesAsync( + entries, + bundleDir, + manifest.Advisories, + entry => entry.RelativePath, + entry => entry.Digest, + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + + error = await AddEntriesAsync( + entries, + bundleDir, + manifest.VexStatements, + entry => entry.RelativePath, + entry => entry.Digest, + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + + error = await AddEntriesAsync( + entries, + bundleDir, + manifest.Policies, + entry => entry.RelativePath, + entry => entry.Digest, + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + + error = await AddEntriesAsync( + entries, + bundleDir, + manifest.TrustRoots, + entry => entry.RelativePath, + entry => entry.Digest, + cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + + foreach (var ruleBundle in manifest.RuleBundles) + { + error = await AddRuleBundleEntriesAsync(entries, bundleDir, ruleBundle, cancellationToken) + .ConfigureAwait(false); + if (error is not null) + { + return error; + } + } + + return null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Models.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Models.cs new file mode 100644 index 000000000..6ae537f5b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Models.cs @@ -0,0 +1,42 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private sealed record BundleEntry(string Path, string Digest, long SizeBytes); + + private sealed record SignatureVerificationResult + { + public bool Verified { get; init; } + public string? KeyId { get; init; } + public string? Error { get; init; } + } + + private sealed record MerkleVerificationResult + { + public bool Verified { get; init; } + public string? Error { get; init; } + } + + private sealed record ManifestReadResult + { + public bool Success { get; init; } + public KnowledgeSnapshotManifest? Manifest { get; init; } + public byte[]? ManifestBytes { get; init; } + public string? Error { get; init; } + + public static ManifestReadResult Failed(string error) => new() + { + Success = false, + Error = error + }; + + public static ManifestReadResult FromManifest(KnowledgeSnapshotManifest manifest, byte[] manifestBytes) => new() + { + Success = true, + Manifest = manifest, + ManifestBytes = manifestBytes + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Read.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Read.cs new file mode 100644 index 000000000..de0a6c57c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Read.cs @@ -0,0 +1,34 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + /// + /// Reads and verifies a snapshot bundle. + /// + public async Task ReadAsync( + SnapshotBundleReadRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath); + + if (!File.Exists(request.BundlePath)) + { + return SnapshotBundleReadResult.Failed("Bundle file not found"); + } + + var tempDir = CreateTempDir(); + try + { + return await ReadBundleAsync(request, tempDir, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + return SnapshotBundleReadResult.Failed($"Failed to read bundle: {ex.Message}"); + } + finally + { + CleanupTempDir(tempDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.ReadBundle.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.ReadBundle.cs new file mode 100644 index 000000000..b649e1d28 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.ReadBundle.cs @@ -0,0 +1,56 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private async Task ReadBundleAsync( + SnapshotBundleReadRequest request, + string tempDir, + CancellationToken cancellationToken) + { + await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken).ConfigureAwait(false); + + var manifestResult = await ReadManifestAsync(tempDir, cancellationToken).ConfigureAwait(false); + if (!manifestResult.Success || manifestResult.Manifest is null) + { + return SnapshotBundleReadResult.Failed(manifestResult.Error ?? "Failed to parse manifest"); + } + + var result = new SnapshotBundleReadResult + { + Success = true, + Manifest = manifestResult.Manifest, + BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken).ConfigureAwait(false) + }; + + result = await ApplySignatureVerificationAsync( + request, + tempDir, + manifestResult, + result, + cancellationToken) + .ConfigureAwait(false); + if (!result.Success) + { + return result; + } + + result = await ApplyMerkleVerificationAsync( + request, + tempDir, + manifestResult.Manifest, + result, + cancellationToken) + .ConfigureAwait(false); + if (!result.Success) + { + return result; + } + + return await ApplyTimeAnchorVerificationAsync( + request, + manifestResult.Manifest, + result, + cancellationToken) + .ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.TempDir.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.TempDir.cs new file mode 100644 index 000000000..01260600a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.TempDir.cs @@ -0,0 +1,26 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private string CreateTempDir() + { + var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{_guidProvider.NewGuid():N}"); + Directory.CreateDirectory(tempDir); + return tempDir; + } + + private static void CleanupTempDir(string tempDir) + { + try + { + if (Directory.Exists(tempDir)) + { + Directory.Delete(tempDir, recursive: true); + } + } + catch + { + // Ignore cleanup errors. + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Merkle.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Merkle.cs new file mode 100644 index 000000000..bea635d8e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Merkle.cs @@ -0,0 +1,77 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task ApplyMerkleVerificationAsync( + SnapshotBundleReadRequest request, + string tempDir, + KnowledgeSnapshotManifest manifest, + SnapshotBundleReadResult result, + CancellationToken cancellationToken) + { + if (!request.VerifyMerkleRoot) + { + return result; + } + + var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken).ConfigureAwait(false); + result = result with + { + MerkleRootVerified = merkleResult.Verified, + MerkleRootError = merkleResult.Error + }; + + if (!merkleResult.Verified && request.RequireValidMerkleRoot) + { + return result with + { + Success = false, + Error = $"Merkle root verification failed: {merkleResult.Error}" + }; + } + + return result; + } + + private static async Task VerifyMerkleRootAsync( + string bundleDir, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + try + { + var entries = new List(); + var error = await BuildMerkleEntriesAsync(entries, bundleDir, manifest, cancellationToken).ConfigureAwait(false); + if (error is not null) + { + return new MerkleVerificationResult + { + Verified = false, + Error = error + }; + } + + var computedRoot = ComputeMerkleRoot(entries); + if (computedRoot != manifest.MerkleRoot) + { + return new MerkleVerificationResult + { + Verified = false, + Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}" + }; + } + + return new MerkleVerificationResult { Verified = true }; + } + catch (Exception ex) + { + return new MerkleVerificationResult + { + Verified = false, + Error = ex.Message + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.Verify.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.Verify.cs new file mode 100644 index 000000000..620d70ded --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.Verify.cs @@ -0,0 +1,62 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private static async Task VerifySignatureAsync( + byte[] manifestBytes, + byte[] signatureEnvelopeBytes, + AsymmetricAlgorithm? publicKey, + CancellationToken cancellationToken) + { + try + { + var signer = new SnapshotManifestSigner(); + var result = await signer.VerifyAsync( + new ManifestVerificationRequest + { + EnvelopeBytes = signatureEnvelopeBytes, + PublicKey = publicKey + }, + cancellationToken) + .ConfigureAwait(false); + + if (!result.Success) + { + return new SignatureVerificationResult + { + Verified = false, + Error = result.Error + }; + } + + var manifestDigest = ComputeSha256(manifestBytes); + if (result.PayloadDigest != manifestDigest) + { + return new SignatureVerificationResult + { + Verified = false, + Error = "Manifest digest does not match signed payload" + }; + } + + var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId; + + return new SignatureVerificationResult + { + Verified = publicKey is null + || (result.VerifiedSignatures?.Any(s => s.Verified == true) ?? false), + KeyId = keyId + }; + } + catch (Exception ex) + { + return new SignatureVerificationResult + { + Verified = false, + Error = ex.Message + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.cs new file mode 100644 index 000000000..e3b16e0c0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.Signature.cs @@ -0,0 +1,67 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private async Task ApplySignatureVerificationAsync( + SnapshotBundleReadRequest request, + string tempDir, + ManifestReadResult manifestResult, + SnapshotBundleReadResult result, + CancellationToken cancellationToken) + { + if (!request.VerifySignature) + { + return result; + } + + var signaturePath = Path.Combine(tempDir, "manifest.sig"); + if (!File.Exists(signaturePath)) + { + if (request.RequireValidSignature) + { + return result with + { + Success = false, + Error = "Signature file not found but signature is required" + }; + } + + return result; + } + + if (manifestResult.ManifestBytes is null) + { + return result with + { + Success = false, + Error = "Manifest payload missing for signature verification" + }; + } + + var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken).ConfigureAwait(false); + var signatureResult = await VerifySignatureAsync( + manifestResult.ManifestBytes, + signatureBytes, + request.PublicKey, + cancellationToken) + .ConfigureAwait(false); + + result = result with + { + SignatureVerified = signatureResult.Verified, + SignatureKeyId = signatureResult.KeyId, + SignatureError = signatureResult.Error + }; + + if (!signatureResult.Verified && request.RequireValidSignature) + { + return result with + { + Success = false, + Error = $"Signature verification failed: {signatureResult.Error}" + }; + } + + return result; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.TimeAnchor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.TimeAnchor.cs new file mode 100644 index 000000000..eaf52df9d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.Verify.TimeAnchor.cs @@ -0,0 +1,54 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleReader +{ + private async Task ApplyTimeAnchorVerificationAsync( + SnapshotBundleReadRequest request, + KnowledgeSnapshotManifest manifest, + SnapshotBundleReadResult result, + CancellationToken cancellationToken) + { + if (!request.VerifyTimeAnchor || manifest.TimeAnchor is null) + { + return result; + } + + var timeAnchorService = new TimeAnchorService(_timeProvider, _guidProvider); + var timeAnchorContent = new TimeAnchorContent + { + AnchorTime = manifest.TimeAnchor.AnchorTime, + Source = manifest.TimeAnchor.Source, + TokenDigest = manifest.TimeAnchor.Digest + }; + + var timeAnchorResult = await timeAnchorService.ValidateAnchorAsync( + timeAnchorContent, + new TimeAnchorValidationRequest + { + MaxAgeHours = request.MaxAgeHours, + MaxClockDriftSeconds = request.MaxClockDriftSeconds + }, + cancellationToken) + .ConfigureAwait(false); + + result = result with + { + TimeAnchorValid = timeAnchorResult.IsValid, + TimeAnchorAgeHours = timeAnchorResult.AgeHours, + TimeAnchorError = timeAnchorResult.Error + }; + + if (!timeAnchorResult.IsValid && request.RequireValidTimeAnchor) + { + return result with + { + Success = false, + Error = $"Time anchor validation failed: {timeAnchorResult.Error}" + }; + } + + return result; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.cs index ad6c102f9..9659c603f 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleReader.cs @@ -1,27 +1,10 @@ -// ----------------------------------------------------------------------------- -// SnapshotBundleReader.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Tasks: SEAL-012, SEAL-013 - Implement signature verification and merkle root validation -// Description: Reads and verifies sealed knowledge snapshot bundles. -// ----------------------------------------------------------------------------- - - -using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry; -using StellaOps.AirGap.Bundle.Models; -using System.Formats.Tar; -using System.IO.Compression; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; -/// -/// Reads and verifies sealed knowledge snapshot bundles. -/// -public sealed class SnapshotBundleReader : ISnapshotBundleReader +public sealed partial class SnapshotBundleReader : ISnapshotBundleReader { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; @@ -38,584 +21,4 @@ public sealed class SnapshotBundleReader : ISnapshotBundleReader _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider)); } - - /// - /// Reads and verifies a snapshot bundle. - /// - public async Task ReadAsync( - SnapshotBundleReadRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath); - - if (!File.Exists(request.BundlePath)) - { - return SnapshotBundleReadResult.Failed("Bundle file not found"); - } - - var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{_guidProvider.NewGuid():N}"); - Directory.CreateDirectory(tempDir); - - try - { - // Extract the bundle with path validation - await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken); - - // Read manifest - var manifestPath = Path.Combine(tempDir, "manifest.json"); - if (!File.Exists(manifestPath)) - { - return SnapshotBundleReadResult.Failed("Manifest not found in bundle"); - } - - var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken); - var manifest = JsonSerializer.Deserialize(manifestBytes, JsonOptions); - if (manifest is null) - { - return SnapshotBundleReadResult.Failed("Failed to parse manifest"); - } - - var result = new SnapshotBundleReadResult - { - Success = true, - Manifest = manifest, - BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken) - }; - - // Verify signature if requested - if (request.VerifySignature) - { - var signaturePath = Path.Combine(tempDir, "manifest.sig"); - if (File.Exists(signaturePath)) - { - var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken); - var signatureResult = await VerifySignatureAsync( - manifestBytes, signatureBytes, request.PublicKey, cancellationToken); - - result = result with - { - SignatureVerified = signatureResult.Verified, - SignatureKeyId = signatureResult.KeyId, - SignatureError = signatureResult.Error - }; - - if (!signatureResult.Verified && request.RequireValidSignature) - { - return result with - { - Success = false, - Error = $"Signature verification failed: {signatureResult.Error}" - }; - } - } - else if (request.RequireValidSignature) - { - return SnapshotBundleReadResult.Failed("Signature file not found but signature is required"); - } - } - - // Verify merkle root if requested - if (request.VerifyMerkleRoot) - { - var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken); - result = result with - { - MerkleRootVerified = merkleResult.Verified, - MerkleRootError = merkleResult.Error - }; - - if (!merkleResult.Verified && request.RequireValidMerkleRoot) - { - return result with - { - Success = false, - Error = $"Merkle root verification failed: {merkleResult.Error}" - }; - } - } - - // Verify time anchor if present - if (request.VerifyTimeAnchor && manifest.TimeAnchor is not null) - { - var timeAnchorService = new TimeAnchorService(_timeProvider, _guidProvider); - var timeAnchorContent = new TimeAnchorContent - { - AnchorTime = manifest.TimeAnchor.AnchorTime, - Source = manifest.TimeAnchor.Source, - TokenDigest = manifest.TimeAnchor.Digest - }; - - var timeAnchorResult = await timeAnchorService.ValidateAnchorAsync( - timeAnchorContent, - new TimeAnchorValidationRequest - { - MaxAgeHours = request.MaxAgeHours, - MaxClockDriftSeconds = request.MaxClockDriftSeconds - }, - cancellationToken); - - result = result with - { - TimeAnchorValid = timeAnchorResult.IsValid, - TimeAnchorAgeHours = timeAnchorResult.AgeHours, - TimeAnchorError = timeAnchorResult.Error - }; - - if (!timeAnchorResult.IsValid && request.RequireValidTimeAnchor) - { - return result with - { - Success = false, - Error = $"Time anchor validation failed: {timeAnchorResult.Error}" - }; - } - } - - return result; - } - catch (Exception ex) - { - return SnapshotBundleReadResult.Failed($"Failed to read bundle: {ex.Message}"); - } - finally - { - // Clean up temp directory - try - { - if (Directory.Exists(tempDir)) - { - Directory.Delete(tempDir, recursive: true); - } - } - catch - { - // Ignore cleanup errors - } - } - } - - private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct) - { - await using var fileStream = File.OpenRead(bundlePath); - await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress); - await using var tarReader = new TarReader(gzipStream); - - TarEntry? entry; - while ((entry = await tarReader.GetNextEntryAsync(copyData: false, ct)) is not null) - { - ct.ThrowIfCancellationRequested(); - - // Validate entry name to prevent path traversal - if (!PathValidation.IsSafeRelativePath(entry.Name)) - { - throw new InvalidOperationException( - $"Unsafe path detected in bundle: '{entry.Name}'. Path traversal or absolute paths are not allowed."); - } - - // Calculate safe target path - var targetPath = PathValidation.SafeCombine(targetDir, entry.Name); - var targetEntryDir = Path.GetDirectoryName(targetPath); - if (!string.IsNullOrEmpty(targetEntryDir) && !Directory.Exists(targetEntryDir)) - { - Directory.CreateDirectory(targetEntryDir); - } - - if (entry.EntryType == TarEntryType.RegularFile && entry.DataStream is not null) - { - await using var outputStream = File.Create(targetPath); - await entry.DataStream.CopyToAsync(outputStream, ct); - } - } - } - - private static async Task ComputeFileDigestAsync(string filePath, CancellationToken ct) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, ct); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static async Task VerifySignatureAsync( - byte[] manifestBytes, - byte[] signatureEnvelopeBytes, - AsymmetricAlgorithm? publicKey, - CancellationToken cancellationToken) - { - try - { - var signer = new SnapshotManifestSigner(); - var result = await signer.VerifyAsync( - new ManifestVerificationRequest - { - EnvelopeBytes = signatureEnvelopeBytes, - PublicKey = publicKey - }, - cancellationToken); - - if (!result.Success) - { - return new SignatureVerificationResult - { - Verified = false, - Error = result.Error - }; - } - - // Verify the payload digest matches the manifest - var manifestDigest = ComputeSha256(manifestBytes); - if (result.PayloadDigest != manifestDigest) - { - return new SignatureVerificationResult - { - Verified = false, - Error = "Manifest digest does not match signed payload" - }; - } - - var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId; - - return new SignatureVerificationResult - { - Verified = publicKey is null || (result.VerifiedSignatures?.Any(s => s.Verified == true) ?? false), - KeyId = keyId - }; - } - catch (Exception ex) - { - return new SignatureVerificationResult - { - Verified = false, - Error = ex.Message - }; - } - } - - private static async Task VerifyMerkleRootAsync( - string bundleDir, - KnowledgeSnapshotManifest manifest, - CancellationToken cancellationToken) - { - try - { - var entries = new List(); - - // Collect all entries from manifest - foreach (var advisory in manifest.Advisories) - { - var filePath = Path.Combine(bundleDir, advisory.RelativePath.Replace('/', Path.DirectorySeparatorChar)); - if (!File.Exists(filePath)) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Missing file: {advisory.RelativePath}" - }; - } - - var content = await File.ReadAllBytesAsync(filePath, cancellationToken); - var digest = ComputeSha256(content); - - if (digest != advisory.Digest) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Digest mismatch for {advisory.RelativePath}" - }; - } - - entries.Add(new BundleEntry(advisory.RelativePath, digest, content.Length)); - } - - foreach (var vex in manifest.VexStatements) - { - var filePath = Path.Combine(bundleDir, vex.RelativePath.Replace('/', Path.DirectorySeparatorChar)); - if (!File.Exists(filePath)) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Missing file: {vex.RelativePath}" - }; - } - - var content = await File.ReadAllBytesAsync(filePath, cancellationToken); - var digest = ComputeSha256(content); - - if (digest != vex.Digest) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Digest mismatch for {vex.RelativePath}" - }; - } - - entries.Add(new BundleEntry(vex.RelativePath, digest, content.Length)); - } - - foreach (var policy in manifest.Policies) - { - var filePath = Path.Combine(bundleDir, policy.RelativePath.Replace('/', Path.DirectorySeparatorChar)); - if (!File.Exists(filePath)) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Missing file: {policy.RelativePath}" - }; - } - - var content = await File.ReadAllBytesAsync(filePath, cancellationToken); - var digest = ComputeSha256(content); - - if (digest != policy.Digest) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Digest mismatch for {policy.RelativePath}" - }; - } - - entries.Add(new BundleEntry(policy.RelativePath, digest, content.Length)); - } - - foreach (var trust in manifest.TrustRoots) - { - var filePath = Path.Combine(bundleDir, trust.RelativePath.Replace('/', Path.DirectorySeparatorChar)); - if (!File.Exists(filePath)) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Missing file: {trust.RelativePath}" - }; - } - - var content = await File.ReadAllBytesAsync(filePath, cancellationToken); - var digest = ComputeSha256(content); - - if (digest != trust.Digest) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Digest mismatch for {trust.RelativePath}" - }; - } - - entries.Add(new BundleEntry(trust.RelativePath, digest, content.Length)); - } - - foreach (var ruleBundle in manifest.RuleBundles) - { - // Verify each file in the rule bundle - foreach (var file in ruleBundle.Files) - { - var relativePath = $"{ruleBundle.RelativePath}/{file.Name}"; - var filePath = Path.Combine(bundleDir, relativePath.Replace('/', Path.DirectorySeparatorChar)); - if (!File.Exists(filePath)) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Missing rule bundle file: {relativePath}" - }; - } - - var content = await File.ReadAllBytesAsync(filePath, cancellationToken); - var digest = ComputeSha256(content); - - if (digest != file.Digest) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Digest mismatch for rule bundle file {relativePath}" - }; - } - - entries.Add(new BundleEntry(relativePath, digest, content.Length)); - } - } - - // Compute merkle root - var computedRoot = ComputeMerkleRoot(entries); - - if (computedRoot != manifest.MerkleRoot) - { - return new MerkleVerificationResult - { - Verified = false, - Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}" - }; - } - - return new MerkleVerificationResult { Verified = true }; - } - catch (Exception ex) - { - return new MerkleVerificationResult - { - Verified = false, - Error = ex.Message - }; - } - } - - private static string ComputeSha256(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static string ComputeMerkleRoot(List entries) - { - if (entries.Count == 0) - { - return string.Empty; - } - - var leaves = entries - .OrderBy(e => e.Path, StringComparer.Ordinal) - .Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}"))) - .ToArray(); - - while (leaves.Length > 1) - { - leaves = PairwiseHash(leaves).ToArray(); - } - - return Convert.ToHexString(leaves[0]).ToLowerInvariant(); - } - - private static IEnumerable PairwiseHash(byte[][] nodes) - { - for (var i = 0; i < nodes.Length; i += 2) - { - if (i + 1 >= nodes.Length) - { - yield return SHA256.HashData(nodes[i]); - continue; - } - - var combined = new byte[nodes[i].Length + nodes[i + 1].Length]; - Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length); - Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length); - yield return SHA256.HashData(combined); - } - } - - private sealed record BundleEntry(string Path, string Digest, long SizeBytes); - private sealed record SignatureVerificationResult - { - public bool Verified { get; init; } - public string? KeyId { get; init; } - public string? Error { get; init; } - } - private sealed record MerkleVerificationResult - { - public bool Verified { get; init; } - public string? Error { get; init; } - } } - -/// -/// Interface for snapshot bundle reading. -/// -public interface ISnapshotBundleReader -{ - Task ReadAsync( - SnapshotBundleReadRequest request, - CancellationToken cancellationToken = default); -} - -#region Request and Result Models - -/// -/// Request for reading a snapshot bundle. -/// -public sealed record SnapshotBundleReadRequest -{ - public required string BundlePath { get; init; } - - /// - /// Verify the manifest signature. - /// - public bool VerifySignature { get; init; } = true; - - /// - /// Fail if signature is invalid. - /// - public bool RequireValidSignature { get; init; } - - /// - /// Verify the merkle root. - /// - public bool VerifyMerkleRoot { get; init; } = true; - - /// - /// Fail if merkle root is invalid. - /// - public bool RequireValidMerkleRoot { get; init; } = true; - - /// - /// Verify time anchor freshness. - /// - public bool VerifyTimeAnchor { get; init; } = true; - - /// - /// Fail if time anchor is invalid. - /// - public bool RequireValidTimeAnchor { get; init; } - - /// - /// Maximum age in hours for time anchor validation. - /// - public int? MaxAgeHours { get; init; } - - /// - /// Maximum clock drift in seconds for time anchor validation. - /// - public int? MaxClockDriftSeconds { get; init; } - - /// - /// Public key for signature verification. - /// - public AsymmetricAlgorithm? PublicKey { get; init; } -} - -/// -/// Result of reading a snapshot bundle. -/// -public sealed record SnapshotBundleReadResult -{ - public bool Success { get; init; } - public KnowledgeSnapshotManifest? Manifest { get; init; } - public string? BundleDigest { get; init; } - public string? Error { get; init; } - - // Signature verification - public bool? SignatureVerified { get; init; } - public string? SignatureKeyId { get; init; } - public string? SignatureError { get; init; } - - // Merkle root verification - public bool? MerkleRootVerified { get; init; } - public string? MerkleRootError { get; init; } - - // Time anchor verification - public bool? TimeAnchorValid { get; init; } - public double? TimeAnchorAgeHours { get; init; } - public string? TimeAnchorError { get; init; } - - public static SnapshotBundleReadResult Failed(string error) => new() - { - Success = false, - Error = error - }; -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleRequest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleRequest.cs new file mode 100644 index 000000000..33e75cb01 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleRequest.cs @@ -0,0 +1,34 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Request for creating a knowledge snapshot bundle. +/// +public sealed record SnapshotBundleRequest +{ + public required string OutputPath { get; init; } + public string? BundleId { get; init; } + public string? Name { get; init; } + public string? Version { get; init; } + public List Advisories { get; init; } = []; + public List VexStatements { get; init; } = []; + public List Policies { get; init; } = []; + public List TrustRoots { get; init; } = []; + public List RuleBundles { get; init; } = []; + public TimeAnchorContent? TimeAnchor { get; init; } + + /// + /// Whether to sign the manifest. + /// + public bool Sign { get; init; } = true; + + /// + /// Path to signing key file (PEM format). + /// If null and Sign is true, an ephemeral key will be used. + /// + public string? SigningKeyPath { get; init; } + + /// + /// Password for encrypted signing key. + /// + public string? SigningKeyPassword { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleResult.cs new file mode 100644 index 000000000..f1f11d664 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleResult.cs @@ -0,0 +1,38 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Result of creating a knowledge snapshot bundle. +/// +public sealed record SnapshotBundleResult +{ + public bool Success { get; init; } + public string? OutputPath { get; init; } + public string? BundleId { get; init; } + public string? MerkleRoot { get; init; } + public string? BundleDigest { get; init; } + public long TotalSizeBytes { get; init; } + public int EntryCount { get; init; } + public DateTimeOffset CreatedAt { get; init; } + public string? Error { get; init; } + + /// + /// Whether the manifest was signed. + /// + public bool Signed { get; init; } + + /// + /// Key ID used for signing. + /// + public string? SigningKeyId { get; init; } + + /// + /// Algorithm used for signing. + /// + public string? SigningAlgorithm { get; init; } + + public static SnapshotBundleResult Failed(string error) => new() + { + Success = false, + Error = error + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Entries.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Entries.cs new file mode 100644 index 000000000..84c21a1fd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Entries.cs @@ -0,0 +1,11 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static string AddEntry(List entries, string relativePath, byte[] content) + { + var digest = ComputeSha256(content); + entries.Add(new BundleEntry(relativePath, digest, content.Length)); + return digest; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Hashing.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Hashing.cs new file mode 100644 index 000000000..ea081be13 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Hashing.cs @@ -0,0 +1,57 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static string ComputeSha256(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static async Task ComputeFileDigestAsync(string filePath, CancellationToken ct) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static string ComputeMerkleRoot(List entries) + { + if (entries.Count == 0) + { + return string.Empty; + } + + var leaves = entries + .OrderBy(e => e.Path, StringComparer.Ordinal) + .Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}"))) + .ToArray(); + + while (leaves.Length > 1) + { + leaves = PairwiseHash(leaves).ToArray(); + } + + return Convert.ToHexString(leaves[0]).ToLowerInvariant(); + } + + private static IEnumerable PairwiseHash(byte[][] nodes) + { + for (var i = 0; i < nodes.Length; i += 2) + { + if (i + 1 >= nodes.Length) + { + yield return SHA256.HashData(nodes[i]); + continue; + } + + var combined = new byte[nodes[i].Length + nodes[i + 1].Length]; + Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length); + Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length); + yield return SHA256.HashData(combined); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Manifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Manifest.cs new file mode 100644 index 000000000..3ea037eae --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Manifest.cs @@ -0,0 +1,38 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Globalization; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private KnowledgeSnapshotManifest CreateManifest(SnapshotBundleRequest request, DateTimeOffset createdAt) + { + return new KnowledgeSnapshotManifest + { + BundleId = request.BundleId ?? _guidProvider.NewGuid().ToString("N"), + Name = request.Name ?? $"knowledge-{createdAt.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture)}", + Version = request.Version ?? "1.0.0", + CreatedAt = createdAt, + SchemaVersion = "1.0.0" + }; + } + + private static void FinalizeManifest(KnowledgeSnapshotManifest manifest, List entries) + { + manifest.MerkleRoot = ComputeMerkleRoot(entries); + manifest.TotalSizeBytes = entries.Sum(e => e.SizeBytes); + manifest.EntryCount = entries.Count; + } + + private static async Task WriteManifestAsync( + string tempDir, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + var manifestJson = JsonSerializer.SerializeToUtf8Bytes(manifest, _jsonOptions); + var manifestPath = Path.Combine(tempDir, "manifest.json"); + await File.WriteAllBytesAsync(manifestPath, manifestJson, cancellationToken).ConfigureAwait(false); + return manifestJson; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Models.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Models.cs new file mode 100644 index 000000000..98ec21985 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Models.cs @@ -0,0 +1,13 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private sealed record BundleEntry(string Path, string Digest, long SizeBytes); + + private sealed record ManifestSigningOutcome + { + public bool Signed { get; init; } + public string? SigningKeyId { get; init; } + public string? SigningAlgorithm { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Output.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Output.cs new file mode 100644 index 000000000..4c50e5c2b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Output.cs @@ -0,0 +1,36 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static string NormalizeOutputPath(string outputPath) + { + return outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase) + ? outputPath + : $"{outputPath}.tar.gz"; + } + + private static SnapshotBundleResult BuildResult( + KnowledgeSnapshotManifest manifest, + int entryCount, + string outputPath, + string bundleDigest, + ManifestSigningOutcome signingOutcome) + { + return new SnapshotBundleResult + { + Success = true, + OutputPath = outputPath, + BundleId = manifest.BundleId, + MerkleRoot = manifest.MerkleRoot, + BundleDigest = bundleDigest, + TotalSizeBytes = new FileInfo(outputPath).Length, + EntryCount = entryCount, + CreatedAt = manifest.CreatedAt, + Signed = signingOutcome.Signed, + SigningKeyId = signingOutcome.SigningKeyId, + SigningAlgorithm = signingOutcome.SigningAlgorithm + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Advisories.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Advisories.cs new file mode 100644 index 000000000..a059225dd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Advisories.cs @@ -0,0 +1,45 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WriteAdvisoriesAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + DateTimeOffset createdAt, + CancellationToken cancellationToken) + { + if (request.Advisories is not { Count: > 0 }) + { + return; + } + + var advisoriesDir = Path.Combine(tempDir, "advisories"); + Directory.CreateDirectory(advisoriesDir); + + foreach (var advisory in request.Advisories) + { + var feedDir = Path.Combine(advisoriesDir, advisory.FeedId); + Directory.CreateDirectory(feedDir); + + var filePath = Path.Combine(feedDir, advisory.FileName); + await File.WriteAllBytesAsync(filePath, advisory.Content, cancellationToken).ConfigureAwait(false); + + var relativePath = $"advisories/{advisory.FeedId}/{advisory.FileName}"; + var digest = AddEntry(entries, relativePath, advisory.Content); + + manifest.Advisories.Add(new AdvisorySnapshotEntry + { + FeedId = advisory.FeedId, + RelativePath = relativePath, + Digest = digest, + SizeBytes = advisory.Content.Length, + SnapshotAt = advisory.SnapshotAt ?? createdAt, + RecordCount = advisory.RecordCount + }); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Policies.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Policies.cs new file mode 100644 index 000000000..c872e393d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Policies.cs @@ -0,0 +1,42 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WritePoliciesAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + if (request.Policies is not { Count: > 0 }) + { + return; + } + + var policiesDir = Path.Combine(tempDir, "policies"); + Directory.CreateDirectory(policiesDir); + + foreach (var policy in request.Policies) + { + var filePath = Path.Combine(policiesDir, policy.FileName); + await File.WriteAllBytesAsync(filePath, policy.Content, cancellationToken).ConfigureAwait(false); + + var relativePath = $"policies/{policy.FileName}"; + var digest = AddEntry(entries, relativePath, policy.Content); + + manifest.Policies.Add(new PolicySnapshotEntry + { + PolicyId = policy.PolicyId, + Name = policy.Name, + Version = policy.Version, + RelativePath = relativePath, + Digest = digest, + SizeBytes = policy.Content.Length, + Type = policy.Type + }); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.RuleBundles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.RuleBundles.cs new file mode 100644 index 000000000..9f3a9d056 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.RuleBundles.cs @@ -0,0 +1,60 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WriteRuleBundlesAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + if (request.RuleBundles is not { Count: > 0 }) + { + return; + } + + var rulesDir = Path.Combine(tempDir, "rules"); + Directory.CreateDirectory(rulesDir); + + foreach (var ruleBundle in request.RuleBundles) + { + var bundleDir = Path.Combine(rulesDir, ruleBundle.BundleId); + Directory.CreateDirectory(bundleDir); + + var bundleFiles = new List(); + var bundleRelativePath = $"rules/{ruleBundle.BundleId}"; + + foreach (var file in ruleBundle.Files) + { + var filePath = Path.Combine(bundleDir, file.Name); + await File.WriteAllBytesAsync(filePath, file.Content, cancellationToken).ConfigureAwait(false); + + var relativePath = $"{bundleRelativePath}/{file.Name}"; + var digest = AddEntry(entries, relativePath, file.Content); + + bundleFiles.Add(new RuleBundleFile + { + Name = file.Name, + Digest = digest, + SizeBytes = file.Content.Length + }); + } + + manifest.RuleBundles.Add(new RuleBundleSnapshotEntry + { + BundleId = ruleBundle.BundleId, + BundleType = ruleBundle.BundleType, + Version = ruleBundle.Version, + RelativePath = bundleRelativePath, + Files = bundleFiles, + RuleCount = ruleBundle.RuleCount, + SignerKeyId = ruleBundle.SignerKeyId, + SignedAt = ruleBundle.SignedAt, + VerifiedAt = ruleBundle.VerifiedAt + }); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TimeAnchor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TimeAnchor.cs new file mode 100644 index 000000000..f53c13e11 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TimeAnchor.cs @@ -0,0 +1,32 @@ +using StellaOps.AirGap.Bundle.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WriteTimeAnchorAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + if (request.TimeAnchor is null) + { + return; + } + + var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json"); + var timeAnchorJson = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, _jsonOptions); + await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorJson, cancellationToken).ConfigureAwait(false); + + var digest = AddEntry(entries, "time-anchor.json", timeAnchorJson); + manifest.TimeAnchor = new TimeAnchorEntry + { + AnchorTime = request.TimeAnchor.AnchorTime, + Source = request.TimeAnchor.Source, + Digest = digest + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TrustRoots.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TrustRoots.cs new file mode 100644 index 000000000..4c4f1941a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.TrustRoots.cs @@ -0,0 +1,41 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WriteTrustRootsAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + CancellationToken cancellationToken) + { + if (request.TrustRoots is not { Count: > 0 }) + { + return; + } + + var trustDir = Path.Combine(tempDir, "trust"); + Directory.CreateDirectory(trustDir); + + foreach (var trustRoot in request.TrustRoots) + { + var filePath = Path.Combine(trustDir, trustRoot.FileName); + await File.WriteAllBytesAsync(filePath, trustRoot.Content, cancellationToken).ConfigureAwait(false); + + var relativePath = $"trust/{trustRoot.FileName}"; + var digest = AddEntry(entries, relativePath, trustRoot.Content); + + manifest.TrustRoots.Add(new TrustRootSnapshotEntry + { + KeyId = trustRoot.KeyId, + RelativePath = relativePath, + Digest = digest, + SizeBytes = trustRoot.Content.Length, + Algorithm = trustRoot.Algorithm, + ExpiresAt = trustRoot.ExpiresAt + }); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Vex.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Vex.cs new file mode 100644 index 000000000..4f7c4adb1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Sections.Vex.cs @@ -0,0 +1,45 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task WriteVexStatementsAsync( + SnapshotBundleRequest request, + string tempDir, + List entries, + KnowledgeSnapshotManifest manifest, + DateTimeOffset createdAt, + CancellationToken cancellationToken) + { + if (request.VexStatements is not { Count: > 0 }) + { + return; + } + + var vexDir = Path.Combine(tempDir, "vex"); + Directory.CreateDirectory(vexDir); + + foreach (var vex in request.VexStatements) + { + var sourceDir = Path.Combine(vexDir, vex.SourceId); + Directory.CreateDirectory(sourceDir); + + var filePath = Path.Combine(sourceDir, vex.FileName); + await File.WriteAllBytesAsync(filePath, vex.Content, cancellationToken).ConfigureAwait(false); + + var relativePath = $"vex/{vex.SourceId}/{vex.FileName}"; + var digest = AddEntry(entries, relativePath, vex.Content); + + manifest.VexStatements.Add(new VexSnapshotEntry + { + SourceId = vex.SourceId, + RelativePath = relativePath, + Digest = digest, + SizeBytes = vex.Content.Length, + SnapshotAt = vex.SnapshotAt ?? createdAt, + StatementCount = vex.StatementCount + }); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Signing.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Signing.cs new file mode 100644 index 000000000..be838de9e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Signing.cs @@ -0,0 +1,42 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task SignManifestAsync( + SnapshotBundleRequest request, + string tempDir, + byte[] manifestBytes, + CancellationToken cancellationToken) + { + if (!request.Sign) + { + return new ManifestSigningOutcome(); + } + + var signer = new SnapshotManifestSigner(); + var signResult = await signer.SignAsync( + new ManifestSigningRequest + { + ManifestBytes = manifestBytes, + KeyFilePath = request.SigningKeyPath, + KeyPassword = request.SigningKeyPassword + }, + cancellationToken) + .ConfigureAwait(false); + + if (!signResult.Success || signResult.Envelope is null) + { + return new ManifestSigningOutcome(); + } + + var signaturePath = Path.Combine(tempDir, "manifest.sig"); + await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken).ConfigureAwait(false); + + return new ManifestSigningOutcome + { + Signed = true, + SigningKeyId = signResult.KeyId, + SigningAlgorithm = signResult.Algorithm + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Tar.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Tar.cs new file mode 100644 index 000000000..48b00fac3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Tar.cs @@ -0,0 +1,36 @@ +using System.Formats.Tar; +using System.IO.Compression; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct) + { + var outputDir = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir)) + { + Directory.CreateDirectory(outputDir); + } + + await using var fileStream = File.Create(outputPath); + await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal); + await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); + + var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories) + .Select(f => (FullPath: f, RelativePath: Path.GetRelativePath(sourceDir, f).Replace('\\', '/'))) + .OrderBy(f => f.RelativePath, StringComparer.Ordinal) + .ToList(); + + foreach (var (fullPath, relativePath) in files) + { + var entry = new PaxTarEntry(TarEntryType.RegularFile, relativePath) + { + DataStream = File.OpenRead(fullPath), + ModificationTime = DeterministicMtime, + Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead + }; + await tarWriter.WriteEntryAsync(entry, ct).ConfigureAwait(false); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.TempDir.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.TempDir.cs new file mode 100644 index 000000000..4983bacd4 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.TempDir.cs @@ -0,0 +1,26 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private string CreateTempDir() + { + var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{_guidProvider.NewGuid():N}"); + Directory.CreateDirectory(tempDir); + return tempDir; + } + + private static void CleanupTempDir(string tempDir) + { + try + { + if (Directory.Exists(tempDir)) + { + Directory.Delete(tempDir, recursive: true); + } + } + catch + { + // Ignore cleanup errors. + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Write.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Write.cs new file mode 100644 index 000000000..fa9d803a3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.Write.cs @@ -0,0 +1,22 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + public async Task WriteAsync( + SnapshotBundleRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath); + + var tempDir = CreateTempDir(); + try + { + return await WriteBundleAsync(request, tempDir, cancellationToken).ConfigureAwait(false); + } + finally + { + CleanupTempDir(tempDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.WriteBundle.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.WriteBundle.cs new file mode 100644 index 000000000..09f891e52 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.WriteBundle.cs @@ -0,0 +1,35 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotBundleWriter +{ + private async Task WriteBundleAsync( + SnapshotBundleRequest request, + string tempDir, + CancellationToken cancellationToken) + { + var entries = new List(); + var createdAt = _timeProvider.GetUtcNow(); + var manifest = CreateManifest(request, createdAt); + + await WriteAdvisoriesAsync(request, tempDir, entries, manifest, createdAt, cancellationToken).ConfigureAwait(false); + await WriteVexStatementsAsync(request, tempDir, entries, manifest, createdAt, cancellationToken).ConfigureAwait(false); + await WritePoliciesAsync(request, tempDir, entries, manifest, cancellationToken).ConfigureAwait(false); + await WriteTrustRootsAsync(request, tempDir, entries, manifest, cancellationToken).ConfigureAwait(false); + await WriteRuleBundlesAsync(request, tempDir, entries, manifest, cancellationToken).ConfigureAwait(false); + await WriteTimeAnchorAsync(request, tempDir, entries, manifest, cancellationToken).ConfigureAwait(false); + + FinalizeManifest(manifest, entries); + + var manifestBytes = await WriteManifestAsync(tempDir, manifest, cancellationToken).ConfigureAwait(false); + var signingOutcome = await SignManifestAsync(request, tempDir, manifestBytes, cancellationToken).ConfigureAwait(false); + + var outputPath = NormalizeOutputPath(request.OutputPath); + await CreateTarGzAsync(tempDir, outputPath, cancellationToken).ConfigureAwait(false); + + var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken).ConfigureAwait(false); + + return BuildResult(manifest, entries.Count, outputPath, bundleDigest, signingOutcome); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.cs index 8fb911681..d4184aceb 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotBundleWriter.cs @@ -1,28 +1,10 @@ -// ----------------------------------------------------------------------------- -// SnapshotBundleWriter.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Task: SEAL-003 - Create SnapshotBundleWriter -// Description: Writes sealed knowledge snapshots to tar.gz bundles. -// ----------------------------------------------------------------------------- - - -using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry; -using StellaOps.AirGap.Bundle.Models; -using System.Formats.Tar; -using System.Globalization; -using System.IO.Compression; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; -/// -/// Writes sealed knowledge snapshots to tar.gz bundles with manifest and merkle root. -/// -public sealed class SnapshotBundleWriter : ISnapshotBundleWriter +public sealed partial class SnapshotBundleWriter : ISnapshotBundleWriter { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase @@ -45,558 +27,4 @@ public sealed class SnapshotBundleWriter : ISnapshotBundleWriter _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider)); } - - /// - /// Creates a knowledge snapshot bundle from the specified contents. - /// - public async Task WriteAsync( - SnapshotBundleRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath); - - var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{_guidProvider.NewGuid():N}"); - Directory.CreateDirectory(tempDir); - - try - { - var entries = new List(); - var createdAt = _timeProvider.GetUtcNow(); - var manifest = new KnowledgeSnapshotManifest - { - BundleId = request.BundleId ?? _guidProvider.NewGuid().ToString("N"), - Name = request.Name ?? $"knowledge-{createdAt.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture)}", - Version = request.Version ?? "1.0.0", - CreatedAt = createdAt, - SchemaVersion = "1.0.0" - }; - - // Write advisories - if (request.Advisories is { Count: > 0 }) - { - var advisoriesDir = Path.Combine(tempDir, "advisories"); - Directory.CreateDirectory(advisoriesDir); - - foreach (var advisory in request.Advisories) - { - var feedDir = Path.Combine(advisoriesDir, advisory.FeedId); - Directory.CreateDirectory(feedDir); - - var filePath = Path.Combine(feedDir, advisory.FileName); - await File.WriteAllBytesAsync(filePath, advisory.Content, cancellationToken); - - var relativePath = $"advisories/{advisory.FeedId}/{advisory.FileName}"; - var digest = ComputeSha256(advisory.Content); - - entries.Add(new BundleEntry(relativePath, digest, advisory.Content.Length)); - manifest.Advisories.Add(new AdvisorySnapshotEntry - { - FeedId = advisory.FeedId, - RelativePath = relativePath, - Digest = digest, - SizeBytes = advisory.Content.Length, - SnapshotAt = advisory.SnapshotAt ?? createdAt, - RecordCount = advisory.RecordCount - }); - } - } - - // Write VEX statements - if (request.VexStatements is { Count: > 0 }) - { - var vexDir = Path.Combine(tempDir, "vex"); - Directory.CreateDirectory(vexDir); - - foreach (var vex in request.VexStatements) - { - var sourceDir = Path.Combine(vexDir, vex.SourceId); - Directory.CreateDirectory(sourceDir); - - var filePath = Path.Combine(sourceDir, vex.FileName); - await File.WriteAllBytesAsync(filePath, vex.Content, cancellationToken); - - var relativePath = $"vex/{vex.SourceId}/{vex.FileName}"; - var digest = ComputeSha256(vex.Content); - - entries.Add(new BundleEntry(relativePath, digest, vex.Content.Length)); - manifest.VexStatements.Add(new VexSnapshotEntry - { - SourceId = vex.SourceId, - RelativePath = relativePath, - Digest = digest, - SizeBytes = vex.Content.Length, - SnapshotAt = vex.SnapshotAt ?? createdAt, - StatementCount = vex.StatementCount - }); - } - } - - // Write policies - if (request.Policies is { Count: > 0 }) - { - var policiesDir = Path.Combine(tempDir, "policies"); - Directory.CreateDirectory(policiesDir); - - foreach (var policy in request.Policies) - { - var filePath = Path.Combine(policiesDir, policy.FileName); - await File.WriteAllBytesAsync(filePath, policy.Content, cancellationToken); - - var relativePath = $"policies/{policy.FileName}"; - var digest = ComputeSha256(policy.Content); - - entries.Add(new BundleEntry(relativePath, digest, policy.Content.Length)); - manifest.Policies.Add(new PolicySnapshotEntry - { - PolicyId = policy.PolicyId, - Name = policy.Name, - Version = policy.Version, - RelativePath = relativePath, - Digest = digest, - SizeBytes = policy.Content.Length, - Type = policy.Type - }); - } - } - - // Write trust roots - if (request.TrustRoots is { Count: > 0 }) - { - var trustDir = Path.Combine(tempDir, "trust"); - Directory.CreateDirectory(trustDir); - - foreach (var trustRoot in request.TrustRoots) - { - var filePath = Path.Combine(trustDir, trustRoot.FileName); - await File.WriteAllBytesAsync(filePath, trustRoot.Content, cancellationToken); - - var relativePath = $"trust/{trustRoot.FileName}"; - var digest = ComputeSha256(trustRoot.Content); - - entries.Add(new BundleEntry(relativePath, digest, trustRoot.Content.Length)); - manifest.TrustRoots.Add(new TrustRootSnapshotEntry - { - KeyId = trustRoot.KeyId, - RelativePath = relativePath, - Digest = digest, - SizeBytes = trustRoot.Content.Length, - Algorithm = trustRoot.Algorithm, - ExpiresAt = trustRoot.ExpiresAt - }); - } - } - - // Write rule bundles - if (request.RuleBundles is { Count: > 0 }) - { - var rulesDir = Path.Combine(tempDir, "rules"); - Directory.CreateDirectory(rulesDir); - - foreach (var ruleBundle in request.RuleBundles) - { - var bundleDir = Path.Combine(rulesDir, ruleBundle.BundleId); - Directory.CreateDirectory(bundleDir); - - var bundleFiles = new List(); - var bundleRelativePath = $"rules/{ruleBundle.BundleId}"; - - foreach (var file in ruleBundle.Files) - { - var filePath = Path.Combine(bundleDir, file.Name); - await File.WriteAllBytesAsync(filePath, file.Content, cancellationToken); - - var relativePath = $"{bundleRelativePath}/{file.Name}"; - var digest = ComputeSha256(file.Content); - - entries.Add(new BundleEntry(relativePath, digest, file.Content.Length)); - bundleFiles.Add(new RuleBundleFile - { - Name = file.Name, - Digest = digest, - SizeBytes = file.Content.Length - }); - } - - manifest.RuleBundles.Add(new RuleBundleSnapshotEntry - { - BundleId = ruleBundle.BundleId, - BundleType = ruleBundle.BundleType, - Version = ruleBundle.Version, - RelativePath = bundleRelativePath, - Files = bundleFiles, - RuleCount = ruleBundle.RuleCount, - SignerKeyId = ruleBundle.SignerKeyId, - SignedAt = ruleBundle.SignedAt, - VerifiedAt = ruleBundle.VerifiedAt - }); - } - } - - // Write time anchor - if (request.TimeAnchor is not null) - { - var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json"); - var timeAnchorJson = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, JsonOptions); - await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorJson, cancellationToken); - - var digest = ComputeSha256(timeAnchorJson); - entries.Add(new BundleEntry("time-anchor.json", digest, timeAnchorJson.Length)); - manifest.TimeAnchor = new TimeAnchorEntry - { - AnchorTime = request.TimeAnchor.AnchorTime, - Source = request.TimeAnchor.Source, - Digest = digest - }; - } - - // Compute merkle root - manifest.MerkleRoot = ComputeMerkleRoot(entries); - manifest.TotalSizeBytes = entries.Sum(e => e.SizeBytes); - manifest.EntryCount = entries.Count; - - // Write manifest - var manifestJson = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions); - var manifestPath = Path.Combine(tempDir, "manifest.json"); - await File.WriteAllBytesAsync(manifestPath, manifestJson, cancellationToken); - - // Sign manifest if requested - string? signingKeyId = null; - string? signingAlgorithm = null; - var signed = false; - - if (request.Sign) - { - var signer = new SnapshotManifestSigner(); - var signResult = await signer.SignAsync(new ManifestSigningRequest - { - ManifestBytes = manifestJson, - KeyFilePath = request.SigningKeyPath, - KeyPassword = request.SigningKeyPassword - }, cancellationToken); - - if (signResult.Success && signResult.Envelope is not null) - { - var signaturePath = Path.Combine(tempDir, "manifest.sig"); - await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken); - signingKeyId = signResult.KeyId; - signingAlgorithm = signResult.Algorithm; - signed = true; - } - } - - // Create tar.gz bundle - var outputPath = request.OutputPath; - if (!outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)) - { - outputPath = $"{outputPath}.tar.gz"; - } - - await CreateTarGzAsync(tempDir, outputPath, cancellationToken); - - var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken); - - return new SnapshotBundleResult - { - Success = true, - OutputPath = outputPath, - BundleId = manifest.BundleId, - MerkleRoot = manifest.MerkleRoot, - BundleDigest = bundleDigest, - TotalSizeBytes = new FileInfo(outputPath).Length, - EntryCount = entries.Count, - CreatedAt = manifest.CreatedAt, - Signed = signed, - SigningKeyId = signingKeyId, - SigningAlgorithm = signingAlgorithm - }; - } - finally - { - // Clean up temp directory - try - { - if (Directory.Exists(tempDir)) - { - Directory.Delete(tempDir, recursive: true); - } - } - catch - { - // Ignore cleanup errors - } - } - } - - private static string ComputeSha256(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static async Task ComputeFileDigestAsync(string filePath, CancellationToken ct) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, ct); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static string ComputeMerkleRoot(List entries) - { - if (entries.Count == 0) - { - return string.Empty; - } - - var leaves = entries - .OrderBy(e => e.Path, StringComparer.Ordinal) - .Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}"))) - .ToArray(); - - while (leaves.Length > 1) - { - leaves = PairwiseHash(leaves).ToArray(); - } - - return Convert.ToHexString(leaves[0]).ToLowerInvariant(); - } - - private static IEnumerable PairwiseHash(byte[][] nodes) - { - for (var i = 0; i < nodes.Length; i += 2) - { - if (i + 1 >= nodes.Length) - { - yield return SHA256.HashData(nodes[i]); - continue; - } - - var combined = new byte[nodes[i].Length + nodes[i + 1].Length]; - Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length); - Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length); - yield return SHA256.HashData(combined); - } - } - - private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct) - { - var outputDir = Path.GetDirectoryName(outputPath); - if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir)) - { - Directory.CreateDirectory(outputDir); - } - - await using var fileStream = File.Create(outputPath); - await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal); - await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax); - - // Collect all files and sort for deterministic ordering - var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories) - .Select(f => (FullPath: f, RelativePath: Path.GetRelativePath(sourceDir, f).Replace('\\', '/'))) - .OrderBy(f => f.RelativePath, StringComparer.Ordinal) - .ToList(); - - foreach (var (fullPath, relativePath) in files) - { - var entry = new PaxTarEntry(TarEntryType.RegularFile, relativePath) - { - DataStream = File.OpenRead(fullPath), - ModificationTime = DeterministicMtime, - Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead - }; - await tarWriter.WriteEntryAsync(entry, ct); - } - } - - private sealed record BundleEntry(string Path, string Digest, long SizeBytes); } - -/// -/// Interface for snapshot bundle writing. -/// -public interface ISnapshotBundleWriter -{ - Task WriteAsync( - SnapshotBundleRequest request, - CancellationToken cancellationToken = default); -} - -#region Request and Result Models - -/// -/// Request for creating a knowledge snapshot bundle. -/// -public sealed record SnapshotBundleRequest -{ - public required string OutputPath { get; init; } - public string? BundleId { get; init; } - public string? Name { get; init; } - public string? Version { get; init; } - public List Advisories { get; init; } = []; - public List VexStatements { get; init; } = []; - public List Policies { get; init; } = []; - public List TrustRoots { get; init; } = []; - public List RuleBundles { get; init; } = []; - public TimeAnchorContent? TimeAnchor { get; init; } - - /// - /// Whether to sign the manifest. - /// - public bool Sign { get; init; } = true; - - /// - /// Path to signing key file (PEM format). - /// If null and Sign is true, an ephemeral key will be used. - /// - public string? SigningKeyPath { get; init; } - - /// - /// Password for encrypted signing key. - /// - public string? SigningKeyPassword { get; init; } -} - -public sealed record AdvisoryContent -{ - public required string FeedId { get; init; } - public required string FileName { get; init; } - public required byte[] Content { get; init; } - public DateTimeOffset? SnapshotAt { get; init; } - public int RecordCount { get; init; } -} - -public sealed record VexContent -{ - public required string SourceId { get; init; } - public required string FileName { get; init; } - public required byte[] Content { get; init; } - public DateTimeOffset? SnapshotAt { get; init; } - public int StatementCount { get; init; } -} - -public sealed record PolicyContent -{ - public required string PolicyId { get; init; } - public required string Name { get; init; } - public required string Version { get; init; } - public required string FileName { get; init; } - public required byte[] Content { get; init; } - public string Type { get; init; } = "OpaRego"; -} - -public sealed record TrustRootContent -{ - public required string KeyId { get; init; } - public required string FileName { get; init; } - public required byte[] Content { get; init; } - public string Algorithm { get; init; } = "ES256"; - public DateTimeOffset? ExpiresAt { get; init; } -} - -/// -/// Content for a rule bundle (e.g., secrets detection rules). -/// -public sealed record RuleBundleContent -{ - /// - /// Bundle identifier (e.g., "secrets.ruleset"). - /// - public required string BundleId { get; init; } - - /// - /// Bundle type (e.g., "secrets", "malware"). - /// - public required string BundleType { get; init; } - - /// - /// Bundle version in YYYY.MM format. - /// - public required string Version { get; init; } - - /// - /// Files in the bundle. - /// - public required List Files { get; init; } - - /// - /// Number of rules in the bundle. - /// - public int RuleCount { get; init; } - - /// - /// Key ID used to sign the bundle. - /// - public string? SignerKeyId { get; init; } - - /// - /// When the bundle was signed. - /// - public DateTimeOffset? SignedAt { get; init; } - - /// - /// When the bundle signature was verified during export. - /// - public DateTimeOffset? VerifiedAt { get; init; } -} - -/// -/// A file within a rule bundle. -/// -public sealed record RuleBundleFileContent -{ - /// - /// Filename (e.g., "secrets.ruleset.manifest.json"). - /// - public required string Name { get; init; } - - /// - /// File content. - /// - public required byte[] Content { get; init; } -} - -public sealed record TimeAnchorContent -{ - public required DateTimeOffset AnchorTime { get; init; } - public required string Source { get; init; } - public string? TokenDigest { get; init; } -} - -/// -/// Result of creating a knowledge snapshot bundle. -/// -public sealed record SnapshotBundleResult -{ - public bool Success { get; init; } - public string? OutputPath { get; init; } - public string? BundleId { get; init; } - public string? MerkleRoot { get; init; } - public string? BundleDigest { get; init; } - public long TotalSizeBytes { get; init; } - public int EntryCount { get; init; } - public DateTimeOffset CreatedAt { get; init; } - public string? Error { get; init; } - - /// - /// Whether the manifest was signed. - /// - public bool Signed { get; init; } - - /// - /// Key ID used for signing. - /// - public string? SigningKeyId { get; init; } - - /// - /// Algorithm used for signing. - /// - public string? SigningAlgorithm { get; init; } - - public static SnapshotBundleResult Failed(string error) => new() - { - Success = false, - Error = error - }; -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyFile.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyFile.cs new file mode 100644 index 000000000..749b71e03 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyFile.cs @@ -0,0 +1,54 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyFileAsync( + string keyFilePath, + string? password, + byte[] data, + CancellationToken cancellationToken) + { + var keyBytes = await File.ReadAllBytesAsync(keyFilePath, cancellationToken).ConfigureAwait(false); + var keyPem = Encoding.UTF8.GetString(keyBytes); + + // Try to load as ECDSA first. + try + { + using var ecdsa = ECDsa.Create(); + if (string.IsNullOrEmpty(password)) + { + ecdsa.ImportFromPem(keyPem); + } + else + { + ecdsa.ImportFromEncryptedPem(keyPem, password); + } + return SignWithEcdsa(ecdsa, data); + } + catch (CryptographicException) + { + // Try RSA. + } + + try + { + using var rsa = RSA.Create(); + if (string.IsNullOrEmpty(password)) + { + rsa.ImportFromPem(keyPem); + } + else + { + rsa.ImportFromEncryptedPem(keyPem, password); + } + return SignWithRsa(rsa, data); + } + catch (CryptographicException ex) + { + throw new InvalidOperationException($"Failed to load signing key from {keyFilePath}", ex); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyId.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyId.cs new file mode 100644 index 000000000..780615b68 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.KeyId.cs @@ -0,0 +1,26 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static string ComputeKeyId(AsymmetricAlgorithm key) + { + byte[] publicKeyBytes; + + switch (key) + { + case ECDsa ecdsa: + publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo(); + break; + case RSA rsa: + publicKeyBytes = rsa.ExportSubjectPublicKeyInfo(); + break; + default: + return "unknown"; + } + + var hash = SHA256.HashData(publicKeyBytes); + return Convert.ToHexString(hash[..8]).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Sign.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Sign.cs new file mode 100644 index 000000000..fe42eab30 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Sign.cs @@ -0,0 +1,53 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyAsync( + AsymmetricAlgorithm key, + byte[] data, + CancellationToken cancellationToken) + { + await Task.CompletedTask.ConfigureAwait(false); // Signature operations are synchronous. + return key switch + { + ECDsa ecdsa => SignWithEcdsa(ecdsa, data), + RSA rsa => SignWithRsa(rsa, data), + _ => throw new NotSupportedException($"Unsupported key type: {key.GetType().Name}") + }; + } + + private static (byte[] Signature, string KeyId, string Algorithm) SignWithEcdsa(ECDsa ecdsa, byte[] data) + { + var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256); + var keyId = ComputeKeyId(ecdsa); + var algorithm = ecdsa.KeySize switch + { + 256 => "ES256", + 384 => "ES384", + 521 => "ES512", + _ => "ECDSA" + }; + return (signature, keyId, algorithm); + } + + private static (byte[] Signature, string KeyId, string Algorithm) SignWithRsa(RSA rsa, byte[] data) + { + var signature = rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var keyId = ComputeKeyId(rsa); + return (signature, keyId, "RS256"); + } + + private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignEphemeralAsync( + byte[] data, + CancellationToken cancellationToken) + { + await Task.CompletedTask.ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256); + var keyId = $"ephemeral:{ComputeKeyId(ecdsa)}"; + return (signature, keyId, "ES256"); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Verify.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Verify.cs new file mode 100644 index 000000000..748a903bd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Crypto.Verify.cs @@ -0,0 +1,22 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static async Task VerifySignatureAsync( + AsymmetricAlgorithm key, + byte[] data, + byte[] signature, + CancellationToken cancellationToken) + { + await Task.CompletedTask.ConfigureAwait(false); + + return key switch + { + ECDsa ecdsa => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256), + RSA rsa => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1), + _ => false + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Dsse.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Dsse.cs new file mode 100644 index 000000000..95343d942 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Dsse.cs @@ -0,0 +1,95 @@ +using System.Globalization; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static byte[] BuildPae(string payloadType, byte[] payload) + { + var typeBytes = Encoding.UTF8.GetBytes(payloadType); + var prefixBytes = Encoding.UTF8.GetBytes(PreAuthenticationEncodingPrefix); + // Use InvariantCulture to ensure ASCII decimal digits per DSSE spec + var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture); + var payloadLenStr = payload.Length.ToString(CultureInfo.InvariantCulture); + + var totalLen = prefixBytes.Length + 1 + + typeLenStr.Length + 1 + + typeBytes.Length + 1 + + payloadLenStr.Length + 1 + + payload.Length; + + var pae = new byte[totalLen]; + var offset = 0; + + // DSSEv1 + Buffer.BlockCopy(prefixBytes, 0, pae, offset, prefixBytes.Length); + offset += prefixBytes.Length; + pae[offset++] = 0x20; + + // LEN(type) + var typeLenBytes = Encoding.UTF8.GetBytes(typeLenStr); + Buffer.BlockCopy(typeLenBytes, 0, pae, offset, typeLenBytes.Length); + offset += typeLenBytes.Length; + pae[offset++] = 0x20; + + // type + Buffer.BlockCopy(typeBytes, 0, pae, offset, typeBytes.Length); + offset += typeBytes.Length; + pae[offset++] = 0x20; + + // LEN(payload) + var payloadLenBytes = Encoding.UTF8.GetBytes(payloadLenStr); + Buffer.BlockCopy(payloadLenBytes, 0, pae, offset, payloadLenBytes.Length); + offset += payloadLenBytes.Length; + pae[offset++] = 0x20; + + // payload + Buffer.BlockCopy(payload, 0, pae, offset, payload.Length); + + return pae; + } + + private static byte[] BuildDsseEnvelope(byte[] payload, byte[] signature, string keyId) + { + var payloadBase64 = Convert.ToBase64String(payload); + var signatureBase64 = Convert.ToBase64String(signature); + + var envelope = new DsseEnvelopeDto + { + PayloadType = DssePayloadType, + Payload = payloadBase64, + Signatures = + [ + new DsseSignatureDto + { + KeyId = keyId, + Sig = signatureBase64 + } + ] + }; + + return JsonSerializer.SerializeToUtf8Bytes(envelope, _jsonOptions); + } + + private static string ComputeSha256(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private sealed class DsseEnvelopeDto + { + public required string PayloadType { get; init; } + public required string Payload { get; init; } + public required List Signatures { get; init; } + } + + private sealed class DsseSignatureDto + { + public string? KeyId { get; init; } + public required string Sig { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Sign.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Sign.cs new file mode 100644 index 000000000..e7609a9b2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Sign.cs @@ -0,0 +1,54 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + /// + /// Signs a manifest using the provided signing key. + /// + public async Task SignAsync( + ManifestSigningRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(request.ManifestBytes); + + // Build PAE (Pre-Authentication Encoding) for DSSE signing + var paeBytes = BuildPae(DssePayloadType, request.ManifestBytes); + + // Sign the PAE + byte[] signatureBytes; + string keyId; + string algorithm; + + if (request.SigningKey is not null) + { + // Use provided signing key + (signatureBytes, keyId, algorithm) = await SignWithKeyAsync( + request.SigningKey, paeBytes, cancellationToken).ConfigureAwait(false); + } + else if (!string.IsNullOrWhiteSpace(request.KeyFilePath)) + { + // Load key from file and sign + (signatureBytes, keyId, algorithm) = await SignWithKeyFileAsync( + request.KeyFilePath, request.KeyPassword, paeBytes, cancellationToken).ConfigureAwait(false); + } + else + { + // Generate ephemeral key for signing (keyless mode) + (signatureBytes, keyId, algorithm) = await SignEphemeralAsync(paeBytes, cancellationToken) + .ConfigureAwait(false); + } + + // Build DSSE envelope + var envelope = BuildDsseEnvelope(request.ManifestBytes, signatureBytes, keyId); + + return new ManifestSignatureResult + { + Success = true, + Envelope = envelope, + KeyId = keyId, + Algorithm = algorithm, + SignatureDigest = ComputeSha256(signatureBytes) + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Envelope.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Envelope.cs new file mode 100644 index 000000000..1653eebf2 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Envelope.cs @@ -0,0 +1,62 @@ +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private static bool TryReadEnvelope( + JsonElement root, + out EnvelopeParts parts, + out string? error) + { + if (!root.TryGetProperty("payloadType", out var payloadTypeElement) || + !root.TryGetProperty("payload", out var payloadElement) || + !root.TryGetProperty("signatures", out var signaturesElement)) + { + error = "Invalid DSSE envelope structure"; + parts = new EnvelopeParts(null, Array.Empty(), default, Array.Empty(), 0); + return false; + } + + var payloadBase64 = payloadElement.GetString(); + if (string.IsNullOrEmpty(payloadBase64)) + { + error = "Missing payload in envelope"; + parts = new EnvelopeParts(null, Array.Empty(), default, Array.Empty(), 0); + return false; + } + + byte[] payloadBytes; + try + { + payloadBytes = Convert.FromBase64String(payloadBase64); + } + catch (FormatException ex) + { + error = $"Invalid base64 encoding: {ex.Message}"; + parts = new EnvelopeParts(null, Array.Empty(), default, Array.Empty(), 0); + return false; + } + + var signatureCount = signaturesElement.GetArrayLength(); + if (signatureCount == 0) + { + error = "No signatures present in envelope"; + parts = new EnvelopeParts(null, Array.Empty(), default, Array.Empty(), 0); + return false; + } + + var payloadType = payloadTypeElement.GetString(); + var paeBytes = BuildPae(payloadType ?? DssePayloadType, payloadBytes); + parts = new EnvelopeParts(payloadType, payloadBytes, signaturesElement, paeBytes, signatureCount); + error = null; + return true; + } + + private sealed record EnvelopeParts( + string? PayloadType, + byte[] PayloadBytes, + JsonElement SignaturesElement, + byte[] PaeBytes, + int SignatureCount); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Signatures.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Signatures.cs new file mode 100644 index 000000000..299bd9351 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.Signatures.cs @@ -0,0 +1,48 @@ +using System.Security.Cryptography; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + private async Task> VerifySignaturesAsync( + JsonElement signaturesElement, + AsymmetricAlgorithm? publicKey, + byte[] paeBytes, + CancellationToken cancellationToken) + { + var verifiedSignatures = new List(); + foreach (var sig in signaturesElement.EnumerateArray()) + { + var keyId = sig.TryGetProperty("keyid", out var keyIdElement) + ? keyIdElement.GetString() + : null; + + if (!sig.TryGetProperty("sig", out var sigElement)) + { + continue; + } + + var signatureBase64 = sigElement.GetString(); + if (string.IsNullOrEmpty(signatureBase64)) + { + continue; + } + + if (publicKey is not null) + { + var signatureBytes = Convert.FromBase64String(signatureBase64); + var isValid = await VerifySignatureAsync( + publicKey, paeBytes, signatureBytes, cancellationToken) + .ConfigureAwait(false); + + verifiedSignatures.Add(new VerifiedSignature(keyId, isValid)); + continue; + } + + verifiedSignatures.Add(new VerifiedSignature(keyId, null)); + } + + return verifiedSignatures; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.cs new file mode 100644 index 000000000..364c12590 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.Verify.cs @@ -0,0 +1,57 @@ +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class SnapshotManifestSigner +{ + /// + /// Verifies a DSSE envelope signature. + /// + public async Task VerifyAsync( + ManifestVerificationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(request.EnvelopeBytes); + + try + { + using var envelope = JsonDocument.Parse(request.EnvelopeBytes); + var root = envelope.RootElement; + + if (!TryReadEnvelope(root, out var parts, out var error)) + { + return new ManifestVerificationResult + { + Success = false, + Error = error + }; + } + + var payloadDigest = ComputeSha256(parts.PayloadBytes); + var verifiedSignatures = await VerifySignaturesAsync( + parts.SignaturesElement, + request.PublicKey, + parts.PaeBytes, + cancellationToken) + .ConfigureAwait(false); + + return new ManifestVerificationResult + { + Success = true, + PayloadDigest = payloadDigest, + SignatureCount = parts.SignatureCount, + VerifiedSignatures = verifiedSignatures, + PayloadType = parts.PayloadType + }; + } + catch (JsonException ex) + { + return new ManifestVerificationResult + { + Success = false, + Error = $"Failed to parse envelope: {ex.Message}" + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.cs index 87604ee94..88542ef5a 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSigner.cs @@ -1,14 +1,3 @@ -// ----------------------------------------------------------------------------- -// SnapshotManifestSigner.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Task: SEAL-004 - Add DSSE signing for manifest -// Description: Signs snapshot manifests using DSSE format for integrity verification. -// ----------------------------------------------------------------------------- - - -using System.Globalization; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -17,473 +6,14 @@ namespace StellaOps.AirGap.Bundle.Services; /// Signs snapshot manifests using DSSE (Dead Simple Signing Envelope) format. /// Produces signatures compatible with in-toto/Sigstore verification. /// -public sealed class SnapshotManifestSigner : ISnapshotManifestSigner +public sealed partial class SnapshotManifestSigner : ISnapshotManifestSigner { private const string DssePayloadType = "application/vnd.stellaops.knowledge-snapshot+json"; private const string PreAuthenticationEncodingPrefix = "DSSEv1"; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; - - /// - /// Signs a manifest using the provided signing key. - /// - public async Task SignAsync( - ManifestSigningRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(request.ManifestBytes); - - // Build PAE (Pre-Authentication Encoding) for DSSE signing - var paeBytes = BuildPae(DssePayloadType, request.ManifestBytes); - - // Sign the PAE - byte[] signatureBytes; - string keyId; - string algorithm; - - if (request.SigningKey is not null) - { - // Use provided signing key - (signatureBytes, keyId, algorithm) = await SignWithKeyAsync( - request.SigningKey, paeBytes, cancellationToken); - } - else if (!string.IsNullOrWhiteSpace(request.KeyFilePath)) - { - // Load key from file and sign - (signatureBytes, keyId, algorithm) = await SignWithKeyFileAsync( - request.KeyFilePath, request.KeyPassword, paeBytes, cancellationToken); - } - else - { - // Generate ephemeral key for signing (keyless mode) - (signatureBytes, keyId, algorithm) = await SignEphemeralAsync(paeBytes, cancellationToken); - } - - // Build DSSE envelope - var envelope = BuildDsseEnvelope(request.ManifestBytes, signatureBytes, keyId); - - return new ManifestSignatureResult - { - Success = true, - Envelope = envelope, - KeyId = keyId, - Algorithm = algorithm, - SignatureDigest = ComputeSha256(signatureBytes) - }; - } - - /// - /// Verifies a DSSE envelope signature. - /// - public async Task VerifyAsync( - ManifestVerificationRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(request.EnvelopeBytes); - - try - { - // Parse the envelope - using var envelope = JsonDocument.Parse(request.EnvelopeBytes); - var root = envelope.RootElement; - - if (!root.TryGetProperty("payloadType", out var payloadTypeElement) || - !root.TryGetProperty("payload", out var payloadElement) || - !root.TryGetProperty("signatures", out var signaturesElement)) - { - return new ManifestVerificationResult - { - Success = false, - Error = "Invalid DSSE envelope structure" - }; - } - - var payloadType = payloadTypeElement.GetString(); - var payloadBase64 = payloadElement.GetString(); - - if (string.IsNullOrEmpty(payloadBase64)) - { - return new ManifestVerificationResult - { - Success = false, - Error = "Missing payload in envelope" - }; - } - - // Decode payload - var payloadBytes = Convert.FromBase64String(payloadBase64); - - // Compute expected digest - var payloadDigest = ComputeSha256(payloadBytes); - - // Verify at least one signature - var signatureCount = signaturesElement.GetArrayLength(); - if (signatureCount == 0) - { - return new ManifestVerificationResult - { - Success = false, - Error = "No signatures present in envelope" - }; - } - - // Build PAE for verification - var paeBytes = BuildPae(payloadType ?? DssePayloadType, payloadBytes); - - // Verify signatures if public key is provided - var verifiedSignatures = new List(); - foreach (var sig in signaturesElement.EnumerateArray()) - { - var keyId = sig.TryGetProperty("keyid", out var keyIdElement) - ? keyIdElement.GetString() - : null; - - if (sig.TryGetProperty("sig", out var sigElement)) - { - var signatureBase64 = sigElement.GetString(); - if (!string.IsNullOrEmpty(signatureBase64)) - { - // If public key is provided, verify the signature - if (request.PublicKey is not null) - { - var signatureBytes = Convert.FromBase64String(signatureBase64); - var isValid = await VerifySignatureAsync( - request.PublicKey, paeBytes, signatureBytes, cancellationToken); - - verifiedSignatures.Add(new VerifiedSignature(keyId, isValid)); - } - else - { - // Without public key, we can only confirm presence - verifiedSignatures.Add(new VerifiedSignature(keyId, null)); - } - } - } - } - - return new ManifestVerificationResult - { - Success = true, - PayloadDigest = payloadDigest, - SignatureCount = signatureCount, - VerifiedSignatures = verifiedSignatures, - PayloadType = payloadType - }; - } - catch (JsonException ex) - { - return new ManifestVerificationResult - { - Success = false, - Error = $"Failed to parse envelope: {ex.Message}" - }; - } - catch (FormatException ex) - { - return new ManifestVerificationResult - { - Success = false, - Error = $"Invalid base64 encoding: {ex.Message}" - }; - } - } - - private static byte[] BuildPae(string payloadType, byte[] payload) - { - var typeBytes = Encoding.UTF8.GetBytes(payloadType); - var prefixBytes = Encoding.UTF8.GetBytes(PreAuthenticationEncodingPrefix); - // Use InvariantCulture to ensure ASCII decimal digits per DSSE spec - var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture); - var payloadLenStr = payload.Length.ToString(CultureInfo.InvariantCulture); - - var totalLen = prefixBytes.Length + 1 + - typeLenStr.Length + 1 + - typeBytes.Length + 1 + - payloadLenStr.Length + 1 + - payload.Length; - - var pae = new byte[totalLen]; - var offset = 0; - - // DSSEv1 - Buffer.BlockCopy(prefixBytes, 0, pae, offset, prefixBytes.Length); - offset += prefixBytes.Length; - pae[offset++] = 0x20; - - // LEN(type) - var typeLenBytes = Encoding.UTF8.GetBytes(typeLenStr); - Buffer.BlockCopy(typeLenBytes, 0, pae, offset, typeLenBytes.Length); - offset += typeLenBytes.Length; - pae[offset++] = 0x20; - - // type - Buffer.BlockCopy(typeBytes, 0, pae, offset, typeBytes.Length); - offset += typeBytes.Length; - pae[offset++] = 0x20; - - // LEN(payload) - var payloadLenBytes = Encoding.UTF8.GetBytes(payloadLenStr); - Buffer.BlockCopy(payloadLenBytes, 0, pae, offset, payloadLenBytes.Length); - offset += payloadLenBytes.Length; - pae[offset++] = 0x20; - - // payload - Buffer.BlockCopy(payload, 0, pae, offset, payload.Length); - - return pae; - } - - private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyAsync( - AsymmetricAlgorithm key, - byte[] data, - CancellationToken cancellationToken) - { - await Task.CompletedTask; // Signature operations are synchronous - - return key switch - { - ECDsa ecdsa => SignWithEcdsa(ecdsa, data), - RSA rsa => SignWithRsa(rsa, data), - _ => throw new NotSupportedException($"Unsupported key type: {key.GetType().Name}") - }; - } - - private static (byte[] Signature, string KeyId, string Algorithm) SignWithEcdsa(ECDsa ecdsa, byte[] data) - { - var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256); - var keyId = ComputeKeyId(ecdsa); - var algorithm = ecdsa.KeySize switch - { - 256 => "ES256", - 384 => "ES384", - 521 => "ES512", - _ => "ECDSA" - }; - return (signature, keyId, algorithm); - } - - private static (byte[] Signature, string KeyId, string Algorithm) SignWithRsa(RSA rsa, byte[] data) - { - var signature = rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var keyId = ComputeKeyId(rsa); - return (signature, keyId, "RS256"); - } - - private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyFileAsync( - string keyFilePath, - string? password, - byte[] data, - CancellationToken cancellationToken) - { - var keyBytes = await File.ReadAllBytesAsync(keyFilePath, cancellationToken); - var keyPem = Encoding.UTF8.GetString(keyBytes); - - // Try to load as ECDSA first - try - { - using var ecdsa = ECDsa.Create(); - if (string.IsNullOrEmpty(password)) - { - ecdsa.ImportFromPem(keyPem); - } - else - { - ecdsa.ImportFromEncryptedPem(keyPem, password); - } - return SignWithEcdsa(ecdsa, data); - } - catch (CryptographicException) - { - // Try RSA - } - - try - { - using var rsa = RSA.Create(); - if (string.IsNullOrEmpty(password)) - { - rsa.ImportFromPem(keyPem); - } - else - { - rsa.ImportFromEncryptedPem(keyPem, password); - } - return SignWithRsa(rsa, data); - } - catch (CryptographicException ex) - { - throw new InvalidOperationException($"Failed to load signing key from {keyFilePath}", ex); - } - } - - private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignEphemeralAsync( - byte[] data, - CancellationToken cancellationToken) - { - await Task.CompletedTask; - - // Generate ephemeral ECDSA P-256 key - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256); - var keyId = $"ephemeral:{ComputeKeyId(ecdsa)}"; - return (signature, keyId, "ES256"); - } - - private static async Task VerifySignatureAsync( - AsymmetricAlgorithm key, - byte[] data, - byte[] signature, - CancellationToken cancellationToken) - { - await Task.CompletedTask; - - return key switch - { - ECDsa ecdsa => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256), - RSA rsa => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1), - _ => false - }; - } - - private static string ComputeKeyId(AsymmetricAlgorithm key) - { - byte[] publicKeyBytes; - - switch (key) - { - case ECDsa ecdsa: - publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo(); - break; - case RSA rsa: - publicKeyBytes = rsa.ExportSubjectPublicKeyInfo(); - break; - default: - return "unknown"; - } - - var hash = SHA256.HashData(publicKeyBytes); - return Convert.ToHexString(hash[..8]).ToLowerInvariant(); - } - - private static byte[] BuildDsseEnvelope(byte[] payload, byte[] signature, string keyId) - { - var payloadBase64 = Convert.ToBase64String(payload); - var signatureBase64 = Convert.ToBase64String(signature); - - var envelope = new DsseEnvelopeDto - { - PayloadType = DssePayloadType, - Payload = payloadBase64, - Signatures = - [ - new DsseSignatureDto - { - KeyId = keyId, - Sig = signatureBase64 - } - ] - }; - - return JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions); - } - - private static string ComputeSha256(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private sealed class DsseEnvelopeDto - { - public required string PayloadType { get; init; } - public required string Payload { get; init; } - public required List Signatures { get; init; } - } - - private sealed class DsseSignatureDto - { - public string? KeyId { get; init; } - public required string Sig { get; init; } - } } - -/// -/// Interface for manifest signing operations. -/// -public interface ISnapshotManifestSigner -{ - Task SignAsync( - ManifestSigningRequest request, - CancellationToken cancellationToken = default); - - Task VerifyAsync( - ManifestVerificationRequest request, - CancellationToken cancellationToken = default); -} - -#region Request and Result Models - -/// -/// Request for signing a manifest. -/// -public sealed record ManifestSigningRequest -{ - public required byte[] ManifestBytes { get; init; } - public AsymmetricAlgorithm? SigningKey { get; init; } - public string? KeyFilePath { get; init; } - public string? KeyPassword { get; init; } -} - -/// -/// Result of signing a manifest. -/// -public sealed record ManifestSignatureResult -{ - public bool Success { get; init; } - public byte[]? Envelope { get; init; } - public string? KeyId { get; init; } - public string? Algorithm { get; init; } - public string? SignatureDigest { get; init; } - public string? Error { get; init; } - - public static ManifestSignatureResult Failed(string error) => new() - { - Success = false, - Error = error - }; -} - -/// -/// Request for verifying a manifest signature. -/// -public sealed record ManifestVerificationRequest -{ - public required byte[] EnvelopeBytes { get; init; } - public AsymmetricAlgorithm? PublicKey { get; init; } -} - -/// -/// Result of verifying a manifest signature. -/// -public sealed record ManifestVerificationResult -{ - public bool Success { get; init; } - public string? PayloadDigest { get; init; } - public string? PayloadType { get; init; } - public int SignatureCount { get; init; } - public IReadOnlyList? VerifiedSignatures { get; init; } - public string? Error { get; init; } -} - -/// -/// A verified signature with optional verification status. -/// -public sealed record VerifiedSignature(string? KeyId, bool? Verified); - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSignerModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSignerModels.cs new file mode 100644 index 000000000..94e3adb46 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SnapshotManifestSignerModels.cs @@ -0,0 +1,60 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Request for signing a manifest. +/// +public sealed record ManifestSigningRequest +{ + public required byte[] ManifestBytes { get; init; } + public AsymmetricAlgorithm? SigningKey { get; init; } + public string? KeyFilePath { get; init; } + public string? KeyPassword { get; init; } +} + +/// +/// Result of signing a manifest. +/// +public sealed record ManifestSignatureResult +{ + public bool Success { get; init; } + public byte[]? Envelope { get; init; } + public string? KeyId { get; init; } + public string? Algorithm { get; init; } + public string? SignatureDigest { get; init; } + public string? Error { get; init; } + + public static ManifestSignatureResult Failed(string error) => new() + { + Success = false, + Error = error + }; +} + +/// +/// Request for verifying a manifest signature. +/// +public sealed record ManifestVerificationRequest +{ + public required byte[] EnvelopeBytes { get; init; } + public AsymmetricAlgorithm? PublicKey { get; init; } +} + +/// +/// Result of verifying a manifest signature. +/// +public sealed record ManifestVerificationResult +{ + public bool Success { get; init; } + public string? PayloadDigest { get; init; } + public string? PayloadType { get; init; } + public int SignatureCount { get; init; } + public IReadOnlyList? VerifiedSignatures { get; init; } + public string? Error { get; init; } +} + +/// +/// A verified signature with optional verification status. +/// +public sealed record VerifiedSignature(string? KeyId, bool? Verified); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SystemGuidProvider.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SystemGuidProvider.cs new file mode 100644 index 000000000..c74876ad3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/SystemGuidProvider.cs @@ -0,0 +1,15 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Default GUID provider using system random GUIDs. +/// +public sealed class SystemGuidProvider : IGuidProvider +{ + /// + /// Singleton instance of the system GUID provider. + /// + public static SystemGuidProvider Instance { get; } = new(); + + /// + public Guid NewGuid() => Guid.NewGuid(); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorContent.cs new file mode 100644 index 000000000..d29259185 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorContent.cs @@ -0,0 +1,8 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record TimeAnchorContent +{ + public required DateTimeOffset AnchorTime { get; init; } + public required string Source { get; init; } + public string? TokenDigest { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorModels.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorModels.cs new file mode 100644 index 000000000..92191c508 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorModels.cs @@ -0,0 +1,68 @@ +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// Request for creating a time anchor. +/// +public sealed record TimeAnchorRequest +{ + /// + /// Time anchor source: "local", "roughtime:", or "rfc3161:" + /// + public string? Source { get; init; } + + /// + /// Merkle root to bind to the time anchor (optional). + /// + public string? MerkleRoot { get; init; } +} + +/// +/// Result of creating a time anchor. +/// +public sealed record TimeAnchorResult +{ + public bool Success { get; init; } + public TimeAnchorContent? Content { get; init; } + public byte[]? TokenBytes { get; init; } + public string? Warning { get; init; } + public string? Error { get; init; } + + public static TimeAnchorResult Failed(string error) => new() + { + Success = false, + Error = error + }; +} + +/// +/// Request for validating a time anchor. +/// +public sealed record TimeAnchorValidationRequest +{ + /// + /// Maximum age in hours. + /// + public int? MaxAgeHours { get; init; } + + /// + /// Maximum clock drift in seconds. + /// + public int? MaxClockDriftSeconds { get; init; } + + /// + /// Expected token digest for validation. + /// + public string? ExpectedTokenDigest { get; init; } +} + +/// +/// Result of validating a time anchor. +/// +public sealed record TimeAnchorValidationResult +{ + public bool IsValid { get; init; } + public DateTimeOffset? AnchorTime { get; init; } + public string? Source { get; init; } + public double? AgeHours { get; init; } + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Create.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Create.cs new file mode 100644 index 000000000..293f4605e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Create.cs @@ -0,0 +1,33 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TimeAnchorService +{ + /// + /// Creates a time anchor token for a snapshot. + /// + public async Task CreateAnchorAsync( + TimeAnchorRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + try + { + var source = request.Source?.ToLowerInvariant() ?? "local"; + + return source switch + { + "local" => await CreateLocalAnchorAsync(request, cancellationToken).ConfigureAwait(false), + var s when s.StartsWith("roughtime:", StringComparison.Ordinal) => + await CreateRoughtimeAnchorAsync(request, cancellationToken).ConfigureAwait(false), + var s when s.StartsWith("rfc3161:", StringComparison.Ordinal) => + await CreateRfc3161AnchorAsync(request, cancellationToken).ConfigureAwait(false), + _ => await CreateLocalAnchorAsync(request, cancellationToken).ConfigureAwait(false) + }; + } + catch (Exception ex) + { + return TimeAnchorResult.Failed($"Failed to create time anchor: {ex.Message}"); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Local.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Local.cs new file mode 100644 index 000000000..a937c15ea --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Local.cs @@ -0,0 +1,48 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TimeAnchorService +{ + private async Task CreateLocalAnchorAsync( + TimeAnchorRequest request, + CancellationToken cancellationToken) + { + await Task.CompletedTask.ConfigureAwait(false); + + var anchorTime = _timeProvider.GetUtcNow(); + + // Create a local anchor with a signed timestamp + var anchorData = new LocalAnchorData + { + Timestamp = anchorTime, + Nonce = _guidProvider.NewGuid().ToString("N"), + MerkleRoot = request.MerkleRoot + }; + + var anchorJson = JsonSerializer.Serialize(anchorData, _jsonOptions); + var anchorBytes = Encoding.UTF8.GetBytes(anchorJson); + var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}"; + + return new TimeAnchorResult + { + Success = true, + Content = new TimeAnchorContent + { + AnchorTime = anchorTime, + Source = "local", + TokenDigest = tokenDigest + }, + TokenBytes = anchorBytes + }; + } + + private sealed record LocalAnchorData + { + public required DateTimeOffset Timestamp { get; init; } + public required string Nonce { get; init; } + public string? MerkleRoot { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Rfc3161.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Rfc3161.cs new file mode 100644 index 000000000..e092d589b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Rfc3161.cs @@ -0,0 +1,29 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TimeAnchorService +{ + private async Task CreateRfc3161AnchorAsync( + TimeAnchorRequest request, + CancellationToken cancellationToken) + { + // RFC 3161 is the Internet X.509 PKI Time-Stamp Protocol (TSP) + // Full implementation requires a TSA client library + var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com"; + + await Task.CompletedTask.ConfigureAwait(false); + + // Per no-silent-stubs rule: unimplemented paths must fail explicitly + return TimeAnchorResult.Failed( + $"RFC 3161 time anchor source '{tsaUrl}' is not implemented. " + + "Use 'local' source or implement RFC 3161 TSA client integration."); + } + + private sealed record Rfc3161AnchorData + { + public required DateTimeOffset Timestamp { get; init; } + public required string TsaUrl { get; init; } + public required string SerialNumber { get; init; } + public required string PolicyOid { get; init; } + public string? MerkleRoot { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Roughtime.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Roughtime.cs new file mode 100644 index 000000000..b0e6f5f25 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Roughtime.cs @@ -0,0 +1,30 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TimeAnchorService +{ + private async Task CreateRoughtimeAnchorAsync( + TimeAnchorRequest request, + CancellationToken cancellationToken) + { + // Roughtime is a cryptographic time synchronization protocol + // Full implementation requires a Roughtime client library + var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003"; + + await Task.CompletedTask.ConfigureAwait(false); + + // Per no-silent-stubs rule: unimplemented paths must fail explicitly + return TimeAnchorResult.Failed( + $"Roughtime time anchor source '{serverUrl}' is not implemented. " + + "Use 'local' source or implement Roughtime client integration."); + } + + private sealed record RoughtimeAnchorData + { + public required DateTimeOffset Timestamp { get; init; } + public required string Server { get; init; } + public required long Midpoint { get; init; } + public required long Radius { get; init; } + public required string Nonce { get; init; } + public string? MerkleRoot { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Validate.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Validate.cs new file mode 100644 index 000000000..a27e6816e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.Validate.cs @@ -0,0 +1,81 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TimeAnchorService +{ + /// + /// Validates a time anchor token. + /// + public async Task ValidateAnchorAsync( + TimeAnchorContent anchor, + TimeAnchorValidationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(anchor); + ArgumentNullException.ThrowIfNull(request); + + try + { + // Validate timestamp is within acceptable range + var now = _timeProvider.GetUtcNow(); + var anchorAge = now - anchor.AnchorTime; + + if (request.MaxAgeHours.HasValue && anchorAge.TotalHours > request.MaxAgeHours.Value) + { + return new TimeAnchorValidationResult + { + IsValid = false, + AnchorTime = anchor.AnchorTime, + Source = anchor.Source, + AgeHours = anchorAge.TotalHours, + Error = $"Time anchor is too old: {anchorAge.TotalHours:F1} hours (max: {request.MaxAgeHours.Value})" + }; + } + + // Validate anchor is not in the future (with drift tolerance) + var maxDrift = TimeSpan.FromSeconds(request.MaxClockDriftSeconds ?? 60); + if (anchor.AnchorTime > now + maxDrift) + { + return new TimeAnchorValidationResult + { + IsValid = false, + AnchorTime = anchor.AnchorTime, + Source = anchor.Source, + Error = "Time anchor is in the future" + }; + } + + // Validate token digest if provided + if (!string.IsNullOrEmpty(anchor.TokenDigest) && !string.IsNullOrEmpty(request.ExpectedTokenDigest)) + { + if (!string.Equals(anchor.TokenDigest, request.ExpectedTokenDigest, StringComparison.OrdinalIgnoreCase)) + { + return new TimeAnchorValidationResult + { + IsValid = false, + AnchorTime = anchor.AnchorTime, + Source = anchor.Source, + Error = "Token digest mismatch" + }; + } + } + + await Task.CompletedTask.ConfigureAwait(false); + + return new TimeAnchorValidationResult + { + IsValid = true, + AnchorTime = anchor.AnchorTime, + Source = anchor.Source, + AgeHours = anchorAge.TotalHours + }; + } + catch (Exception ex) + { + return new TimeAnchorValidationResult + { + IsValid = false, + Error = $"Validation failed: {ex.Message}" + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.cs index 6d1fc4be4..e042a17a4 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimeAnchorService.cs @@ -1,12 +1,3 @@ -// ----------------------------------------------------------------------------- -// TimeAnchorService.cs -// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import) -// Task: SEAL-009 - Add time anchor token generation -// Description: Generates time anchor tokens for knowledge snapshot bundles. -// ----------------------------------------------------------------------------- - -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.Services; @@ -15,9 +6,9 @@ namespace StellaOps.AirGap.Bundle.Services; /// Generates time anchor tokens for snapshot bundles. /// Time anchors provide cryptographic proof of the time when a snapshot was created. /// -public sealed class TimeAnchorService : ITimeAnchorService +public sealed partial class TimeAnchorService : ITimeAnchorService { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase @@ -35,285 +26,4 @@ public sealed class TimeAnchorService : ITimeAnchorService _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider)); } - - /// - /// Creates a time anchor token for a snapshot. - /// - public async Task CreateAnchorAsync( - TimeAnchorRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - try - { - var source = request.Source?.ToLowerInvariant() ?? "local"; - - return source switch - { - "local" => await CreateLocalAnchorAsync(request, cancellationToken), - var s when s.StartsWith("roughtime:", StringComparison.Ordinal) => await CreateRoughtimeAnchorAsync(request, cancellationToken), - var s when s.StartsWith("rfc3161:", StringComparison.Ordinal) => await CreateRfc3161AnchorAsync(request, cancellationToken), - _ => await CreateLocalAnchorAsync(request, cancellationToken) - }; - } - catch (Exception ex) - { - return TimeAnchorResult.Failed($"Failed to create time anchor: {ex.Message}"); - } - } - - /// - /// Validates a time anchor token. - /// - public async Task ValidateAnchorAsync( - TimeAnchorContent anchor, - TimeAnchorValidationRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(anchor); - ArgumentNullException.ThrowIfNull(request); - - try - { - // Validate timestamp is within acceptable range - var now = _timeProvider.GetUtcNow(); - var anchorAge = now - anchor.AnchorTime; - - if (request.MaxAgeHours.HasValue && anchorAge.TotalHours > request.MaxAgeHours.Value) - { - return new TimeAnchorValidationResult - { - IsValid = false, - AnchorTime = anchor.AnchorTime, - Source = anchor.Source, - AgeHours = anchorAge.TotalHours, - Error = $"Time anchor is too old: {anchorAge.TotalHours:F1} hours (max: {request.MaxAgeHours.Value})" - }; - } - - // Validate anchor is not in the future (with drift tolerance) - var maxDrift = TimeSpan.FromSeconds(request.MaxClockDriftSeconds ?? 60); - if (anchor.AnchorTime > now + maxDrift) - { - return new TimeAnchorValidationResult - { - IsValid = false, - AnchorTime = anchor.AnchorTime, - Source = anchor.Source, - Error = "Time anchor is in the future" - }; - } - - // Validate token digest if provided - if (!string.IsNullOrEmpty(anchor.TokenDigest) && !string.IsNullOrEmpty(request.ExpectedTokenDigest)) - { - if (!string.Equals(anchor.TokenDigest, request.ExpectedTokenDigest, StringComparison.OrdinalIgnoreCase)) - { - return new TimeAnchorValidationResult - { - IsValid = false, - AnchorTime = anchor.AnchorTime, - Source = anchor.Source, - Error = "Token digest mismatch" - }; - } - } - - await Task.CompletedTask; - - return new TimeAnchorValidationResult - { - IsValid = true, - AnchorTime = anchor.AnchorTime, - Source = anchor.Source, - AgeHours = anchorAge.TotalHours - }; - } - catch (Exception ex) - { - return new TimeAnchorValidationResult - { - IsValid = false, - Error = $"Validation failed: {ex.Message}" - }; - } - } - - private async Task CreateLocalAnchorAsync( - TimeAnchorRequest request, - CancellationToken cancellationToken) - { - await Task.CompletedTask; - - var anchorTime = _timeProvider.GetUtcNow(); - - // Create a local anchor with a signed timestamp - var anchorData = new LocalAnchorData - { - Timestamp = anchorTime, - Nonce = _guidProvider.NewGuid().ToString("N"), - MerkleRoot = request.MerkleRoot - }; - - var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions); - var anchorBytes = Encoding.UTF8.GetBytes(anchorJson); - var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}"; - - return new TimeAnchorResult - { - Success = true, - Content = new TimeAnchorContent - { - AnchorTime = anchorTime, - Source = "local", - TokenDigest = tokenDigest - }, - TokenBytes = anchorBytes - }; - } - - private async Task CreateRoughtimeAnchorAsync( - TimeAnchorRequest request, - CancellationToken cancellationToken) - { - // Roughtime is a cryptographic time synchronization protocol - // Full implementation requires a Roughtime client library - var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003"; - - await Task.CompletedTask; - - // Per no-silent-stubs rule: unimplemented paths must fail explicitly - return TimeAnchorResult.Failed( - $"Roughtime time anchor source '{serverUrl}' is not implemented. " + - "Use 'local' source or implement Roughtime client integration."); - } - - private async Task CreateRfc3161AnchorAsync( - TimeAnchorRequest request, - CancellationToken cancellationToken) - { - // RFC 3161 is the Internet X.509 PKI Time-Stamp Protocol (TSP) - // Full implementation requires a TSA client library - var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com"; - - await Task.CompletedTask; - - // Per no-silent-stubs rule: unimplemented paths must fail explicitly - return TimeAnchorResult.Failed( - $"RFC 3161 time anchor source '{tsaUrl}' is not implemented. " + - "Use 'local' source or implement RFC 3161 TSA client integration."); - } - - private sealed record LocalAnchorData - { - public required DateTimeOffset Timestamp { get; init; } - public required string Nonce { get; init; } - public string? MerkleRoot { get; init; } - } - - private sealed record RoughtimeAnchorData - { - public required DateTimeOffset Timestamp { get; init; } - public required string Server { get; init; } - public required long Midpoint { get; init; } - public required long Radius { get; init; } - public required string Nonce { get; init; } - public string? MerkleRoot { get; init; } - } - - private sealed record Rfc3161AnchorData - { - public required DateTimeOffset Timestamp { get; init; } - public required string TsaUrl { get; init; } - public required string SerialNumber { get; init; } - public required string PolicyOid { get; init; } - public string? MerkleRoot { get; init; } - } } - -/// -/// Interface for time anchor operations. -/// -public interface ITimeAnchorService -{ - Task CreateAnchorAsync( - TimeAnchorRequest request, - CancellationToken cancellationToken = default); - - Task ValidateAnchorAsync( - TimeAnchorContent anchor, - TimeAnchorValidationRequest request, - CancellationToken cancellationToken = default); -} - -#region Request and Result Models - -/// -/// Request for creating a time anchor. -/// -public sealed record TimeAnchorRequest -{ - /// - /// Time anchor source: "local", "roughtime:", or "rfc3161:" - /// - public string? Source { get; init; } - - /// - /// Merkle root to bind to the time anchor (optional). - /// - public string? MerkleRoot { get; init; } -} - -/// -/// Result of creating a time anchor. -/// -public sealed record TimeAnchorResult -{ - public bool Success { get; init; } - public TimeAnchorContent? Content { get; init; } - public byte[]? TokenBytes { get; init; } - public string? Warning { get; init; } - public string? Error { get; init; } - - public static TimeAnchorResult Failed(string error) => new() - { - Success = false, - Error = error - }; -} - -/// -/// Request for validating a time anchor. -/// -public sealed record TimeAnchorValidationRequest -{ - /// - /// Maximum age in hours. - /// - public int? MaxAgeHours { get; init; } - - /// - /// Maximum clock drift in seconds. - /// - public int? MaxClockDriftSeconds { get; init; } - - /// - /// Expected token digest for validation. - /// - public string? ExpectedTokenDigest { get; init; } -} - -/// -/// Result of validating a time anchor. -/// -public sealed record TimeAnchorValidationResult -{ - public bool IsValid { get; init; } - public DateTimeOffset? AnchorTime { get; init; } - public string? Source { get; init; } - public double? AgeHours { get; init; } - public string? Error { get; init; } -} - -#endregion diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimestampBuildConfigs.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimestampBuildConfigs.cs new file mode 100644 index 000000000..b235e9f61 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TimestampBuildConfigs.cs @@ -0,0 +1,9 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public abstract record TimestampBuildConfig; + +public sealed record Rfc3161TimestampBuildConfig(byte[] TimeStampToken) + : TimestampBuildConfig; + +public sealed record EidasQtsTimestampBuildConfig(string SourcePath, string RelativePath) + : TimestampBuildConfig; diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Normalize.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Normalize.cs new file mode 100644 index 000000000..ae28d3271 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Normalize.cs @@ -0,0 +1,37 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TrustProfileLoader +{ + private static TrustProfile NormalizeProfile(TrustProfile profile, string sourcePath) + { + var profileId = string.IsNullOrWhiteSpace(profile.ProfileId) + ? InferProfileId(sourcePath) + : profile.ProfileId; + + var name = string.IsNullOrWhiteSpace(profile.Name) ? profileId : profile.Name; + + return profile with + { + ProfileId = profileId, + Name = name, + TrustRoots = profile.TrustRoots.IsDefault ? [] : profile.TrustRoots, + RekorKeys = profile.RekorKeys.IsDefault ? [] : profile.RekorKeys, + TsaRoots = profile.TsaRoots.IsDefault ? [] : profile.TsaRoots, + SourcePath = sourcePath + }; + } + + private static string InferProfileId(string profilePath) + { + var fileName = Path.GetFileName(profilePath); + const string suffix = ".trustprofile.json"; + if (fileName.EndsWith(suffix, StringComparison.OrdinalIgnoreCase)) + { + return fileName[..^suffix.Length]; + } + + return Path.GetFileNameWithoutExtension(fileName); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Paths.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Paths.cs new file mode 100644 index 000000000..a912b78d8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.Paths.cs @@ -0,0 +1,32 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TrustProfileLoader +{ + public string ResolveEntryPath(TrustProfile profile, TrustProfileEntry entry) + { + if (string.IsNullOrWhiteSpace(entry.Path)) + { + throw new ArgumentException("Entry path is required.", nameof(entry)); + } + + if (Path.IsPathRooted(entry.Path)) + { + return entry.Path; + } + + if (string.IsNullOrWhiteSpace(profile.SourcePath)) + { + throw new InvalidOperationException("Profile source path is missing."); + } + + var baseDir = Path.GetDirectoryName(profile.SourcePath); + if (string.IsNullOrWhiteSpace(baseDir)) + { + throw new InvalidOperationException("Profile base directory is missing."); + } + + return PathValidation.SafeCombine(baseDir, entry.Path); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.cs index 6d5a90242..dfc83086f 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustProfileLoader.cs @@ -6,9 +6,9 @@ using System.Text.Json.Serialization; namespace StellaOps.AirGap.Bundle.Services; -public sealed class TrustProfileLoader +public sealed partial class TrustProfileLoader { - private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + private static readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.Web) { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull @@ -47,66 +47,10 @@ public sealed class TrustProfileLoader } var json = File.ReadAllText(profilePath); - var profile = JsonSerializer.Deserialize(json, JsonOptions) + var profile = JsonSerializer.Deserialize(json, _jsonOptions) ?? throw new InvalidOperationException("Failed to deserialize trust profile."); return NormalizeProfile(profile, profilePath); } - public string ResolveEntryPath(TrustProfile profile, TrustProfileEntry entry) - { - if (string.IsNullOrWhiteSpace(entry.Path)) - { - throw new ArgumentException("Entry path is required.", nameof(entry)); - } - - if (Path.IsPathRooted(entry.Path)) - { - return entry.Path; - } - - if (string.IsNullOrWhiteSpace(profile.SourcePath)) - { - throw new InvalidOperationException("Profile source path is missing."); - } - - var baseDir = Path.GetDirectoryName(profile.SourcePath); - if (string.IsNullOrWhiteSpace(baseDir)) - { - throw new InvalidOperationException("Profile base directory is missing."); - } - - return PathValidation.SafeCombine(baseDir, entry.Path); - } - - private static TrustProfile NormalizeProfile(TrustProfile profile, string sourcePath) - { - var profileId = string.IsNullOrWhiteSpace(profile.ProfileId) - ? InferProfileId(sourcePath) - : profile.ProfileId; - - var name = string.IsNullOrWhiteSpace(profile.Name) ? profileId : profile.Name; - - return profile with - { - ProfileId = profileId, - Name = name, - TrustRoots = profile.TrustRoots.IsDefault ? [] : profile.TrustRoots, - RekorKeys = profile.RekorKeys.IsDefault ? [] : profile.RekorKeys, - TsaRoots = profile.TsaRoots.IsDefault ? [] : profile.TsaRoots, - SourcePath = sourcePath - }; - } - - private static string InferProfileId(string profilePath) - { - var fileName = Path.GetFileName(profilePath); - const string suffix = ".trustprofile.json"; - if (fileName.EndsWith(suffix, StringComparison.OrdinalIgnoreCase)) - { - return fileName[..^suffix.Length]; - } - - return Path.GetFileNameWithoutExtension(fileName); - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustRootContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustRootContent.cs new file mode 100644 index 000000000..bedd8c0d1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TrustRootContent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record TrustRootContent +{ + public required string KeyId { get; init; } + public required string FileName { get; init; } + public required byte[] Content { get; init; } + public string Algorithm { get; init; } = "ES256"; + public DateTimeOffset? ExpiresAt { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundleResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundleResult.cs new file mode 100644 index 000000000..137102790 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundleResult.cs @@ -0,0 +1,9 @@ +using System.Collections.Immutable; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record TsaChainBundleResult( + ImmutableArray ChainPaths, + ImmutableArray Certificates, + long TotalSizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Chain.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Chain.cs new file mode 100644 index 000000000..e314a13b5 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Chain.cs @@ -0,0 +1,88 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TsaChainBundler +{ + private static List BuildChain( + X509Certificate2 leaf, + IReadOnlyList pool) + { + var byThumbprint = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var cert in pool) + { + if (!string.IsNullOrWhiteSpace(cert.Thumbprint) && !byThumbprint.ContainsKey(cert.Thumbprint)) + { + byThumbprint[cert.Thumbprint] = cert; + } + } + + var chain = new List(); + var visited = new HashSet(StringComparer.OrdinalIgnoreCase); + var current = leaf; + + while (current is not null && !string.IsNullOrWhiteSpace(current.Thumbprint)) + { + if (!visited.Add(current.Thumbprint)) + { + break; + } + + chain.Add(current); + + if (IsSelfSigned(current)) + { + break; + } + + var issuer = FindIssuer(current, byThumbprint.Values); + if (issuer is null) + { + break; + } + + current = issuer; + } + + return chain; + } + + private static X509Certificate2? FindIssuer( + X509Certificate2 certificate, + IEnumerable candidates) + { + var issuerName = certificate.Issuer; + var issuerCandidates = candidates + .Where(c => string.Equals(c.Subject, issuerName, StringComparison.OrdinalIgnoreCase)) + .OrderBy(c => c.Thumbprint, StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (issuerCandidates.Count == 0) + { + return null; + } + + if (issuerCandidates.Count == 1) + { + return issuerCandidates[0]; + } + + var authorityKeyId = TryGetAuthorityKeyIdentifier(certificate); + if (authorityKeyId is null) + { + return issuerCandidates[0]; + } + + foreach (var candidate in issuerCandidates) + { + var subjectKeyId = TryGetSubjectKeyIdentifier(candidate); + if (subjectKeyId is not null && subjectKeyId.SequenceEqual(authorityKeyId)) + { + return candidate; + } + } + + return issuerCandidates[0]; + } + +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Extensions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Extensions.cs new file mode 100644 index 000000000..50ab9c63d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Extensions.cs @@ -0,0 +1,68 @@ +using System.Formats.Asn1; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TsaChainBundler +{ + private static byte[]? TryGetSubjectKeyIdentifier(X509Certificate2 certificate) + { + var ext = certificate.Extensions.Cast() + .FirstOrDefault(e => e.Oid?.Value == "2.5.29.14"); + if (ext is null) + { + return null; + } + + try + { + var ski = new X509SubjectKeyIdentifierExtension(ext, ext.Critical); + var keyId = ski.SubjectKeyIdentifier; + if (string.IsNullOrWhiteSpace(keyId)) + { + return null; + } + + return Convert.FromHexString(keyId); + } + catch + { + return null; + } + } + + private static byte[]? TryGetAuthorityKeyIdentifier(X509Certificate2 certificate) + { + var ext = certificate.Extensions.Cast() + .FirstOrDefault(e => e.Oid?.Value == "2.5.29.35"); + if (ext is null) + { + return null; + } + + try + { + var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); + var akiBytes = reader.ReadOctetString(); + var akiReader = new AsnReader(akiBytes, AsnEncodingRules.DER); + var sequence = akiReader.ReadSequence(); + + while (sequence.HasData) + { + var tag = sequence.PeekTag(); + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) + { + return sequence.ReadOctetString(new Asn1Tag(TagClass.ContextSpecific, 0)); + } + + sequence.ReadEncodedValue(); + } + } + catch + { + return null; + } + + return null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Helpers.cs new file mode 100644 index 000000000..a8fc2474e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.Helpers.cs @@ -0,0 +1,29 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TsaChainBundler +{ + private static string ComputePrefix(ReadOnlySpan tokenBytes) + { + var hash = SHA256.HashData(tokenBytes); + return Convert.ToHexString(hash).ToLowerInvariant()[..12]; + } + + private static string ComputeShortHash(byte[] data) + { + var hash = SHA256.HashData(data); + return Convert.ToHexString(hash).ToLowerInvariant()[..16]; + } + + private static string EncodePem(byte[] raw) + { + var base64 = Convert.ToBase64String(raw, Base64FormattingOptions.InsertLineBreaks); + var builder = new StringBuilder(); + builder.Append("-----BEGIN CERTIFICATE-----\n"); + builder.Append(base64); + builder.Append("\n-----END CERTIFICATE-----\n"); + return builder.ToString(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.SelfSigned.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.SelfSigned.cs new file mode 100644 index 000000000..696a65a21 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.SelfSigned.cs @@ -0,0 +1,24 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +public sealed partial class TsaChainBundler +{ + private static bool IsSelfSigned(X509Certificate2 certificate) + { + if (!string.Equals(certificate.Subject, certificate.Issuer, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + var authorityKeyId = TryGetAuthorityKeyIdentifier(certificate); + var subjectKeyId = TryGetSubjectKeyIdentifier(certificate); + + if (authorityKeyId is null || subjectKeyId is null) + { + return true; + } + + return authorityKeyId.SequenceEqual(subjectKeyId); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.cs index 122f20dd5..3fe9a6270 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainBundler.cs @@ -1,22 +1,11 @@ using System.Collections.Immutable; -using System.Formats.Asn1; -using System.Security.Cryptography; using System.Security.Cryptography.Pkcs; using System.Security.Cryptography.X509Certificates; using System.Text; namespace StellaOps.AirGap.Bundle.Services; -public interface ITsaChainBundler -{ - Task BundleAsync( - ReadOnlyMemory timeStampToken, - string outputPath, - string? filePrefix = null, - CancellationToken ct = default); -} - -public sealed class TsaChainBundler : ITsaChainBundler +public sealed partial class TsaChainBundler : ITsaChainBundler { public async Task BundleAsync( ReadOnlyMemory timeStampToken, @@ -79,193 +68,4 @@ public sealed class TsaChainBundler : ITsaChainBundler entries.Select(e => e.Certificate).ToImmutableArray(), entries.Sum(e => e.SizeBytes)); } - - private static List BuildChain( - X509Certificate2 leaf, - IReadOnlyList pool) - { - var byThumbprint = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var cert in pool) - { - if (!string.IsNullOrWhiteSpace(cert.Thumbprint) && !byThumbprint.ContainsKey(cert.Thumbprint)) - { - byThumbprint[cert.Thumbprint] = cert; - } - } - - var chain = new List(); - var visited = new HashSet(StringComparer.OrdinalIgnoreCase); - var current = leaf; - - while (current is not null && !string.IsNullOrWhiteSpace(current.Thumbprint)) - { - if (!visited.Add(current.Thumbprint)) - { - break; - } - - chain.Add(current); - - if (IsSelfSigned(current)) - { - break; - } - - var issuer = FindIssuer(current, byThumbprint.Values); - if (issuer is null) - { - break; - } - - current = issuer; - } - - return chain; - } - - private static X509Certificate2? FindIssuer( - X509Certificate2 certificate, - IEnumerable candidates) - { - var issuerName = certificate.Issuer; - var issuerCandidates = candidates - .Where(c => string.Equals(c.Subject, issuerName, StringComparison.OrdinalIgnoreCase)) - .OrderBy(c => c.Thumbprint, StringComparer.OrdinalIgnoreCase) - .ToList(); - - if (issuerCandidates.Count == 0) - { - return null; - } - - if (issuerCandidates.Count == 1) - { - return issuerCandidates[0]; - } - - var authorityKeyId = TryGetAuthorityKeyIdentifier(certificate); - if (authorityKeyId is null) - { - return issuerCandidates[0]; - } - - foreach (var candidate in issuerCandidates) - { - var subjectKeyId = TryGetSubjectKeyIdentifier(candidate); - if (subjectKeyId is not null && subjectKeyId.SequenceEqual(authorityKeyId)) - { - return candidate; - } - } - - return issuerCandidates[0]; - } - - private static bool IsSelfSigned(X509Certificate2 certificate) - { - if (!string.Equals(certificate.Subject, certificate.Issuer, StringComparison.OrdinalIgnoreCase)) - { - return false; - } - - var authorityKeyId = TryGetAuthorityKeyIdentifier(certificate); - var subjectKeyId = TryGetSubjectKeyIdentifier(certificate); - - if (authorityKeyId is null || subjectKeyId is null) - { - return true; - } - - return authorityKeyId.SequenceEqual(subjectKeyId); - } - - private static byte[]? TryGetSubjectKeyIdentifier(X509Certificate2 certificate) - { - var ext = certificate.Extensions.Cast() - .FirstOrDefault(e => e.Oid?.Value == "2.5.29.14"); - if (ext is null) - { - return null; - } - - try - { - var ski = new X509SubjectKeyIdentifierExtension(ext, ext.Critical); - var keyId = ski.SubjectKeyIdentifier; - if (string.IsNullOrWhiteSpace(keyId)) - { - return null; - } - - return Convert.FromHexString(keyId); - } - catch - { - return null; - } - } - - private static byte[]? TryGetAuthorityKeyIdentifier(X509Certificate2 certificate) - { - var ext = certificate.Extensions.Cast() - .FirstOrDefault(e => e.Oid?.Value == "2.5.29.35"); - if (ext is null) - { - return null; - } - - try - { - var reader = new AsnReader(ext.RawData, AsnEncodingRules.DER); - var akiBytes = reader.ReadOctetString(); - var akiReader = new AsnReader(akiBytes, AsnEncodingRules.DER); - var sequence = akiReader.ReadSequence(); - - while (sequence.HasData) - { - var tag = sequence.PeekTag(); - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) - { - return sequence.ReadOctetString(new Asn1Tag(TagClass.ContextSpecific, 0)); - } - - sequence.ReadEncodedValue(); - } - } - catch - { - return null; - } - - return null; - } - - private static string ComputePrefix(ReadOnlySpan tokenBytes) - { - var hash = SHA256.HashData(tokenBytes); - return Convert.ToHexString(hash).ToLowerInvariant()[..12]; - } - - private static string ComputeShortHash(byte[] data) - { - var hash = SHA256.HashData(data); - return Convert.ToHexString(hash).ToLowerInvariant()[..16]; - } - - private static string EncodePem(byte[] raw) - { - var base64 = Convert.ToBase64String(raw, Base64FormattingOptions.InsertLineBreaks); - var builder = new StringBuilder(); - builder.Append("-----BEGIN CERTIFICATE-----\n"); - builder.Append(base64); - builder.Append("\n-----END CERTIFICATE-----\n"); - return builder.ToString(); - } } - -public sealed record TsaChainBundleResult( - ImmutableArray ChainPaths, - ImmutableArray Certificates, - long TotalSizeBytes); - -internal sealed record TsaChainEntry(X509Certificate2 Certificate, string RelativePath, long SizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainEntry.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainEntry.cs new file mode 100644 index 000000000..8b4db8dc8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/TsaChainEntry.cs @@ -0,0 +1,5 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.AirGap.Bundle.Services; + +internal sealed record TsaChainEntry(X509Certificate2 Certificate, string RelativePath, long SizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexContent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexContent.cs new file mode 100644 index 000000000..deceb5a4a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexContent.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.Services; + +public sealed record VexContent +{ + public required string SourceId { get; init; } + public required string FileName { get; init; } + public required byte[] Content { get; init; } + public DateTimeOffset? SnapshotAt { get; init; } + public int StatementCount { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexStatementDto.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexStatementDto.cs new file mode 100644 index 000000000..4aa33fbd5 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Services/VexStatementDto.cs @@ -0,0 +1,20 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.Services; + +/// +/// DTO for deserializing VEX statements from NDJSON. +/// +internal sealed record VexStatementDto +{ + public string? Context { get; init; } + public string? Id { get; init; } + public string? Vulnerability { get; init; } + public string? Status { get; init; } + public string? Justification { get; init; } + public string? Impact { get; init; } + public string? ActionStatement { get; init; } + public Uri? SourceUri { get; init; } + public DateTimeOffset? Timestamp { get; init; } + public ImmutableArray Products { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointComponent.cs new file mode 100644 index 000000000..076c2a0b9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointComponent.cs @@ -0,0 +1,27 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Checkpoint component. +/// +public sealed record CheckpointComponent +{ + /// + /// Relative path to the checkpoint file. + /// + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// + /// SHA-256 digest. + /// + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// + /// Signed checkpoint note (raw). + /// + [JsonPropertyName("signed_note")] + public string? SignedNote { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointSource.cs new file mode 100644 index 000000000..b8b01e259 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/CheckpointSource.cs @@ -0,0 +1,12 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Checkpoint source. +/// +public sealed record CheckpointSource +{ + /// + /// Signed checkpoint note. + /// + public required string SignedNote { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesComponent.cs new file mode 100644 index 000000000..fce09ccf8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesComponent.cs @@ -0,0 +1,39 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Optional entries component (for offline verification). +/// +public sealed record EntriesComponent +{ + /// + /// Relative path to the entries file. + /// + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// + /// SHA-256 digest. + /// + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// + /// File size in bytes. + /// + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } + + /// + /// Number of entries included. + /// + [JsonPropertyName("entry_count")] + public required int EntryCount { get; init; } + + /// + /// Format of the entries file. + /// + [JsonPropertyName("format")] + public string Format { get; init; } = "ndjson.zst"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesSource.cs new file mode 100644 index 000000000..3dac2dfdd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntriesSource.cs @@ -0,0 +1,17 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Entries source. +/// +public sealed record EntriesSource +{ + /// + /// Path to the entries file. + /// + public required string SourcePath { get; init; } + + /// + /// Number of entries in the file. + /// + public required int EntryCount { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntryRange.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntryRange.cs new file mode 100644 index 000000000..ee35fbfa0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/EntryRange.cs @@ -0,0 +1,21 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Entry range specification. +/// +public sealed record EntryRange +{ + /// + /// Start index (inclusive). + /// + [JsonPropertyName("start")] + public required long Start { get; init; } + + /// + /// End index (exclusive). + /// + [JsonPropertyName("end")] + public required long End { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileFileComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileFileComponent.cs new file mode 100644 index 000000000..8f15fe015 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileFileComponent.cs @@ -0,0 +1,45 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Individual tile file. +/// +public sealed record TileFileComponent +{ + /// + /// Tile level. + /// + [JsonPropertyName("level")] + public required int Level { get; init; } + + /// + /// Tile index. + /// + [JsonPropertyName("index")] + public required long Index { get; init; } + + /// + /// Relative path within the bundle. + /// + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// + /// SHA-256 digest. + /// + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// + /// File size in bytes. + /// + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } + + /// + /// Whether this is a partial tile. + /// + [JsonPropertyName("is_partial")] + public bool IsPartial { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileImportResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileImportResult.cs new file mode 100644 index 000000000..3ffe427d5 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileImportResult.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Result of importing tiles. +/// +public sealed record TileImportResult +{ + public int ImportedCount { get; init; } + public long ImportedBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetComponent.cs new file mode 100644 index 000000000..8f7a0c72f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetComponent.cs @@ -0,0 +1,40 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Tile set component. +/// +public sealed record TileSetComponent +{ + /// + /// Base path for tiles within the bundle. + /// + [JsonPropertyName("base_path")] + public required string BasePath { get; init; } + + /// + /// Number of tiles included. + /// + [JsonPropertyName("tile_count")] + public required int TileCount { get; init; } + + /// + /// Total size of tiles in bytes. + /// + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } + + /// + /// Range of entries covered by tiles. + /// + [JsonPropertyName("entry_range")] + public required EntryRange EntryRange { get; init; } + + /// + /// Individual tile files (for verification). + /// + [JsonPropertyName("tiles")] + public ImmutableArray Tiles { get; init; } = []; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetSource.cs new file mode 100644 index 000000000..454240316 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSetSource.cs @@ -0,0 +1,22 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Tile set source. +/// +public sealed record TileSetSource +{ + /// + /// Tiles to include. + /// + public required IReadOnlyList Tiles { get; init; } + + /// + /// Start of entry range covered. + /// + public required long EntryRangeStart { get; init; } + + /// + /// End of entry range covered. + /// + public required long EntryRangeEnd { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSource.cs new file mode 100644 index 000000000..d826b9deb --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TileSource.cs @@ -0,0 +1,27 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Individual tile source. +/// +public sealed record TileSource +{ + /// + /// Tile level. + /// + public required int Level { get; init; } + + /// + /// Tile index. + /// + public required long Index { get; init; } + + /// + /// Tile content (raw hashes). + /// + public required byte[] Content { get; init; } + + /// + /// Whether this is a partial tile. + /// + public bool IsPartial { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuildRequest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuildRequest.cs new file mode 100644 index 000000000..a0d36d016 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuildRequest.cs @@ -0,0 +1,42 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Request to build a trust snapshot. +/// +public sealed record TrustSnapshotBuildRequest +{ + /// + /// Log origin identifier. + /// + public required string Origin { get; init; } + + /// + /// Tree size at snapshot time. + /// + public required long TreeSize { get; init; } + + /// + /// Root hash at snapshot time. + /// + public required string RootHash { get; init; } + + /// + /// Checkpoint source. + /// + public required CheckpointSource Checkpoint { get; init; } + + /// + /// Tiles to include. + /// + public required TileSetSource Tiles { get; init; } + + /// + /// TUF metadata (optional). + /// + public TufMetadataSource? TufMetadata { get; init; } + + /// + /// Entries to include (optional). + /// + public EntriesSource? Entries { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Build.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Build.cs new file mode 100644 index 000000000..a9b237536 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Build.cs @@ -0,0 +1,81 @@ +using System.Text; +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + /// + /// Builds a trust snapshot bundle. + /// + public async Task BuildAsync( + TrustSnapshotBuildRequest request, + string outputPath, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + + Directory.CreateDirectory(outputPath); + + var bundleId = _guidProvider.NewGuid().ToString(); + var createdAt = _timeProvider.GetUtcNow(); + + TufMetadataComponent? tufComponent = null; + DateTimeOffset? expiresAt = null; + if (request.TufMetadata != null) + { + tufComponent = await CopyTufMetadataAsync( + request.TufMetadata, + outputPath, + cancellationToken); + expiresAt = request.TufMetadata.TimestampExpires; + } + + var checkpointComponent = await CopyCheckpointAsync( + request.Checkpoint, + outputPath, + cancellationToken); + + var tilesComponent = await CopyTilesAsync( + request.Tiles, + outputPath, + cancellationToken); + + EntriesComponent? entriesComponent = null; + if (request.Entries != null) + { + entriesComponent = await CopyEntriesAsync( + request.Entries, + outputPath, + cancellationToken); + } + + var totalSize = (tufComponent != null ? GetTufComponentSize(tufComponent) : 0) + + (checkpointComponent.SignedNote?.Length ?? 0) + + tilesComponent.SizeBytes + + (entriesComponent?.SizeBytes ?? 0); + + var manifest = new TrustSnapshotManifest + { + BundleId = bundleId, + CreatedAt = createdAt, + ExpiresAt = expiresAt, + Origin = request.Origin, + TreeSize = request.TreeSize, + RootHash = request.RootHash, + Tuf = tufComponent, + Checkpoint = checkpointComponent, + Tiles = tilesComponent, + Entries = entriesComponent, + TotalSizeBytes = totalSize + }; + + var manifestPath = Path.Combine(outputPath, "index.json"); + var manifestJson = JsonSerializer.Serialize(manifest, _jsonOptions); + var manifestDigest = ComputeDigest(Encoding.UTF8.GetBytes(manifestJson)); + await File.WriteAllTextAsync(manifestPath, manifestJson, cancellationToken); + + return manifest with { Digest = manifestDigest }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Checkpoint.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Checkpoint.cs new file mode 100644 index 000000000..112e407a9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Checkpoint.cs @@ -0,0 +1,24 @@ +using System.Text; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private async Task CopyCheckpointAsync( + CheckpointSource source, + string outputPath, + CancellationToken cancellationToken) + { + var checkpointPath = Path.Combine(outputPath, "checkpoint.sig"); + await File.WriteAllTextAsync(checkpointPath, source.SignedNote, cancellationToken); + + var digest = ComputeDigest(Encoding.UTF8.GetBytes(source.SignedNote)); + + return new CheckpointComponent + { + Path = "checkpoint.sig", + Digest = digest, + SignedNote = source.SignedNote + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Compress.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Compress.cs new file mode 100644 index 000000000..5a9f23fc6 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Compress.cs @@ -0,0 +1,18 @@ +using System.IO.Compression; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private static async Task CompressAsync( + string sourcePath, + string destPath, + CancellationToken cancellationToken) + { + // Use GZip compression (zstd would require external library). + await using var sourceStream = File.OpenRead(sourcePath); + await using var destStream = File.Create(destPath); + await using var gzipStream = new GZipStream(destStream, CompressionLevel.Optimal); + await sourceStream.CopyToAsync(gzipStream, cancellationToken); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Entries.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Entries.cs new file mode 100644 index 000000000..6d857290d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Entries.cs @@ -0,0 +1,25 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private async Task CopyEntriesAsync( + EntriesSource source, + string outputPath, + CancellationToken cancellationToken) + { + var entriesDir = Path.Combine(outputPath, "entries"); + Directory.CreateDirectory(entriesDir); + + var entriesPath = Path.Combine(entriesDir, "entries.ndjson.zst"); + var component = await CopyFileAsync(source.SourcePath, entriesPath, cancellationToken); + + return new EntriesComponent + { + Path = "entries/entries.ndjson.zst", + Digest = component.Digest, + SizeBytes = component.SizeBytes, + EntryCount = source.EntryCount, + Format = "ndjson.zst" + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Files.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Files.cs new file mode 100644 index 000000000..f3cbf2b69 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Files.cs @@ -0,0 +1,28 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private static async Task<(string Digest, long SizeBytes)> CopyFileAsync( + string sourcePath, + string destPath, + CancellationToken cancellationToken) + { + await using var sourceStream = File.OpenRead(sourcePath); + await using var destStream = File.Create(destPath); + await sourceStream.CopyToAsync(destStream, cancellationToken); + + destStream.Position = 0; + var hash = await SHA256.HashDataAsync(destStream, cancellationToken); + var digest = $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + + return (digest, destStream.Length); + } + + private static string ComputeDigest(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Pack.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Pack.cs new file mode 100644 index 000000000..cbafde364 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Pack.cs @@ -0,0 +1,29 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + /// + /// Creates a compressed tar.zst archive from a snapshot directory. + /// + public async Task PackAsync( + string sourceDirectory, + string outputFilePath, + CancellationToken cancellationToken = default) + { + var tempTarPath = outputFilePath + ".tar"; + + try + { + await CreateTarAsync(sourceDirectory, tempTarPath, cancellationToken); + await CompressAsync(tempTarPath, outputFilePath, cancellationToken); + return outputFilePath; + } + finally + { + if (File.Exists(tempTarPath)) + { + File.Delete(tempTarPath); + } + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tar.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tar.cs new file mode 100644 index 000000000..ea467fec8 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tar.cs @@ -0,0 +1,63 @@ +using System.Text; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private static async Task CreateTarAsync( + string sourceDirectory, + string tarPath, + CancellationToken cancellationToken) + { + await using var tarStream = File.Create(tarPath); + + foreach (var file in Directory.GetFiles(sourceDirectory, "*", SearchOption.AllDirectories)) + { + var relativePath = Path.GetRelativePath(sourceDirectory, file); + var content = await File.ReadAllBytesAsync(file, cancellationToken); + + await WriteTarHeaderAsync(tarStream, relativePath, content.Length, cancellationToken); + await tarStream.WriteAsync(content, cancellationToken); + + var padding = 512 - (content.Length % 512); + if (padding < 512) + { + await tarStream.WriteAsync(new byte[padding], cancellationToken); + } + } + + await tarStream.WriteAsync(new byte[1024], cancellationToken); + } + + private static async Task WriteTarHeaderAsync( + Stream stream, + string path, + long size, + CancellationToken cancellationToken) + { + var header = new byte[512]; + + var nameBytes = Encoding.ASCII.GetBytes(path.Replace('\\', '/')); + Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100)); + + Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100); + Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108); + Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116); + + var sizeOctal = Convert.ToString(size, 8).PadLeft(11, '0') + "\0"; + Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124); + + var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(); + var mtimeOctal = Convert.ToString(mtime, 8).PadLeft(11, '0') + "\0"; + Encoding.ASCII.GetBytes(mtimeOctal).CopyTo(header, 136); + + Encoding.ASCII.GetBytes(" ").CopyTo(header, 148); + header[156] = (byte)'0'; + + var checksum = header.Sum(b => (int)b); + var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 "; + Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148); + + await stream.WriteAsync(header, cancellationToken); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tiles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tiles.cs new file mode 100644 index 000000000..1c755f480 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tiles.cs @@ -0,0 +1,54 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private async Task CopyTilesAsync( + TileSetSource source, + string outputPath, + CancellationToken cancellationToken) + { + var tilesDir = Path.Combine(outputPath, "tiles"); + Directory.CreateDirectory(tilesDir); + + var tileFiles = new List(); + long totalSize = 0; + + foreach (var tile in source.Tiles) + { + var levelDir = Path.Combine(tilesDir, tile.Level.ToString()); + Directory.CreateDirectory(levelDir); + + var tilePath = Path.Combine(levelDir, $"{tile.Index}.tile"); + await File.WriteAllBytesAsync(tilePath, tile.Content, cancellationToken); + + var digest = ComputeDigest(tile.Content); + var size = tile.Content.Length; + totalSize += size; + + tileFiles.Add(new TileFileComponent + { + Level = tile.Level, + Index = tile.Index, + Path = $"tiles/{tile.Level}/{tile.Index}.tile", + Digest = digest, + SizeBytes = size, + IsPartial = tile.IsPartial + }); + } + + return new TileSetComponent + { + BasePath = "tiles", + TileCount = tileFiles.Count, + SizeBytes = totalSize, + EntryRange = new EntryRange + { + Start = source.EntryRangeStart, + End = source.EntryRangeEnd + }, + Tiles = tileFiles.ToImmutableArray() + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tuf.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tuf.cs new file mode 100644 index 000000000..d7040fcab --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.Tuf.cs @@ -0,0 +1,76 @@ +using System.Collections.Immutable; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotBuilder +{ + private async Task CopyTufMetadataAsync( + TufMetadataSource source, + string outputPath, + CancellationToken cancellationToken) + { + var tufDir = Path.Combine(outputPath, "tuf"); + var targetsDir = Path.Combine(tufDir, "targets"); + Directory.CreateDirectory(targetsDir); + + var rootComponent = await CopyFileAsync(source.RootPath, Path.Combine(tufDir, "root.json"), cancellationToken); + var snapshotComponent = await CopyFileAsync(source.SnapshotPath, Path.Combine(tufDir, "snapshot.json"), cancellationToken); + var timestampComponent = await CopyFileAsync(source.TimestampPath, Path.Combine(tufDir, "timestamp.json"), cancellationToken); + var targetsComponent = await CopyFileAsync(source.TargetsPath, Path.Combine(tufDir, "targets.json"), cancellationToken); + + var targetFiles = new List(); + foreach (var target in source.TargetFiles) + { + var targetPath = Path.Combine(targetsDir, target.Name); + var component = await CopyFileAsync(target.SourcePath, targetPath, cancellationToken); + targetFiles.Add(new TufTargetFileComponent + { + Name = target.Name, + Path = $"tuf/targets/{target.Name}", + Digest = component.Digest, + SizeBytes = component.SizeBytes + }); + } + + return new TufMetadataComponent + { + Root = new TufFileComponent + { + Path = "tuf/root.json", + Digest = rootComponent.Digest, + SizeBytes = rootComponent.SizeBytes, + Version = source.RootVersion + }, + Snapshot = new TufFileComponent + { + Path = "tuf/snapshot.json", + Digest = snapshotComponent.Digest, + SizeBytes = snapshotComponent.SizeBytes + }, + Timestamp = new TufFileComponent + { + Path = "tuf/timestamp.json", + Digest = timestampComponent.Digest, + SizeBytes = timestampComponent.SizeBytes + }, + Targets = new TufFileComponent + { + Path = "tuf/targets.json", + Digest = targetsComponent.Digest, + SizeBytes = targetsComponent.SizeBytes + }, + TargetFiles = targetFiles.ToImmutableArray(), + RepositoryUrl = source.RepositoryUrl, + RootVersion = source.RootVersion + }; + } + + private static long GetTufComponentSize(TufMetadataComponent tuf) + { + return tuf.Root.SizeBytes + + tuf.Snapshot.SizeBytes + + tuf.Timestamp.SizeBytes + + tuf.Targets.SizeBytes + + tuf.TargetFiles.Sum(t => t.SizeBytes); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.cs index 856474ae9..9c6b721c2 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotBuilder.cs @@ -5,10 +5,6 @@ // Description: Builder for creating trust snapshot bundles // ----------------------------------------------------------------------------- -using System.Collections.Immutable; -using System.IO.Compression; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.TrustSnapshot; @@ -16,12 +12,12 @@ namespace StellaOps.AirGap.Bundle.TrustSnapshot; /// /// Builds trust snapshot bundles containing TUF metadata and tiles for offline verification. /// -public sealed class TrustSnapshotBuilder +public sealed partial class TrustSnapshotBuilder { private readonly TimeProvider _timeProvider; private readonly IGuidProvider _guidProvider; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower @@ -37,559 +33,4 @@ public sealed class TrustSnapshotBuilder _guidProvider = guidProvider; } - /// - /// Builds a trust snapshot bundle. - /// - public async Task BuildAsync( - TrustSnapshotBuildRequest request, - string outputPath, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); - - Directory.CreateDirectory(outputPath); - - var bundleId = _guidProvider.NewGuid().ToString(); - var createdAt = _timeProvider.GetUtcNow(); - - // Copy TUF metadata - TufMetadataComponent? tufComponent = null; - DateTimeOffset? expiresAt = null; - if (request.TufMetadata != null) - { - tufComponent = await CopyTufMetadataAsync( - request.TufMetadata, - outputPath, - cancellationToken); - expiresAt = request.TufMetadata.TimestampExpires; - } - - // Copy checkpoint - var checkpointComponent = await CopyCheckpointAsync( - request.Checkpoint, - outputPath, - cancellationToken); - - // Copy tiles - var tilesComponent = await CopyTilesAsync( - request.Tiles, - outputPath, - cancellationToken); - - // Copy entries (optional) - EntriesComponent? entriesComponent = null; - if (request.Entries != null) - { - entriesComponent = await CopyEntriesAsync( - request.Entries, - outputPath, - cancellationToken); - } - - // Calculate total size - var totalSize = (tufComponent != null ? GetTufComponentSize(tufComponent) : 0) - + (checkpointComponent.SignedNote?.Length ?? 0) - + tilesComponent.SizeBytes - + (entriesComponent?.SizeBytes ?? 0); - - // Build manifest - var manifest = new TrustSnapshotManifest - { - BundleId = bundleId, - CreatedAt = createdAt, - ExpiresAt = expiresAt, - Origin = request.Origin, - TreeSize = request.TreeSize, - RootHash = request.RootHash, - Tuf = tufComponent, - Checkpoint = checkpointComponent, - Tiles = tilesComponent, - Entries = entriesComponent, - TotalSizeBytes = totalSize - }; - - // Write manifest - var manifestPath = Path.Combine(outputPath, "index.json"); - var manifestJson = JsonSerializer.Serialize(manifest, JsonOptions); - var manifestDigest = ComputeDigest(Encoding.UTF8.GetBytes(manifestJson)); - await File.WriteAllTextAsync(manifestPath, manifestJson, cancellationToken); - - // Return manifest with digest - return manifest with { Digest = manifestDigest }; - } - - /// - /// Creates a compressed tar.zst archive from a snapshot directory. - /// - public async Task PackAsync( - string sourceDirectory, - string outputFilePath, - CancellationToken cancellationToken = default) - { - var tempTarPath = outputFilePath + ".tar"; - - try - { - // Create tar archive - await CreateTarAsync(sourceDirectory, tempTarPath, cancellationToken); - - // Compress with zstd (using GZip as fallback if zstd not available) - await CompressAsync(tempTarPath, outputFilePath, cancellationToken); - - return outputFilePath; - } - finally - { - if (File.Exists(tempTarPath)) - { - File.Delete(tempTarPath); - } - } - } - - private async Task CopyTufMetadataAsync( - TufMetadataSource source, - string outputPath, - CancellationToken cancellationToken) - { - var tufDir = Path.Combine(outputPath, "tuf"); - var targetsDir = Path.Combine(tufDir, "targets"); - Directory.CreateDirectory(targetsDir); - - // Copy role metadata - var rootComponent = await CopyFileAsync(source.RootPath, Path.Combine(tufDir, "root.json"), cancellationToken); - var snapshotComponent = await CopyFileAsync(source.SnapshotPath, Path.Combine(tufDir, "snapshot.json"), cancellationToken); - var timestampComponent = await CopyFileAsync(source.TimestampPath, Path.Combine(tufDir, "timestamp.json"), cancellationToken); - var targetsComponent = await CopyFileAsync(source.TargetsPath, Path.Combine(tufDir, "targets.json"), cancellationToken); - - // Copy target files - var targetFiles = new List(); - foreach (var target in source.TargetFiles) - { - var targetPath = Path.Combine(targetsDir, target.Name); - var component = await CopyFileAsync(target.SourcePath, targetPath, cancellationToken); - targetFiles.Add(new TufTargetFileComponent - { - Name = target.Name, - Path = $"tuf/targets/{target.Name}", - Digest = component.Digest, - SizeBytes = component.SizeBytes - }); - } - - return new TufMetadataComponent - { - Root = new TufFileComponent - { - Path = "tuf/root.json", - Digest = rootComponent.Digest, - SizeBytes = rootComponent.SizeBytes, - Version = source.RootVersion - }, - Snapshot = new TufFileComponent - { - Path = "tuf/snapshot.json", - Digest = snapshotComponent.Digest, - SizeBytes = snapshotComponent.SizeBytes - }, - Timestamp = new TufFileComponent - { - Path = "tuf/timestamp.json", - Digest = timestampComponent.Digest, - SizeBytes = timestampComponent.SizeBytes - }, - Targets = new TufFileComponent - { - Path = "tuf/targets.json", - Digest = targetsComponent.Digest, - SizeBytes = targetsComponent.SizeBytes - }, - TargetFiles = targetFiles.ToImmutableArray(), - RepositoryUrl = source.RepositoryUrl, - RootVersion = source.RootVersion - }; - } - - private async Task CopyCheckpointAsync( - CheckpointSource source, - string outputPath, - CancellationToken cancellationToken) - { - var checkpointPath = Path.Combine(outputPath, "checkpoint.sig"); - await File.WriteAllTextAsync(checkpointPath, source.SignedNote, cancellationToken); - - var digest = ComputeDigest(Encoding.UTF8.GetBytes(source.SignedNote)); - - return new CheckpointComponent - { - Path = "checkpoint.sig", - Digest = digest, - SignedNote = source.SignedNote - }; - } - - private async Task CopyTilesAsync( - TileSetSource source, - string outputPath, - CancellationToken cancellationToken) - { - var tilesDir = Path.Combine(outputPath, "tiles"); - Directory.CreateDirectory(tilesDir); - - var tileFiles = new List(); - long totalSize = 0; - - foreach (var tile in source.Tiles) - { - var levelDir = Path.Combine(tilesDir, tile.Level.ToString()); - Directory.CreateDirectory(levelDir); - - var tilePath = Path.Combine(levelDir, $"{tile.Index}.tile"); - await File.WriteAllBytesAsync(tilePath, tile.Content, cancellationToken); - - var digest = ComputeDigest(tile.Content); - var size = tile.Content.Length; - totalSize += size; - - tileFiles.Add(new TileFileComponent - { - Level = tile.Level, - Index = tile.Index, - Path = $"tiles/{tile.Level}/{tile.Index}.tile", - Digest = digest, - SizeBytes = size, - IsPartial = tile.IsPartial - }); - } - - return new TileSetComponent - { - BasePath = "tiles", - TileCount = tileFiles.Count, - SizeBytes = totalSize, - EntryRange = new EntryRange - { - Start = source.EntryRangeStart, - End = source.EntryRangeEnd - }, - Tiles = tileFiles.ToImmutableArray() - }; - } - - private async Task CopyEntriesAsync( - EntriesSource source, - string outputPath, - CancellationToken cancellationToken) - { - var entriesDir = Path.Combine(outputPath, "entries"); - Directory.CreateDirectory(entriesDir); - - var entriesPath = Path.Combine(entriesDir, "entries.ndjson.zst"); - var component = await CopyFileAsync(source.SourcePath, entriesPath, cancellationToken); - - return new EntriesComponent - { - Path = "entries/entries.ndjson.zst", - Digest = component.Digest, - SizeBytes = component.SizeBytes, - EntryCount = source.EntryCount, - Format = "ndjson.zst" - }; - } - - private static async Task<(string Digest, long SizeBytes)> CopyFileAsync( - string sourcePath, - string destPath, - CancellationToken cancellationToken) - { - await using var sourceStream = File.OpenRead(sourcePath); - await using var destStream = File.Create(destPath); - await sourceStream.CopyToAsync(destStream, cancellationToken); - - destStream.Position = 0; - var hash = await SHA256.HashDataAsync(destStream, cancellationToken); - var digest = $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - - return (digest, destStream.Length); - } - - private static string ComputeDigest(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private static long GetTufComponentSize(TufMetadataComponent tuf) - { - return tuf.Root.SizeBytes + - tuf.Snapshot.SizeBytes + - tuf.Timestamp.SizeBytes + - tuf.Targets.SizeBytes + - tuf.TargetFiles.Sum(t => t.SizeBytes); - } - - private static async Task CreateTarAsync( - string sourceDirectory, - string tarPath, - CancellationToken cancellationToken) - { - // Simple tar creation (directory structure only) - await using var tarStream = File.Create(tarPath); - - foreach (var file in Directory.GetFiles(sourceDirectory, "*", SearchOption.AllDirectories)) - { - var relativePath = Path.GetRelativePath(sourceDirectory, file); - var content = await File.ReadAllBytesAsync(file, cancellationToken); - - // Write TAR header - await WriteTarHeaderAsync(tarStream, relativePath, content.Length, cancellationToken); - - // Write content - await tarStream.WriteAsync(content, cancellationToken); - - // Pad to 512-byte boundary - var padding = 512 - (content.Length % 512); - if (padding < 512) - { - await tarStream.WriteAsync(new byte[padding], cancellationToken); - } - } - - // Write end-of-archive marker (two 512-byte blocks of zeros) - await tarStream.WriteAsync(new byte[1024], cancellationToken); - } - - private static async Task WriteTarHeaderAsync( - Stream stream, - string path, - long size, - CancellationToken cancellationToken) - { - var header = new byte[512]; - - // Name (100 bytes) - var nameBytes = Encoding.ASCII.GetBytes(path.Replace('\\', '/')); - Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100)); - - // Mode (8 bytes) - 0644 - Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100); - - // UID (8 bytes) - 0 - Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108); - - // GID (8 bytes) - 0 - Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116); - - // Size (12 bytes) - octal - var sizeOctal = Convert.ToString(size, 8).PadLeft(11, '0') + "\0"; - Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124); - - // Mtime (12 bytes) - current time - var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(); - var mtimeOctal = Convert.ToString(mtime, 8).PadLeft(11, '0') + "\0"; - Encoding.ASCII.GetBytes(mtimeOctal).CopyTo(header, 136); - - // Checksum placeholder (8 bytes of spaces) - Encoding.ASCII.GetBytes(" ").CopyTo(header, 148); - - // Type flag - regular file - header[156] = (byte)'0'; - - // Calculate checksum - var checksum = header.Sum(b => (int)b); - var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 "; - Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148); - - await stream.WriteAsync(header, cancellationToken); - } - - private static async Task CompressAsync( - string sourcePath, - string destPath, - CancellationToken cancellationToken) - { - // Use GZip compression (zstd would require external library) - await using var sourceStream = File.OpenRead(sourcePath); - await using var destStream = File.Create(destPath); - await using var gzipStream = new GZipStream(destStream, CompressionLevel.Optimal); - await sourceStream.CopyToAsync(gzipStream, cancellationToken); - } -} - -/// -/// Request to build a trust snapshot. -/// -public sealed record TrustSnapshotBuildRequest -{ - /// - /// Log origin identifier. - /// - public required string Origin { get; init; } - - /// - /// Tree size at snapshot time. - /// - public required long TreeSize { get; init; } - - /// - /// Root hash at snapshot time. - /// - public required string RootHash { get; init; } - - /// - /// Checkpoint source. - /// - public required CheckpointSource Checkpoint { get; init; } - - /// - /// Tiles to include. - /// - public required TileSetSource Tiles { get; init; } - - /// - /// TUF metadata (optional). - /// - public TufMetadataSource? TufMetadata { get; init; } - - /// - /// Entries to include (optional). - /// - public EntriesSource? Entries { get; init; } -} - -/// -/// Checkpoint source. -/// -public sealed record CheckpointSource -{ - /// - /// Signed checkpoint note. - /// - public required string SignedNote { get; init; } -} - -/// -/// Tile set source. -/// -public sealed record TileSetSource -{ - /// - /// Tiles to include. - /// - public required IReadOnlyList Tiles { get; init; } - - /// - /// Start of entry range covered. - /// - public required long EntryRangeStart { get; init; } - - /// - /// End of entry range covered. - /// - public required long EntryRangeEnd { get; init; } -} - -/// -/// Individual tile source. -/// -public sealed record TileSource -{ - /// - /// Tile level. - /// - public required int Level { get; init; } - - /// - /// Tile index. - /// - public required long Index { get; init; } - - /// - /// Tile content (raw hashes). - /// - public required byte[] Content { get; init; } - - /// - /// Whether this is a partial tile. - /// - public bool IsPartial { get; init; } -} - -/// -/// TUF metadata source. -/// -public sealed record TufMetadataSource -{ - /// - /// Path to root.json. - /// - public required string RootPath { get; init; } - - /// - /// Path to snapshot.json. - /// - public required string SnapshotPath { get; init; } - - /// - /// Path to timestamp.json. - /// - public required string TimestampPath { get; init; } - - /// - /// Path to targets.json. - /// - public required string TargetsPath { get; init; } - - /// - /// Target files to include. - /// - public IReadOnlyList TargetFiles { get; init; } = []; - - /// - /// TUF repository URL. - /// - public string? RepositoryUrl { get; init; } - - /// - /// Root version. - /// - public int RootVersion { get; init; } - - /// - /// When the timestamp expires. - /// - public DateTimeOffset? TimestampExpires { get; init; } -} - -/// -/// TUF target file source. -/// -public sealed record TufTargetSource -{ - /// - /// Target name. - /// - public required string Name { get; init; } - - /// - /// Source path. - /// - public required string SourcePath { get; init; } -} - -/// -/// Entries source. -/// -public sealed record EntriesSource -{ - /// - /// Path to the entries file. - /// - public required string SourcePath { get; init; } - - /// - /// Number of entries in the file. - /// - public required int EntryCount { get; init; } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportOptions.cs new file mode 100644 index 000000000..deafdb8f7 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportOptions.cs @@ -0,0 +1,32 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Options for importing a trust snapshot. +/// +public sealed record TrustSnapshotImportOptions +{ + /// + /// Whether to verify manifest checksums. + /// + public bool VerifyManifest { get; init; } = true; + + /// + /// Reject if snapshot is older than this threshold. + /// + public TimeSpan? RejectIfStale { get; init; } + + /// + /// Force import even if validation fails. + /// + public bool Force { get; init; } + + /// + /// Path to TUF cache directory. + /// + public string? TufCachePath { get; init; } + + /// + /// Path to tile cache directory. + /// + public string? TileCachePath { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportResult.cs new file mode 100644 index 000000000..70c229708 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImportResult.cs @@ -0,0 +1,33 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Result of importing a trust snapshot. +/// +public sealed record TrustSnapshotImportResult +{ + public bool IsSuccess { get; init; } + public string? Error { get; init; } + public TrustSnapshotManifest? Manifest { get; init; } + public TufImportResult? TufResult { get; init; } + public TileImportResult? TileResult { get; init; } + public string? CheckpointContent { get; init; } + + public static TrustSnapshotImportResult Success( + TrustSnapshotManifest manifest, + TufImportResult? tufResult, + TileImportResult? tileResult, + string? checkpointContent) => new() + { + IsSuccess = true, + Manifest = manifest, + TufResult = tufResult, + TileResult = tileResult, + CheckpointContent = checkpointContent + }; + + public static TrustSnapshotImportResult Failure(string error) => new() + { + IsSuccess = false, + Error = error + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.Tar.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.Tar.cs new file mode 100644 index 000000000..0c3c4ab7e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.Tar.cs @@ -0,0 +1,74 @@ +using System.Text; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ExtractTarAsync( + string tarPath, + string destDir, + CancellationToken cancellationToken) + { + await using var tarStream = File.OpenRead(tarPath); + var buffer = new byte[512]; + + while (true) + { + var bytesRead = await tarStream.ReadAsync(buffer.AsMemory(0, 512), cancellationToken).ConfigureAwait(false); + if (bytesRead < 512 || buffer.All(b => b == 0)) + { + break; + } + + var name = Encoding.ASCII.GetString(buffer, 0, 100).TrimEnd('\0'); + if (string.IsNullOrEmpty(name)) + { + break; + } + + var sizeOctal = Encoding.ASCII.GetString(buffer, 124, 12).TrimEnd('\0', ' '); + var size = Convert.ToInt64(sizeOctal, 8); + var typeFlag = (char)buffer[156]; + + if (typeFlag == '5' || name.EndsWith('/')) + { + var dirPath = Path.Combine(destDir, name); + Directory.CreateDirectory(dirPath); + continue; + } + + var filePath = Path.Combine(destDir, name); + var fileDir = Path.GetDirectoryName(filePath); + if (!string.IsNullOrEmpty(fileDir)) + { + Directory.CreateDirectory(fileDir); + } + + await using (var fileStream = File.Create(filePath)) + { + var remaining = size; + var fileBuffer = new byte[8192]; + while (remaining > 0) + { + var toRead = (int)Math.Min(remaining, fileBuffer.Length); + bytesRead = await tarStream.ReadAsync(fileBuffer.AsMemory(0, toRead), cancellationToken) + .ConfigureAwait(false); + if (bytesRead == 0) + { + break; + } + + await fileStream.WriteAsync(fileBuffer.AsMemory(0, bytesRead), cancellationToken) + .ConfigureAwait(false); + remaining -= bytesRead; + } + } + + var padding = 512 - (size % 512); + if (padding < 512) + { + tarStream.Seek(padding, SeekOrigin.Current); + } + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.cs new file mode 100644 index 000000000..208b2d48b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Archive.cs @@ -0,0 +1,58 @@ +using System.IO.Compression; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ExtractArchiveAsync( + string archivePath, + string destDir, + CancellationToken cancellationToken) + { + if (archivePath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase) + || archivePath.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase) + || archivePath.EndsWith(".tar.zst", StringComparison.OrdinalIgnoreCase)) + { + var tarPath = Path.Combine(destDir, "archive.tar"); + await using var compressedStream = File.OpenRead(archivePath); + await using var gzipStream = new GZipStream(compressedStream, CompressionMode.Decompress); + await using var tarStream = File.Create(tarPath); + await gzipStream.CopyToAsync(tarStream, cancellationToken).ConfigureAwait(false); + + await ExtractTarAsync(tarPath, destDir, cancellationToken).ConfigureAwait(false); + File.Delete(tarPath); + return; + } + + if (archivePath.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) + { + ZipFile.ExtractToDirectory(archivePath, destDir); + return; + } + + if (Directory.Exists(archivePath)) + { + CopyDirectory(archivePath, destDir); + return; + } + + throw new InvalidOperationException($"Unknown archive format: {archivePath}"); + } + + private static void CopyDirectory(string sourceDir, string destDir) + { + Directory.CreateDirectory(destDir); + + foreach (var file in Directory.GetFiles(sourceDir)) + { + var destFile = Path.Combine(destDir, Path.GetFileName(file)); + File.Copy(file, destFile); + } + + foreach (var dir in Directory.GetDirectories(sourceDir)) + { + var destSubDir = Path.Combine(destDir, Path.GetFileName(dir)); + CopyDirectory(dir, destSubDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Hashing.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Hashing.cs new file mode 100644 index 000000000..1dad80264 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Hashing.cs @@ -0,0 +1,12 @@ +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static string ComputeDigest(byte[] content) + { + var hash = SHA256.HashData(content); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tiles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tiles.cs new file mode 100644 index 000000000..c2d3d5ee0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tiles.cs @@ -0,0 +1,45 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ImportTilesAsync( + TrustSnapshotManifest manifest, + string sourceDir, + string destDir, + CancellationToken cancellationToken) + { + Directory.CreateDirectory(destDir); + + var importedCount = 0; + long importedBytes = 0; + + if (manifest.Tiles?.Tiles == null) + { + return new TileImportResult { ImportedCount = 0, ImportedBytes = 0 }; + } + + foreach (var tile in manifest.Tiles.Tiles) + { + var src = Path.Combine(sourceDir, tile.Path); + if (!File.Exists(src)) + { + continue; + } + + var levelDir = Path.Combine(destDir, manifest.Origin ?? "default", tile.Level.ToString()); + Directory.CreateDirectory(levelDir); + + var dest = Path.Combine(levelDir, $"{tile.Index}.tile"); + await CopyFileAsync(src, dest, cancellationToken).ConfigureAwait(false); + + importedCount++; + importedBytes += tile.SizeBytes; + } + + return new TileImportResult + { + ImportedCount = importedCount, + ImportedBytes = importedBytes + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tuf.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tuf.cs new file mode 100644 index 000000000..bb39cb1ec --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.Tuf.cs @@ -0,0 +1,60 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ImportTufMetadataAsync( + TufMetadataComponent tuf, + string sourceDir, + string destDir, + CancellationToken cancellationToken) + { + Directory.CreateDirectory(destDir); + var targetsDir = Path.Combine(destDir, "targets"); + Directory.CreateDirectory(targetsDir); + + var importedFiles = new List(); + + var roleFiles = new[] + { + (tuf.Root.Path, "root.json"), + (tuf.Snapshot.Path, "snapshot.json"), + (tuf.Timestamp.Path, "timestamp.json"), + (tuf.Targets.Path, "targets.json") + }; + + foreach (var (sourcePath, destName) in roleFiles) + { + var src = Path.Combine(sourceDir, sourcePath); + var dest = Path.Combine(destDir, destName); + if (File.Exists(src)) + { + await CopyFileAsync(src, dest, cancellationToken).ConfigureAwait(false); + importedFiles.Add(destName); + } + } + + foreach (var target in tuf.TargetFiles) + { + var src = Path.Combine(sourceDir, target.Path); + var dest = Path.Combine(targetsDir, target.Name); + if (File.Exists(src)) + { + await CopyFileAsync(src, dest, cancellationToken).ConfigureAwait(false); + importedFiles.Add($"targets/{target.Name}"); + } + } + + return new TufImportResult + { + ImportedFiles = importedFiles, + RootVersion = tuf.RootVersion + }; + } + + private static async Task CopyFileAsync(string src, string dest, CancellationToken cancellationToken) + { + await using var srcStream = File.OpenRead(src); + await using var destStream = File.Create(dest); + await srcStream.CopyToAsync(destStream, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.cs new file mode 100644 index 000000000..f4f7230e0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Import.cs @@ -0,0 +1,91 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + /// + /// Imports a trust snapshot from a compressed archive. + /// + public async Task ImportAsync( + string archivePath, + TrustSnapshotImportOptions options, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(archivePath); + ArgumentNullException.ThrowIfNull(options); + + if (!File.Exists(archivePath)) + { + return TrustSnapshotImportResult.Failure($"Archive not found: {archivePath}"); + } + + var tempDir = CreateTempDir("trust-snapshot"); + try + { + await ExtractArchiveAsync(archivePath, tempDir, cancellationToken).ConfigureAwait(false); + + var manifestResult = await ReadManifestAsync( + tempDir, + "Manifest (index.json) not found in archive", + cancellationToken) + .ConfigureAwait(false); + if (!manifestResult.Success || manifestResult.Manifest is null) + { + return TrustSnapshotImportResult.Failure(manifestResult.Error ?? "Failed to parse manifest"); + } + + var manifest = manifestResult.Manifest; + + if (options.VerifyManifest) + { + var validationResult = await ValidateManifestAsync(manifest, tempDir, cancellationToken).ConfigureAwait(false); + if (!validationResult.Success && !options.Force) + { + return TrustSnapshotImportResult.Failure( + $"Manifest validation failed: {validationResult.Error}"); + } + } + + if (options.RejectIfStale.HasValue) + { + var age = _timeProvider.GetUtcNow() - manifest.CreatedAt; + if (age > options.RejectIfStale.Value && !options.Force) + { + return TrustSnapshotImportResult.Failure( + $"Snapshot is stale (age: {age.TotalDays:F1} days, threshold: {options.RejectIfStale.Value.TotalDays:F1} days)"); + } + } + + if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < _timeProvider.GetUtcNow() && !options.Force) + { + return TrustSnapshotImportResult.Failure( + $"Snapshot has expired (expired at: {manifest.ExpiresAt.Value:u})"); + } + + TufImportResult? tufResult = null; + if (manifest.Tuf is not null && !string.IsNullOrEmpty(options.TufCachePath)) + { + tufResult = await ImportTufMetadataAsync(manifest.Tuf, tempDir, options.TufCachePath, cancellationToken) + .ConfigureAwait(false); + } + + TileImportResult? tileResult = null; + if (!string.IsNullOrEmpty(options.TileCachePath)) + { + tileResult = await ImportTilesAsync(manifest, tempDir, options.TileCachePath, cancellationToken) + .ConfigureAwait(false); + } + + var checkpointContent = await ReadCheckpointAsync(manifest, tempDir, cancellationToken).ConfigureAwait(false); + + return TrustSnapshotImportResult.Success( + manifest, + tufResult, + tileResult, + checkpointContent); + } + finally + { + CleanupTempDir(tempDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Manifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Manifest.cs new file mode 100644 index 000000000..f17768bd9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Manifest.cs @@ -0,0 +1,46 @@ +using System.Text.Json; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ReadManifestAsync( + string tempDir, + string missingMessage, + CancellationToken cancellationToken) + { + var manifestPath = Path.Combine(tempDir, "index.json"); + if (!File.Exists(manifestPath)) + { + return ManifestReadResult.Failed(missingMessage); + } + + var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken).ConfigureAwait(false); + var manifest = JsonSerializer.Deserialize(manifestJson, _jsonOptions); + if (manifest is null) + { + return ManifestReadResult.Failed("Failed to parse manifest"); + } + + return ManifestReadResult.FromManifest(manifest); + } + + private static async Task ReadCheckpointAsync( + TrustSnapshotManifest manifest, + string tempDir, + CancellationToken cancellationToken) + { + if (manifest.Checkpoint is null) + { + return null; + } + + var checkpointPath = Path.Combine(tempDir, manifest.Checkpoint.Path); + if (!File.Exists(checkpointPath)) + { + return null; + } + + return await File.ReadAllTextAsync(checkpointPath, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Models.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Models.cs new file mode 100644 index 000000000..9885a9b5c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Models.cs @@ -0,0 +1,38 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private sealed record ManifestReadResult + { + public bool Success { get; init; } + public TrustSnapshotManifest? Manifest { get; init; } + public string? Error { get; init; } + + public static ManifestReadResult Failed(string error) => new() + { + Success = false, + Error = error + }; + + public static ManifestReadResult FromManifest(TrustSnapshotManifest manifest) => new() + { + Success = true, + Manifest = manifest + }; + } + + private sealed record ManifestValidationResult + { + public bool Success { get; init; } + public string? Error { get; init; } + public int FileCount { get; init; } + public long TotalBytes { get; init; } + } + + private sealed class ManifestValidationState + { + public List Errors { get; } = new(); + public int FileCount { get; set; } + public long TotalBytes { get; set; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.TempDir.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.TempDir.cs new file mode 100644 index 000000000..c7005ce5b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.TempDir.cs @@ -0,0 +1,26 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static string CreateTempDir(string prefix) + { + var tempDir = Path.Combine(Path.GetTempPath(), $"{prefix}-{Guid.NewGuid():N}"); + Directory.CreateDirectory(tempDir); + return tempDir; + } + + private static void CleanupTempDir(string tempDir) + { + try + { + if (Directory.Exists(tempDir)) + { + Directory.Delete(tempDir, recursive: true); + } + } + catch + { + // Ignore cleanup errors. + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validate.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validate.cs new file mode 100644 index 000000000..4f6fec8ab --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validate.cs @@ -0,0 +1,59 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + /// + /// Validates a trust snapshot without importing it. + /// + public async Task ValidateAsync( + string archivePath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(archivePath); + + if (!File.Exists(archivePath)) + { + return new TrustSnapshotValidationResult + { + IsValid = false, + Error = $"Archive not found: {archivePath}" + }; + } + + var tempDir = CreateTempDir("trust-snapshot-validate"); + try + { + await ExtractArchiveAsync(archivePath, tempDir, cancellationToken).ConfigureAwait(false); + + var manifestResult = await ReadManifestAsync( + tempDir, + "Manifest (index.json) not found", + cancellationToken) + .ConfigureAwait(false); + if (!manifestResult.Success || manifestResult.Manifest is null) + { + return new TrustSnapshotValidationResult + { + IsValid = false, + Error = manifestResult.Error ?? "Failed to parse manifest" + }; + } + + var validationResult = await ValidateManifestAsync(manifestResult.Manifest, tempDir, cancellationToken) + .ConfigureAwait(false); + + return new TrustSnapshotValidationResult + { + IsValid = validationResult.Success, + Error = validationResult.Error, + Manifest = manifestResult.Manifest, + FileCount = validationResult.FileCount, + TotalBytes = validationResult.TotalBytes + }; + } + finally + { + CleanupTempDir(tempDir); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Checkpoint.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Checkpoint.cs new file mode 100644 index 000000000..34a606d87 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Checkpoint.cs @@ -0,0 +1,34 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ValidateCheckpointAsync( + TrustSnapshotManifest manifest, + string extractDir, + ManifestValidationState state, + CancellationToken cancellationToken) + { + if (manifest.Checkpoint is null) + { + return; + } + + var checkpointPath = Path.Combine(extractDir, manifest.Checkpoint.Path); + if (!File.Exists(checkpointPath)) + { + state.Errors.Add($"Checkpoint file missing: {manifest.Checkpoint.Path}"); + return; + } + + var content = await File.ReadAllBytesAsync(checkpointPath, cancellationToken).ConfigureAwait(false); + var digest = ComputeDigest(content); + if (digest != manifest.Checkpoint.Digest) + { + state.Errors.Add( + $"Checkpoint digest mismatch: expected {manifest.Checkpoint.Digest}, got {digest}"); + } + + state.FileCount++; + state.TotalBytes += content.Length; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tiles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tiles.cs new file mode 100644 index 000000000..2db983a5d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tiles.cs @@ -0,0 +1,41 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ValidateTilesAsync( + TrustSnapshotManifest manifest, + string extractDir, + ManifestValidationState state, + CancellationToken cancellationToken) + { + if (manifest.Tiles is null || manifest.Tiles.Tiles.Length == 0) + { + return; + } + + var tilesToCheck = manifest.Tiles.Tiles.Length > 10 + ? manifest.Tiles.Tiles.Take(5).Concat(manifest.Tiles.Tiles.TakeLast(5)).ToArray() + : manifest.Tiles.Tiles.ToArray(); + + foreach (var tile in tilesToCheck) + { + var tilePath = Path.Combine(extractDir, tile.Path); + if (!File.Exists(tilePath)) + { + state.Errors.Add($"Tile file missing: {tile.Path}"); + continue; + } + + var content = await File.ReadAllBytesAsync(tilePath, cancellationToken).ConfigureAwait(false); + var digest = ComputeDigest(content); + if (digest != tile.Digest) + { + state.Errors.Add( + $"Tile digest mismatch ({tile.Level}/{tile.Index}): expected {tile.Digest}, got {digest}"); + } + } + + state.FileCount += manifest.Tiles.TileCount; + state.TotalBytes += manifest.Tiles.SizeBytes; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tuf.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tuf.cs new file mode 100644 index 000000000..c0b67828d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.Tuf.cs @@ -0,0 +1,83 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ValidateTufAsync( + TrustSnapshotManifest manifest, + string extractDir, + ManifestValidationState state, + CancellationToken cancellationToken) + { + if (manifest.Tuf is null) + { + return; + } + + await ValidateTufRolesAsync(manifest.Tuf, extractDir, state, cancellationToken).ConfigureAwait(false); + await ValidateTufTargetsAsync(manifest.Tuf, extractDir, state, cancellationToken).ConfigureAwait(false); + } + + private static async Task ValidateTufRolesAsync( + TufMetadataComponent tuf, + string extractDir, + ManifestValidationState state, + CancellationToken cancellationToken) + { + var tufFiles = new[] + { + (tuf.Root.Path, tuf.Root.Digest), + (tuf.Snapshot.Path, tuf.Snapshot.Digest), + (tuf.Timestamp.Path, tuf.Timestamp.Digest), + (tuf.Targets.Path, tuf.Targets.Digest) + }; + + foreach (var (path, expectedDigest) in tufFiles) + { + var fullPath = Path.Combine(extractDir, path); + if (!File.Exists(fullPath)) + { + state.Errors.Add($"TUF file missing: {path}"); + continue; + } + + var content = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); + var digest = ComputeDigest(content); + if (digest != expectedDigest) + { + state.Errors.Add( + $"TUF file digest mismatch ({path}): expected {expectedDigest}, got {digest}"); + } + + state.FileCount++; + state.TotalBytes += content.Length; + } + } + + private static async Task ValidateTufTargetsAsync( + TufMetadataComponent tuf, + string extractDir, + ManifestValidationState state, + CancellationToken cancellationToken) + { + foreach (var target in tuf.TargetFiles) + { + var targetPath = Path.Combine(extractDir, target.Path); + if (!File.Exists(targetPath)) + { + state.Errors.Add($"TUF target file missing: {target.Path}"); + continue; + } + + var content = await File.ReadAllBytesAsync(targetPath, cancellationToken).ConfigureAwait(false); + var digest = ComputeDigest(content); + if (digest != target.Digest) + { + state.Errors.Add( + $"TUF target digest mismatch ({target.Name}): expected {target.Digest}, got {digest}"); + } + + state.FileCount++; + state.TotalBytes += content.Length; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.cs new file mode 100644 index 000000000..c84656a4e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.Validation.cs @@ -0,0 +1,24 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +public sealed partial class TrustSnapshotImporter +{ + private static async Task ValidateManifestAsync( + TrustSnapshotManifest manifest, + string extractDir, + CancellationToken cancellationToken) + { + var state = new ManifestValidationState(); + + await ValidateCheckpointAsync(manifest, extractDir, state, cancellationToken).ConfigureAwait(false); + await ValidateTufAsync(manifest, extractDir, state, cancellationToken).ConfigureAwait(false); + await ValidateTilesAsync(manifest, extractDir, state, cancellationToken).ConfigureAwait(false); + + return new ManifestValidationResult + { + Success = state.Errors.Count == 0, + Error = state.Errors.Count > 0 ? string.Join("; ", state.Errors) : null, + FileCount = state.FileCount, + TotalBytes = state.TotalBytes + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.cs index cef566895..44550c119 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotImporter.cs @@ -1,13 +1,3 @@ -// ----------------------------------------------------------------------------- -// TrustSnapshotImporter.cs -// Sprint: SPRINT_20260125_002_Attestor_trust_automation -// Task: PROXY-005 - Add snapshot import command -// Description: Importer for trust snapshot bundles -// ----------------------------------------------------------------------------- - -using System.IO.Compression; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Bundle.TrustSnapshot; @@ -15,11 +5,11 @@ namespace StellaOps.AirGap.Bundle.TrustSnapshot; /// /// Imports trust snapshot bundles into the local cache for offline verification. /// -public sealed class TrustSnapshotImporter +public sealed partial class TrustSnapshotImporter { private readonly TimeProvider _timeProvider; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, PropertyNameCaseInsensitive = true @@ -33,654 +23,4 @@ public sealed class TrustSnapshotImporter { _timeProvider = timeProvider; } - - /// - /// Imports a trust snapshot from a compressed archive. - /// - public async Task ImportAsync( - string archivePath, - TrustSnapshotImportOptions options, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(archivePath); - ArgumentNullException.ThrowIfNull(options); - - if (!File.Exists(archivePath)) - { - return TrustSnapshotImportResult.Failure($"Archive not found: {archivePath}"); - } - - // Create temp directory for extraction - var tempDir = Path.Combine(Path.GetTempPath(), $"trust-snapshot-{Guid.NewGuid():N}"); - Directory.CreateDirectory(tempDir); - - try - { - // Extract archive - await ExtractArchiveAsync(archivePath, tempDir, cancellationToken); - - // Read and validate manifest - var manifestPath = Path.Combine(tempDir, "index.json"); - if (!File.Exists(manifestPath)) - { - return TrustSnapshotImportResult.Failure("Manifest (index.json) not found in archive"); - } - - var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken); - var manifest = JsonSerializer.Deserialize(manifestJson, JsonOptions); - - if (manifest == null) - { - return TrustSnapshotImportResult.Failure("Failed to parse manifest"); - } - - // Validate manifest integrity - if (options.VerifyManifest) - { - var validationResult = await ValidateManifestAsync(manifest, tempDir, cancellationToken); - if (!validationResult.Success) - { - if (!options.Force) - { - return TrustSnapshotImportResult.Failure($"Manifest validation failed: {validationResult.Error}"); - } - // Log warning but continue if force is set - } - } - - // Check staleness - if (options.RejectIfStale.HasValue) - { - var age = _timeProvider.GetUtcNow() - manifest.CreatedAt; - if (age > options.RejectIfStale.Value) - { - if (!options.Force) - { - return TrustSnapshotImportResult.Failure( - $"Snapshot is stale (age: {age.TotalDays:F1} days, threshold: {options.RejectIfStale.Value.TotalDays:F1} days)"); - } - } - } - - // Check expiration - if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < _timeProvider.GetUtcNow()) - { - if (!options.Force) - { - return TrustSnapshotImportResult.Failure( - $"Snapshot has expired (expired at: {manifest.ExpiresAt.Value:u})"); - } - } - - // Import TUF metadata - TufImportResult? tufResult = null; - if (manifest.Tuf != null && !string.IsNullOrEmpty(options.TufCachePath)) - { - tufResult = await ImportTufMetadataAsync(manifest.Tuf, tempDir, options.TufCachePath, cancellationToken); - } - - // Import tiles - TileImportResult? tileResult = null; - if (!string.IsNullOrEmpty(options.TileCachePath)) - { - tileResult = await ImportTilesAsync(manifest, tempDir, options.TileCachePath, cancellationToken); - } - - // Import checkpoint - string? checkpointContent = null; - if (manifest.Checkpoint != null) - { - var checkpointPath = Path.Combine(tempDir, manifest.Checkpoint.Path); - if (File.Exists(checkpointPath)) - { - checkpointContent = await File.ReadAllTextAsync(checkpointPath, cancellationToken); - } - } - - return TrustSnapshotImportResult.Success( - manifest, - tufResult, - tileResult, - checkpointContent); - } - finally - { - // Cleanup temp directory - try - { - if (Directory.Exists(tempDir)) - { - Directory.Delete(tempDir, recursive: true); - } - } - catch - { - // Ignore cleanup errors - } - } - } - - /// - /// Validates a trust snapshot without importing it. - /// - public async Task ValidateAsync( - string archivePath, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(archivePath); - - if (!File.Exists(archivePath)) - { - return new TrustSnapshotValidationResult - { - IsValid = false, - Error = $"Archive not found: {archivePath}" - }; - } - - var tempDir = Path.Combine(Path.GetTempPath(), $"trust-snapshot-validate-{Guid.NewGuid():N}"); - Directory.CreateDirectory(tempDir); - - try - { - await ExtractArchiveAsync(archivePath, tempDir, cancellationToken); - - var manifestPath = Path.Combine(tempDir, "index.json"); - if (!File.Exists(manifestPath)) - { - return new TrustSnapshotValidationResult - { - IsValid = false, - Error = "Manifest (index.json) not found" - }; - } - - var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken); - var manifest = JsonSerializer.Deserialize(manifestJson, JsonOptions); - - if (manifest == null) - { - return new TrustSnapshotValidationResult - { - IsValid = false, - Error = "Failed to parse manifest" - }; - } - - var validationResult = await ValidateManifestAsync(manifest, tempDir, cancellationToken); - - return new TrustSnapshotValidationResult - { - IsValid = validationResult.Success, - Error = validationResult.Error, - Manifest = manifest, - FileCount = validationResult.FileCount, - TotalBytes = validationResult.TotalBytes - }; - } - finally - { - try - { - if (Directory.Exists(tempDir)) - { - Directory.Delete(tempDir, recursive: true); - } - } - catch - { - // Ignore cleanup errors - } - } - } - - private static async Task ExtractArchiveAsync( - string archivePath, - string destDir, - CancellationToken cancellationToken) - { - // Detect archive type by extension - if (archivePath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase) || - archivePath.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase) || - archivePath.EndsWith(".tar.zst", StringComparison.OrdinalIgnoreCase)) - { - // Decompress to tar first - var tarPath = Path.Combine(destDir, "archive.tar"); - await using (var compressedStream = File.OpenRead(archivePath)) - await using (var gzipStream = new GZipStream(compressedStream, CompressionMode.Decompress)) - await using (var tarStream = File.Create(tarPath)) - { - await gzipStream.CopyToAsync(tarStream, cancellationToken); - } - - // Extract tar - await ExtractTarAsync(tarPath, destDir, cancellationToken); - File.Delete(tarPath); - } - else if (archivePath.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) - { - ZipFile.ExtractToDirectory(archivePath, destDir); - } - else - { - // Assume it's a directory - if (Directory.Exists(archivePath)) - { - CopyDirectory(archivePath, destDir); - } - else - { - throw new InvalidOperationException($"Unknown archive format: {archivePath}"); - } - } - } - - private static async Task ExtractTarAsync( - string tarPath, - string destDir, - CancellationToken cancellationToken) - { - await using var tarStream = File.OpenRead(tarPath); - var buffer = new byte[512]; - - while (true) - { - // Read header - var bytesRead = await tarStream.ReadAsync(buffer.AsMemory(0, 512), cancellationToken); - if (bytesRead < 512 || buffer.All(b => b == 0)) - { - break; // End of archive - } - - // Parse header - var name = Encoding.ASCII.GetString(buffer, 0, 100).TrimEnd('\0'); - if (string.IsNullOrEmpty(name)) - { - break; - } - - var sizeOctal = Encoding.ASCII.GetString(buffer, 124, 12).TrimEnd('\0', ' '); - var size = Convert.ToInt64(sizeOctal, 8); - var typeFlag = (char)buffer[156]; - - // Skip directories - if (typeFlag == '5' || name.EndsWith('/')) - { - var dirPath = Path.Combine(destDir, name); - Directory.CreateDirectory(dirPath); - continue; - } - - // Extract file - var filePath = Path.Combine(destDir, name); - var fileDir = Path.GetDirectoryName(filePath); - if (!string.IsNullOrEmpty(fileDir)) - { - Directory.CreateDirectory(fileDir); - } - - await using (var fileStream = File.Create(filePath)) - { - var remaining = size; - var fileBuffer = new byte[8192]; - while (remaining > 0) - { - var toRead = (int)Math.Min(remaining, fileBuffer.Length); - bytesRead = await tarStream.ReadAsync(fileBuffer.AsMemory(0, toRead), cancellationToken); - if (bytesRead == 0) break; - await fileStream.WriteAsync(fileBuffer.AsMemory(0, bytesRead), cancellationToken); - remaining -= bytesRead; - } - } - - // Skip padding - var padding = 512 - (size % 512); - if (padding < 512) - { - tarStream.Seek(padding, SeekOrigin.Current); - } - } - } - - private static void CopyDirectory(string sourceDir, string destDir) - { - Directory.CreateDirectory(destDir); - - foreach (var file in Directory.GetFiles(sourceDir)) - { - var destFile = Path.Combine(destDir, Path.GetFileName(file)); - File.Copy(file, destFile); - } - - foreach (var dir in Directory.GetDirectories(sourceDir)) - { - var destSubDir = Path.Combine(destDir, Path.GetFileName(dir)); - CopyDirectory(dir, destSubDir); - } - } - - private static async Task ValidateManifestAsync( - TrustSnapshotManifest manifest, - string extractDir, - CancellationToken cancellationToken) - { - var errors = new List(); - var fileCount = 0; - long totalBytes = 0; - - // Validate checkpoint - if (manifest.Checkpoint != null) - { - var checkpointPath = Path.Combine(extractDir, manifest.Checkpoint.Path); - if (!File.Exists(checkpointPath)) - { - errors.Add($"Checkpoint file missing: {manifest.Checkpoint.Path}"); - } - else - { - var content = await File.ReadAllBytesAsync(checkpointPath, cancellationToken); - var digest = ComputeDigest(content); - if (digest != manifest.Checkpoint.Digest) - { - errors.Add($"Checkpoint digest mismatch: expected {manifest.Checkpoint.Digest}, got {digest}"); - } - fileCount++; - totalBytes += content.Length; - } - } - - // Validate TUF metadata - if (manifest.Tuf != null) - { - var tufFiles = new[] - { - (manifest.Tuf.Root.Path, manifest.Tuf.Root.Digest), - (manifest.Tuf.Snapshot.Path, manifest.Tuf.Snapshot.Digest), - (manifest.Tuf.Timestamp.Path, manifest.Tuf.Timestamp.Digest), - (manifest.Tuf.Targets.Path, manifest.Tuf.Targets.Digest) - }; - - foreach (var (path, expectedDigest) in tufFiles) - { - var fullPath = Path.Combine(extractDir, path); - if (!File.Exists(fullPath)) - { - errors.Add($"TUF file missing: {path}"); - continue; - } - - var content = await File.ReadAllBytesAsync(fullPath, cancellationToken); - var digest = ComputeDigest(content); - if (digest != expectedDigest) - { - errors.Add($"TUF file digest mismatch ({path}): expected {expectedDigest}, got {digest}"); - } - fileCount++; - totalBytes += content.Length; - } - - // Validate target files - foreach (var target in manifest.Tuf.TargetFiles) - { - var targetPath = Path.Combine(extractDir, target.Path); - if (!File.Exists(targetPath)) - { - errors.Add($"TUF target file missing: {target.Path}"); - continue; - } - - var content = await File.ReadAllBytesAsync(targetPath, cancellationToken); - var digest = ComputeDigest(content); - if (digest != target.Digest) - { - errors.Add($"TUF target digest mismatch ({target.Name}): expected {target.Digest}, got {digest}"); - } - fileCount++; - totalBytes += content.Length; - } - } - - // Validate tiles (sample check - not all tiles to avoid performance issues) - if (manifest.Tiles != null && manifest.Tiles.Tiles.Length > 0) - { - var tilesToCheck = manifest.Tiles.Tiles.Length > 10 - ? manifest.Tiles.Tiles.Take(5).Concat(manifest.Tiles.Tiles.TakeLast(5)).ToArray() - : manifest.Tiles.Tiles.ToArray(); - - foreach (var tile in tilesToCheck) - { - var tilePath = Path.Combine(extractDir, tile.Path); - if (!File.Exists(tilePath)) - { - errors.Add($"Tile file missing: {tile.Path}"); - continue; - } - - var content = await File.ReadAllBytesAsync(tilePath, cancellationToken); - var digest = ComputeDigest(content); - if (digest != tile.Digest) - { - errors.Add($"Tile digest mismatch ({tile.Level}/{tile.Index}): expected {tile.Digest}, got {digest}"); - } - } - - fileCount += manifest.Tiles.TileCount; - totalBytes += manifest.Tiles.SizeBytes; - } - - return new ManifestValidationResult - { - Success = errors.Count == 0, - Error = errors.Count > 0 ? string.Join("; ", errors) : null, - FileCount = fileCount, - TotalBytes = totalBytes - }; - } - - private static async Task ImportTufMetadataAsync( - TufMetadataComponent tuf, - string sourceDir, - string destDir, - CancellationToken cancellationToken) - { - Directory.CreateDirectory(destDir); - var targetsDir = Path.Combine(destDir, "targets"); - Directory.CreateDirectory(targetsDir); - - var importedFiles = new List(); - - // Copy role metadata - var roleFiles = new[] - { - (tuf.Root.Path, "root.json"), - (tuf.Snapshot.Path, "snapshot.json"), - (tuf.Timestamp.Path, "timestamp.json"), - (tuf.Targets.Path, "targets.json") - }; - - foreach (var (sourcePath, destName) in roleFiles) - { - var src = Path.Combine(sourceDir, sourcePath); - var dest = Path.Combine(destDir, destName); - if (File.Exists(src)) - { - await CopyFileAsync(src, dest, cancellationToken); - importedFiles.Add(destName); - } - } - - // Copy target files - foreach (var target in tuf.TargetFiles) - { - var src = Path.Combine(sourceDir, target.Path); - var dest = Path.Combine(targetsDir, target.Name); - if (File.Exists(src)) - { - await CopyFileAsync(src, dest, cancellationToken); - importedFiles.Add($"targets/{target.Name}"); - } - } - - return new TufImportResult - { - ImportedFiles = importedFiles, - RootVersion = tuf.RootVersion - }; - } - - private static async Task ImportTilesAsync( - TrustSnapshotManifest manifest, - string sourceDir, - string destDir, - CancellationToken cancellationToken) - { - Directory.CreateDirectory(destDir); - - var importedCount = 0; - long importedBytes = 0; - - if (manifest.Tiles?.Tiles == null) - { - return new TileImportResult { ImportedCount = 0, ImportedBytes = 0 }; - } - - foreach (var tile in manifest.Tiles.Tiles) - { - var src = Path.Combine(sourceDir, tile.Path); - if (!File.Exists(src)) - { - continue; - } - - // Create destination path matching FileSystemRekorTileCache structure - var levelDir = Path.Combine(destDir, manifest.Origin ?? "default", tile.Level.ToString()); - Directory.CreateDirectory(levelDir); - - var dest = Path.Combine(levelDir, $"{tile.Index}.tile"); - await CopyFileAsync(src, dest, cancellationToken); - - importedCount++; - importedBytes += tile.SizeBytes; - } - - return new TileImportResult - { - ImportedCount = importedCount, - ImportedBytes = importedBytes - }; - } - - private static async Task CopyFileAsync(string src, string dest, CancellationToken cancellationToken) - { - await using var srcStream = File.OpenRead(src); - await using var destStream = File.Create(dest); - await srcStream.CopyToAsync(destStream, cancellationToken); - } - - private static string ComputeDigest(byte[] content) - { - var hash = SHA256.HashData(content); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - private sealed record ManifestValidationResult - { - public bool Success { get; init; } - public string? Error { get; init; } - public int FileCount { get; init; } - public long TotalBytes { get; init; } - } -} - -/// -/// Options for importing a trust snapshot. -/// -public sealed record TrustSnapshotImportOptions -{ - /// - /// Whether to verify manifest checksums. - /// - public bool VerifyManifest { get; init; } = true; - - /// - /// Reject if snapshot is older than this threshold. - /// - public TimeSpan? RejectIfStale { get; init; } - - /// - /// Force import even if validation fails. - /// - public bool Force { get; init; } - - /// - /// Path to TUF cache directory. - /// - public string? TufCachePath { get; init; } - - /// - /// Path to tile cache directory. - /// - public string? TileCachePath { get; init; } -} - -/// -/// Result of importing a trust snapshot. -/// -public sealed record TrustSnapshotImportResult -{ - public bool IsSuccess { get; init; } - public string? Error { get; init; } - public TrustSnapshotManifest? Manifest { get; init; } - public TufImportResult? TufResult { get; init; } - public TileImportResult? TileResult { get; init; } - public string? CheckpointContent { get; init; } - - public static TrustSnapshotImportResult Success( - TrustSnapshotManifest manifest, - TufImportResult? tufResult, - TileImportResult? tileResult, - string? checkpointContent) => new() - { - IsSuccess = true, - Manifest = manifest, - TufResult = tufResult, - TileResult = tileResult, - CheckpointContent = checkpointContent - }; - - public static TrustSnapshotImportResult Failure(string error) => new() - { - IsSuccess = false, - Error = error - }; -} - -/// -/// Result of importing TUF metadata. -/// -public sealed record TufImportResult -{ - public List ImportedFiles { get; init; } = []; - public int RootVersion { get; init; } -} - -/// -/// Result of importing tiles. -/// -public sealed record TileImportResult -{ - public int ImportedCount { get; init; } - public long ImportedBytes { get; init; } -} - -/// -/// Result of validating a trust snapshot. -/// -public sealed record TrustSnapshotValidationResult -{ - public bool IsValid { get; init; } - public string? Error { get; init; } - public TrustSnapshotManifest? Manifest { get; init; } - public int FileCount { get; init; } - public long TotalBytes { get; init; } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotManifest.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotManifest.cs index 954d3ddbe..4fef28664 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotManifest.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotManifest.cs @@ -1,11 +1,3 @@ -// ----------------------------------------------------------------------------- -// TrustSnapshotManifest.cs -// Sprint: SPRINT_20260125_002_Attestor_trust_automation -// Task: PROXY-004 - Add snapshot export command -// Description: Manifest model for trust snapshots -// ----------------------------------------------------------------------------- - -using System.Collections.Immutable; using System.Text.Json.Serialization; namespace StellaOps.AirGap.Bundle.TrustSnapshot; @@ -93,267 +85,3 @@ public sealed record TrustSnapshotManifest [JsonPropertyName("digest")] public string? Digest { get; init; } } - -/// -/// TUF metadata component. -/// -public sealed record TufMetadataComponent -{ - /// - /// Path to root.json. - /// - [JsonPropertyName("root")] - public required TufFileComponent Root { get; init; } - - /// - /// Path to snapshot.json. - /// - [JsonPropertyName("snapshot")] - public required TufFileComponent Snapshot { get; init; } - - /// - /// Path to timestamp.json. - /// - [JsonPropertyName("timestamp")] - public required TufFileComponent Timestamp { get; init; } - - /// - /// Path to targets.json. - /// - [JsonPropertyName("targets")] - public required TufFileComponent Targets { get; init; } - - /// - /// Target files (Rekor keys, service map, etc.). - /// - [JsonPropertyName("target_files")] - public ImmutableArray TargetFiles { get; init; } = []; - - /// - /// TUF repository URL. - /// - [JsonPropertyName("repository_url")] - public string? RepositoryUrl { get; init; } - - /// - /// TUF root version. - /// - [JsonPropertyName("root_version")] - public int RootVersion { get; init; } -} - -/// -/// Individual TUF metadata file. -/// -public sealed record TufFileComponent -{ - /// - /// Relative path within the bundle. - /// - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// - /// SHA-256 digest. - /// - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// - /// File size in bytes. - /// - [JsonPropertyName("size_bytes")] - public required long SizeBytes { get; init; } - - /// - /// Version number (if applicable). - /// - [JsonPropertyName("version")] - public int? Version { get; init; } -} - -/// -/// TUF target file component. -/// -public sealed record TufTargetFileComponent -{ - /// - /// Target name. - /// - [JsonPropertyName("name")] - public required string Name { get; init; } - - /// - /// Relative path within the bundle. - /// - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// - /// SHA-256 digest. - /// - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// - /// File size in bytes. - /// - [JsonPropertyName("size_bytes")] - public required long SizeBytes { get; init; } -} - -/// -/// Checkpoint component. -/// -public sealed record CheckpointComponent -{ - /// - /// Relative path to the checkpoint file. - /// - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// - /// SHA-256 digest. - /// - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// - /// Signed checkpoint note (raw). - /// - [JsonPropertyName("signed_note")] - public string? SignedNote { get; init; } -} - -/// -/// Tile set component. -/// -public sealed record TileSetComponent -{ - /// - /// Base path for tiles within the bundle. - /// - [JsonPropertyName("base_path")] - public required string BasePath { get; init; } - - /// - /// Number of tiles included. - /// - [JsonPropertyName("tile_count")] - public required int TileCount { get; init; } - - /// - /// Total size of tiles in bytes. - /// - [JsonPropertyName("size_bytes")] - public required long SizeBytes { get; init; } - - /// - /// Range of entries covered by tiles. - /// - [JsonPropertyName("entry_range")] - public required EntryRange EntryRange { get; init; } - - /// - /// Individual tile files (for verification). - /// - [JsonPropertyName("tiles")] - public ImmutableArray Tiles { get; init; } = []; -} - -/// -/// Entry range specification. -/// -public sealed record EntryRange -{ - /// - /// Start index (inclusive). - /// - [JsonPropertyName("start")] - public required long Start { get; init; } - - /// - /// End index (exclusive). - /// - [JsonPropertyName("end")] - public required long End { get; init; } -} - -/// -/// Individual tile file. -/// -public sealed record TileFileComponent -{ - /// - /// Tile level. - /// - [JsonPropertyName("level")] - public required int Level { get; init; } - - /// - /// Tile index. - /// - [JsonPropertyName("index")] - public required long Index { get; init; } - - /// - /// Relative path within the bundle. - /// - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// - /// SHA-256 digest. - /// - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// - /// File size in bytes. - /// - [JsonPropertyName("size_bytes")] - public required long SizeBytes { get; init; } - - /// - /// Whether this is a partial tile. - /// - [JsonPropertyName("is_partial")] - public bool IsPartial { get; init; } -} - -/// -/// Optional entries component (for offline verification). -/// -public sealed record EntriesComponent -{ - /// - /// Relative path to the entries file. - /// - [JsonPropertyName("path")] - public required string Path { get; init; } - - /// - /// SHA-256 digest. - /// - [JsonPropertyName("digest")] - public required string Digest { get; init; } - - /// - /// File size in bytes. - /// - [JsonPropertyName("size_bytes")] - public required long SizeBytes { get; init; } - - /// - /// Number of entries included. - /// - [JsonPropertyName("entry_count")] - public required int EntryCount { get; init; } - - /// - /// Format of the entries file. - /// - [JsonPropertyName("format")] - public string Format { get; init; } = "ndjson.zst"; -} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotValidationResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotValidationResult.cs new file mode 100644 index 000000000..9864c0784 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TrustSnapshotValidationResult.cs @@ -0,0 +1,13 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Result of validating a trust snapshot. +/// +public sealed record TrustSnapshotValidationResult +{ + public bool IsValid { get; init; } + public string? Error { get; init; } + public TrustSnapshotManifest? Manifest { get; init; } + public int FileCount { get; init; } + public long TotalBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufFileComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufFileComponent.cs new file mode 100644 index 000000000..6e74b6b22 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufFileComponent.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Individual TUF metadata file. +/// +public sealed record TufFileComponent +{ + /// + /// Relative path within the bundle. + /// + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// + /// SHA-256 digest. + /// + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// + /// File size in bytes. + /// + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } + + /// + /// Version number (if applicable). + /// + [JsonPropertyName("version")] + public int? Version { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufImportResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufImportResult.cs new file mode 100644 index 000000000..ef96123aa --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufImportResult.cs @@ -0,0 +1,10 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// Result of importing TUF metadata. +/// +public sealed record TufImportResult +{ + public List ImportedFiles { get; init; } = []; + public int RootVersion { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataComponent.cs new file mode 100644 index 000000000..38e5ca079 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataComponent.cs @@ -0,0 +1,52 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// TUF metadata component. +/// +public sealed record TufMetadataComponent +{ + /// + /// Path to root.json. + /// + [JsonPropertyName("root")] + public required TufFileComponent Root { get; init; } + + /// + /// Path to snapshot.json. + /// + [JsonPropertyName("snapshot")] + public required TufFileComponent Snapshot { get; init; } + + /// + /// Path to timestamp.json. + /// + [JsonPropertyName("timestamp")] + public required TufFileComponent Timestamp { get; init; } + + /// + /// Path to targets.json. + /// + [JsonPropertyName("targets")] + public required TufFileComponent Targets { get; init; } + + /// + /// Target files (Rekor keys, service map, etc.). + /// + [JsonPropertyName("target_files")] + public ImmutableArray TargetFiles { get; init; } = []; + + /// + /// TUF repository URL. + /// + [JsonPropertyName("repository_url")] + public string? RepositoryUrl { get; init; } + + /// + /// TUF root version. + /// + [JsonPropertyName("root_version")] + public int RootVersion { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataSource.cs new file mode 100644 index 000000000..07539587f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufMetadataSource.cs @@ -0,0 +1,47 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// TUF metadata source. +/// +public sealed record TufMetadataSource +{ + /// + /// Path to root.json. + /// + public required string RootPath { get; init; } + + /// + /// Path to snapshot.json. + /// + public required string SnapshotPath { get; init; } + + /// + /// Path to timestamp.json. + /// + public required string TimestampPath { get; init; } + + /// + /// Path to targets.json. + /// + public required string TargetsPath { get; init; } + + /// + /// Target files to include. + /// + public IReadOnlyList TargetFiles { get; init; } = []; + + /// + /// TUF repository URL. + /// + public string? RepositoryUrl { get; init; } + + /// + /// Root version. + /// + public int RootVersion { get; init; } + + /// + /// When the timestamp expires. + /// + public DateTimeOffset? TimestampExpires { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetFileComponent.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetFileComponent.cs new file mode 100644 index 000000000..70a6da89c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetFileComponent.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// TUF target file component. +/// +public sealed record TufTargetFileComponent +{ + /// + /// Target name. + /// + [JsonPropertyName("name")] + public required string Name { get; init; } + + /// + /// Relative path within the bundle. + /// + [JsonPropertyName("path")] + public required string Path { get; init; } + + /// + /// SHA-256 digest. + /// + [JsonPropertyName("digest")] + public required string Digest { get; init; } + + /// + /// File size in bytes. + /// + [JsonPropertyName("size_bytes")] + public required long SizeBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetSource.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetSource.cs new file mode 100644 index 000000000..2a5302d5f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/TrustSnapshot/TufTargetSource.cs @@ -0,0 +1,17 @@ +namespace StellaOps.AirGap.Bundle.TrustSnapshot; + +/// +/// TUF target file source. +/// +public sealed record TufTargetSource +{ + /// + /// Target name. + /// + public required string Name { get; init; } + + /// + /// Source path. + /// + public required string SourcePath { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationError.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationError.cs new file mode 100644 index 000000000..48d04e3e0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationError.cs @@ -0,0 +1,3 @@ +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed record BundleValidationError(string Component, string Message); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationResult.cs new file mode 100644 index 000000000..e1e5cf4ae --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationResult.cs @@ -0,0 +1,7 @@ +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed record BundleValidationResult( + bool IsValid, + IReadOnlyList Errors, + IReadOnlyList Warnings, + long TotalSizeBytes); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationWarning.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationWarning.cs new file mode 100644 index 000000000..c50c81543 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidationWarning.cs @@ -0,0 +1,3 @@ +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed record BundleValidationWarning(string Component, string Message); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Helpers.cs new file mode 100644 index 000000000..78e357a54 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Helpers.cs @@ -0,0 +1,39 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed partial class BundleValidator +{ + private static async Task<(bool IsValid, string ActualDigest)> VerifyFileDigestAsync( + string filePath, + string expectedDigest, + CancellationToken ct) + { + if (!File.Exists(filePath)) + { + return (false, "FILE_NOT_FOUND"); + } + + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); + var actualDigest = Convert.ToHexString(hash).ToLowerInvariant(); + return (string.Equals(actualDigest, expectedDigest, StringComparison.OrdinalIgnoreCase), actualDigest); + } + + private static string NormalizeDigest(string digest) + { + return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? digest[7..] + : digest; + } + + private static string ComputeBundleDigest(BundleManifest manifest) + { + var withoutDigest = manifest with { BundleDigest = null }; + var json = BundleManifestSerializer.Serialize(withoutDigest); + return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(json))).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validate.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validate.cs new file mode 100644 index 000000000..7e3f9eaac --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validate.cs @@ -0,0 +1,53 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed partial class BundleValidator +{ + public async Task ValidateAsync( + BundleManifest manifest, + string bundlePath, + CancellationToken ct = default) + { + var errors = new List(); + var warnings = new List(); + var now = _timeProvider.GetUtcNow(); + + if (manifest.Feeds.Length == 0) + { + errors.Add(new BundleValidationError("Feeds", "At least one feed required")); + } + + if (manifest.CryptoMaterials.Length == 0) + { + errors.Add(new BundleValidationError("CryptoMaterials", "Trust roots required")); + } + + await ValidateFeedsAsync(manifest, bundlePath, errors, ct).ConfigureAwait(false); + + if (_options.ValidatePolicies) + { + await ValidatePoliciesAsync(manifest, bundlePath, errors, ct).ConfigureAwait(false); + } + + if (_options.ValidateCryptoMaterials) + { + await ValidateCryptoMaterialsAsync(manifest, bundlePath, errors, ct).ConfigureAwait(false); + } + + if (_options.ValidateArtifacts && manifest.Artifacts.Length > 0) + { + await ValidateArtifactsAsync(manifest, bundlePath, errors, warnings, ct).ConfigureAwait(false); + } + + ValidateExpiration(manifest, now, warnings); + ValidateStaleness(manifest, now, errors, warnings); + ValidateBundleDigest(manifest, errors); + + return new BundleValidationResult( + errors.Count == 0, + errors, + warnings, + manifest.TotalSizeBytes); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Artifacts.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Artifacts.cs new file mode 100644 index 000000000..47a246d8f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Artifacts.cs @@ -0,0 +1,94 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed partial class BundleValidator +{ + private async Task ValidateArtifactsAsync( + BundleManifest manifest, + string bundlePath, + List errors, + List warnings, + CancellationToken ct) + { + foreach (var artifact in manifest.Artifacts) + { + if (string.IsNullOrWhiteSpace(artifact.Path)) + { + continue; + } + + if (!PathValidation.IsSafeRelativePath(artifact.Path)) + { + errors.Add(new BundleValidationError("Artifacts", + $"Artifact '{artifact.Type}' has unsafe relative path: {artifact.Path}")); + continue; + } + + if (string.IsNullOrWhiteSpace(artifact.Digest)) + { + warnings.Add(new BundleValidationWarning("Artifacts", + $"Artifact '{artifact.Type}' at '{artifact.Path}' has no digest")); + continue; + } + + var filePath = PathValidation.SafeCombine(bundlePath, artifact.Path); + var result = await VerifyFileDigestAsync(filePath, NormalizeDigest(artifact.Digest), ct).ConfigureAwait(false); + if (!result.IsValid) + { + errors.Add(new BundleValidationError("Artifacts", + $"Artifact '{artifact.Type}' at '{artifact.Path}' digest mismatch: expected {artifact.Digest}, got {result.ActualDigest}")); + } + } + } + + private static void ValidateExpiration( + BundleManifest manifest, + DateTimeOffset now, + List warnings) + { + if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < now) + { + warnings.Add(new BundleValidationWarning("ExpiresAt", "Bundle has expired")); + } + } + + private void ValidateStaleness( + BundleManifest manifest, + DateTimeOffset now, + List errors, + List warnings) + { + foreach (var feed in manifest.Feeds) + { + var age = now - feed.SnapshotAt; + if (age.TotalDays > _options.MaxFeedAgeDays) + { + var message = $"Feed {feed.FeedId} is {age.TotalDays:F0} days old (threshold: {_options.MaxFeedAgeDays} days)"; + if (_options.FailOnStaleFeed) + { + errors.Add(new BundleValidationError("Feeds", message)); + } + else + { + warnings.Add(new BundleValidationWarning("Feeds", message)); + } + } + } + } + + private static void ValidateBundleDigest(BundleManifest manifest, List errors) + { + if (manifest.BundleDigest is null) + { + return; + } + + var computed = ComputeBundleDigest(manifest); + if (!string.Equals(computed, manifest.BundleDigest, StringComparison.OrdinalIgnoreCase)) + { + errors.Add(new BundleValidationError("BundleDigest", "Bundle digest mismatch")); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Files.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Files.cs new file mode 100644 index 000000000..f8f8ac2d9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.Validation.Files.cs @@ -0,0 +1,82 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Validation; + +public sealed partial class BundleValidator +{ + private async Task ValidateFeedsAsync( + BundleManifest manifest, + string bundlePath, + List errors, + CancellationToken ct) + { + foreach (var feed in manifest.Feeds) + { + if (!PathValidation.IsSafeRelativePath(feed.RelativePath)) + { + errors.Add(new BundleValidationError("Feeds", + $"Feed {feed.FeedId} has unsafe relative path: {feed.RelativePath}")); + continue; + } + + var filePath = PathValidation.SafeCombine(bundlePath, feed.RelativePath); + var result = await VerifyFileDigestAsync(filePath, feed.Digest, ct).ConfigureAwait(false); + if (!result.IsValid) + { + errors.Add(new BundleValidationError("Feeds", + $"Feed {feed.FeedId} digest mismatch: expected {feed.Digest}, got {result.ActualDigest}")); + } + } + } + + private async Task ValidatePoliciesAsync( + BundleManifest manifest, + string bundlePath, + List errors, + CancellationToken ct) + { + foreach (var policy in manifest.Policies) + { + if (!PathValidation.IsSafeRelativePath(policy.RelativePath)) + { + errors.Add(new BundleValidationError("Policies", + $"Policy {policy.PolicyId} has unsafe relative path: {policy.RelativePath}")); + continue; + } + + var filePath = PathValidation.SafeCombine(bundlePath, policy.RelativePath); + var result = await VerifyFileDigestAsync(filePath, policy.Digest, ct).ConfigureAwait(false); + if (!result.IsValid) + { + errors.Add(new BundleValidationError("Policies", + $"Policy {policy.PolicyId} digest mismatch: expected {policy.Digest}, got {result.ActualDigest}")); + } + } + } + + private async Task ValidateCryptoMaterialsAsync( + BundleManifest manifest, + string bundlePath, + List errors, + CancellationToken ct) + { + foreach (var crypto in manifest.CryptoMaterials) + { + if (!PathValidation.IsSafeRelativePath(crypto.RelativePath)) + { + errors.Add(new BundleValidationError("CryptoMaterials", + $"Crypto material {crypto.ComponentId} has unsafe relative path: {crypto.RelativePath}")); + continue; + } + + var filePath = PathValidation.SafeCombine(bundlePath, crypto.RelativePath); + var result = await VerifyFileDigestAsync(filePath, crypto.Digest, ct).ConfigureAwait(false); + if (!result.IsValid) + { + errors.Add(new BundleValidationError("CryptoMaterials", + $"Crypto material {crypto.ComponentId} digest mismatch: expected {crypto.Digest}, got {result.ActualDigest}")); + } + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.cs index 548718887..702741da7 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/BundleValidator.cs @@ -1,13 +1,10 @@ using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; using StellaOps.AirGap.Bundle.Services; -using System.Security.Cryptography; -using System.Text; namespace StellaOps.AirGap.Bundle.Validation; -public sealed class BundleValidator : IBundleValidator +public sealed partial class BundleValidator : IBundleValidator { private readonly TimeProvider _timeProvider; private readonly BundleValidationOptions _options; @@ -22,204 +19,4 @@ public sealed class BundleValidator : IBundleValidator _options = options ?? throw new ArgumentNullException(nameof(options)); } - public async Task ValidateAsync( - BundleManifest manifest, - string bundlePath, - CancellationToken ct = default) - { - var errors = new List(); - var warnings = new List(); - var now = _timeProvider.GetUtcNow(); - - if (manifest.Feeds.Length == 0) - { - errors.Add(new BundleValidationError("Feeds", "At least one feed required")); - } - - if (manifest.CryptoMaterials.Length == 0) - { - errors.Add(new BundleValidationError("CryptoMaterials", "Trust roots required")); - } - - // Validate feed digests and paths - foreach (var feed in manifest.Feeds) - { - // Validate path safety - if (!PathValidation.IsSafeRelativePath(feed.RelativePath)) - { - errors.Add(new BundleValidationError("Feeds", - $"Feed {feed.FeedId} has unsafe relative path: {feed.RelativePath}")); - continue; - } - - var filePath = PathValidation.SafeCombine(bundlePath, feed.RelativePath); - var result = await VerifyFileDigestAsync(filePath, feed.Digest, ct).ConfigureAwait(false); - if (!result.IsValid) - { - errors.Add(new BundleValidationError("Feeds", - $"Feed {feed.FeedId} digest mismatch: expected {feed.Digest}, got {result.ActualDigest}")); - } - } - - // Validate policy digests if enabled - if (_options.ValidatePolicies) - { - foreach (var policy in manifest.Policies) - { - if (!PathValidation.IsSafeRelativePath(policy.RelativePath)) - { - errors.Add(new BundleValidationError("Policies", - $"Policy {policy.PolicyId} has unsafe relative path: {policy.RelativePath}")); - continue; - } - - var filePath = PathValidation.SafeCombine(bundlePath, policy.RelativePath); - var result = await VerifyFileDigestAsync(filePath, policy.Digest, ct).ConfigureAwait(false); - if (!result.IsValid) - { - errors.Add(new BundleValidationError("Policies", - $"Policy {policy.PolicyId} digest mismatch: expected {policy.Digest}, got {result.ActualDigest}")); - } - } - } - - // Validate crypto material digests if enabled - if (_options.ValidateCryptoMaterials) - { - foreach (var crypto in manifest.CryptoMaterials) - { - if (!PathValidation.IsSafeRelativePath(crypto.RelativePath)) - { - errors.Add(new BundleValidationError("CryptoMaterials", - $"Crypto material {crypto.ComponentId} has unsafe relative path: {crypto.RelativePath}")); - continue; - } - - var filePath = PathValidation.SafeCombine(bundlePath, crypto.RelativePath); - var result = await VerifyFileDigestAsync(filePath, crypto.Digest, ct).ConfigureAwait(false); - if (!result.IsValid) - { - errors.Add(new BundleValidationError("CryptoMaterials", - $"Crypto material {crypto.ComponentId} digest mismatch: expected {crypto.Digest}, got {result.ActualDigest}")); - } - } - } - - // Validate artifact digests (function maps, observations, verification reports) - if (_options.ValidateArtifacts && manifest.Artifacts.Length > 0) - { - foreach (var artifact in manifest.Artifacts) - { - if (string.IsNullOrWhiteSpace(artifact.Path)) - { - continue; // Inline artifact without path - } - - if (!PathValidation.IsSafeRelativePath(artifact.Path)) - { - errors.Add(new BundleValidationError("Artifacts", - $"Artifact '{artifact.Type}' has unsafe relative path: {artifact.Path}")); - continue; - } - - if (string.IsNullOrWhiteSpace(artifact.Digest)) - { - warnings.Add(new BundleValidationWarning("Artifacts", - $"Artifact '{artifact.Type}' at '{artifact.Path}' has no digest")); - continue; - } - - var filePath = PathValidation.SafeCombine(bundlePath, artifact.Path); - var result = await VerifyFileDigestAsync(filePath, NormalizeDigest(artifact.Digest), ct).ConfigureAwait(false); - if (!result.IsValid) - { - errors.Add(new BundleValidationError("Artifacts", - $"Artifact '{artifact.Type}' at '{artifact.Path}' digest mismatch: expected {artifact.Digest}, got {result.ActualDigest}")); - } - } - } - - // Check bundle expiration - if (manifest.ExpiresAt.HasValue && manifest.ExpiresAt.Value < now) - { - warnings.Add(new BundleValidationWarning("ExpiresAt", "Bundle has expired")); - } - - // Check feed staleness using configurable threshold - foreach (var feed in manifest.Feeds) - { - var age = now - feed.SnapshotAt; - if (age.TotalDays > _options.MaxFeedAgeDays) - { - var message = $"Feed {feed.FeedId} is {age.TotalDays:F0} days old (threshold: {_options.MaxFeedAgeDays} days)"; - if (_options.FailOnStaleFeed) - { - errors.Add(new BundleValidationError("Feeds", message)); - } - else - { - warnings.Add(new BundleValidationWarning("Feeds", message)); - } - } - } - - // Verify bundle digest if present - if (manifest.BundleDigest is not null) - { - var computed = ComputeBundleDigest(manifest); - if (!string.Equals(computed, manifest.BundleDigest, StringComparison.OrdinalIgnoreCase)) - { - errors.Add(new BundleValidationError("BundleDigest", "Bundle digest mismatch")); - } - } - - return new BundleValidationResult( - errors.Count == 0, - errors, - warnings, - manifest.TotalSizeBytes); - } - - private static async Task<(bool IsValid, string ActualDigest)> VerifyFileDigestAsync( - string filePath, string expectedDigest, CancellationToken ct) - { - if (!File.Exists(filePath)) - { - return (false, "FILE_NOT_FOUND"); - } - - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false); - var actualDigest = Convert.ToHexString(hash).ToLowerInvariant(); - return (string.Equals(actualDigest, expectedDigest, StringComparison.OrdinalIgnoreCase), actualDigest); - } - - private static string NormalizeDigest(string digest) - { - // Strip "sha256:" prefix if present for comparison with raw hex - return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? digest[7..] - : digest; - } - - private static string ComputeBundleDigest(BundleManifest manifest) - { - var withoutDigest = manifest with { BundleDigest = null }; - var json = BundleManifestSerializer.Serialize(withoutDigest); - return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(json))).ToLowerInvariant(); - } } - -public interface IBundleValidator -{ - Task ValidateAsync(BundleManifest manifest, string bundlePath, CancellationToken ct = default); -} - -public sealed record BundleValidationResult( - bool IsValid, - IReadOnlyList Errors, - IReadOnlyList Warnings, - long TotalSizeBytes); - -public sealed record BundleValidationError(string Component, string Message); -public sealed record BundleValidationWarning(string Component, string Message); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/IBundleValidator.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/IBundleValidator.cs new file mode 100644 index 000000000..4d6f6ed3b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Bundle/Validation/IBundleValidator.cs @@ -0,0 +1,11 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Validation; + +public interface IBundleValidator +{ + Task ValidateAsync( + BundleManifest manifest, + string bundlePath, + CancellationToken ct = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Extensions/AirGapPersistenceExtensions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Extensions/AirGapPersistenceExtensions.cs index e72ede16c..00c07bb6a 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Extensions/AirGapPersistenceExtensions.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Extensions/AirGapPersistenceExtensions.cs @@ -1,9 +1,6 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Npgsql; using StellaOps.AirGap.Controller.Stores; using StellaOps.AirGap.Importer.Versioning; using StellaOps.AirGap.Persistence.Postgres; @@ -27,7 +24,7 @@ public static class AirGapPersistenceExtensions { services.Configure(sectionName, configuration.GetSection(sectionName)); services.AddSingleton(); - services.AddHostedService(sp => CreateMigrationHost(sp)); + services.AddHostedService(); services.AddScoped(); services.AddScoped(); @@ -43,46 +40,10 @@ public static class AirGapPersistenceExtensions { services.Configure(configureOptions); services.AddSingleton(); - services.AddHostedService(sp => CreateMigrationHost(sp)); + services.AddHostedService(); services.AddScoped(); services.AddScoped(); return services; } - - private static IHostedService CreateMigrationHost(IServiceProvider serviceProvider) - { - var options = serviceProvider.GetRequiredService>().Value; - var schemaName = string.IsNullOrWhiteSpace(options.SchemaName) - ? AirGapDataSource.DefaultSchemaName - : options.SchemaName!; - - var connectionString = BuildMigrationConnectionString(options, schemaName); - var logger = serviceProvider.GetRequiredService() - .CreateLogger("Migration.AirGap.Persistence"); - var lifetime = serviceProvider.GetRequiredService(); - - return new AirGapStartupMigrationHost( - connectionString, - schemaName, - "AirGap.Persistence", - typeof(AirGapDataSource).Assembly, - logger, - lifetime); - } - - private static string BuildMigrationConnectionString(PostgresOptions options, string schemaName) - { - var builder = new NpgsqlConnectionStringBuilder(options.ConnectionString) - { - CommandTimeout = options.CommandTimeoutSeconds - }; - - if (!string.IsNullOrWhiteSpace(schemaName)) - { - builder.SearchPath = $"{schemaName}, public"; - } - - return builder.ConnectionString; - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/AirGapStartupMigrationHost.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/AirGapStartupMigrationHost.cs index f256a9428..ca35ad6f6 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/AirGapStartupMigrationHost.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/AirGapStartupMigrationHost.cs @@ -1,22 +1,52 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Npgsql; using StellaOps.Infrastructure.Postgres.Migrations; -using System.Reflection; +using StellaOps.Infrastructure.Postgres.Options; namespace StellaOps.AirGap.Persistence.Postgres; internal sealed class AirGapStartupMigrationHost : StartupMigrationHost { + private const string ModuleName = "AirGap.Persistence"; + public AirGapStartupMigrationHost( - string connectionString, - string schemaName, - string moduleName, - Assembly migrationsAssembly, - ILogger logger, - IHostApplicationLifetime lifetime, - StartupMigrationOptions? options = null) - : base(connectionString, schemaName, moduleName, migrationsAssembly, logger, lifetime, options) + IOptions options, + ILoggerFactory loggerFactory, + IHostApplicationLifetime lifetime) + : base( + BuildMigrationConnectionString(options.Value, out var schemaName), + schemaName, + ModuleName, + typeof(AirGapDataSource).Assembly, + loggerFactory.CreateLogger($"Migration.{ModuleName}"), + lifetime) { } + + private static string BuildMigrationConnectionString(PostgresOptions options, out string schemaName) + { + schemaName = ResolveSchemaName(options); + + var builder = new NpgsqlConnectionStringBuilder(options.ConnectionString) + { + CommandTimeout = options.CommandTimeoutSeconds + }; + + if (!string.IsNullOrWhiteSpace(schemaName)) + { + builder.SearchPath = $"{schemaName}, public"; + } + + return builder.ConnectionString; + } + + private static string ResolveSchemaName(PostgresOptions options) + { + return string.IsNullOrWhiteSpace(options.SchemaName) + ? AirGapDataSource.DefaultSchemaName + : options.SchemaName!; + } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Map.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Map.cs new file mode 100644 index 000000000..5ad4025ae --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Map.cs @@ -0,0 +1,37 @@ +using Npgsql; +using StellaOps.AirGap.Controller.Domain; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + private AirGapState Map(NpgsqlDataReader reader) + { + var id = reader.GetString(0); + var tenantId = reader.GetString(1); + var sealed_ = reader.GetBoolean(2); + var policyHash = reader.IsDBNull(3) ? null : reader.GetString(3); + var timeAnchorJson = reader.GetFieldValue(4); + var lastTransitionAt = reader.GetFieldValue(5); + var stalenessBudgetJson = reader.GetFieldValue(6); + var driftBaselineSeconds = reader.GetInt64(7); + var contentBudgetsJson = reader.IsDBNull(8) ? null : reader.GetFieldValue(8); + + var timeAnchor = DeserializeTimeAnchor(timeAnchorJson); + var stalenessBudget = DeserializeStalenessBudget(stalenessBudgetJson); + var contentBudgets = DeserializeContentBudgets(contentBudgetsJson); + + return new AirGapState + { + Id = id, + TenantId = tenantId, + Sealed = sealed_, + PolicyHash = policyHash, + TimeAnchor = timeAnchor, + LastTransitionAt = lastTransitionAt, + StalenessBudget = stalenessBudget, + DriftBaselineSeconds = driftBaselineSeconds, + ContentBudgets = contentBudgets + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Read.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Read.cs new file mode 100644 index 000000000..5e7702a0a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Read.cs @@ -0,0 +1,58 @@ +using System; +using System.Threading; +using Npgsql; +using StellaOps.AirGap.Controller.Domain; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + public async Task GetAsync(string tenantId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + await EnsureTableAsync(cancellationToken).ConfigureAwait(false); + + var tenantKey = NormalizeTenantId(tenantId); + var stateTable = GetQualifiedTableName("state"); + + await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", cancellationToken) + .ConfigureAwait(false); + var sql = $$""" + SELECT id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, + staleness_budget, drift_baseline_seconds, content_budgets + FROM {{stateTable}} + WHERE tenant_id = @tenant_id; + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "tenant_id", tenantKey); + + await using (var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false)) + { + if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return Map(reader); + } + } + + // Fallback for legacy rows stored without normalization. + await using var fallbackCommand = CreateCommand($$""" + SELECT id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, + staleness_budget, drift_baseline_seconds, content_budgets + FROM {{stateTable}} + WHERE LOWER(tenant_id) = LOWER(@tenant_id) + ORDER BY updated_at DESC, id DESC + LIMIT 1; + """, connection); + AddParameter(fallbackCommand, "tenant_id", tenantId); + + await using var fallbackReader = await fallbackCommand.ExecuteReaderAsync(cancellationToken) + .ConfigureAwait(false); + if (await fallbackReader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return Map(fallbackReader); + } + + return new AirGapState { TenantId = tenantId }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Schema.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Schema.cs new file mode 100644 index 000000000..096aad894 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Schema.cs @@ -0,0 +1,85 @@ +using System; +using System.Threading; +using Npgsql; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + private async ValueTask EnsureTableAsync(CancellationToken cancellationToken) + { + if (_initialized) + { + return; + } + + await _initLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_initialized) + { + return; + } + + await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken) + .ConfigureAwait(false); + var schemaName = GetSchemaName(); + if (!await TableExistsAsync(connection, schemaName, "state", cancellationToken).ConfigureAwait(false)) + { + throw new InvalidOperationException( + $"AirGap state table missing in schema '{schemaName}'. Run AirGap migrations before using the store."); + } + _initialized = true; + } + finally + { + _initLock.Release(); + } + } + + private async Task TableExistsAsync( + NpgsqlConnection connection, + string schemaName, + string tableName, + CancellationToken cancellationToken) + { + const string sql = """ + SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = @schema AND table_name = @table + ); + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "schema", schemaName); + AddParameter(command, "table", tableName); + + var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false); + return result is true; + } + + private string GetQualifiedTableName(string tableName) + { + var schema = GetSchemaName(); + return $"{QuoteIdentifier(schema)}.{QuoteIdentifier(tableName)}"; + } + + private string GetSchemaName() + { + if (!string.IsNullOrWhiteSpace(DataSource.SchemaName)) + { + return DataSource.SchemaName!; + } + + return AirGapDataSource.DefaultSchemaName; + } + + private static string NormalizeTenantId(string tenantId) => tenantId.Trim().ToLowerInvariant(); + + private static string QuoteIdentifier(string identifier) + { + var escaped = identifier.Replace("\"", "\"\"", StringComparison.Ordinal); + return $"\"{escaped}\""; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.ContentBudgets.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.ContentBudgets.cs new file mode 100644 index 000000000..88472d06b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.ContentBudgets.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Time.Models; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + private static string SerializeContentBudgets(IReadOnlyDictionary budgets) + { + if (budgets.Count == 0) + { + return "{}"; + } + + var dict = new SortedDictionary(StringComparer.Ordinal); + foreach (var kv in budgets.OrderBy(kv => kv.Key, StringComparer.Ordinal)) + { + dict[kv.Key] = new + { + warningSeconds = kv.Value.WarningSeconds, + breachSeconds = kv.Value.BreachSeconds + }; + } + + return JsonSerializer.Serialize(dict); + } + + private IReadOnlyDictionary DeserializeContentBudgets(string? json) + { + if (string.IsNullOrWhiteSpace(json)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + try + { + using var doc = JsonDocument.Parse(json); + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var property in doc.RootElement.EnumerateObject()) + { + var warningSeconds = property.Value.GetProperty("warningSeconds").GetInt64(); + var breachSeconds = property.Value.GetProperty("breachSeconds").GetInt64(); + result[property.Name] = new StalenessBudget(warningSeconds, breachSeconds); + } + + return result; + } + catch (Exception ex) + { + Logger.LogWarning(ex, "AirGap state: Failed to parse content budgets JSON; using defaults."); + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.StalenessBudget.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.StalenessBudget.cs new file mode 100644 index 000000000..190a664fa --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.StalenessBudget.cs @@ -0,0 +1,38 @@ +using System; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Time.Models; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + private static string SerializeStalenessBudget(StalenessBudget budget) + { + var obj = new + { + warningSeconds = budget.WarningSeconds, + breachSeconds = budget.BreachSeconds + }; + return JsonSerializer.Serialize(obj); + } + + private StalenessBudget DeserializeStalenessBudget(string json) + { + try + { + using var doc = JsonDocument.Parse(json); + var root = doc.RootElement; + + var warningSeconds = root.GetProperty("warningSeconds").GetInt64(); + var breachSeconds = root.GetProperty("breachSeconds").GetInt64(); + + return new StalenessBudget(warningSeconds, breachSeconds); + } + catch (Exception ex) + { + Logger.LogWarning(ex, "AirGap state: Failed to parse staleness budget JSON; using default."); + return StalenessBudget.Default; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.TimeAnchor.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.TimeAnchor.cs new file mode 100644 index 000000000..81b994d32 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Serialization.TimeAnchor.cs @@ -0,0 +1,49 @@ +using System; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Time.Models; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + private static string SerializeTimeAnchor(TimeAnchor anchor) + { + var obj = new + { + anchorTime = anchor.AnchorTime, + source = anchor.Source, + format = anchor.Format, + signatureFingerprint = anchor.SignatureFingerprint, + tokenDigest = anchor.TokenDigest + }; + return JsonSerializer.Serialize(obj); + } + + private TimeAnchor DeserializeTimeAnchor(string json) + { + try + { + using var doc = JsonDocument.Parse(json); + var root = doc.RootElement; + + var anchorTime = root.GetProperty("anchorTime").GetDateTimeOffset(); + var source = root.GetProperty("source").GetString() ?? "unknown"; + var format = root.GetProperty("format").GetString() ?? "unknown"; + var signatureFingerprint = root.TryGetProperty("signatureFingerprint", out var sf) && + sf.ValueKind == JsonValueKind.String + ? sf.GetString() ?? "" + : ""; + var tokenDigest = root.TryGetProperty("tokenDigest", out var td) && td.ValueKind == JsonValueKind.String + ? td.GetString() ?? "" + : ""; + + return new TimeAnchor(anchorTime, source, format, signatureFingerprint, tokenDigest); + } + catch (Exception ex) + { + Logger.LogWarning(ex, "AirGap state: Failed to parse time anchor JSON; using default."); + return TimeAnchor.Unknown; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Write.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Write.cs new file mode 100644 index 000000000..cb94f27d0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.Write.cs @@ -0,0 +1,53 @@ +using System; +using System.Threading; +using StellaOps.AirGap.Controller.Domain; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresAirGapStateStore +{ + public async Task SetAsync(AirGapState state, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(state); + await EnsureTableAsync(cancellationToken).ConfigureAwait(false); + + var tenantKey = NormalizeTenantId(state.TenantId); + var stateTable = GetQualifiedTableName("state"); + + await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "writer", cancellationToken) + .ConfigureAwait(false); + var sql = $$""" + INSERT INTO {{stateTable}} ( + id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, + staleness_budget, drift_baseline_seconds, content_budgets + ) + VALUES ( + @id, @tenant_id, @sealed, @policy_hash, @time_anchor, @last_transition_at, + @staleness_budget, @drift_baseline_seconds, @content_budgets + ) + ON CONFLICT (tenant_id) DO UPDATE SET + id = EXCLUDED.id, + sealed = EXCLUDED.sealed, + policy_hash = EXCLUDED.policy_hash, + time_anchor = EXCLUDED.time_anchor, + last_transition_at = EXCLUDED.last_transition_at, + staleness_budget = EXCLUDED.staleness_budget, + drift_baseline_seconds = EXCLUDED.drift_baseline_seconds, + content_budgets = EXCLUDED.content_budgets, + updated_at = NOW(); + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "id", state.Id); + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "sealed", state.Sealed); + AddParameter(command, "policy_hash", (object?)state.PolicyHash ?? DBNull.Value); + AddJsonbParameter(command, "time_anchor", SerializeTimeAnchor(state.TimeAnchor)); + AddParameter(command, "last_transition_at", state.LastTransitionAt); + AddJsonbParameter(command, "staleness_budget", SerializeStalenessBudget(state.StalenessBudget)); + AddParameter(command, "drift_baseline_seconds", state.DriftBaselineSeconds); + AddJsonbParameter(command, "content_budgets", SerializeContentBudgets(state.ContentBudgets)); + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.cs index d92150db2..fe6a1402c 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresAirGapStateStore.cs @@ -1,18 +1,11 @@ - +using System.Threading; using Microsoft.Extensions.Logging; -using Npgsql; -using StellaOps.AirGap.Controller.Domain; using StellaOps.AirGap.Controller.Stores; -using StellaOps.AirGap.Time.Models; using StellaOps.Infrastructure.Postgres.Repositories; -using System.Text.Json; namespace StellaOps.AirGap.Persistence.Postgres.Repositories; -/// -/// PostgreSQL-backed store for AirGap sealing state. -/// -public sealed class PostgresAirGapStateStore : RepositoryBase, IAirGapStateStore +public sealed partial class PostgresAirGapStateStore : RepositoryBase, IAirGapStateStore { private volatile bool _initialized; private readonly SemaphoreSlim _initLock = new(1, 1); @@ -21,322 +14,4 @@ public sealed class PostgresAirGapStateStore : RepositoryBase, : base(dataSource, logger) { } - - public async Task GetAsync(string tenantId, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - await EnsureTableAsync(cancellationToken).ConfigureAwait(false); - - var tenantKey = NormalizeTenantId(tenantId); - var stateTable = GetQualifiedTableName("state"); - - await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", cancellationToken).ConfigureAwait(false); - var sql = $$""" - SELECT id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, - staleness_budget, drift_baseline_seconds, content_budgets - FROM {{stateTable}} - WHERE tenant_id = @tenant_id; - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "tenant_id", tenantKey); - - await using (var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false)) - { - if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return Map(reader); - } - } - - // Fallback for legacy rows stored without normalization. - await using var fallbackCommand = CreateCommand($$""" - SELECT id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, - staleness_budget, drift_baseline_seconds, content_budgets - FROM {{stateTable}} - WHERE LOWER(tenant_id) = LOWER(@tenant_id) - ORDER BY updated_at DESC, id DESC - LIMIT 1; - """, connection); - AddParameter(fallbackCommand, "tenant_id", tenantId); - - await using var fallbackReader = await fallbackCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - if (await fallbackReader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return Map(fallbackReader); - } - - // Return default state for tenant if not found - return new AirGapState { TenantId = tenantId }; - } - - public async Task SetAsync(AirGapState state, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(state); - await EnsureTableAsync(cancellationToken).ConfigureAwait(false); - - var tenantKey = NormalizeTenantId(state.TenantId); - var stateTable = GetQualifiedTableName("state"); - - await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "writer", cancellationToken).ConfigureAwait(false); - var sql = $$""" - INSERT INTO {{stateTable}} ( - id, tenant_id, sealed, policy_hash, time_anchor, last_transition_at, - staleness_budget, drift_baseline_seconds, content_budgets - ) - VALUES ( - @id, @tenant_id, @sealed, @policy_hash, @time_anchor, @last_transition_at, - @staleness_budget, @drift_baseline_seconds, @content_budgets - ) - ON CONFLICT (tenant_id) DO UPDATE SET - id = EXCLUDED.id, - sealed = EXCLUDED.sealed, - policy_hash = EXCLUDED.policy_hash, - time_anchor = EXCLUDED.time_anchor, - last_transition_at = EXCLUDED.last_transition_at, - staleness_budget = EXCLUDED.staleness_budget, - drift_baseline_seconds = EXCLUDED.drift_baseline_seconds, - content_budgets = EXCLUDED.content_budgets, - updated_at = NOW(); - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "id", state.Id); - AddParameter(command, "tenant_id", tenantKey); - AddParameter(command, "sealed", state.Sealed); - AddParameter(command, "policy_hash", (object?)state.PolicyHash ?? DBNull.Value); - AddJsonbParameter(command, "time_anchor", SerializeTimeAnchor(state.TimeAnchor)); - AddParameter(command, "last_transition_at", state.LastTransitionAt); - AddJsonbParameter(command, "staleness_budget", SerializeStalenessBudget(state.StalenessBudget)); - AddParameter(command, "drift_baseline_seconds", state.DriftBaselineSeconds); - AddJsonbParameter(command, "content_budgets", SerializeContentBudgets(state.ContentBudgets)); - - await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - } - - private AirGapState Map(NpgsqlDataReader reader) - { - var id = reader.GetString(0); - var tenantId = reader.GetString(1); - var sealed_ = reader.GetBoolean(2); - var policyHash = reader.IsDBNull(3) ? null : reader.GetString(3); - var timeAnchorJson = reader.GetFieldValue(4); - var lastTransitionAt = reader.GetFieldValue(5); - var stalenessBudgetJson = reader.GetFieldValue(6); - var driftBaselineSeconds = reader.GetInt64(7); - var contentBudgetsJson = reader.IsDBNull(8) ? null : reader.GetFieldValue(8); - - var timeAnchor = DeserializeTimeAnchor(timeAnchorJson); - var stalenessBudget = DeserializeStalenessBudget(stalenessBudgetJson); - var contentBudgets = DeserializeContentBudgets(contentBudgetsJson); - - return new AirGapState - { - Id = id, - TenantId = tenantId, - Sealed = sealed_, - PolicyHash = policyHash, - TimeAnchor = timeAnchor, - LastTransitionAt = lastTransitionAt, - StalenessBudget = stalenessBudget, - DriftBaselineSeconds = driftBaselineSeconds, - ContentBudgets = contentBudgets - }; - } - - #region Serialization - - private static string SerializeTimeAnchor(TimeAnchor anchor) - { - var obj = new - { - anchorTime = anchor.AnchorTime, - source = anchor.Source, - format = anchor.Format, - signatureFingerprint = anchor.SignatureFingerprint, - tokenDigest = anchor.TokenDigest - }; - return JsonSerializer.Serialize(obj); - } - - private TimeAnchor DeserializeTimeAnchor(string json) - { - try - { - using var doc = JsonDocument.Parse(json); - var root = doc.RootElement; - - var anchorTime = root.GetProperty("anchorTime").GetDateTimeOffset(); - var source = root.GetProperty("source").GetString() ?? "unknown"; - var format = root.GetProperty("format").GetString() ?? "unknown"; - var signatureFingerprint = root.TryGetProperty("signatureFingerprint", out var sf) && sf.ValueKind == JsonValueKind.String - ? sf.GetString() ?? "" - : ""; - var tokenDigest = root.TryGetProperty("tokenDigest", out var td) && td.ValueKind == JsonValueKind.String - ? td.GetString() ?? "" - : ""; - - return new TimeAnchor(anchorTime, source, format, signatureFingerprint, tokenDigest); - } - catch (Exception ex) - { - Logger.LogWarning(ex, "AirGap state: Failed to parse time anchor JSON; using default."); - return TimeAnchor.Unknown; - } - } - - private static string SerializeStalenessBudget(StalenessBudget budget) - { - var obj = new - { - warningSeconds = budget.WarningSeconds, - breachSeconds = budget.BreachSeconds - }; - return JsonSerializer.Serialize(obj); - } - - private StalenessBudget DeserializeStalenessBudget(string json) - { - try - { - using var doc = JsonDocument.Parse(json); - var root = doc.RootElement; - - var warningSeconds = root.GetProperty("warningSeconds").GetInt64(); - var breachSeconds = root.GetProperty("breachSeconds").GetInt64(); - - return new StalenessBudget(warningSeconds, breachSeconds); - } - catch (Exception ex) - { - Logger.LogWarning(ex, "AirGap state: Failed to parse staleness budget JSON; using default."); - return StalenessBudget.Default; - } - } - - private static string SerializeContentBudgets(IReadOnlyDictionary budgets) - { - if (budgets.Count == 0) - { - return "{}"; - } - - var dict = new SortedDictionary(StringComparer.Ordinal); - foreach (var kv in budgets.OrderBy(kv => kv.Key, StringComparer.Ordinal)) - { - dict[kv.Key] = new - { - warningSeconds = kv.Value.WarningSeconds, - breachSeconds = kv.Value.BreachSeconds - }; - } - - return JsonSerializer.Serialize(dict); - } - - private IReadOnlyDictionary DeserializeContentBudgets(string? json) - { - if (string.IsNullOrWhiteSpace(json)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - try - { - using var doc = JsonDocument.Parse(json); - var result = new Dictionary(StringComparer.OrdinalIgnoreCase); - - foreach (var property in doc.RootElement.EnumerateObject()) - { - var warningSeconds = property.Value.GetProperty("warningSeconds").GetInt64(); - var breachSeconds = property.Value.GetProperty("breachSeconds").GetInt64(); - result[property.Name] = new StalenessBudget(warningSeconds, breachSeconds); - } - - return result; - } - catch (Exception ex) - { - Logger.LogWarning(ex, "AirGap state: Failed to parse content budgets JSON; using defaults."); - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - } - - #endregion - - private async ValueTask EnsureTableAsync(CancellationToken cancellationToken) - { - if (_initialized) - { - return; - } - - await _initLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_initialized) - { - return; - } - - await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false); - var schemaName = GetSchemaName(); - if (!await TableExistsAsync(connection, schemaName, "state", cancellationToken).ConfigureAwait(false)) - { - throw new InvalidOperationException( - $"AirGap state table missing in schema '{schemaName}'. Run AirGap migrations before using the store."); - } - _initialized = true; - } - finally - { - _initLock.Release(); - } - } - - private async Task TableExistsAsync( - NpgsqlConnection connection, - string schemaName, - string tableName, - CancellationToken cancellationToken) - { - const string sql = """ - SELECT EXISTS ( - SELECT 1 - FROM information_schema.tables - WHERE table_schema = @schema AND table_name = @table - ); - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "schema", schemaName); - AddParameter(command, "table", tableName); - - var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false); - return result is true; - } - - private string GetQualifiedTableName(string tableName) - { - var schema = GetSchemaName(); - return $"{QuoteIdentifier(schema)}.{QuoteIdentifier(tableName)}"; - } - - private string GetSchemaName() - { - if (!string.IsNullOrWhiteSpace(DataSource.SchemaName)) - { - return DataSource.SchemaName!; - } - - return AirGapDataSource.DefaultSchemaName; - } - - private static string NormalizeTenantId(string tenantId) => tenantId.Trim().ToLowerInvariant(); - - private static string QuoteIdentifier(string identifier) - { - var escaped = identifier.Replace("\"", "\"\"", StringComparison.Ordinal); - return $"\"{escaped}\""; - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Mapping.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Mapping.cs new file mode 100644 index 000000000..8095eb4c3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Mapping.cs @@ -0,0 +1,63 @@ +using System.Threading; +using Npgsql; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + private static BundleVersionRecord Map(NpgsqlDataReader reader) + { + var tenantId = reader.GetString(0); + var bundleType = reader.GetString(1); + var versionString = reader.GetString(2); + var major = reader.GetInt32(3); + var minor = reader.GetInt32(4); + var patch = reader.GetInt32(5); + var prerelease = reader.IsDBNull(6) ? null : reader.GetString(6); + var bundleCreatedAt = reader.GetFieldValue(7); + var bundleDigest = reader.GetString(8); + var activatedAt = reader.GetFieldValue(9); + var wasForceActivated = reader.GetBoolean(10); + var forceActivateReason = reader.IsDBNull(11) ? null : reader.GetString(11); + + return new BundleVersionRecord( + TenantId: tenantId, + BundleType: bundleType, + VersionString: versionString, + Major: major, + Minor: minor, + Patch: patch, + Prerelease: prerelease, + BundleCreatedAt: bundleCreatedAt, + BundleDigest: bundleDigest, + ActivatedAt: activatedAt, + WasForceActivated: wasForceActivated, + ForceActivateReason: forceActivateReason); + } + + private async Task GetCurrentForUpdateAsync( + NpgsqlConnection connection, + NpgsqlTransaction transaction, + string versionTable, + string tenantKey, + string bundleTypeKey, + CancellationToken ct) + { + var sql = $$""" + SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + FROM {{versionTable}} + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type + FOR UPDATE; + """; + + await using var command = CreateCommand(sql, connection); + command.Transaction = transaction; + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "bundle_type", bundleTypeKey); + + await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); + return await reader.ReadAsync(ct).ConfigureAwait(false) ? Map(reader) : null; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Read.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Read.cs new file mode 100644 index 000000000..762beea6e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Read.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using Npgsql; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + public async Task GetCurrentAsync( + string tenantId, + string bundleType, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(tenantId); + var bundleTypeKey = NormalizeKey(bundleType); + + var versionTable = GetQualifiedTableName("bundle_versions"); + await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", ct).ConfigureAwait(false); + var sql = $$""" + SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + FROM {{versionTable}} + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type; + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "bundle_type", bundleTypeKey); + + await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); + return await reader.ReadAsync(ct).ConfigureAwait(false) ? Map(reader) : null; + } + + public async Task> GetHistoryAsync( + string tenantId, + string bundleType, + int limit = 10, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + + if (limit <= 0) + { + return Array.Empty(); + } + + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(tenantId); + var bundleTypeKey = NormalizeKey(bundleType); + + var historyTable = GetQualifiedTableName("bundle_version_history"); + await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", ct).ConfigureAwait(false); + var sql = $$""" + SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + FROM {{historyTable}} + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type + ORDER BY activated_at DESC, id DESC + LIMIT @limit; + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "bundle_type", bundleTypeKey); + AddParameter(command, "limit", limit); + + await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); + var results = new List(); + while (await reader.ReadAsync(ct).ConfigureAwait(false)) + { + results.Add(Map(reader)); + } + + return results; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Schema.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Schema.cs new file mode 100644 index 000000000..0c11e8263 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Schema.cs @@ -0,0 +1,87 @@ +using System; +using System.Threading; +using Npgsql; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + private async ValueTask EnsureTablesAsync(CancellationToken ct) + { + if (_initialized) + { + return; + } + + await _initLock.WaitAsync(ct).ConfigureAwait(false); + try + { + if (_initialized) + { + return; + } + + await using var connection = await DataSource.OpenSystemConnectionAsync(ct).ConfigureAwait(false); + var schemaName = GetSchemaName(); + + if (!await TableExistsAsync(connection, schemaName, "bundle_versions", ct).ConfigureAwait(false) || + !await TableExistsAsync(connection, schemaName, "bundle_version_history", ct).ConfigureAwait(false)) + { + throw new InvalidOperationException( + $"AirGap bundle version tables missing in schema '{schemaName}'. Run AirGap migrations before using the store."); + } + + _initialized = true; + } + finally + { + _initLock.Release(); + } + } + + private async Task TableExistsAsync( + NpgsqlConnection connection, + string schemaName, + string tableName, + CancellationToken cancellationToken) + { + const string sql = """ + SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = @schema AND table_name = @table + ); + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "schema", schemaName); + AddParameter(command, "table", tableName); + + var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false); + return result is true; + } + + private string GetQualifiedTableName(string tableName) + { + var schema = GetSchemaName(); + return $"{QuoteIdentifier(schema)}.{QuoteIdentifier(tableName)}"; + } + + private string GetSchemaName() + { + if (!string.IsNullOrWhiteSpace(DataSource.SchemaName)) + { + return DataSource.SchemaName!; + } + + return AirGapDataSource.DefaultSchemaName; + } + + private static string NormalizeKey(string value) => value.Trim().ToLowerInvariant(); + + private static string QuoteIdentifier(string identifier) + { + var escaped = identifier.Replace("\"", "\"\"", StringComparison.Ordinal); + return $"\"{escaped}\""; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Current.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Current.cs new file mode 100644 index 000000000..8290af9cd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Current.cs @@ -0,0 +1,58 @@ +using System; +using System.Threading; +using Npgsql; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + private async Task UpsertCurrentAsync( + NpgsqlConnection connection, + NpgsqlTransaction tx, + string versionTable, + BundleVersionRecord record, + string tenantKey, + string bundleTypeKey, + CancellationToken ct) + { + var upsertSql = $$""" + INSERT INTO {{versionTable}} ( + tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + ) + VALUES ( + @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, + @bundle_created_at, @bundle_digest, @activated_at, @was_force_activated, @force_activate_reason + ) + ON CONFLICT (tenant_id, bundle_type) DO UPDATE SET + version_string = EXCLUDED.version_string, + major = EXCLUDED.major, + minor = EXCLUDED.minor, + patch = EXCLUDED.patch, + prerelease = EXCLUDED.prerelease, + bundle_created_at = EXCLUDED.bundle_created_at, + bundle_digest = EXCLUDED.bundle_digest, + activated_at = EXCLUDED.activated_at, + was_force_activated = EXCLUDED.was_force_activated, + force_activate_reason = EXCLUDED.force_activate_reason, + updated_at = NOW(); + """; + + await using var upsertCmd = CreateCommand(upsertSql, connection); + upsertCmd.Transaction = tx; + AddParameter(upsertCmd, "tenant_id", tenantKey); + AddParameter(upsertCmd, "bundle_type", bundleTypeKey); + AddParameter(upsertCmd, "version_string", record.VersionString); + AddParameter(upsertCmd, "major", record.Major); + AddParameter(upsertCmd, "minor", record.Minor); + AddParameter(upsertCmd, "patch", record.Patch); + AddParameter(upsertCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); + AddParameter(upsertCmd, "bundle_created_at", record.BundleCreatedAt); + AddParameter(upsertCmd, "bundle_digest", record.BundleDigest); + AddParameter(upsertCmd, "activated_at", record.ActivatedAt); + AddParameter(upsertCmd, "was_force_activated", record.WasForceActivated); + AddParameter(upsertCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); + await upsertCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.History.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.History.cs new file mode 100644 index 000000000..a823e1930 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.History.cs @@ -0,0 +1,71 @@ +using System; +using System.Threading; +using Npgsql; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + private async Task CloseHistoryAsync( + NpgsqlConnection connection, + NpgsqlTransaction tx, + string historyTable, + BundleVersionRecord record, + string tenantKey, + string bundleTypeKey, + CancellationToken ct) + { + var closeHistorySql = $$""" + UPDATE {{historyTable}} + SET deactivated_at = @activated_at + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type AND deactivated_at IS NULL; + """; + + await using var closeCmd = CreateCommand(closeHistorySql, connection); + closeCmd.Transaction = tx; + AddParameter(closeCmd, "tenant_id", tenantKey); + AddParameter(closeCmd, "bundle_type", bundleTypeKey); + AddParameter(closeCmd, "activated_at", record.ActivatedAt); + await closeCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } + + private async Task InsertHistoryAsync( + NpgsqlConnection connection, + NpgsqlTransaction tx, + string historyTable, + BundleVersionRecord record, + string tenantKey, + string bundleTypeKey, + CancellationToken ct) + { + var historySql = $$""" + INSERT INTO {{historyTable}} ( + tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, deactivated_at, + was_force_activated, force_activate_reason + ) + VALUES ( + @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, + @bundle_created_at, @bundle_digest, @activated_at, NULL, + @was_force_activated, @force_activate_reason + ); + """; + + await using var historyCmd = CreateCommand(historySql, connection); + historyCmd.Transaction = tx; + AddParameter(historyCmd, "tenant_id", tenantKey); + AddParameter(historyCmd, "bundle_type", bundleTypeKey); + AddParameter(historyCmd, "version_string", record.VersionString); + AddParameter(historyCmd, "major", record.Major); + AddParameter(historyCmd, "minor", record.Minor); + AddParameter(historyCmd, "patch", record.Patch); + AddParameter(historyCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); + AddParameter(historyCmd, "bundle_created_at", record.BundleCreatedAt); + AddParameter(historyCmd, "bundle_digest", record.BundleDigest); + AddParameter(historyCmd, "activated_at", record.ActivatedAt); + AddParameter(historyCmd, "was_force_activated", record.WasForceActivated); + AddParameter(historyCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); + await historyCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Validation.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Validation.cs new file mode 100644 index 000000000..378d61668 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.Validation.cs @@ -0,0 +1,34 @@ +using System; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + private static void EnsureMonotonicVersion(BundleVersionRecord incoming, BundleVersionRecord? current) + { + if (current is null || incoming.WasForceActivated) + { + return; + } + + var incomingVersion = new BundleVersion( + incoming.Major, + incoming.Minor, + incoming.Patch, + incoming.BundleCreatedAt, + incoming.Prerelease); + var currentVersion = new BundleVersion( + current.Major, + current.Minor, + current.Patch, + current.BundleCreatedAt, + current.Prerelease); + + if (!incomingVersion.IsNewerThan(currentVersion)) + { + throw new InvalidOperationException( + $"Incoming version '{incomingVersion.SemVer}' is not monotonic vs current '{currentVersion.SemVer}'."); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.cs new file mode 100644 index 000000000..422ec8c3a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.Upsert.cs @@ -0,0 +1,43 @@ +using System; +using System.Threading; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Persistence.Postgres.Repositories; + +public sealed partial class PostgresBundleVersionStore +{ + public async Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(record); + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(record.TenantId); + var bundleTypeKey = NormalizeKey(record.BundleType); + + var versionTable = GetQualifiedTableName("bundle_versions"); + var historyTable = GetQualifiedTableName("bundle_version_history"); + + await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "writer", ct).ConfigureAwait(false); + await using var tx = await connection.BeginTransactionAsync(ct).ConfigureAwait(false); + + var current = await GetCurrentForUpdateAsync( + connection, + tx, + versionTable, + tenantKey, + bundleTypeKey, + ct) + .ConfigureAwait(false); + + EnsureMonotonicVersion(record, current); + + await CloseHistoryAsync(connection, tx, historyTable, record, tenantKey, bundleTypeKey, ct) + .ConfigureAwait(false); + await InsertHistoryAsync(connection, tx, historyTable, record, tenantKey, bundleTypeKey, ct) + .ConfigureAwait(false); + await UpsertCurrentAsync(connection, tx, versionTable, record, tenantKey, bundleTypeKey, ct) + .ConfigureAwait(false); + + await tx.CommitAsync(ct).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.cs index 60b13317b..374e5eda4 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/Postgres/Repositories/PostgresBundleVersionStore.cs @@ -1,14 +1,11 @@ +using System.Threading; using Microsoft.Extensions.Logging; -using Npgsql; using StellaOps.AirGap.Importer.Versioning; using StellaOps.Infrastructure.Postgres.Repositories; namespace StellaOps.AirGap.Persistence.Postgres.Repositories; -/// -/// PostgreSQL-backed store for AirGap bundle version activation tracking. -/// -public sealed class PostgresBundleVersionStore : RepositoryBase, IBundleVersionStore +public sealed partial class PostgresBundleVersionStore : RepositoryBase, IBundleVersionStore { private volatile bool _initialized; private readonly SemaphoreSlim _initLock = new(1, 1); @@ -17,345 +14,4 @@ public sealed class PostgresBundleVersionStore : RepositoryBase GetCurrentAsync( - string tenantId, - string bundleType, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); - - await EnsureTablesAsync(ct).ConfigureAwait(false); - - var tenantKey = NormalizeKey(tenantId); - var bundleTypeKey = NormalizeKey(bundleType); - - var versionTable = GetQualifiedTableName("bundle_versions"); - await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", ct).ConfigureAwait(false); - var sql = $$""" - SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, - bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason - FROM {{versionTable}} - WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type; - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "tenant_id", tenantKey); - AddParameter(command, "bundle_type", bundleTypeKey); - - await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); - return await reader.ReadAsync(ct).ConfigureAwait(false) ? Map(reader) : null; - } - - public async Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(record); - await EnsureTablesAsync(ct).ConfigureAwait(false); - - var tenantKey = NormalizeKey(record.TenantId); - var bundleTypeKey = NormalizeKey(record.BundleType); - - var versionTable = GetQualifiedTableName("bundle_versions"); - var historyTable = GetQualifiedTableName("bundle_version_history"); - await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "writer", ct).ConfigureAwait(false); - await using var tx = await connection.BeginTransactionAsync(ct).ConfigureAwait(false); - - var current = await GetCurrentForUpdateAsync( - connection, - tx, - versionTable, - tenantKey, - bundleTypeKey, - ct) - .ConfigureAwait(false); - - if (current is not null && !record.WasForceActivated) - { - var incomingVersion = new BundleVersion( - record.Major, - record.Minor, - record.Patch, - record.BundleCreatedAt, - record.Prerelease); - var currentVersion = new BundleVersion( - current.Major, - current.Minor, - current.Patch, - current.BundleCreatedAt, - current.Prerelease); - - if (!incomingVersion.IsNewerThan(currentVersion)) - { - throw new InvalidOperationException( - $"Incoming version '{incomingVersion.SemVer}' is not monotonic vs current '{currentVersion.SemVer}'."); - } - } - - var closeHistorySql = $$""" - UPDATE {{historyTable}} - SET deactivated_at = @activated_at - WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type AND deactivated_at IS NULL; - """; - - await using (var closeCmd = CreateCommand(closeHistorySql, connection)) - { - closeCmd.Transaction = tx; - AddParameter(closeCmd, "tenant_id", tenantKey); - AddParameter(closeCmd, "bundle_type", bundleTypeKey); - AddParameter(closeCmd, "activated_at", record.ActivatedAt); - await closeCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); - } - - var historySql = $$""" - INSERT INTO {{historyTable}} ( - tenant_id, bundle_type, version_string, major, minor, patch, prerelease, - bundle_created_at, bundle_digest, activated_at, deactivated_at, was_force_activated, force_activate_reason - ) - VALUES ( - @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, - @bundle_created_at, @bundle_digest, @activated_at, NULL, @was_force_activated, @force_activate_reason - ); - """; - - await using (var historyCmd = CreateCommand(historySql, connection)) - { - historyCmd.Transaction = tx; - AddParameter(historyCmd, "tenant_id", tenantKey); - AddParameter(historyCmd, "bundle_type", bundleTypeKey); - AddParameter(historyCmd, "version_string", record.VersionString); - AddParameter(historyCmd, "major", record.Major); - AddParameter(historyCmd, "minor", record.Minor); - AddParameter(historyCmd, "patch", record.Patch); - AddParameter(historyCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); - AddParameter(historyCmd, "bundle_created_at", record.BundleCreatedAt); - AddParameter(historyCmd, "bundle_digest", record.BundleDigest); - AddParameter(historyCmd, "activated_at", record.ActivatedAt); - AddParameter(historyCmd, "was_force_activated", record.WasForceActivated); - AddParameter(historyCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); - await historyCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); - } - - var upsertSql = $$""" - INSERT INTO {{versionTable}} ( - tenant_id, bundle_type, version_string, major, minor, patch, prerelease, - bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason - ) - VALUES ( - @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, - @bundle_created_at, @bundle_digest, @activated_at, @was_force_activated, @force_activate_reason - ) - ON CONFLICT (tenant_id, bundle_type) DO UPDATE SET - version_string = EXCLUDED.version_string, - major = EXCLUDED.major, - minor = EXCLUDED.minor, - patch = EXCLUDED.patch, - prerelease = EXCLUDED.prerelease, - bundle_created_at = EXCLUDED.bundle_created_at, - bundle_digest = EXCLUDED.bundle_digest, - activated_at = EXCLUDED.activated_at, - was_force_activated = EXCLUDED.was_force_activated, - force_activate_reason = EXCLUDED.force_activate_reason, - updated_at = NOW(); - """; - - await using (var upsertCmd = CreateCommand(upsertSql, connection)) - { - upsertCmd.Transaction = tx; - AddParameter(upsertCmd, "tenant_id", tenantKey); - AddParameter(upsertCmd, "bundle_type", bundleTypeKey); - AddParameter(upsertCmd, "version_string", record.VersionString); - AddParameter(upsertCmd, "major", record.Major); - AddParameter(upsertCmd, "minor", record.Minor); - AddParameter(upsertCmd, "patch", record.Patch); - AddParameter(upsertCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); - AddParameter(upsertCmd, "bundle_created_at", record.BundleCreatedAt); - AddParameter(upsertCmd, "bundle_digest", record.BundleDigest); - AddParameter(upsertCmd, "activated_at", record.ActivatedAt); - AddParameter(upsertCmd, "was_force_activated", record.WasForceActivated); - AddParameter(upsertCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); - await upsertCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); - } - - await tx.CommitAsync(ct).ConfigureAwait(false); - } - - public async Task> GetHistoryAsync( - string tenantId, - string bundleType, - int limit = 10, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); - - if (limit <= 0) - { - return Array.Empty(); - } - - await EnsureTablesAsync(ct).ConfigureAwait(false); - - var tenantKey = NormalizeKey(tenantId); - var bundleTypeKey = NormalizeKey(bundleType); - - var historyTable = GetQualifiedTableName("bundle_version_history"); - await using var connection = await DataSource.OpenConnectionAsync(tenantKey, "reader", ct).ConfigureAwait(false); - var sql = $$""" - SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, - bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason - FROM {{historyTable}} - WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type - ORDER BY activated_at DESC, id DESC - LIMIT @limit; - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "tenant_id", tenantKey); - AddParameter(command, "bundle_type", bundleTypeKey); - AddParameter(command, "limit", limit); - - await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); - var results = new List(); - while (await reader.ReadAsync(ct).ConfigureAwait(false)) - { - results.Add(Map(reader)); - } - - return results; - } - - private static BundleVersionRecord Map(NpgsqlDataReader reader) - { - var tenantId = reader.GetString(0); - var bundleType = reader.GetString(1); - var versionString = reader.GetString(2); - var major = reader.GetInt32(3); - var minor = reader.GetInt32(4); - var patch = reader.GetInt32(5); - var prerelease = reader.IsDBNull(6) ? null : reader.GetString(6); - var bundleCreatedAt = reader.GetFieldValue(7); - var bundleDigest = reader.GetString(8); - var activatedAt = reader.GetFieldValue(9); - var wasForceActivated = reader.GetBoolean(10); - var forceActivateReason = reader.IsDBNull(11) ? null : reader.GetString(11); - - return new BundleVersionRecord( - TenantId: tenantId, - BundleType: bundleType, - VersionString: versionString, - Major: major, - Minor: minor, - Patch: patch, - Prerelease: prerelease, - BundleCreatedAt: bundleCreatedAt, - BundleDigest: bundleDigest, - ActivatedAt: activatedAt, - WasForceActivated: wasForceActivated, - ForceActivateReason: forceActivateReason); - } - - private async Task GetCurrentForUpdateAsync( - NpgsqlConnection connection, - NpgsqlTransaction transaction, - string versionTable, - string tenantKey, - string bundleTypeKey, - CancellationToken ct) - { - var sql = $$""" - SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, - bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason - FROM {{versionTable}} - WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type - FOR UPDATE; - """; - - await using var command = CreateCommand(sql, connection); - command.Transaction = transaction; - AddParameter(command, "tenant_id", tenantKey); - AddParameter(command, "bundle_type", bundleTypeKey); - - await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); - return await reader.ReadAsync(ct).ConfigureAwait(false) ? Map(reader) : null; - } - - private async ValueTask EnsureTablesAsync(CancellationToken ct) - { - if (_initialized) - { - return; - } - - await _initLock.WaitAsync(ct).ConfigureAwait(false); - try - { - if (_initialized) - { - return; - } - - await using var connection = await DataSource.OpenSystemConnectionAsync(ct).ConfigureAwait(false); - var schemaName = GetSchemaName(); - - if (!await TableExistsAsync(connection, schemaName, "bundle_versions", ct).ConfigureAwait(false) || - !await TableExistsAsync(connection, schemaName, "bundle_version_history", ct).ConfigureAwait(false)) - { - throw new InvalidOperationException( - $"AirGap bundle version tables missing in schema '{schemaName}'. Run AirGap migrations before using the store."); - } - - _initialized = true; - } - finally - { - _initLock.Release(); - } - } - - private async Task TableExistsAsync( - NpgsqlConnection connection, - string schemaName, - string tableName, - CancellationToken cancellationToken) - { - const string sql = """ - SELECT EXISTS ( - SELECT 1 - FROM information_schema.tables - WHERE table_schema = @schema AND table_name = @table - ); - """; - - await using var command = CreateCommand(sql, connection); - AddParameter(command, "schema", schemaName); - AddParameter(command, "table", tableName); - - var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false); - return result is true; - } - - private string GetQualifiedTableName(string tableName) - { - var schema = GetSchemaName(); - return $"{QuoteIdentifier(schema)}.{QuoteIdentifier(tableName)}"; - } - - private string GetSchemaName() - { - if (!string.IsNullOrWhiteSpace(DataSource.SchemaName)) - { - return DataSource.SchemaName!; - } - - return AirGapDataSource.DefaultSchemaName; - } - - private static string NormalizeKey(string value) => value.Trim().ToLowerInvariant(); - - private static string QuoteIdentifier(string identifier) - { - var escaped = identifier.Replace("\"", "\"\"", StringComparison.Ordinal); - return $"\"{escaped}\""; - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/TASKS.md b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/TASKS.md index 6a09c2e79..c01ddf968 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/TASKS.md +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/TASKS.md @@ -1,10 +1,8 @@ -# AirGap Persistence Task Board - +# StellaOps.AirGap.Persistence Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. | Task ID | Status | Notes | | --- | --- | --- | -| AUDIT-0028-M | DONE | Revalidated 2026-01-06; no new maintainability findings. | -| AUDIT-0028-T | DONE | Revalidated 2026-01-06; test coverage tracked in AUDIT-0029. | -| AUDIT-0028-A | DONE | Applied schema + determinism fixes and migration host wiring. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/__Libraries/StellaOps.AirGap.Persistence/StellaOps.AirGap.Persistence.md. | +| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncClockOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncClockOptions.cs new file mode 100644 index 000000000..3ce305893 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncClockOptions.cs @@ -0,0 +1,15 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync; + +internal sealed class AirGapSyncClockOptions +{ + public AirGapSyncClockOptions(string nodeId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); + NodeId = nodeId; + } + + public string NodeId { get; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Import.cs new file mode 100644 index 000000000..c2a6a0d19 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Import.cs @@ -0,0 +1,26 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.AirGap.Sync.Services; + +namespace StellaOps.AirGap.Sync; + +public static partial class AirGapSyncServiceCollectionExtensions +{ + /// + /// Adds the air-gap sync service for importing bundles to the central scheduler. + /// + /// The service collection. + /// The service collection for chaining. + /// + /// This requires ISyncSchedulerLogRepository to be registered separately, + /// as it depends on the Scheduler.Persistence module. + /// + public static IServiceCollection AddAirGapSyncImportService(this IServiceCollection services) + { + services.TryAddScoped(); + return services; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Options.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Options.cs new file mode 100644 index 000000000..4556abc47 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Options.cs @@ -0,0 +1,26 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.DependencyInjection; +using StellaOps.AirGap.Sync.Stores; + +namespace StellaOps.AirGap.Sync; + +public static partial class AirGapSyncServiceCollectionExtensions +{ + /// + /// Adds air-gap sync services with custom options. + /// + /// The service collection. + /// The node identifier for this instance. + /// Action to configure file-based store options. + /// The service collection for chaining. + public static IServiceCollection AddAirGapSyncServices( + this IServiceCollection services, + string nodeId, + Action configureOptions) + { + services.Configure(configureOptions); + return services.AddAirGapSyncServices(nodeId); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Transport.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Transport.cs new file mode 100644 index 000000000..cb0a3910b --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.Transport.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.AirGap.Sync.Transport; + +namespace StellaOps.AirGap.Sync; + +public static partial class AirGapSyncServiceCollectionExtensions +{ + /// + /// Adds file-based transport for job sync bundles. + /// + /// The service collection. + /// The service collection for chaining. + public static IServiceCollection AddFileBasedJobSyncTransport(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } + + /// + /// Adds Router-based transport for job sync bundles. + /// + /// The service collection. + /// The service collection for chaining. + /// + /// Requires IRouterJobSyncClient to be registered separately. + /// + public static IServiceCollection AddRouterJobSyncTransport(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.TransportOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.TransportOptions.cs new file mode 100644 index 000000000..fa51166bc --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.TransportOptions.cs @@ -0,0 +1,38 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.DependencyInjection; +using StellaOps.AirGap.Sync.Transport; + +namespace StellaOps.AirGap.Sync; + +public static partial class AirGapSyncServiceCollectionExtensions +{ + /// + /// Adds file-based transport for job sync bundles with custom options. + /// + /// The service collection. + /// Action to configure transport options. + /// The service collection for chaining. + public static IServiceCollection AddFileBasedJobSyncTransport( + this IServiceCollection services, + Action configureOptions) + { + services.Configure(configureOptions); + return services.AddFileBasedJobSyncTransport(); + } + + /// + /// Adds Router-based transport for job sync bundles with custom options. + /// + /// The service collection. + /// Action to configure transport options. + /// The service collection for chaining. + public static IServiceCollection AddRouterJobSyncTransport( + this IServiceCollection services, + Action configureOptions) + { + services.Configure(configureOptions); + return services.AddRouterJobSyncTransport(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.cs index b9c9ae89e..92afabb69 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/AirGapSyncServiceCollectionExtensions.cs @@ -1,13 +1,10 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Logging; using StellaOps.AirGap.Sync.Services; using StellaOps.AirGap.Sync.Stores; -using StellaOps.AirGap.Sync.Transport; using StellaOps.Determinism; using StellaOps.HybridLogicalClock; @@ -16,7 +13,7 @@ namespace StellaOps.AirGap.Sync; /// /// Extension methods for registering air-gap sync services. /// -public static class AirGapSyncServiceCollectionExtensions +public static partial class AirGapSyncServiceCollectionExtensions { /// /// Adds air-gap sync services to the service collection. @@ -30,124 +27,22 @@ public static class AirGapSyncServiceCollectionExtensions { ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); - // Core services + services.TryAddSingleton(TimeProvider.System); + services.TryAddSingleton(new AirGapSyncClockOptions(nodeId)); + services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); - - // Register in-memory HLC state store for offline operation - services.TryAddSingleton(); - - // Register HLC clock with node ID - services.TryAddSingleton(sp => - { - var timeProvider = sp.GetService() ?? TimeProvider.System; - var stateStore = sp.GetRequiredService(); - var logger = sp.GetRequiredService>(); - return new HybridLogicalClock.HybridLogicalClock(timeProvider, nodeId, stateStore, logger); - }); - - // Register deterministic GUID provider - services.TryAddSingleton(SystemGuidProvider.Instance); - - // File-based store (can be overridden) - services.TryAddSingleton(); - - // Offline HLC manager - services.TryAddSingleton(); - - // Bundle exporter services.TryAddSingleton(); - // Bundle DSSE signer (OMP-010) + services.TryAddSingleton(); + services.TryAddSingleton(); + + services.TryAddSingleton(SystemGuidProvider.Instance); + services.TryAddSingleton(); + services.TryAddSingleton(); services.TryAddSingleton(); return services; } - - /// - /// Adds air-gap sync services with custom options. - /// - /// The service collection. - /// The node identifier for this instance. - /// Action to configure file-based store options. - /// The service collection for chaining. - public static IServiceCollection AddAirGapSyncServices( - this IServiceCollection services, - string nodeId, - Action configureOptions) - { - // Configure file-based store options - services.Configure(configureOptions); - - return services.AddAirGapSyncServices(nodeId); - } - - /// - /// Adds the air-gap sync service for importing bundles to the central scheduler. - /// - /// The service collection. - /// The service collection for chaining. - /// - /// This requires ISyncSchedulerLogRepository to be registered separately, - /// as it depends on the Scheduler.Persistence module. - /// - public static IServiceCollection AddAirGapSyncImportService(this IServiceCollection services) - { - services.TryAddScoped(); - return services; - } - - /// - /// Adds file-based transport for job sync bundles. - /// - /// The service collection. - /// The service collection for chaining. - public static IServiceCollection AddFileBasedJobSyncTransport(this IServiceCollection services) - { - services.TryAddSingleton(); - return services; - } - - /// - /// Adds file-based transport for job sync bundles with custom options. - /// - /// The service collection. - /// Action to configure transport options. - /// The service collection for chaining. - public static IServiceCollection AddFileBasedJobSyncTransport( - this IServiceCollection services, - Action configureOptions) - { - services.Configure(configureOptions); - return services.AddFileBasedJobSyncTransport(); - } - - /// - /// Adds Router-based transport for job sync bundles. - /// - /// The service collection. - /// The service collection for chaining. - /// - /// Requires IRouterJobSyncClient to be registered separately. - /// - public static IServiceCollection AddRouterJobSyncTransport(this IServiceCollection services) - { - services.TryAddSingleton(); - return services; - } - - /// - /// Adds Router-based transport for job sync bundles with custom options. - /// - /// The service collection. - /// Action to configure transport options. - /// The service collection for chaining. - public static IServiceCollection AddRouterJobSyncTransport( - this IServiceCollection services, - Action configureOptions) - { - services.Configure(configureOptions); - return services.AddRouterJobSyncTransport(); - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseOptions.cs new file mode 100644 index 000000000..229df7bf3 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseOptions.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Options for air-gap bundle DSSE signing. +/// +public sealed class AirGapBundleDsseOptions +{ + /// + /// Configuration section name. + /// + public const string SectionName = "AirGap:BundleSigning"; + + /// + /// Gets or sets the signing mode: "hmac" for HMAC-SHA256, "none" to disable. + /// + public string Mode { get; set; } = "none"; + + /// + /// Gets or sets the HMAC secret key as Base64. + /// Required when Mode is "hmac". + /// + public string? SecretBase64 { get; set; } + + /// + /// Gets or sets the key identifier for the signature. + /// + public string KeyId { get; set; } = "airgap-bundle-signer"; + + /// + /// Gets or sets the payload type for DSSE envelope. + /// + public string PayloadType { get; set; } = "application/vnd.stellaops.airgap.bundle+json"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Helpers.cs new file mode 100644 index 000000000..bea8155d9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Helpers.cs @@ -0,0 +1,33 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using System.Globalization; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleDsseSigner +{ + /// + /// Computes DSSE Pre-Authentication Encoding (PAE). + /// PAE = "DSSEv1" SP len(payloadType) SP payloadType SP len(payload) SP payload. + /// + private static byte[] ComputePreAuthenticationEncoding(string payloadType, string manifestDigest) + { + var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType); + var manifestDigestBytes = Encoding.UTF8.GetBytes(manifestDigest); + + var paeString = string.Create( + CultureInfo.InvariantCulture, + $"{DssePrefix}{payloadTypeBytes.Length} {payloadType} {manifestDigestBytes.Length} {manifestDigest}"); + + return Encoding.UTF8.GetBytes(paeString); + } + + private static byte[] ComputeHmacSha256(byte[] key, byte[] data) + { + using var hmac = new HMACSHA256(key); + return hmac.ComputeHash(data); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Sign.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Sign.cs new file mode 100644 index 000000000..c54aeb157 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Sign.cs @@ -0,0 +1,54 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleDsseSigner +{ + /// + public Task SignAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundle); + cancellationToken.ThrowIfCancellationRequested(); + + var opts = _options.Value; + + if (!IsEnabled) + { + _logger.LogDebug("Air-gap bundle DSSE signing is disabled"); + return Task.FromResult(null); + } + + if (string.IsNullOrWhiteSpace(opts.SecretBase64)) + { + throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured"); + } + + byte[] secret; + try + { + secret = Convert.FromBase64String(opts.SecretBase64); + } + catch (FormatException ex) + { + throw new InvalidOperationException("SecretBase64 is not valid Base64", ex); + } + + var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest); + var signature = ComputeHmacSha256(secret, pae); + var signatureBase64 = Convert.ToBase64String(signature); + + _logger.LogInformation( + "Signed air-gap bundle {BundleId} with key {KeyId}", + bundle.BundleId, + opts.KeyId); + + return Task.FromResult( + new AirGapBundleSignatureResult(opts.KeyId, signature, signatureBase64)); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Verify.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Verify.cs new file mode 100644 index 000000000..2dc0857e9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.Verify.cs @@ -0,0 +1,76 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using System.Security.Cryptography; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleDsseSigner +{ + /// + public Task VerifyAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundle); + cancellationToken.ThrowIfCancellationRequested(); + + var opts = _options.Value; + + if (!IsEnabled) + { + _logger.LogDebug("Air-gap bundle DSSE signing is disabled, skipping verification"); + return Task.FromResult(AirGapBundleVerificationResult.SigningDisabled); + } + + if (string.IsNullOrWhiteSpace(bundle.Signature)) + { + _logger.LogWarning("Air-gap bundle {BundleId} has no signature", bundle.BundleId); + return Task.FromResult(AirGapBundleVerificationResult.MissingSignature); + } + + if (string.IsNullOrWhiteSpace(opts.SecretBase64)) + { + throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured"); + } + + byte[] secret; + try + { + secret = Convert.FromBase64String(opts.SecretBase64); + } + catch (FormatException ex) + { + throw new InvalidOperationException("SecretBase64 is not valid Base64", ex); + } + + byte[] expectedSignature; + try + { + expectedSignature = Convert.FromBase64String(bundle.Signature); + } + catch (FormatException) + { + _logger.LogWarning("Air-gap bundle {BundleId} has invalid Base64 signature", bundle.BundleId); + return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature); + } + + var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest); + var computedSignature = ComputeHmacSha256(secret, pae); + + if (!CryptographicOperations.FixedTimeEquals(expectedSignature, computedSignature)) + { + _logger.LogWarning( + "Air-gap bundle {BundleId} signature verification failed", + bundle.BundleId); + return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature); + } + + _logger.LogDebug( + "Air-gap bundle {BundleId} signature verified successfully", + bundle.BundleId); + return Task.FromResult(AirGapBundleVerificationResult.Valid); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.cs index 0739bec22..56704b4ed 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleDsseSigner.cs @@ -1,124 +1,15 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using StellaOps.AirGap.Sync.Models; -using StellaOps.Canonical.Json; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; namespace StellaOps.AirGap.Sync.Services; -/// -/// Options for air-gap bundle DSSE signing. -/// -public sealed class AirGapBundleDsseOptions -{ - /// - /// Configuration section name. - /// - public const string SectionName = "AirGap:BundleSigning"; - - /// - /// Gets or sets the signing mode: "hmac" for HMAC-SHA256, "none" to disable. - /// - public string Mode { get; set; } = "none"; - - /// - /// Gets or sets the HMAC secret key as Base64. - /// Required when Mode is "hmac". - /// - public string? SecretBase64 { get; set; } - - /// - /// Gets or sets the key identifier for the signature. - /// - public string KeyId { get; set; } = "airgap-bundle-signer"; - - /// - /// Gets or sets the payload type for DSSE envelope. - /// - public string PayloadType { get; set; } = "application/vnd.stellaops.airgap.bundle+json"; -} - -/// -/// Result of a bundle signature operation. -/// -/// The key ID used for signing. -/// The signature bytes. -/// The signature as Base64 string. -public sealed record AirGapBundleSignatureResult( - string KeyId, - byte[] Signature, - string SignatureBase64); - -/// -/// Interface for air-gap bundle DSSE signing. -/// -public interface IAirGapBundleDsseSigner -{ - /// - /// Signs an air-gap bundle manifest and returns the signature result. - /// - /// The bundle to sign. - /// Cancellation token. - /// Signature result with key ID and signature. - Task SignAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default); - - /// - /// Verifies an air-gap bundle signature. - /// - /// The bundle to verify. - /// Cancellation token. - /// True if signature is valid or signing is disabled; false if invalid. - Task VerifyAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default); - - /// - /// Gets whether signing is enabled. - /// - bool IsEnabled { get; } -} - -/// -/// Result of bundle signature verification. -/// -/// Whether the signature is valid. -/// The reason for the result. -public sealed record AirGapBundleVerificationResult(bool IsValid, string Reason) -{ - /// - /// Verification succeeded. - /// - public static AirGapBundleVerificationResult Valid { get; } = new(true, "Signature verified"); - - /// - /// Signing is disabled, so verification is skipped. - /// - public static AirGapBundleVerificationResult SigningDisabled { get; } = new(true, "Signing disabled"); - - /// - /// Bundle has no signature but signing is enabled. - /// - public static AirGapBundleVerificationResult MissingSignature { get; } = new(false, "Bundle is not signed"); - - /// - /// Signature verification failed. - /// - public static AirGapBundleVerificationResult InvalidSignature { get; } = new(false, "Signature verification failed"); -} - /// /// DSSE signer for air-gap bundles using HMAC-SHA256. /// -public sealed class AirGapBundleDsseSigner : IAirGapBundleDsseSigner +public sealed partial class AirGapBundleDsseSigner : IAirGapBundleDsseSigner { private const string DssePrefix = "DSSEv1 "; @@ -137,140 +28,8 @@ public sealed class AirGapBundleDsseSigner : IAirGapBundleDsseSigner } /// - public bool IsEnabled => string.Equals(_options.Value.Mode, "hmac", StringComparison.OrdinalIgnoreCase); - - /// - public Task SignAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(bundle); - cancellationToken.ThrowIfCancellationRequested(); - - var opts = _options.Value; - - if (!IsEnabled) - { - _logger.LogDebug("Air-gap bundle DSSE signing is disabled"); - return Task.FromResult(null); - } - - if (string.IsNullOrWhiteSpace(opts.SecretBase64)) - { - throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured"); - } - - byte[] secret; - try - { - secret = Convert.FromBase64String(opts.SecretBase64); - } - catch (FormatException ex) - { - throw new InvalidOperationException("SecretBase64 is not valid Base64", ex); - } - - // Compute PAE (Pre-Authentication Encoding) per DSSE spec - var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest); - var signature = ComputeHmacSha256(secret, pae); - var signatureBase64 = Convert.ToBase64String(signature); - - _logger.LogInformation( - "Signed air-gap bundle {BundleId} with key {KeyId}", - bundle.BundleId, - opts.KeyId); - - return Task.FromResult( - new AirGapBundleSignatureResult(opts.KeyId, signature, signatureBase64)); - } - - /// - public Task VerifyAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(bundle); - cancellationToken.ThrowIfCancellationRequested(); - - var opts = _options.Value; - - if (!IsEnabled) - { - _logger.LogDebug("Air-gap bundle DSSE signing is disabled, skipping verification"); - return Task.FromResult(AirGapBundleVerificationResult.SigningDisabled); - } - - if (string.IsNullOrWhiteSpace(bundle.Signature)) - { - _logger.LogWarning("Air-gap bundle {BundleId} has no signature", bundle.BundleId); - return Task.FromResult(AirGapBundleVerificationResult.MissingSignature); - } - - if (string.IsNullOrWhiteSpace(opts.SecretBase64)) - { - throw new InvalidOperationException("HMAC signing mode requires SecretBase64 to be configured"); - } - - byte[] secret; - try - { - secret = Convert.FromBase64String(opts.SecretBase64); - } - catch (FormatException ex) - { - throw new InvalidOperationException("SecretBase64 is not valid Base64", ex); - } - - byte[] expectedSignature; - try - { - expectedSignature = Convert.FromBase64String(bundle.Signature); - } - catch (FormatException) - { - _logger.LogWarning("Air-gap bundle {BundleId} has invalid Base64 signature", bundle.BundleId); - return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature); - } - - // Compute PAE and expected signature - var pae = ComputePreAuthenticationEncoding(opts.PayloadType, bundle.ManifestDigest); - var computedSignature = ComputeHmacSha256(secret, pae); - - if (!CryptographicOperations.FixedTimeEquals(expectedSignature, computedSignature)) - { - _logger.LogWarning( - "Air-gap bundle {BundleId} signature verification failed", - bundle.BundleId); - return Task.FromResult(AirGapBundleVerificationResult.InvalidSignature); - } - - _logger.LogDebug( - "Air-gap bundle {BundleId} signature verified successfully", - bundle.BundleId); - return Task.FromResult(AirGapBundleVerificationResult.Valid); - } - - /// - /// Computes DSSE Pre-Authentication Encoding (PAE). - /// PAE = "DSSEv1" SP len(payloadType) SP payloadType SP len(payload) SP payload - /// where len() returns ASCII decimal length, and SP is a space character. - /// - private static byte[] ComputePreAuthenticationEncoding(string payloadType, string manifestDigest) - { - var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType); - var manifestDigestBytes = Encoding.UTF8.GetBytes(manifestDigest); - - // Format: "DSSEv1 {payloadType.Length} {payloadType} {payload.Length} {payload}" - var paeString = string.Create( - CultureInfo.InvariantCulture, - $"{DssePrefix}{payloadTypeBytes.Length} {payloadType} {manifestDigestBytes.Length} {manifestDigest}"); - - return Encoding.UTF8.GetBytes(paeString); - } - - private static byte[] ComputeHmacSha256(byte[] key, byte[] data) - { - using var hmac = new HMACSHA256(key); - return hmac.ComputeHash(data); - } + public bool IsEnabled => string.Equals( + _options.Value.Mode, + "hmac", + StringComparison.OrdinalIgnoreCase); } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Dtos.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Dtos.cs new file mode 100644 index 000000000..e87965947 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Dtos.cs @@ -0,0 +1,40 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleExporter +{ + private sealed record AirGapBundleExportDto + { + public required Guid BundleId { get; init; } + public required string TenantId { get; init; } + public required DateTimeOffset CreatedAt { get; init; } + public required string CreatedByNodeId { get; init; } + public required string ManifestDigest { get; init; } + public string? Signature { get; init; } + public string? SignedBy { get; init; } + public required IReadOnlyList JobLogs { get; init; } + } + + private sealed record NodeJobLogExportDto + { + public required string NodeId { get; init; } + public required string LastHlc { get; init; } + public required string ChainHead { get; init; } + public required IReadOnlyList Entries { get; init; } + } + + private sealed record OfflineJobLogEntryExportDto + { + public required string NodeId { get; init; } + public required string THlc { get; init; } + public required Guid JobId { get; init; } + public string? PartitionKey { get; init; } + public required string Payload { get; init; } + public required string PayloadHash { get; init; } + public string? PrevLink { get; init; } + public required string Link { get; init; } + public DateTimeOffset EnqueuedAt { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Export.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Export.cs new file mode 100644 index 000000000..b41167b55 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Export.cs @@ -0,0 +1,68 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleExporter +{ + /// + public async Task ExportAsync( + string tenantId, + IReadOnlyList? nodeIds = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var effectiveNodeIds = nodeIds ?? new[] { _hlcManager.NodeId }; + + _logger.LogInformation( + "Exporting air-gap bundle for tenant {TenantId} with {NodeCount} nodes", + tenantId, + effectiveNodeIds.Count); + + var jobLogs = new List(); + + foreach (var nodeId in effectiveNodeIds) + { + cancellationToken.ThrowIfCancellationRequested(); + + var nodeLog = await _jobLogStore.GetNodeJobLogAsync(nodeId, cancellationToken) + .ConfigureAwait(false); + + if (nodeLog is not null && nodeLog.Entries.Count > 0) + { + jobLogs.Add(nodeLog); + _logger.LogDebug( + "Added node {NodeId} with {EntryCount} entries to bundle", + nodeId, + nodeLog.Entries.Count); + } + } + + if (jobLogs.Count == 0) + { + _logger.LogWarning("No offline job logs found for export"); + } + + var bundle = new AirGapBundle + { + BundleId = _guidProvider.NewGuid(), + TenantId = tenantId, + CreatedAt = _timeProvider.GetUtcNow(), + CreatedByNodeId = _hlcManager.NodeId, + JobLogs = jobLogs, + ManifestDigest = ComputeManifestDigest(jobLogs) + }; + + _logger.LogInformation( + "Created bundle {BundleId} with {LogCount} node logs, {TotalEntries} total entries", + bundle.BundleId, + jobLogs.Count, + jobLogs.Sum(l => l.Entries.Count)); + + return bundle; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.ExportFile.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.ExportFile.cs new file mode 100644 index 000000000..4cdab479c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.ExportFile.cs @@ -0,0 +1,56 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleExporter +{ + /// + public async Task ExportToFileAsync( + AirGapBundle bundle, + string outputPath, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundle); + ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); + + var dto = ToExportDto(bundle); + var json = JsonSerializer.Serialize(dto, _jsonOptions); + + var directory = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Exported bundle {BundleId} to {OutputPath}", + bundle.BundleId, + outputPath); + } + + /// + public Task ExportToStringAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundle); + cancellationToken.ThrowIfCancellationRequested(); + + var dto = ToExportDto(bundle); + var json = JsonSerializer.Serialize(dto, _jsonOptions); + + _logger.LogDebug( + "Exported bundle {BundleId} to string ({Length} chars)", + bundle.BundleId, + json.Length); + + return Task.FromResult(json); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Helpers.cs new file mode 100644 index 000000000..5da56551c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.Helpers.cs @@ -0,0 +1,62 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.Canonical.Json; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleExporter +{ + private static string ComputeManifestDigest(IReadOnlyList jobLogs) + { + var manifest = jobLogs + .OrderBy(l => l.NodeId, StringComparer.Ordinal) + .Select(l => new + { + l.NodeId, + LastHlc = l.LastHlc.ToSortableString(), + ChainHead = Convert.ToHexString(l.ChainHead) + }) + .ToList(); + + var json = CanonJson.Serialize(manifest); + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static AirGapBundleExportDto ToExportDto(AirGapBundle bundle) => new() + { + BundleId = bundle.BundleId, + TenantId = bundle.TenantId, + CreatedAt = bundle.CreatedAt, + CreatedByNodeId = bundle.CreatedByNodeId, + ManifestDigest = bundle.ManifestDigest, + Signature = bundle.Signature, + SignedBy = bundle.SignedBy, + JobLogs = bundle.JobLogs.Select(ToNodeJobLogDto).ToList() + }; + + private static NodeJobLogExportDto ToNodeJobLogDto(NodeJobLog log) => new() + { + NodeId = log.NodeId, + LastHlc = log.LastHlc.ToSortableString(), + ChainHead = Convert.ToBase64String(log.ChainHead), + Entries = log.Entries.Select(ToEntryDto).ToList() + }; + + private static OfflineJobLogEntryExportDto ToEntryDto(OfflineJobLogEntry entry) => new() + { + NodeId = entry.NodeId, + THlc = entry.THlc.ToSortableString(), + JobId = entry.JobId, + PartitionKey = entry.PartitionKey, + Payload = entry.Payload, + PayloadHash = Convert.ToBase64String(entry.PayloadHash), + PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null, + Link = Convert.ToBase64String(entry.Link), + EnqueuedAt = entry.EnqueuedAt + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.cs index 7fac34689..d9317bbe4 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleExporter.cs @@ -1,75 +1,30 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Stores; -using StellaOps.Canonical.Json; using StellaOps.Determinism; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for air-gap bundle export operations. -/// -public interface IAirGapBundleExporter -{ - /// - /// Exports an air-gap bundle containing offline job logs. - /// - /// The tenant ID. - /// The node IDs to include (null for current node only). - /// Cancellation token. - /// The exported bundle. - Task ExportAsync( - string tenantId, - IReadOnlyList? nodeIds = null, - CancellationToken cancellationToken = default); - - /// - /// Exports an air-gap bundle to a file. - /// - /// The bundle to export. - /// The output file path. - /// Cancellation token. - Task ExportToFileAsync( - AirGapBundle bundle, - string outputPath, - CancellationToken cancellationToken = default); - - /// - /// Exports an air-gap bundle to a JSON string. - /// - /// The bundle to export. - /// Cancellation token. - /// The JSON string representation. - Task ExportToStringAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default); -} - /// /// Service for exporting air-gap bundles. /// -public sealed class AirGapBundleExporter : IAirGapBundleExporter +public sealed partial class AirGapBundleExporter : IAirGapBundleExporter { + private static readonly JsonSerializerOptions _jsonOptions = new() + { + WriteIndented = true, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + private readonly IOfflineJobLogStore _jobLogStore; private readonly IOfflineHlcManager _hlcManager; private readonly IGuidProvider _guidProvider; private readonly TimeProvider _timeProvider; private readonly ILogger _logger; - private static readonly JsonSerializerOptions JsonOptions = new() - { - WriteIndented = true, - PropertyNamingPolicy = JsonNamingPolicy.CamelCase - }; - /// /// Initializes a new instance of the class. /// @@ -86,186 +41,4 @@ public sealed class AirGapBundleExporter : IAirGapBundleExporter _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - /// - public async Task ExportAsync( - string tenantId, - IReadOnlyList? nodeIds = null, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - - var effectiveNodeIds = nodeIds ?? new[] { _hlcManager.NodeId }; - - _logger.LogInformation( - "Exporting air-gap bundle for tenant {TenantId} with {NodeCount} nodes", - tenantId, effectiveNodeIds.Count); - - var jobLogs = new List(); - - foreach (var nodeId in effectiveNodeIds) - { - cancellationToken.ThrowIfCancellationRequested(); - - var nodeLog = await _jobLogStore.GetNodeJobLogAsync(nodeId, cancellationToken) - .ConfigureAwait(false); - - if (nodeLog is not null && nodeLog.Entries.Count > 0) - { - jobLogs.Add(nodeLog); - _logger.LogDebug( - "Added node {NodeId} with {EntryCount} entries to bundle", - nodeId, nodeLog.Entries.Count); - } - } - - if (jobLogs.Count == 0) - { - _logger.LogWarning("No offline job logs found for export"); - } - - var bundle = new AirGapBundle - { - BundleId = _guidProvider.NewGuid(), - TenantId = tenantId, - CreatedAt = _timeProvider.GetUtcNow(), - CreatedByNodeId = _hlcManager.NodeId, - JobLogs = jobLogs, - ManifestDigest = ComputeManifestDigest(jobLogs) - }; - - _logger.LogInformation( - "Created bundle {BundleId} with {LogCount} node logs, {TotalEntries} total entries", - bundle.BundleId, jobLogs.Count, jobLogs.Sum(l => l.Entries.Count)); - - return bundle; - } - - /// - public async Task ExportToFileAsync( - AirGapBundle bundle, - string outputPath, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(bundle); - ArgumentException.ThrowIfNullOrWhiteSpace(outputPath); - - var dto = ToExportDto(bundle); - var json = JsonSerializer.Serialize(dto, JsonOptions); - - var directory = Path.GetDirectoryName(outputPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); - - _logger.LogInformation( - "Exported bundle {BundleId} to {OutputPath}", - bundle.BundleId, outputPath); - } - - /// - public Task ExportToStringAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(bundle); - cancellationToken.ThrowIfCancellationRequested(); - - var dto = ToExportDto(bundle); - var json = JsonSerializer.Serialize(dto, JsonOptions); - - _logger.LogDebug( - "Exported bundle {BundleId} to string ({Length} chars)", - bundle.BundleId, json.Length); - - return Task.FromResult(json); - } - - private static string ComputeManifestDigest(IReadOnlyList jobLogs) - { - // Create manifest of all chain heads for integrity - var manifest = jobLogs - .OrderBy(l => l.NodeId, StringComparer.Ordinal) - .Select(l => new - { - l.NodeId, - LastHlc = l.LastHlc.ToSortableString(), - ChainHead = Convert.ToHexString(l.ChainHead) - }) - .ToList(); - - var json = CanonJson.Serialize(manifest); - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static AirGapBundleExportDto ToExportDto(AirGapBundle bundle) => new() - { - BundleId = bundle.BundleId, - TenantId = bundle.TenantId, - CreatedAt = bundle.CreatedAt, - CreatedByNodeId = bundle.CreatedByNodeId, - ManifestDigest = bundle.ManifestDigest, - Signature = bundle.Signature, - SignedBy = bundle.SignedBy, - JobLogs = bundle.JobLogs.Select(ToNodeJobLogDto).ToList() - }; - - private static NodeJobLogExportDto ToNodeJobLogDto(NodeJobLog log) => new() - { - NodeId = log.NodeId, - LastHlc = log.LastHlc.ToSortableString(), - ChainHead = Convert.ToBase64String(log.ChainHead), - Entries = log.Entries.Select(ToEntryDto).ToList() - }; - - private static OfflineJobLogEntryExportDto ToEntryDto(OfflineJobLogEntry entry) => new() - { - NodeId = entry.NodeId, - THlc = entry.THlc.ToSortableString(), - JobId = entry.JobId, - PartitionKey = entry.PartitionKey, - Payload = entry.Payload, - PayloadHash = Convert.ToBase64String(entry.PayloadHash), - PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null, - Link = Convert.ToBase64String(entry.Link), - EnqueuedAt = entry.EnqueuedAt - }; - - // Export DTOs - private sealed record AirGapBundleExportDto - { - public required Guid BundleId { get; init; } - public required string TenantId { get; init; } - public required DateTimeOffset CreatedAt { get; init; } - public required string CreatedByNodeId { get; init; } - public required string ManifestDigest { get; init; } - public string? Signature { get; init; } - public string? SignedBy { get; init; } - public required IReadOnlyList JobLogs { get; init; } - } - - private sealed record NodeJobLogExportDto - { - public required string NodeId { get; init; } - public required string LastHlc { get; init; } - public required string ChainHead { get; init; } - public required IReadOnlyList Entries { get; init; } - } - - private sealed record OfflineJobLogEntryExportDto - { - public required string NodeId { get; init; } - public required string THlc { get; init; } - public required Guid JobId { get; init; } - public string? PartitionKey { get; init; } - public required string Payload { get; init; } - public required string PayloadHash { get; init; } - public string? PrevLink { get; init; } - public required string Link { get; init; } - public DateTimeOffset EnqueuedAt { get; init; } - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Dtos.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Dtos.cs new file mode 100644 index 000000000..1560ea5c9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Dtos.cs @@ -0,0 +1,40 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleImporter +{ + private sealed record AirGapBundleImportDto + { + public required Guid BundleId { get; init; } + public required string TenantId { get; init; } + public required DateTimeOffset CreatedAt { get; init; } + public required string CreatedByNodeId { get; init; } + public required string ManifestDigest { get; init; } + public string? Signature { get; init; } + public string? SignedBy { get; init; } + public required IReadOnlyList JobLogs { get; init; } + } + + private sealed record NodeJobLogImportDto + { + public required string NodeId { get; init; } + public required string LastHlc { get; init; } + public required string ChainHead { get; init; } + public required IReadOnlyList Entries { get; init; } + } + + private sealed record OfflineJobLogEntryImportDto + { + public required string NodeId { get; init; } + public required string THlc { get; init; } + public required Guid JobId { get; init; } + public string? PartitionKey { get; init; } + public required string Payload { get; init; } + public required string PayloadHash { get; init; } + public string? PrevLink { get; init; } + public required string Link { get; init; } + public DateTimeOffset EnqueuedAt { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Import.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Import.cs new file mode 100644 index 000000000..8a45a73d1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Import.cs @@ -0,0 +1,73 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleImporter +{ + /// + public async Task ImportFromFileAsync( + string inputPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(inputPath); + + if (!File.Exists(inputPath)) + { + throw new FileNotFoundException($"Bundle file not found: {inputPath}", inputPath); + } + + _logger.LogInformation("Importing air-gap bundle from {InputPath}", inputPath); + + var json = await File.ReadAllTextAsync(inputPath, cancellationToken).ConfigureAwait(false); + var dto = JsonSerializer.Deserialize(json, _jsonOptions); + + if (dto is null) + { + throw new InvalidOperationException("Failed to deserialize bundle file"); + } + + var bundle = FromImportDto(dto); + + _logger.LogInformation( + "Imported bundle {BundleId} from {InputPath}: {LogCount} node logs, {TotalEntries} total entries", + bundle.BundleId, + inputPath, + bundle.JobLogs.Count, + bundle.JobLogs.Sum(l => l.Entries.Count)); + + return bundle; + } + + /// + public Task ImportFromStringAsync( + string json, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(json); + cancellationToken.ThrowIfCancellationRequested(); + + _logger.LogDebug("Importing air-gap bundle from string ({Length} chars)", json.Length); + + var dto = JsonSerializer.Deserialize(json, _jsonOptions); + + if (dto is null) + { + throw new InvalidOperationException("Failed to deserialize bundle JSON"); + } + + var bundle = FromImportDto(dto); + + _logger.LogInformation( + "Imported bundle {BundleId} from string: {LogCount} node logs, {TotalEntries} total entries", + bundle.BundleId, + bundle.JobLogs.Count, + bundle.JobLogs.Sum(l => l.Entries.Count)); + + return Task.FromResult(bundle); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Mappings.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Mappings.cs new file mode 100644 index 000000000..5bf588344 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Mappings.cs @@ -0,0 +1,43 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleImporter +{ + private static AirGapBundle FromImportDto(AirGapBundleImportDto dto) => new() + { + BundleId = dto.BundleId, + TenantId = dto.TenantId, + CreatedAt = dto.CreatedAt, + CreatedByNodeId = dto.CreatedByNodeId, + ManifestDigest = dto.ManifestDigest, + Signature = dto.Signature, + SignedBy = dto.SignedBy, + JobLogs = dto.JobLogs.Select(FromNodeJobLogDto).ToList() + }; + + private static NodeJobLog FromNodeJobLogDto(NodeJobLogImportDto dto) => new() + { + NodeId = dto.NodeId, + LastHlc = HlcTimestamp.Parse(dto.LastHlc), + ChainHead = Convert.FromBase64String(dto.ChainHead), + Entries = dto.Entries.Select(FromEntryDto).ToList() + }; + + private static OfflineJobLogEntry FromEntryDto(OfflineJobLogEntryImportDto dto) => new() + { + NodeId = dto.NodeId, + THlc = HlcTimestamp.Parse(dto.THlc), + JobId = dto.JobId, + PartitionKey = dto.PartitionKey, + Payload = dto.Payload, + PayloadHash = Convert.FromBase64String(dto.PayloadHash), + PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null, + Link = Convert.FromBase64String(dto.Link), + EnqueuedAt = dto.EnqueuedAt + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.Helpers.cs new file mode 100644 index 000000000..6d18bdd34 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.Helpers.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.Canonical.Json; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleImporter +{ + private static string ComputeManifestDigest(IReadOnlyList jobLogs) + { + var manifest = jobLogs + .OrderBy(l => l.NodeId, StringComparer.Ordinal) + .Select(l => new + { + l.NodeId, + LastHlc = l.LastHlc.ToSortableString(), + ChainHead = Convert.ToHexString(l.ChainHead) + }) + .ToList(); + + var json = CanonJson.Serialize(manifest); + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static bool ByteArrayEquals(byte[]? a, byte[]? b) + { + if (a is null && b is null) return true; + if (a is null || b is null) return false; + return a.AsSpan().SequenceEqual(b); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.cs new file mode 100644 index 000000000..eacb3c3c1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.Validation.cs @@ -0,0 +1,91 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapBundleImporter +{ + /// + public BundleValidationResult Validate(AirGapBundle bundle) + { + ArgumentNullException.ThrowIfNull(bundle); + + var issues = new List(); + + var computedDigest = ComputeManifestDigest(bundle.JobLogs); + if (!string.Equals(computedDigest, bundle.ManifestDigest, StringComparison.Ordinal)) + { + issues.Add($"Manifest digest mismatch: expected {bundle.ManifestDigest}, computed {computedDigest}"); + } + + foreach (var nodeLog in bundle.JobLogs) + { + var nodeIssues = ValidateNodeLog(nodeLog); + issues.AddRange(nodeIssues); + } + + foreach (var nodeLog in bundle.JobLogs) + { + if (nodeLog.Entries.Count > 0) + { + var lastEntry = nodeLog.Entries[^1]; + if (!ByteArrayEquals(nodeLog.ChainHead, lastEntry.Link)) + { + issues.Add($"Node {nodeLog.NodeId}: chain head doesn't match last entry link"); + } + } + } + + var isValid = issues.Count == 0; + + if (!isValid) + { + _logger.LogWarning( + "Bundle {BundleId} validation failed with {IssueCount} issues", + bundle.BundleId, + issues.Count); + } + else + { + _logger.LogDebug("Bundle {BundleId} validation passed", bundle.BundleId); + } + + return new BundleValidationResult + { + IsValid = isValid, + Issues = issues + }; + } + + private static IEnumerable ValidateNodeLog(NodeJobLog nodeLog) + { + byte[]? expectedPrevLink = null; + + for (var i = 0; i < nodeLog.Entries.Count; i++) + { + var entry = nodeLog.Entries[i]; + + if (!ByteArrayEquals(entry.PrevLink, expectedPrevLink)) + { + yield return $"Node {nodeLog.NodeId}, entry {i}: prev_link mismatch"; + } + + var computedLink = OfflineHlcManager.ComputeLink( + entry.PrevLink, + entry.JobId, + entry.THlc, + entry.PayloadHash); + + if (!ByteArrayEquals(entry.Link, computedLink)) + { + yield return $"Node {nodeLog.NodeId}, entry {i} (JobId {entry.JobId}): link mismatch"; + } + + expectedPrevLink = entry.Link; + } + } + +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.cs index 340e34af2..4c7598263 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleImporter.cs @@ -1,80 +1,24 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; -using StellaOps.AirGap.Sync.Models; -using StellaOps.Canonical.Json; -using StellaOps.HybridLogicalClock; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for air-gap bundle import operations. -/// -public interface IAirGapBundleImporter -{ - /// - /// Imports an air-gap bundle from a file. - /// - /// The input file path. - /// Cancellation token. - /// The imported bundle. - Task ImportFromFileAsync( - string inputPath, - CancellationToken cancellationToken = default); - - /// - /// Validates a bundle's integrity. - /// - /// The bundle to validate. - /// Validation result with any issues found. - BundleValidationResult Validate(AirGapBundle bundle); - - /// - /// Imports an air-gap bundle from a JSON string. - /// - /// The JSON string representation. - /// Cancellation token. - /// The imported bundle. - Task ImportFromStringAsync( - string json, - CancellationToken cancellationToken = default); -} - -/// -/// Result of bundle validation. -/// -public sealed record BundleValidationResult -{ - /// - /// Gets whether the bundle is valid. - /// - public required bool IsValid { get; init; } - - /// - /// Gets validation issues found. - /// - public required IReadOnlyList Issues { get; init; } -} - /// /// Service for importing air-gap bundles. /// -public sealed class AirGapBundleImporter : IAirGapBundleImporter +public sealed partial class AirGapBundleImporter : IAirGapBundleImporter { - private readonly ILogger _logger; - - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, PropertyNameCaseInsensitive = true }; + private readonly ILogger _logger; + /// /// Initializes a new instance of the class. /// @@ -82,236 +26,4 @@ public sealed class AirGapBundleImporter : IAirGapBundleImporter { _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - /// - public async Task ImportFromFileAsync( - string inputPath, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(inputPath); - - if (!File.Exists(inputPath)) - { - throw new FileNotFoundException($"Bundle file not found: {inputPath}", inputPath); - } - - _logger.LogInformation("Importing air-gap bundle from {InputPath}", inputPath); - - var json = await File.ReadAllTextAsync(inputPath, cancellationToken).ConfigureAwait(false); - var dto = JsonSerializer.Deserialize(json, JsonOptions); - - if (dto is null) - { - throw new InvalidOperationException("Failed to deserialize bundle file"); - } - - var bundle = FromImportDto(dto); - - _logger.LogInformation( - "Imported bundle {BundleId} from {InputPath}: {LogCount} node logs, {TotalEntries} total entries", - bundle.BundleId, inputPath, bundle.JobLogs.Count, bundle.JobLogs.Sum(l => l.Entries.Count)); - - return bundle; - } - - /// - public Task ImportFromStringAsync( - string json, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(json); - cancellationToken.ThrowIfCancellationRequested(); - - _logger.LogDebug("Importing air-gap bundle from string ({Length} chars)", json.Length); - - var dto = JsonSerializer.Deserialize(json, JsonOptions); - - if (dto is null) - { - throw new InvalidOperationException("Failed to deserialize bundle JSON"); - } - - var bundle = FromImportDto(dto); - - _logger.LogInformation( - "Imported bundle {BundleId} from string: {LogCount} node logs, {TotalEntries} total entries", - bundle.BundleId, bundle.JobLogs.Count, bundle.JobLogs.Sum(l => l.Entries.Count)); - - return Task.FromResult(bundle); - } - - /// - public BundleValidationResult Validate(AirGapBundle bundle) - { - ArgumentNullException.ThrowIfNull(bundle); - - var issues = new List(); - - // 1. Validate manifest digest - var computedDigest = ComputeManifestDigest(bundle.JobLogs); - if (!string.Equals(computedDigest, bundle.ManifestDigest, StringComparison.Ordinal)) - { - issues.Add($"Manifest digest mismatch: expected {bundle.ManifestDigest}, computed {computedDigest}"); - } - - // 2. Validate each node log's chain integrity - foreach (var nodeLog in bundle.JobLogs) - { - var nodeIssues = ValidateNodeLog(nodeLog); - issues.AddRange(nodeIssues); - } - - // 3. Validate chain heads match last entry links - foreach (var nodeLog in bundle.JobLogs) - { - if (nodeLog.Entries.Count > 0) - { - var lastEntry = nodeLog.Entries[^1]; - if (!ByteArrayEquals(nodeLog.ChainHead, lastEntry.Link)) - { - issues.Add($"Node {nodeLog.NodeId}: chain head doesn't match last entry link"); - } - } - } - - var isValid = issues.Count == 0; - - if (!isValid) - { - _logger.LogWarning( - "Bundle {BundleId} validation failed with {IssueCount} issues", - bundle.BundleId, issues.Count); - } - else - { - _logger.LogDebug("Bundle {BundleId} validation passed", bundle.BundleId); - } - - return new BundleValidationResult - { - IsValid = isValid, - Issues = issues - }; - } - - private static IEnumerable ValidateNodeLog(NodeJobLog nodeLog) - { - byte[]? expectedPrevLink = null; - - for (var i = 0; i < nodeLog.Entries.Count; i++) - { - var entry = nodeLog.Entries[i]; - - // Verify prev_link matches expected - if (!ByteArrayEquals(entry.PrevLink, expectedPrevLink)) - { - yield return $"Node {nodeLog.NodeId}, entry {i}: prev_link mismatch"; - } - - // Recompute and verify link - var computedLink = OfflineHlcManager.ComputeLink( - entry.PrevLink, - entry.JobId, - entry.THlc, - entry.PayloadHash); - - if (!ByteArrayEquals(entry.Link, computedLink)) - { - yield return $"Node {nodeLog.NodeId}, entry {i} (JobId {entry.JobId}): link mismatch"; - } - - expectedPrevLink = entry.Link; - } - } - - private static string ComputeManifestDigest(IReadOnlyList jobLogs) - { - var manifest = jobLogs - .OrderBy(l => l.NodeId, StringComparer.Ordinal) - .Select(l => new - { - l.NodeId, - LastHlc = l.LastHlc.ToSortableString(), - ChainHead = Convert.ToHexString(l.ChainHead) - }) - .ToList(); - - var json = CanonJson.Serialize(manifest); - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static bool ByteArrayEquals(byte[]? a, byte[]? b) - { - if (a is null && b is null) return true; - if (a is null || b is null) return false; - return a.AsSpan().SequenceEqual(b); - } - - private static AirGapBundle FromImportDto(AirGapBundleImportDto dto) => new() - { - BundleId = dto.BundleId, - TenantId = dto.TenantId, - CreatedAt = dto.CreatedAt, - CreatedByNodeId = dto.CreatedByNodeId, - ManifestDigest = dto.ManifestDigest, - Signature = dto.Signature, - SignedBy = dto.SignedBy, - JobLogs = dto.JobLogs.Select(FromNodeJobLogDto).ToList() - }; - - private static NodeJobLog FromNodeJobLogDto(NodeJobLogImportDto dto) => new() - { - NodeId = dto.NodeId, - LastHlc = HlcTimestamp.Parse(dto.LastHlc), - ChainHead = Convert.FromBase64String(dto.ChainHead), - Entries = dto.Entries.Select(FromEntryDto).ToList() - }; - - private static OfflineJobLogEntry FromEntryDto(OfflineJobLogEntryImportDto dto) => new() - { - NodeId = dto.NodeId, - THlc = HlcTimestamp.Parse(dto.THlc), - JobId = dto.JobId, - PartitionKey = dto.PartitionKey, - Payload = dto.Payload, - PayloadHash = Convert.FromBase64String(dto.PayloadHash), - PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null, - Link = Convert.FromBase64String(dto.Link), - EnqueuedAt = dto.EnqueuedAt - }; - - // Import DTOs - private sealed record AirGapBundleImportDto - { - public required Guid BundleId { get; init; } - public required string TenantId { get; init; } - public required DateTimeOffset CreatedAt { get; init; } - public required string CreatedByNodeId { get; init; } - public required string ManifestDigest { get; init; } - public string? Signature { get; init; } - public string? SignedBy { get; init; } - public required IReadOnlyList JobLogs { get; init; } - } - - private sealed record NodeJobLogImportDto - { - public required string NodeId { get; init; } - public required string LastHlc { get; init; } - public required string ChainHead { get; init; } - public required IReadOnlyList Entries { get; init; } - } - - private sealed record OfflineJobLogEntryImportDto - { - public required string NodeId { get; init; } - public required string THlc { get; init; } - public required Guid JobId { get; init; } - public string? PartitionKey { get; init; } - public required string Payload { get; init; } - public required string PayloadHash { get; init; } - public string? PrevLink { get; init; } - public required string Link { get; init; } - public DateTimeOffset EnqueuedAt { get; init; } - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleSignatureResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleSignatureResult.cs new file mode 100644 index 000000000..e3b0eaba5 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleSignatureResult.cs @@ -0,0 +1,15 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Result of a bundle signature operation. +/// +/// The key ID used for signing. +/// The signature bytes. +/// The signature as Base64 string. +public sealed record AirGapBundleSignatureResult( + string KeyId, + byte[] Signature, + string SignatureBase64); diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleVerificationResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleVerificationResult.cs new file mode 100644 index 000000000..dfda65e7d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapBundleVerificationResult.cs @@ -0,0 +1,35 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Result of bundle signature verification. +/// +/// Whether the signature is valid. +/// The reason for the result. +public sealed record AirGapBundleVerificationResult(bool IsValid, string Reason) +{ + /// + /// Verification succeeded. + /// + public static AirGapBundleVerificationResult Valid { get; } = new(true, "Signature verified"); + + /// + /// Signing is disabled, so verification is skipped. + /// + public static AirGapBundleVerificationResult SigningDisabled { get; } = + new(true, "Signing disabled"); + + /// + /// Bundle has no signature but signing is enabled. + /// + public static AirGapBundleVerificationResult MissingSignature { get; } = + new(false, "Bundle is not signed"); + + /// + /// Signature verification failed. + /// + public static AirGapBundleVerificationResult InvalidSignature { get; } = + new(false, "Signature verification failed"); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapHybridLogicalClock.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapHybridLogicalClock.cs new file mode 100644 index 000000000..f0cb2a78d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapHybridLogicalClock.cs @@ -0,0 +1,38 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Services; + +internal sealed class AirGapHybridLogicalClock : IHybridLogicalClock +{ + private readonly HybridLogicalClock.HybridLogicalClock _clock; + + public AirGapHybridLogicalClock( + TimeProvider timeProvider, + IHlcStateStore stateStore, + AirGapSyncClockOptions options, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(stateStore); + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(logger); + + _clock = new HybridLogicalClock.HybridLogicalClock( + timeProvider, + options.NodeId, + stateStore, + logger); + } + + public string NodeId => _clock.NodeId; + + public HlcTimestamp Current => _clock.Current; + + public HlcTimestamp Tick() => _clock.Tick(); + + public HlcTimestamp Receive(HlcTimestamp remote) => _clock.Receive(remote); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.Sync.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.Sync.cs new file mode 100644 index 000000000..8b35e5c32 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.Sync.cs @@ -0,0 +1,65 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapSyncService +{ + /// + public async Task SyncFromBundleAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(bundle); + + _logger.LogInformation( + "Starting sync from bundle {BundleId} with {LogCount} node logs for tenant {TenantId}", + bundle.BundleId, + bundle.JobLogs.Count, + bundle.TenantId); + + var merged = await _mergeService.MergeAsync(bundle.JobLogs, cancellationToken) + .ConfigureAwait(false); + + if (merged.MergedEntries.Count == 0) + { + _logger.LogInformation("Bundle {BundleId} has no entries to sync", bundle.BundleId); + return new SyncResult + { + BundleId = bundle.BundleId, + TotalInBundle = 0, + Appended = 0, + Duplicates = 0, + AlreadyExisted = 0 + }; + } + + var warnings = new List(); + var (appended, alreadyExisted, newChainHead) = await AppendMergedEntriesAsync( + bundle, + merged, + cancellationToken) + .ConfigureAwait(false); + + _logger.LogInformation( + "Sync complete for bundle {BundleId}: {Appended} appended, {Duplicates} duplicates, {AlreadyExisted} already existed", + bundle.BundleId, + appended, + merged.Duplicates.Count, + alreadyExisted); + + return new SyncResult + { + BundleId = bundle.BundleId, + TotalInBundle = merged.MergedEntries.Count, + Appended = appended, + Duplicates = merged.Duplicates.Count, + AlreadyExisted = alreadyExisted, + NewChainHead = newChainHead, + Warnings = warnings.Count > 0 ? warnings : null + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.SyncEntries.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.SyncEntries.cs new file mode 100644 index 000000000..35b9bc540 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.SyncEntries.cs @@ -0,0 +1,71 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class AirGapSyncService +{ + private async Task<(int Appended, int AlreadyExisted, byte[]? NewChainHead)> AppendMergedEntriesAsync( + AirGapBundle bundle, + MergeResult merged, + CancellationToken cancellationToken) + { + var (currentLink, _) = await _schedulerLogRepo.GetChainHeadAsync( + bundle.TenantId, + cancellationToken: cancellationToken).ConfigureAwait(false); + + foreach (var entry in merged.MergedEntries) + { + _hlc.Receive(entry.THlc); + } + + byte[]? prevLink = currentLink; + var appended = 0; + var alreadyExisted = 0; + + foreach (var entry in merged.MergedEntries) + { + cancellationToken.ThrowIfCancellationRequested(); + + var exists = await _schedulerLogRepo.ExistsByJobIdAsync( + bundle.TenantId, + entry.JobId, + cancellationToken).ConfigureAwait(false); + + if (exists) + { + _logger.LogDebug( + "Job {JobId} already exists in scheduler log, skipping", + entry.JobId); + alreadyExisted++; + continue; + } + + var newLink = OfflineHlcManager.ComputeLink( + prevLink, + entry.JobId, + entry.THlc, + entry.PayloadHash); + + await _schedulerLogRepo.InsertSyncedEntryAsync( + bundle.TenantId, + entry.THlc.ToSortableString(), + entry.PartitionKey, + entry.JobId, + entry.PayloadHash, + prevLink, + newLink, + entry.SourceNodeId, + bundle.BundleId, + cancellationToken).ConfigureAwait(false); + + prevLink = newLink; + appended++; + } + + return (appended, alreadyExisted, prevLink); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.cs index e4deb5fc1..06c2d9dd1 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/AirGapSyncService.cs @@ -1,74 +1,15 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - using Microsoft.Extensions.Logging; -using StellaOps.AirGap.Sync.Models; using StellaOps.HybridLogicalClock; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for the scheduler log repository used by sync. -/// -/// -/// This is a subset of the full ISchedulerLogRepository to avoid circular dependencies. -/// Implementations should delegate to the actual repository. -/// -public interface ISyncSchedulerLogRepository -{ - /// - /// Gets the chain head for a tenant/partition. - /// - Task<(byte[]? Link, string? THlc)> GetChainHeadAsync( - string tenantId, - string? partitionKey = null, - CancellationToken cancellationToken = default); - - /// - /// Gets an entry by job ID. - /// - Task ExistsByJobIdAsync( - string tenantId, - Guid jobId, - CancellationToken cancellationToken = default); - - /// - /// Inserts a synced entry. - /// - Task InsertSyncedEntryAsync( - string tenantId, - string tHlc, - string? partitionKey, - Guid jobId, - byte[] payloadHash, - byte[]? prevLink, - byte[] link, - string sourceNodeId, - Guid syncedFromBundle, - CancellationToken cancellationToken = default); -} - -/// -/// Interface for air-gap sync operations. -/// -public interface IAirGapSyncService -{ - /// - /// Syncs offline jobs from an air-gap bundle to the central scheduler. - /// - /// The bundle to sync. - /// Cancellation token. - /// The sync result. - Task SyncFromBundleAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default); -} - /// /// Service for syncing air-gap bundles to the central scheduler. /// -public sealed class AirGapSyncService : IAirGapSyncService +public sealed partial class AirGapSyncService : IAirGapSyncService { private readonly IHlcMergeService _mergeService; private readonly ISyncSchedulerLogRepository _schedulerLogRepo; @@ -89,110 +30,4 @@ public sealed class AirGapSyncService : IAirGapSyncService _hlc = hlc ?? throw new ArgumentNullException(nameof(hlc)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - /// - public async Task SyncFromBundleAsync( - AirGapBundle bundle, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(bundle); - - _logger.LogInformation( - "Starting sync from bundle {BundleId} with {LogCount} node logs for tenant {TenantId}", - bundle.BundleId, bundle.JobLogs.Count, bundle.TenantId); - - // 1. Merge all offline logs - var merged = await _mergeService.MergeAsync(bundle.JobLogs, cancellationToken) - .ConfigureAwait(false); - - if (merged.MergedEntries.Count == 0) - { - _logger.LogInformation("Bundle {BundleId} has no entries to sync", bundle.BundleId); - return new SyncResult - { - BundleId = bundle.BundleId, - TotalInBundle = 0, - Appended = 0, - Duplicates = 0, - AlreadyExisted = 0 - }; - } - - // 2. Get current scheduler chain head - var (currentLink, _) = await _schedulerLogRepo.GetChainHeadAsync( - bundle.TenantId, - cancellationToken: cancellationToken).ConfigureAwait(false); - - // 3. For each merged entry, update HLC clock (receive) - // This ensures central clock advances past all offline timestamps - foreach (var entry in merged.MergedEntries) - { - _hlc.Receive(entry.THlc); - } - - // 4. Append merged entries to scheduler log - // Chain links recomputed to extend from current head - byte[]? prevLink = currentLink; - var appended = 0; - var alreadyExisted = 0; - var warnings = new List(); - - foreach (var entry in merged.MergedEntries) - { - cancellationToken.ThrowIfCancellationRequested(); - - // Check if job already exists (idempotency) - var exists = await _schedulerLogRepo.ExistsByJobIdAsync( - bundle.TenantId, - entry.JobId, - cancellationToken).ConfigureAwait(false); - - if (exists) - { - _logger.LogDebug( - "Job {JobId} already exists in scheduler log, skipping", - entry.JobId); - alreadyExisted++; - continue; - } - - // Compute new chain link extending from current chain - var newLink = OfflineHlcManager.ComputeLink( - prevLink, - entry.JobId, - entry.THlc, - entry.PayloadHash); - - // Insert the entry - await _schedulerLogRepo.InsertSyncedEntryAsync( - bundle.TenantId, - entry.THlc.ToSortableString(), - entry.PartitionKey, - entry.JobId, - entry.PayloadHash, - prevLink, - newLink, - entry.SourceNodeId, - bundle.BundleId, - cancellationToken).ConfigureAwait(false); - - prevLink = newLink; - appended++; - } - - _logger.LogInformation( - "Sync complete for bundle {BundleId}: {Appended} appended, {Duplicates} duplicates, {AlreadyExisted} already existed", - bundle.BundleId, appended, merged.Duplicates.Count, alreadyExisted); - - return new SyncResult - { - BundleId = bundle.BundleId, - TotalInBundle = merged.MergedEntries.Count, - Appended = appended, - Duplicates = merged.Duplicates.Count, - AlreadyExisted = alreadyExisted, - NewChainHead = prevLink, - Warnings = warnings.Count > 0 ? warnings : null - }; - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/BundleValidationResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/BundleValidationResult.cs new file mode 100644 index 000000000..91860bf42 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/BundleValidationResult.cs @@ -0,0 +1,20 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Result of bundle validation. +/// +public sealed record BundleValidationResult +{ + /// + /// Gets whether the bundle is valid. + /// + public required bool IsValid { get; init; } + + /// + /// Gets validation issues found. + /// + public required IReadOnlyList Issues { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ConflictResolver.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ConflictResolver.cs index ce3fb074b..e72409625 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ConflictResolver.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ConflictResolver.cs @@ -1,28 +1,11 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - using Microsoft.Extensions.Logging; using StellaOps.AirGap.Sync.Models; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for conflict resolution during merge. -/// -public interface IConflictResolver -{ - /// - /// Resolves conflicts when the same JobId appears in multiple entries. - /// - /// The conflicting job ID. - /// The conflicting entries with their source nodes. - /// The resolution result. - ConflictResolution Resolve( - Guid jobId, - IReadOnlyList<(string NodeId, OfflineJobLogEntry Entry)> conflicting); -} - /// /// Resolves conflicts during HLC merge operations. /// @@ -52,7 +35,6 @@ public sealed class ConflictResolver : IConflictResolver if (conflicting.Count == 1) { - // No conflict return new ConflictResolution { Type = ConflictType.DuplicateTimestamp, @@ -62,7 +44,6 @@ public sealed class ConflictResolver : IConflictResolver }; } - // Verify payloads are actually different var uniquePayloads = conflicting .Select(c => Convert.ToHexString(c.Entry.PayloadHash)) .Distinct() @@ -70,8 +51,6 @@ public sealed class ConflictResolver : IConflictResolver if (uniquePayloads.Count == 1) { - // Same payload, different HLC timestamps - not a real conflict - // Take the earliest HLC (preserves causality) var sorted = conflicting .OrderBy(c => c.Entry.THlc.PhysicalTime) .ThenBy(c => c.Entry.THlc.LogicalCounter) @@ -83,7 +62,10 @@ public sealed class ConflictResolver : IConflictResolver _logger.LogDebug( "Resolved duplicate timestamp conflict for JobId {JobId}: selected entry from node {NodeId} at {THlc}, dropped {DroppedCount} duplicates", - jobId, earliest.NodeId, earliest.Entry.THlc, dropped.Count); + jobId, + earliest.NodeId, + earliest.Entry.THlc, + dropped.Count); return new ConflictResolution { @@ -94,14 +76,14 @@ public sealed class ConflictResolver : IConflictResolver }; } - // Actual conflict: same JobId, different payloads - // This indicates a bug in deterministic ID computation var nodeIds = string.Join(", ", conflicting.Select(c => c.NodeId)); var payloadHashes = string.Join(", ", conflicting.Select(c => Convert.ToHexString(c.Entry.PayloadHash)[..16] + "...")); _logger.LogError( "Payload mismatch conflict for JobId {JobId}: different payloads from nodes [{NodeIds}] with hashes [{PayloadHashes}]", - jobId, nodeIds, payloadHashes); + jobId, + nodeIds, + payloadHashes); return new ConflictResolution { diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Mappings.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Mappings.cs new file mode 100644 index 000000000..b33628af6 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Mappings.cs @@ -0,0 +1,20 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class HlcMergeService +{ + private static MergedJobEntry CreateMergedEntry(string nodeId, OfflineJobLogEntry entry) => new() + { + SourceNodeId = nodeId, + THlc = entry.THlc, + JobId = entry.JobId, + PartitionKey = entry.PartitionKey, + Payload = entry.Payload, + PayloadHash = entry.PayloadHash, + OriginalLink = entry.Link + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.Result.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.Result.cs new file mode 100644 index 000000000..03aa2cee6 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.Result.cs @@ -0,0 +1,47 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class HlcMergeService +{ + private MergeResult BuildMergeResult( + IReadOnlyList nodeLogs, + List deduplicated, + List duplicates) + { + var ordered = deduplicated + .OrderBy(x => x.THlc.PhysicalTime) + .ThenBy(x => x.THlc.LogicalCounter) + .ThenBy(x => x.THlc.NodeId, StringComparer.Ordinal) + .ThenBy(x => x.JobId) + .ToList(); + + byte[]? prevLink = null; + foreach (var entry in ordered) + { + entry.MergedLink = OfflineHlcManager.ComputeLink( + prevLink, + entry.JobId, + entry.THlc, + entry.PayloadHash); + prevLink = entry.MergedLink; + } + + _logger.LogInformation( + "Merge complete: {MergedCount} entries, {DuplicateCount} duplicates dropped", + ordered.Count, + duplicates.Count); + + return new MergeResult + { + MergedEntries = ordered, + Duplicates = duplicates, + MergedChainHead = prevLink, + SourceNodes = nodeLogs.Select(l => l.NodeId).ToList() + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.cs new file mode 100644 index 000000000..5d98875e0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.Merge.cs @@ -0,0 +1,87 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class HlcMergeService +{ + /// + public Task MergeAsync( + IReadOnlyList nodeLogs, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(nodeLogs); + cancellationToken.ThrowIfCancellationRequested(); + + if (nodeLogs.Count == 0) + { + return Task.FromResult(new MergeResult + { + MergedEntries = Array.Empty(), + Duplicates = Array.Empty(), + SourceNodes = Array.Empty() + }); + } + + _logger.LogInformation( + "Starting merge of {NodeCount} node logs with {TotalEntries} total entries", + nodeLogs.Count, + nodeLogs.Sum(l => l.Entries.Count)); + + var allEntries = nodeLogs + .SelectMany(log => log.Entries.Select(e => (log.NodeId, Entry: e))) + .ToList(); + + var sorted = allEntries + .OrderBy(x => x.Entry.THlc.PhysicalTime) + .ThenBy(x => x.Entry.THlc.LogicalCounter) + .ThenBy(x => x.Entry.THlc.NodeId, StringComparer.Ordinal) + .ThenBy(x => x.Entry.JobId) + .ToList(); + + var groupedByJobId = sorted.GroupBy(x => x.Entry.JobId).ToList(); + + var deduplicated = new List(); + var duplicates = new List(); + + foreach (var group in groupedByJobId) + { + var entries = group.ToList(); + + if (entries.Count == 1) + { + var (nodeId, entry) = entries[0]; + deduplicated.Add(CreateMergedEntry(nodeId, entry)); + continue; + } + + var resolution = _conflictResolver.Resolve(group.Key, entries); + + if (resolution.Resolution == ResolutionStrategy.Error) + { + _logger.LogError( + "Conflict resolution failed for JobId {JobId}: {Error}", + group.Key, + resolution.Error); + throw new InvalidOperationException(resolution.Error); + } + + if (resolution.SelectedEntry is not null) + { + var sourceEntry = entries.First(e => e.Entry == resolution.SelectedEntry); + deduplicated.Add(CreateMergedEntry(sourceEntry.NodeId, resolution.SelectedEntry)); + } + + foreach (var dropped in resolution.DroppedEntries ?? Array.Empty()) + { + var sourceEntry = entries.First(e => e.Entry == dropped); + duplicates.Add(new DuplicateEntry(dropped.JobId, sourceEntry.NodeId, dropped.THlc)); + } + } + + return Task.FromResult(BuildMergeResult(nodeLogs, deduplicated, duplicates)); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.cs index 4c11e896e..f7c6482a8 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/HlcMergeService.cs @@ -1,32 +1,14 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - using Microsoft.Extensions.Logging; -using StellaOps.AirGap.Sync.Models; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for HLC-based merge operations. -/// -public interface IHlcMergeService -{ - /// - /// Merges job logs from multiple offline nodes into a unified, HLC-ordered stream. - /// - /// The node logs to merge. - /// Cancellation token. - /// The merge result. - Task MergeAsync( - IReadOnlyList nodeLogs, - CancellationToken cancellationToken = default); -} - /// /// Service for merging job logs from multiple offline nodes using HLC total ordering. /// -public sealed class HlcMergeService : IHlcMergeService +public sealed partial class HlcMergeService : IHlcMergeService { private readonly IConflictResolver _conflictResolver; private readonly ILogger _logger; @@ -41,129 +23,4 @@ public sealed class HlcMergeService : IHlcMergeService _conflictResolver = conflictResolver ?? throw new ArgumentNullException(nameof(conflictResolver)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - /// - public Task MergeAsync( - IReadOnlyList nodeLogs, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(nodeLogs); - cancellationToken.ThrowIfCancellationRequested(); - - if (nodeLogs.Count == 0) - { - return Task.FromResult(new MergeResult - { - MergedEntries = Array.Empty(), - Duplicates = Array.Empty(), - SourceNodes = Array.Empty() - }); - } - - _logger.LogInformation( - "Starting merge of {NodeCount} node logs with {TotalEntries} total entries", - nodeLogs.Count, - nodeLogs.Sum(l => l.Entries.Count)); - - // 1. Collect all entries from all nodes - var allEntries = nodeLogs - .SelectMany(log => log.Entries.Select(e => (log.NodeId, Entry: e))) - .ToList(); - - // 2. Sort by HLC total order: (PhysicalTime, LogicalCounter, NodeId, JobId) - var sorted = allEntries - .OrderBy(x => x.Entry.THlc.PhysicalTime) - .ThenBy(x => x.Entry.THlc.LogicalCounter) - .ThenBy(x => x.Entry.THlc.NodeId, StringComparer.Ordinal) - .ThenBy(x => x.Entry.JobId) - .ToList(); - - // 3. Group by JobId to detect duplicates - var groupedByJobId = sorted.GroupBy(x => x.Entry.JobId).ToList(); - - var deduplicated = new List(); - var duplicates = new List(); - - foreach (var group in groupedByJobId) - { - var entries = group.ToList(); - - if (entries.Count == 1) - { - // No conflict - add directly - var (nodeId, entry) = entries[0]; - deduplicated.Add(CreateMergedEntry(nodeId, entry)); - } - else - { - // Multiple entries with same JobId - resolve conflict - var resolution = _conflictResolver.Resolve(group.Key, entries); - - if (resolution.Resolution == ResolutionStrategy.Error) - { - _logger.LogError( - "Conflict resolution failed for JobId {JobId}: {Error}", - group.Key, resolution.Error); - throw new InvalidOperationException(resolution.Error); - } - - // Add the selected entry - if (resolution.SelectedEntry is not null) - { - var sourceEntry = entries.First(e => e.Entry == resolution.SelectedEntry); - deduplicated.Add(CreateMergedEntry(sourceEntry.NodeId, resolution.SelectedEntry)); - } - - // Record duplicates - foreach (var dropped in resolution.DroppedEntries ?? Array.Empty()) - { - var sourceEntry = entries.First(e => e.Entry == dropped); - duplicates.Add(new DuplicateEntry(dropped.JobId, sourceEntry.NodeId, dropped.THlc)); - } - } - } - - // 4. Sort deduplicated entries by HLC order - deduplicated = deduplicated - .OrderBy(x => x.THlc.PhysicalTime) - .ThenBy(x => x.THlc.LogicalCounter) - .ThenBy(x => x.THlc.NodeId, StringComparer.Ordinal) - .ThenBy(x => x.JobId) - .ToList(); - - // 5. Recompute unified chain - byte[]? prevLink = null; - foreach (var entry in deduplicated) - { - entry.MergedLink = OfflineHlcManager.ComputeLink( - prevLink, - entry.JobId, - entry.THlc, - entry.PayloadHash); - prevLink = entry.MergedLink; - } - - _logger.LogInformation( - "Merge complete: {MergedCount} entries, {DuplicateCount} duplicates dropped", - deduplicated.Count, duplicates.Count); - - return Task.FromResult(new MergeResult - { - MergedEntries = deduplicated, - Duplicates = duplicates, - MergedChainHead = prevLink, - SourceNodes = nodeLogs.Select(l => l.NodeId).ToList() - }); - } - - private static MergedJobEntry CreateMergedEntry(string nodeId, OfflineJobLogEntry entry) => new() - { - SourceNodeId = nodeId, - THlc = entry.THlc, - JobId = entry.JobId, - PartitionKey = entry.PartitionKey, - Payload = entry.Payload, - PayloadHash = entry.PayloadHash, - OriginalLink = entry.Link - }; } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleDsseSigner.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleDsseSigner.cs new file mode 100644 index 000000000..86fde5d16 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleDsseSigner.cs @@ -0,0 +1,37 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for air-gap bundle DSSE signing. +/// +public interface IAirGapBundleDsseSigner +{ + /// + /// Signs an air-gap bundle manifest and returns the signature result. + /// + /// The bundle to sign. + /// Cancellation token. + /// Signature result with key ID and signature. + Task SignAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default); + + /// + /// Verifies an air-gap bundle signature. + /// + /// The bundle to verify. + /// Cancellation token. + /// True if signature is valid or signing is disabled; false if invalid. + Task VerifyAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default); + + /// + /// Gets whether signing is enabled. + /// + bool IsEnabled { get; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleExporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleExporter.cs new file mode 100644 index 000000000..b31ed5831 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleExporter.cs @@ -0,0 +1,45 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for air-gap bundle export operations. +/// +public interface IAirGapBundleExporter +{ + /// + /// Exports an air-gap bundle containing offline job logs. + /// + /// The tenant ID. + /// The node IDs to include (null for current node only). + /// Cancellation token. + /// The exported bundle. + Task ExportAsync( + string tenantId, + IReadOnlyList? nodeIds = null, + CancellationToken cancellationToken = default); + + /// + /// Exports an air-gap bundle to a file. + /// + /// The bundle to export. + /// The output file path. + /// Cancellation token. + Task ExportToFileAsync( + AirGapBundle bundle, + string outputPath, + CancellationToken cancellationToken = default); + + /// + /// Exports an air-gap bundle to a JSON string. + /// + /// The bundle to export. + /// Cancellation token. + /// The JSON string representation. + Task ExportToStringAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleImporter.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleImporter.cs new file mode 100644 index 000000000..32db3937a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapBundleImporter.cs @@ -0,0 +1,39 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for air-gap bundle import operations. +/// +public interface IAirGapBundleImporter +{ + /// + /// Imports an air-gap bundle from a file. + /// + /// The input file path. + /// Cancellation token. + /// The imported bundle. + Task ImportFromFileAsync( + string inputPath, + CancellationToken cancellationToken = default); + + /// + /// Validates a bundle's integrity. + /// + /// The bundle to validate. + /// Validation result with any issues found. + BundleValidationResult Validate(AirGapBundle bundle); + + /// + /// Imports an air-gap bundle from a JSON string. + /// + /// The JSON string representation. + /// Cancellation token. + /// The imported bundle. + Task ImportFromStringAsync( + string json, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapSyncService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapSyncService.cs new file mode 100644 index 000000000..3490bc993 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IAirGapSyncService.cs @@ -0,0 +1,22 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for air-gap sync operations. +/// +public interface IAirGapSyncService +{ + /// + /// Syncs offline jobs from an air-gap bundle to the central scheduler. + /// + /// The bundle to sync. + /// Cancellation token. + /// The sync result. + Task SyncFromBundleAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IConflictResolver.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IConflictResolver.cs new file mode 100644 index 000000000..efc431808 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IConflictResolver.cs @@ -0,0 +1,22 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for conflict resolution during merge. +/// +public interface IConflictResolver +{ + /// + /// Resolves conflicts when the same JobId appears in multiple entries. + /// + /// The conflicting job ID. + /// The conflicting entries with their source nodes. + /// The resolution result. + ConflictResolution Resolve( + Guid jobId, + IReadOnlyList<(string NodeId, OfflineJobLogEntry Entry)> conflicting); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IHlcMergeService.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IHlcMergeService.cs new file mode 100644 index 000000000..bca3a73df --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IHlcMergeService.cs @@ -0,0 +1,22 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for HLC-based merge operations. +/// +public interface IHlcMergeService +{ + /// + /// Merges job logs from multiple offline nodes into a unified, HLC-ordered stream. + /// + /// The node logs to merge. + /// Cancellation token. + /// The merge result. + Task MergeAsync( + IReadOnlyList nodeLogs, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IOfflineHlcManager.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IOfflineHlcManager.cs new file mode 100644 index 000000000..20c9921c9 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/IOfflineHlcManager.cs @@ -0,0 +1,39 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for offline HLC management. +/// +public interface IOfflineHlcManager +{ + /// + /// Enqueues a job locally while offline, maintaining the local chain. + /// + /// The payload type. + /// The job payload. + /// The idempotency key for deterministic job ID. + /// Optional partition key. + /// Cancellation token. + /// The enqueue result. + Task EnqueueOfflineAsync( + T payload, + string idempotencyKey, + string? partitionKey = null, + CancellationToken cancellationToken = default) where T : notnull; + + /// + /// Gets the current node's job log for export. + /// + /// Cancellation token. + /// The node job log, or null if empty. + Task GetNodeJobLogAsync(CancellationToken cancellationToken = default); + + /// + /// Gets the node ID. + /// + string NodeId { get; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ISyncSchedulerLogRepository.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ISyncSchedulerLogRepository.cs new file mode 100644 index 000000000..8c3e56303 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/ISyncSchedulerLogRepository.cs @@ -0,0 +1,45 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Services; + +/// +/// Interface for the scheduler log repository used by sync. +/// +/// +/// This is a subset of the full ISchedulerLogRepository to avoid circular dependencies. +/// Implementations should delegate to the actual repository. +/// +public interface ISyncSchedulerLogRepository +{ + /// + /// Gets the chain head for a tenant/partition. + /// + Task<(byte[]? Link, string? THlc)> GetChainHeadAsync( + string tenantId, + string? partitionKey = null, + CancellationToken cancellationToken = default); + + /// + /// Gets an entry by job ID. + /// + Task ExistsByJobIdAsync( + string tenantId, + Guid jobId, + CancellationToken cancellationToken = default); + + /// + /// Inserts a synced entry. + /// + Task InsertSyncedEntryAsync( + string tenantId, + string tHlc, + string? partitionKey, + Guid jobId, + byte[] payloadHash, + byte[]? prevLink, + byte[] link, + string sourceNodeId, + Guid syncedFromBundle, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Enqueue.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Enqueue.cs new file mode 100644 index 000000000..dfceccd70 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Enqueue.cs @@ -0,0 +1,68 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using StellaOps.Canonical.Json; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class OfflineHlcManager +{ + /// + public async Task EnqueueOfflineAsync( + T payload, + string idempotencyKey, + string? partitionKey = null, + CancellationToken cancellationToken = default) where T : notnull + { + ArgumentNullException.ThrowIfNull(payload); + ArgumentException.ThrowIfNullOrWhiteSpace(idempotencyKey); + + var tHlc = _hlc.Tick(); + var jobId = ComputeDeterministicJobId(idempotencyKey); + + var payloadJson = CanonJson.Serialize(payload); + var payloadHash = SHA256.HashData(Encoding.UTF8.GetBytes(payloadJson)); + + var prevLink = await _jobLogStore.GetLastLinkAsync(NodeId, cancellationToken) + .ConfigureAwait(false); + + var link = ComputeLink(prevLink, jobId, tHlc, payloadHash); + + var entry = new OfflineJobLogEntry + { + NodeId = NodeId, + THlc = tHlc, + JobId = jobId, + PartitionKey = partitionKey, + Payload = payloadJson, + PayloadHash = payloadHash, + PrevLink = prevLink, + Link = link, + EnqueuedAt = _timeProvider.GetUtcNow() + }; + + await _jobLogStore.AppendAsync(entry, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Enqueued offline job {JobId} with HLC {THlc} on node {NodeId}", + jobId, + tHlc, + NodeId); + + return new OfflineEnqueueResult + { + THlc = tHlc, + JobId = jobId, + Link = link, + NodeId = NodeId + }; + } + + /// + public Task GetNodeJobLogAsync(CancellationToken cancellationToken = default) + => _jobLogStore.GetNodeJobLogAsync(NodeId, cancellationToken); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Helpers.cs new file mode 100644 index 000000000..d4b1e87ec --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.Helpers.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.HybridLogicalClock; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Sync.Services; + +public sealed partial class OfflineHlcManager +{ + private static Guid ComputeDeterministicJobId(string idempotencyKey) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(idempotencyKey)); + return new Guid(hash.AsSpan(0, 16)); + } + + /// + /// Computes chain link: Hash(prev_link || job_id || t_hlc || payload_hash). + /// + internal static byte[] ComputeLink( + byte[]? prevLink, + Guid jobId, + HlcTimestamp tHlc, + byte[] payloadHash) + { + using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); + + hasher.AppendData(prevLink ?? new byte[32]); + hasher.AppendData(jobId.ToByteArray()); + hasher.AppendData(Encoding.UTF8.GetBytes(tHlc.ToSortableString())); + hasher.AppendData(payloadHash); + + return hasher.GetHashAndReset(); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.cs index 2d5b03aad..82f6abc84 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Services/OfflineHlcManager.cs @@ -1,60 +1,22 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Stores; -using StellaOps.Canonical.Json; using StellaOps.Determinism; using StellaOps.HybridLogicalClock; -using System.Security.Cryptography; -using System.Text; namespace StellaOps.AirGap.Sync.Services; -/// -/// Interface for offline HLC management. -/// -public interface IOfflineHlcManager -{ - /// - /// Enqueues a job locally while offline, maintaining the local chain. - /// - /// The payload type. - /// The job payload. - /// The idempotency key for deterministic job ID. - /// Optional partition key. - /// Cancellation token. - /// The enqueue result. - Task EnqueueOfflineAsync( - T payload, - string idempotencyKey, - string? partitionKey = null, - CancellationToken cancellationToken = default) where T : notnull; - - /// - /// Gets the current node's job log for export. - /// - /// Cancellation token. - /// The node job log, or null if empty. - Task GetNodeJobLogAsync(CancellationToken cancellationToken = default); - - /// - /// Gets the node ID. - /// - string NodeId { get; } -} - /// /// Manages HLC operations for offline/air-gap scenarios. /// -public sealed class OfflineHlcManager : IOfflineHlcManager +public sealed partial class OfflineHlcManager : IOfflineHlcManager { private readonly IHybridLogicalClock _hlc; private readonly IOfflineJobLogStore _jobLogStore; private readonly IGuidProvider _guidProvider; + private readonly TimeProvider _timeProvider; private readonly ILogger _logger; /// @@ -64,110 +26,16 @@ public sealed class OfflineHlcManager : IOfflineHlcManager IHybridLogicalClock hlc, IOfflineJobLogStore jobLogStore, IGuidProvider guidProvider, + TimeProvider timeProvider, ILogger logger) { _hlc = hlc ?? throw new ArgumentNullException(nameof(hlc)); _jobLogStore = jobLogStore ?? throw new ArgumentNullException(nameof(jobLogStore)); _guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } /// public string NodeId => _hlc.NodeId; - - /// - public async Task EnqueueOfflineAsync( - T payload, - string idempotencyKey, - string? partitionKey = null, - CancellationToken cancellationToken = default) where T : notnull - { - ArgumentNullException.ThrowIfNull(payload); - ArgumentException.ThrowIfNullOrWhiteSpace(idempotencyKey); - - // 1. Generate HLC timestamp - var tHlc = _hlc.Tick(); - - // 2. Compute deterministic job ID from idempotency key - var jobId = ComputeDeterministicJobId(idempotencyKey); - - // 3. Serialize and hash payload - var payloadJson = CanonJson.Serialize(payload); - var payloadHash = SHA256.HashData(Encoding.UTF8.GetBytes(payloadJson)); - - // 4. Get previous chain link - var prevLink = await _jobLogStore.GetLastLinkAsync(NodeId, cancellationToken) - .ConfigureAwait(false); - - // 5. Compute chain link - var link = ComputeLink(prevLink, jobId, tHlc, payloadHash); - - // 6. Create and store entry - var entry = new OfflineJobLogEntry - { - NodeId = NodeId, - THlc = tHlc, - JobId = jobId, - PartitionKey = partitionKey, - Payload = payloadJson, - PayloadHash = payloadHash, - PrevLink = prevLink, - Link = link, - EnqueuedAt = DateTimeOffset.UtcNow - }; - - await _jobLogStore.AppendAsync(entry, cancellationToken).ConfigureAwait(false); - - _logger.LogInformation( - "Enqueued offline job {JobId} with HLC {THlc} on node {NodeId}", - jobId, tHlc, NodeId); - - return new OfflineEnqueueResult - { - THlc = tHlc, - JobId = jobId, - Link = link, - NodeId = NodeId - }; - } - - /// - public Task GetNodeJobLogAsync(CancellationToken cancellationToken = default) - => _jobLogStore.GetNodeJobLogAsync(NodeId, cancellationToken); - - /// - /// Computes deterministic job ID from idempotency key. - /// - private Guid ComputeDeterministicJobId(string idempotencyKey) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(idempotencyKey)); - // Use first 16 bytes of SHA-256 as deterministic GUID - return new Guid(hash.AsSpan(0, 16)); - } - - /// - /// Computes chain link: Hash(prev_link || job_id || t_hlc || payload_hash). - /// - internal static byte[] ComputeLink( - byte[]? prevLink, - Guid jobId, - HlcTimestamp tHlc, - byte[] payloadHash) - { - using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); - - // Previous link (or 32 zero bytes for first entry) - hasher.AppendData(prevLink ?? new byte[32]); - - // Job ID as bytes - hasher.AppendData(jobId.ToByteArray()); - - // HLC timestamp as UTF-8 bytes - hasher.AppendData(Encoding.UTF8.GetBytes(tHlc.ToSortableString())); - - // Payload hash - hasher.AppendData(payloadHash); - - return hasher.GetHashAndReset(); - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Append.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Append.cs new file mode 100644 index 000000000..4e551b17d --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Append.cs @@ -0,0 +1,37 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Stores; + +public sealed partial class FileBasedOfflineJobLogStore +{ + /// + public async Task AppendAsync(OfflineJobLogEntry entry, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var filePath = GetNodeLogFilePath(entry.NodeId); + var dto = ToDto(entry); + var line = JsonSerializer.Serialize(dto, _jsonOptions); + + await File.AppendAllTextAsync(filePath, line + LineEnding, cancellationToken) + .ConfigureAwait(false); + + _logger.LogDebug( + "Appended offline job entry {JobId} for node {NodeId}", + entry.JobId, + entry.NodeId); + } + finally + { + _lock.Release(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Clear.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Clear.cs new file mode 100644 index 000000000..7601f9519 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Clear.cs @@ -0,0 +1,63 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Stores; + +public sealed partial class FileBasedOfflineJobLogStore +{ + /// + public async Task ClearEntriesAsync( + string nodeId, + string upToHlc, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); + + await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); + var remaining = entries + .Where(e => string.CompareOrdinal(e.THlc.ToSortableString(), upToHlc) > 0) + .ToList(); + + var cleared = entries.Count - remaining.Count; + var filePath = GetNodeLogFilePath(nodeId); + + if (remaining.Count == 0) + { + if (File.Exists(filePath)) + { + File.Delete(filePath); + } + } + else + { + var lines = remaining.Select(e => JsonSerializer.Serialize(ToDto(e), _jsonOptions)); + var content = string.Join(LineEnding, lines); + if (content.Length > 0) + { + content += LineEnding; + } + + await File.WriteAllTextAsync(filePath, content, cancellationToken) + .ConfigureAwait(false); + } + + _logger.LogInformation( + "Cleared {Count} offline job entries for node {NodeId} up to HLC {UpToHlc}", + cleared, + nodeId, + upToHlc); + + return cleared; + } + finally + { + _lock.Release(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Dto.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Dto.cs new file mode 100644 index 000000000..8ac84e358 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Dto.cs @@ -0,0 +1,20 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Stores; + +public sealed partial class FileBasedOfflineJobLogStore +{ + private sealed record OfflineJobLogEntryDto + { + public required string NodeId { get; init; } + public required string THlc { get; init; } + public required Guid JobId { get; init; } + public string? PartitionKey { get; init; } + public required string Payload { get; init; } + public required string PayloadHash { get; init; } + public string? PrevLink { get; init; } + public required string Link { get; init; } + public DateTimeOffset EnqueuedAt { get; init; } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Helpers.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Helpers.cs new file mode 100644 index 000000000..1939108bd --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Helpers.cs @@ -0,0 +1,56 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Stores; + +public sealed partial class FileBasedOfflineJobLogStore +{ + private string GetNodeLogFilePath(string nodeId) + { + var safeNodeId = nodeId + .Replace('/', '_') + .Replace('\\', '_') + .Replace(':', '_'); + return Path.Combine(_options.Value.DataDirectory, $"offline-jobs-{safeNodeId}.ndjson"); + } + + private void EnsureDirectoryExists() + { + var dir = _options.Value.DataDirectory; + if (!Directory.Exists(dir)) + { + Directory.CreateDirectory(dir); + _logger.LogInformation("Created offline job log directory: {Directory}", dir); + } + } + + private static OfflineJobLogEntryDto ToDto(OfflineJobLogEntry entry) => new() + { + NodeId = entry.NodeId, + THlc = entry.THlc.ToSortableString(), + JobId = entry.JobId, + PartitionKey = entry.PartitionKey, + Payload = entry.Payload, + PayloadHash = Convert.ToBase64String(entry.PayloadHash), + PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null, + Link = Convert.ToBase64String(entry.Link), + EnqueuedAt = entry.EnqueuedAt + }; + + private static OfflineJobLogEntry FromDto(OfflineJobLogEntryDto dto) => new() + { + NodeId = dto.NodeId, + THlc = HlcTimestamp.Parse(dto.THlc), + JobId = dto.JobId, + PartitionKey = dto.PartitionKey, + Payload = dto.Payload, + PayloadHash = Convert.FromBase64String(dto.PayloadHash), + PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null, + Link = Convert.FromBase64String(dto.Link), + EnqueuedAt = dto.EnqueuedAt + }; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Read.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Read.cs new file mode 100644 index 000000000..564bc6cfc --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.Read.cs @@ -0,0 +1,77 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Stores; + +public sealed partial class FileBasedOfflineJobLogStore +{ + /// + public async Task> GetEntriesAsync( + string nodeId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); + + var filePath = GetNodeLogFilePath(nodeId); + if (!File.Exists(filePath)) + { + return Array.Empty(); + } + + await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var lines = await File.ReadAllLinesAsync(filePath, cancellationToken).ConfigureAwait(false); + var entries = new List(lines.Length); + + foreach (var line in lines) + { + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var dto = JsonSerializer.Deserialize(line, _jsonOptions); + if (dto is not null) + { + entries.Add(FromDto(dto)); + } + } + + return entries.OrderBy(e => e.THlc).ToList(); + } + finally + { + _lock.Release(); + } + } + + /// + public async Task GetLastLinkAsync(string nodeId, CancellationToken cancellationToken = default) + { + var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); + return entries.Count > 0 ? entries[^1].Link : null; + } + + /// + public async Task GetNodeJobLogAsync(string nodeId, CancellationToken cancellationToken = default) + { + var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); + if (entries.Count == 0) + { + return null; + } + + var lastEntry = entries[^1]; + return new NodeJobLog + { + NodeId = nodeId, + LastHlc = lastEntry.THlc, + ChainHead = lastEntry.Link, + Entries = entries + }; + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.cs index 16f3f903d..50ab6fd2e 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStore.cs @@ -1,43 +1,29 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using StellaOps.AirGap.Sync.Models; -using StellaOps.Canonical.Json; -using StellaOps.HybridLogicalClock; using System.Text.Json; namespace StellaOps.AirGap.Sync.Stores; -/// -/// Options for the file-based offline job log store. -/// -public sealed class FileBasedOfflineJobLogStoreOptions -{ - /// - /// Gets or sets the directory for storing offline job logs. - /// - public string DataDirectory { get; set; } = "./offline-job-logs"; -} - /// /// File-based implementation of for air-gap scenarios. /// -public sealed class FileBasedOfflineJobLogStore : IOfflineJobLogStore +public sealed partial class FileBasedOfflineJobLogStore : IOfflineJobLogStore { - private readonly IOptions _options; - private readonly ILogger _logger; - private readonly SemaphoreSlim _lock = new(1, 1); + private const string LineEnding = "\n"; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + private readonly IOptions _options; + private readonly ILogger _logger; + private readonly SemaphoreSlim _lock = new(1, 1); + /// /// Initializes a new instance of the class. /// @@ -50,198 +36,4 @@ public sealed class FileBasedOfflineJobLogStore : IOfflineJobLogStore EnsureDirectoryExists(); } - - /// - public async Task AppendAsync(OfflineJobLogEntry entry, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(entry); - - await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var filePath = GetNodeLogFilePath(entry.NodeId); - var dto = ToDto(entry); - var line = JsonSerializer.Serialize(dto, JsonOptions); - - await File.AppendAllTextAsync(filePath, line + Environment.NewLine, cancellationToken) - .ConfigureAwait(false); - - _logger.LogDebug( - "Appended offline job entry {JobId} for node {NodeId}", - entry.JobId, entry.NodeId); - } - finally - { - _lock.Release(); - } - } - - /// - public async Task> GetEntriesAsync( - string nodeId, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); - - var filePath = GetNodeLogFilePath(nodeId); - if (!File.Exists(filePath)) - { - return Array.Empty(); - } - - await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var lines = await File.ReadAllLinesAsync(filePath, cancellationToken).ConfigureAwait(false); - var entries = new List(lines.Length); - - foreach (var line in lines) - { - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var dto = JsonSerializer.Deserialize(line, JsonOptions); - if (dto is not null) - { - entries.Add(FromDto(dto)); - } - } - - // Return in HLC order - return entries.OrderBy(e => e.THlc).ToList(); - } - finally - { - _lock.Release(); - } - } - - /// - public async Task GetLastLinkAsync(string nodeId, CancellationToken cancellationToken = default) - { - var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); - return entries.Count > 0 ? entries[^1].Link : null; - } - - /// - public async Task GetNodeJobLogAsync(string nodeId, CancellationToken cancellationToken = default) - { - var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); - if (entries.Count == 0) - { - return null; - } - - var lastEntry = entries[^1]; - return new NodeJobLog - { - NodeId = nodeId, - LastHlc = lastEntry.THlc, - ChainHead = lastEntry.Link, - Entries = entries - }; - } - - /// - public async Task ClearEntriesAsync( - string nodeId, - string upToHlc, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(nodeId); - - await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var entries = await GetEntriesAsync(nodeId, cancellationToken).ConfigureAwait(false); - var remaining = entries - .Where(e => string.CompareOrdinal(e.THlc.ToSortableString(), upToHlc) > 0) - .ToList(); - - var cleared = entries.Count - remaining.Count; - - if (remaining.Count == 0) - { - var filePath = GetNodeLogFilePath(nodeId); - if (File.Exists(filePath)) - { - File.Delete(filePath); - } - } - else - { - // Rewrite with remaining entries - var filePath = GetNodeLogFilePath(nodeId); - var lines = remaining.Select(e => JsonSerializer.Serialize(ToDto(e), JsonOptions)); - await File.WriteAllLinesAsync(filePath, lines, cancellationToken).ConfigureAwait(false); - } - - _logger.LogInformation( - "Cleared {Count} offline job entries for node {NodeId} up to HLC {UpToHlc}", - cleared, nodeId, upToHlc); - - return cleared; - } - finally - { - _lock.Release(); - } - } - - private string GetNodeLogFilePath(string nodeId) - { - var safeNodeId = nodeId.Replace('/', '_').Replace('\\', '_').Replace(':', '_'); - return Path.Combine(_options.Value.DataDirectory, $"offline-jobs-{safeNodeId}.ndjson"); - } - - private void EnsureDirectoryExists() - { - var dir = _options.Value.DataDirectory; - if (!Directory.Exists(dir)) - { - Directory.CreateDirectory(dir); - _logger.LogInformation("Created offline job log directory: {Directory}", dir); - } - } - - private static OfflineJobLogEntryDto ToDto(OfflineJobLogEntry entry) => new() - { - NodeId = entry.NodeId, - THlc = entry.THlc.ToSortableString(), - JobId = entry.JobId, - PartitionKey = entry.PartitionKey, - Payload = entry.Payload, - PayloadHash = Convert.ToBase64String(entry.PayloadHash), - PrevLink = entry.PrevLink is not null ? Convert.ToBase64String(entry.PrevLink) : null, - Link = Convert.ToBase64String(entry.Link), - EnqueuedAt = entry.EnqueuedAt - }; - - private static OfflineJobLogEntry FromDto(OfflineJobLogEntryDto dto) => new() - { - NodeId = dto.NodeId, - THlc = HlcTimestamp.Parse(dto.THlc), - JobId = dto.JobId, - PartitionKey = dto.PartitionKey, - Payload = dto.Payload, - PayloadHash = Convert.FromBase64String(dto.PayloadHash), - PrevLink = dto.PrevLink is not null ? Convert.FromBase64String(dto.PrevLink) : null, - Link = Convert.FromBase64String(dto.Link), - EnqueuedAt = dto.EnqueuedAt - }; - - private sealed record OfflineJobLogEntryDto - { - public required string NodeId { get; init; } - public required string THlc { get; init; } - public required Guid JobId { get; init; } - public string? PartitionKey { get; init; } - public required string Payload { get; init; } - public required string PayloadHash { get; init; } - public string? PrevLink { get; init; } - public required string Link { get; init; } - public DateTimeOffset EnqueuedAt { get; init; } - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStoreOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStoreOptions.cs new file mode 100644 index 000000000..741e2a5c0 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Stores/FileBasedOfflineJobLogStoreOptions.cs @@ -0,0 +1,15 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Stores; + +/// +/// Options for the file-based offline job log store. +/// +public sealed class FileBasedOfflineJobLogStoreOptions +{ + /// + /// Gets or sets the directory for storing offline job logs. + /// + public string DataDirectory { get; set; } = "./offline-job-logs"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/TASKS.md b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/TASKS.md index f8b56367a..9d3c020fd 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/TASKS.md +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/TASKS.md @@ -1,10 +1,9 @@ # AirGap Sync Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. | Task ID | Status | Notes | | --- | --- | --- | -| AUDIT-0792-M | DONE | Revalidated 2026-01-07. | -| AUDIT-0792-T | DONE | Revalidated 2026-01-07. | -| AUDIT-0792-A | TODO | Open findings (TimeProvider, DSSE helper, InvariantCulture, path validation, line endings, tests). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/__Libraries/StellaOps.AirGap.Sync/StellaOps.AirGap.Sync.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.Counters.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.Counters.cs new file mode 100644 index 000000000..67bb364bc --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.Counters.cs @@ -0,0 +1,56 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using System.Diagnostics.Metrics; + +namespace StellaOps.AirGap.Sync.Telemetry; + +public static partial class AirGapSyncMetrics +{ + private static readonly Meter _meter = new("StellaOps.AirGap.Sync"); + + private static readonly Counter _bundlesExportedCounter = _meter.CreateCounter( + "airgap_bundles_exported_total", + unit: "{bundle}", + description: "Total number of air-gap bundles exported"); + + private static readonly Counter _bundlesImportedCounter = _meter.CreateCounter( + "airgap_bundles_imported_total", + unit: "{bundle}", + description: "Total number of air-gap bundles imported"); + + private static readonly Counter _jobsSyncedCounter = _meter.CreateCounter( + "airgap_jobs_synced_total", + unit: "{job}", + description: "Total number of jobs synced from air-gap bundles"); + + private static readonly Counter _duplicatesDroppedCounter = _meter.CreateCounter( + "airgap_duplicates_dropped_total", + unit: "{duplicate}", + description: "Total number of duplicate entries dropped during merge"); + + private static readonly Counter _mergeConflictsCounter = _meter.CreateCounter( + "airgap_merge_conflicts_total", + unit: "{conflict}", + description: "Total number of merge conflicts by type"); + + private static readonly Counter _offlineEnqueuesCounter = _meter.CreateCounter( + "airgap_offline_enqueues_total", + unit: "{enqueue}", + description: "Total number of offline enqueue operations"); + + private static readonly Histogram _bundleSizeHistogram = _meter.CreateHistogram( + "airgap_bundle_size_bytes", + unit: "By", + description: "Size of air-gap bundles in bytes"); + + private static readonly Histogram _syncDurationHistogram = _meter.CreateHistogram( + "airgap_sync_duration_seconds", + unit: "s", + description: "Duration of air-gap sync operations"); + + private static readonly Histogram _mergeEntriesHistogram = _meter.CreateHistogram( + "airgap_merge_entries_count", + unit: "{entry}", + description: "Number of entries in merge operations"); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordBundles.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordBundles.cs new file mode 100644 index 000000000..c6e7812bc --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordBundles.cs @@ -0,0 +1,45 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Telemetry; + +public static partial class AirGapSyncMetrics +{ + /// + /// Records a bundle export. + /// + /// The node ID that exported. + /// The tenant ID. + /// Number of entries in the bundle. + public static void RecordBundleExported(string nodeId, string tenantId, int entryCount) + { + _bundlesExportedCounter.Add(1, + new KeyValuePair(NodeIdTag, nodeId), + new KeyValuePair(TenantIdTag, tenantId)); + _mergeEntriesHistogram.Record(entryCount, + new KeyValuePair(NodeIdTag, nodeId)); + } + + /// + /// Records a bundle import. + /// + /// The node ID that imported. + /// The tenant ID. + public static void RecordBundleImported(string nodeId, string tenantId) + { + _bundlesImportedCounter.Add(1, + new KeyValuePair(NodeIdTag, nodeId), + new KeyValuePair(TenantIdTag, tenantId)); + } + + /// + /// Records bundle size. + /// + /// The node ID. + /// Size in bytes. + public static void RecordBundleSize(string nodeId, long sizeBytes) + { + _bundleSizeHistogram.Record(sizeBytes, + new KeyValuePair(NodeIdTag, nodeId)); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordSync.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordSync.cs new file mode 100644 index 000000000..5ed64ab6e --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.RecordSync.cs @@ -0,0 +1,65 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Telemetry; + +public static partial class AirGapSyncMetrics +{ + /// + /// Records jobs synced from a bundle. + /// + /// The node ID. + /// Number of jobs synced. + public static void RecordJobsSynced(string nodeId, int count) + { + _jobsSyncedCounter.Add(count, + new KeyValuePair(NodeIdTag, nodeId)); + } + + /// + /// Records duplicates dropped during merge. + /// + /// The node ID. + /// Number of duplicates dropped. + public static void RecordDuplicatesDropped(string nodeId, int count) + { + if (count > 0) + { + _duplicatesDroppedCounter.Add(count, + new KeyValuePair(NodeIdTag, nodeId)); + } + } + + /// + /// Records a merge conflict. + /// + /// The type of conflict. + public static void RecordMergeConflict(ConflictType conflictType) + { + _mergeConflictsCounter.Add(1, + new KeyValuePair(ConflictTypeTag, conflictType.ToString())); + } + + /// + /// Records an offline enqueue operation. + /// + /// The node ID. + public static void RecordOfflineEnqueue(string nodeId) + { + _offlineEnqueuesCounter.Add(1, + new KeyValuePair(NodeIdTag, nodeId)); + } + + /// + /// Records sync duration. + /// + /// The node ID. + /// Duration in seconds. + public static void RecordSyncDuration(string nodeId, double durationSeconds) + { + _syncDurationHistogram.Record(durationSeconds, + new KeyValuePair(NodeIdTag, nodeId)); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.cs index ba31e9922..c0451e5b6 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Telemetry/AirGapSyncMetrics.cs @@ -1,9 +1,6 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - -using StellaOps.AirGap.Sync.Models; using System.Diagnostics.Metrics; namespace StellaOps.AirGap.Sync.Telemetry; @@ -11,152 +8,10 @@ namespace StellaOps.AirGap.Sync.Telemetry; /// /// Metrics for air-gap sync operations. /// -public static class AirGapSyncMetrics +public static partial class AirGapSyncMetrics { private const string NodeIdTag = "node_id"; private const string TenantIdTag = "tenant_id"; private const string ConflictTypeTag = "conflict_type"; - private static readonly Meter Meter = new("StellaOps.AirGap.Sync"); - - // Counters - private static readonly Counter BundlesExportedCounter = Meter.CreateCounter( - "airgap_bundles_exported_total", - unit: "{bundle}", - description: "Total number of air-gap bundles exported"); - - private static readonly Counter BundlesImportedCounter = Meter.CreateCounter( - "airgap_bundles_imported_total", - unit: "{bundle}", - description: "Total number of air-gap bundles imported"); - - private static readonly Counter JobsSyncedCounter = Meter.CreateCounter( - "airgap_jobs_synced_total", - unit: "{job}", - description: "Total number of jobs synced from air-gap bundles"); - - private static readonly Counter DuplicatesDroppedCounter = Meter.CreateCounter( - "airgap_duplicates_dropped_total", - unit: "{duplicate}", - description: "Total number of duplicate entries dropped during merge"); - - private static readonly Counter MergeConflictsCounter = Meter.CreateCounter( - "airgap_merge_conflicts_total", - unit: "{conflict}", - description: "Total number of merge conflicts by type"); - - private static readonly Counter OfflineEnqueuesCounter = Meter.CreateCounter( - "airgap_offline_enqueues_total", - unit: "{enqueue}", - description: "Total number of offline enqueue operations"); - - // Histograms - private static readonly Histogram BundleSizeHistogram = Meter.CreateHistogram( - "airgap_bundle_size_bytes", - unit: "By", - description: "Size of air-gap bundles in bytes"); - - private static readonly Histogram SyncDurationHistogram = Meter.CreateHistogram( - "airgap_sync_duration_seconds", - unit: "s", - description: "Duration of air-gap sync operations"); - - private static readonly Histogram MergeEntriesHistogram = Meter.CreateHistogram( - "airgap_merge_entries_count", - unit: "{entry}", - description: "Number of entries in merge operations"); - - /// - /// Records a bundle export. - /// - /// The node ID that exported. - /// The tenant ID. - /// Number of entries in the bundle. - public static void RecordBundleExported(string nodeId, string tenantId, int entryCount) - { - BundlesExportedCounter.Add(1, - new KeyValuePair(NodeIdTag, nodeId), - new KeyValuePair(TenantIdTag, tenantId)); - MergeEntriesHistogram.Record(entryCount, - new KeyValuePair(NodeIdTag, nodeId)); - } - - /// - /// Records a bundle import. - /// - /// The node ID that imported. - /// The tenant ID. - public static void RecordBundleImported(string nodeId, string tenantId) - { - BundlesImportedCounter.Add(1, - new KeyValuePair(NodeIdTag, nodeId), - new KeyValuePair(TenantIdTag, tenantId)); - } - - /// - /// Records jobs synced from a bundle. - /// - /// The node ID. - /// Number of jobs synced. - public static void RecordJobsSynced(string nodeId, int count) - { - JobsSyncedCounter.Add(count, - new KeyValuePair(NodeIdTag, nodeId)); - } - - /// - /// Records duplicates dropped during merge. - /// - /// The node ID. - /// Number of duplicates dropped. - public static void RecordDuplicatesDropped(string nodeId, int count) - { - if (count > 0) - { - DuplicatesDroppedCounter.Add(count, - new KeyValuePair(NodeIdTag, nodeId)); - } - } - - /// - /// Records a merge conflict. - /// - /// The type of conflict. - public static void RecordMergeConflict(ConflictType conflictType) - { - MergeConflictsCounter.Add(1, - new KeyValuePair(ConflictTypeTag, conflictType.ToString())); - } - - /// - /// Records an offline enqueue operation. - /// - /// The node ID. - public static void RecordOfflineEnqueue(string nodeId) - { - OfflineEnqueuesCounter.Add(1, - new KeyValuePair(NodeIdTag, nodeId)); - } - - /// - /// Records bundle size. - /// - /// The node ID. - /// Size in bytes. - public static void RecordBundleSize(string nodeId, long sizeBytes) - { - BundleSizeHistogram.Record(sizeBytes, - new KeyValuePair(NodeIdTag, nodeId)); - } - - /// - /// Records sync duration. - /// - /// The node ID. - /// Duration in seconds. - public static void RecordSyncDuration(string nodeId, double durationSeconds) - { - SyncDurationHistogram.Record(durationSeconds, - new KeyValuePair(NodeIdTag, nodeId)); - } } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/BundleInfo.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/BundleInfo.cs new file mode 100644 index 000000000..e0610e4e6 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/BundleInfo.cs @@ -0,0 +1,40 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Information about an available bundle. +/// +public sealed record BundleInfo +{ + /// + /// Gets the bundle ID. + /// + public required Guid BundleId { get; init; } + + /// + /// Gets the tenant ID. + /// + public required string TenantId { get; init; } + + /// + /// Gets the source node ID. + /// + public required string SourceNodeId { get; init; } + + /// + /// Gets the creation timestamp. + /// + public required DateTimeOffset CreatedAt { get; init; } + + /// + /// Gets the entry count in the bundle. + /// + public int EntryCount { get; init; } + + /// + /// Gets the bundle size in bytes. + /// + public long SizeBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.List.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.List.cs new file mode 100644 index 000000000..89f97d188 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.List.cs @@ -0,0 +1,76 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using System.Globalization; +using System.Text.Json; + +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class FileBasedJobSyncTransport +{ + /// + public Task> ListAvailableBundlesAsync( + string source, + CancellationToken cancellationToken = default) + { + var sourcePath = ResolveInputPath(source); + var bundles = new List(); + + if (!Directory.Exists(sourcePath)) + { + return Task.FromResult>(bundles); + } + + var files = Directory.GetFiles(sourcePath, "job-sync-*.json"); + + foreach (var file in files) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var json = File.ReadAllText(file); + using var doc = JsonDocument.Parse(json); + var root = doc.RootElement; + + if (root.TryGetProperty("bundleId", out var bundleIdProp) && + root.TryGetProperty("tenantId", out var tenantIdProp) && + root.TryGetProperty("createdByNodeId", out var nodeIdProp) && + root.TryGetProperty("createdAt", out var createdAtProp)) + { + var entryCount = 0; + if (root.TryGetProperty("jobLogs", out var jobLogs)) + { + foreach (var log in jobLogs.EnumerateArray()) + { + if (log.TryGetProperty("entries", out var entries)) + { + entryCount += entries.GetArrayLength(); + } + } + } + + bundles.Add(new BundleInfo + { + BundleId = Guid.Parse(bundleIdProp.GetString()!), + TenantId = tenantIdProp.GetString()!, + SourceNodeId = nodeIdProp.GetString()!, + CreatedAt = DateTimeOffset.Parse( + createdAtProp.GetString()!, + CultureInfo.InvariantCulture), + EntryCount = entryCount, + SizeBytes = new FileInfo(file).Length + }); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse bundle metadata from {File}", file); + } + } + + return Task.FromResult>( + bundles.OrderByDescending(b => b.CreatedAt).ToList()); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Paths.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Paths.cs new file mode 100644 index 000000000..5b312a1ca --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Paths.cs @@ -0,0 +1,46 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class FileBasedJobSyncTransport +{ + private string ResolveOutputPath(string destination) + => ResolvePath(_options.OutputDirectory, destination, nameof(destination)); + + private string ResolveInputPath(string source) + => ResolvePath(_options.InputDirectory, source, nameof(source)); + + private static string ResolvePath(string rootPath, string value, string paramName) + { + ArgumentException.ThrowIfNullOrWhiteSpace(value); + + var rootFull = Path.GetFullPath(rootPath); + var candidate = Path.IsPathRooted(value) + ? Path.GetFullPath(value) + : Path.GetFullPath(Path.Combine(rootFull, value)); + + if (!IsUnderRoot(rootFull, candidate)) + { + throw new ArgumentException( + $"Path '{value}' escapes configured root '{rootFull}'.", + paramName); + } + + return candidate; + } + + private static bool IsUnderRoot(string rootPath, string candidate) + { + if (string.Equals(rootPath, candidate, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + var normalizedRoot = rootPath.EndsWith(Path.DirectorySeparatorChar) + ? rootPath + : rootPath + Path.DirectorySeparatorChar; + + return candidate.StartsWith(normalizedRoot, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Receive.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Receive.cs new file mode 100644 index 000000000..8d2ade3a1 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Receive.cs @@ -0,0 +1,42 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; + +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class FileBasedJobSyncTransport +{ + /// + public async Task ReceiveBundleAsync( + string source, + CancellationToken cancellationToken = default) + { + try + { + var sourcePath = ResolveInputPath(source); + + if (!File.Exists(sourcePath)) + { + _logger.LogWarning("Job sync bundle file not found: {Path}", sourcePath); + return null; + } + + var bundle = await _importer.ImportFromFileAsync(sourcePath, cancellationToken) + .ConfigureAwait(false); + + _logger.LogInformation( + "Imported job sync bundle {BundleId} from {Path}", + bundle.BundleId, + sourcePath); + + return bundle; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to import job sync bundle from {Source}", source); + return null; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Send.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Send.cs new file mode 100644 index 000000000..1c17bbace --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.Send.cs @@ -0,0 +1,63 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Telemetry; + +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class FileBasedJobSyncTransport +{ + /// + public async Task SendBundleAsync( + AirGapBundle bundle, + string destination, + CancellationToken cancellationToken = default) + { + var startTime = _timeProvider.GetUtcNow(); + + try + { + var destPath = ResolveOutputPath(destination); + Directory.CreateDirectory(destPath); + + var filePath = Path.Combine(destPath, $"job-sync-{bundle.BundleId:N}.json"); + await _exporter.ExportToFileAsync(bundle, filePath, cancellationToken) + .ConfigureAwait(false); + + var fileInfo = new FileInfo(filePath); + var sizeBytes = fileInfo.Exists ? fileInfo.Length : 0; + + _logger.LogInformation( + "Exported job sync bundle {BundleId} to {Path} ({Size} bytes)", + bundle.BundleId, + filePath, + sizeBytes); + + AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, sizeBytes); + + return new JobSyncSendResult + { + Success = true, + BundleId = bundle.BundleId, + Destination = filePath, + TransmittedAt = startTime, + SizeBytes = sizeBytes + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to export job sync bundle {BundleId}", bundle.BundleId); + + return new JobSyncSendResult + { + Success = false, + BundleId = bundle.BundleId, + Destination = destination, + Error = ex.Message, + TransmittedAt = startTime + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.cs index 4cb35fd0d..071e582db 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransport.cs @@ -1,26 +1,21 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Services; -using StellaOps.AirGap.Sync.Telemetry; -using System.Globalization; -using System.Text.Json; namespace StellaOps.AirGap.Sync.Transport; /// /// File-based transport for job sync bundles in air-gapped scenarios. /// -public sealed class FileBasedJobSyncTransport : IJobSyncTransport +public sealed partial class FileBasedJobSyncTransport : IJobSyncTransport { private readonly IAirGapBundleExporter _exporter; private readonly IAirGapBundleImporter _importer; private readonly FileBasedJobSyncTransportOptions _options; + private readonly TimeProvider _timeProvider; private readonly ILogger _logger; /// @@ -30,194 +25,16 @@ public sealed class FileBasedJobSyncTransport : IJobSyncTransport IAirGapBundleExporter exporter, IAirGapBundleImporter importer, IOptions options, + TimeProvider timeProvider, ILogger logger) { _exporter = exporter ?? throw new ArgumentNullException(nameof(exporter)); _importer = importer ?? throw new ArgumentNullException(nameof(importer)); _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } /// public string TransportId => "file"; - - /// - public async Task SendBundleAsync( - AirGapBundle bundle, - string destination, - CancellationToken cancellationToken = default) - { - var startTime = DateTimeOffset.UtcNow; - - try - { - // Ensure destination directory exists - var destPath = Path.IsPathRooted(destination) - ? destination - : Path.Combine(_options.OutputDirectory, destination); - - Directory.CreateDirectory(destPath); - - // Export to file - var filePath = Path.Combine(destPath, $"job-sync-{bundle.BundleId:N}.json"); - await _exporter.ExportToFileAsync(bundle, filePath, cancellationToken) - .ConfigureAwait(false); - - var fileInfo = new FileInfo(filePath); - var sizeBytes = fileInfo.Exists ? fileInfo.Length : 0; - - _logger.LogInformation( - "Exported job sync bundle {BundleId} to {Path} ({Size} bytes)", - bundle.BundleId, - filePath, - sizeBytes); - - AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, sizeBytes); - - return new JobSyncSendResult - { - Success = true, - BundleId = bundle.BundleId, - Destination = filePath, - TransmittedAt = startTime, - SizeBytes = sizeBytes - }; - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to export job sync bundle {BundleId}", bundle.BundleId); - - return new JobSyncSendResult - { - Success = false, - BundleId = bundle.BundleId, - Destination = destination, - Error = ex.Message, - TransmittedAt = startTime - }; - } - } - - /// - public async Task ReceiveBundleAsync( - string source, - CancellationToken cancellationToken = default) - { - try - { - var sourcePath = Path.IsPathRooted(source) - ? source - : Path.Combine(_options.InputDirectory, source); - - if (!File.Exists(sourcePath)) - { - _logger.LogWarning("Job sync bundle file not found: {Path}", sourcePath); - return null; - } - - var bundle = await _importer.ImportFromFileAsync(sourcePath, cancellationToken) - .ConfigureAwait(false); - - _logger.LogInformation( - "Imported job sync bundle {BundleId} from {Path}", - bundle.BundleId, - sourcePath); - - return bundle; - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to import job sync bundle from {Source}", source); - return null; - } - } - - /// - public Task> ListAvailableBundlesAsync( - string source, - CancellationToken cancellationToken = default) - { - var sourcePath = Path.IsPathRooted(source) - ? source - : Path.Combine(_options.InputDirectory, source); - - var bundles = new List(); - - if (!Directory.Exists(sourcePath)) - { - return Task.FromResult>(bundles); - } - - var files = Directory.GetFiles(sourcePath, "job-sync-*.json"); - - foreach (var file in files) - { - try - { - // Quick parse to extract bundle metadata - var json = File.ReadAllText(file); - var doc = JsonDocument.Parse(json); - var root = doc.RootElement; - - if (root.TryGetProperty("bundleId", out var bundleIdProp) && - root.TryGetProperty("tenantId", out var tenantIdProp) && - root.TryGetProperty("createdByNodeId", out var nodeIdProp) && - root.TryGetProperty("createdAt", out var createdAtProp)) - { - var entryCount = 0; - if (root.TryGetProperty("jobLogs", out var jobLogs)) - { - foreach (var log in jobLogs.EnumerateArray()) - { - if (log.TryGetProperty("entries", out var entries)) - { - entryCount += entries.GetArrayLength(); - } - } - } - - bundles.Add(new BundleInfo - { - BundleId = Guid.Parse(bundleIdProp.GetString()!), - TenantId = tenantIdProp.GetString()!, - SourceNodeId = nodeIdProp.GetString()!, - CreatedAt = DateTimeOffset.Parse(createdAtProp.GetString()!, CultureInfo.InvariantCulture), - EntryCount = entryCount, - SizeBytes = new FileInfo(file).Length - }); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse bundle metadata from {File}", file); - } - } - - return Task.FromResult>( - bundles.OrderByDescending(b => b.CreatedAt).ToList()); - } -} - -/// -/// Options for file-based job sync transport. -/// -public sealed class FileBasedJobSyncTransportOptions -{ - /// - /// Gets or sets the output directory for exporting bundles. - /// - public string OutputDirectory { get; set; } = Path.Combine( - Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), - "stellaops", - "airgap", - "outbox"); - - /// - /// Gets or sets the input directory for importing bundles. - /// - public string InputDirectory { get; set; } = Path.Combine( - Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), - "stellaops", - "airgap", - "inbox"); } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransportOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransportOptions.cs new file mode 100644 index 000000000..dbf3a648a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/FileBasedJobSyncTransportOptions.cs @@ -0,0 +1,28 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Options for file-based job sync transport. +/// +public sealed class FileBasedJobSyncTransportOptions +{ + /// + /// Gets or sets the output directory for exporting bundles. + /// + public string OutputDirectory { get; set; } = Path.Combine( + Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), + "stellaops", + "airgap", + "outbox"); + + /// + /// Gets or sets the input directory for importing bundles. + /// + public string InputDirectory { get; set; } = Path.Combine( + Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), + "stellaops", + "airgap", + "inbox"); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IJobSyncTransport.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IJobSyncTransport.cs index b67171354..b75e9e811 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IJobSyncTransport.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IJobSyncTransport.cs @@ -1,7 +1,6 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - using StellaOps.AirGap.Sync.Models; namespace StellaOps.AirGap.Sync.Transport; @@ -49,75 +48,3 @@ public interface IJobSyncTransport string source, CancellationToken cancellationToken = default); } - -/// -/// Result of sending a job sync bundle. -/// -public sealed record JobSyncSendResult -{ - /// - /// Gets a value indicating whether the send was successful. - /// - public required bool Success { get; init; } - - /// - /// Gets the bundle ID. - /// - public required Guid BundleId { get; init; } - - /// - /// Gets the destination where the bundle was sent. - /// - public required string Destination { get; init; } - - /// - /// Gets the error message if the send failed. - /// - public string? Error { get; init; } - - /// - /// Gets the transmission timestamp. - /// - public DateTimeOffset TransmittedAt { get; init; } - - /// - /// Gets the size of the transmitted data in bytes. - /// - public long SizeBytes { get; init; } -} - -/// -/// Information about an available bundle. -/// -public sealed record BundleInfo -{ - /// - /// Gets the bundle ID. - /// - public required Guid BundleId { get; init; } - - /// - /// Gets the tenant ID. - /// - public required string TenantId { get; init; } - - /// - /// Gets the source node ID. - /// - public required string SourceNodeId { get; init; } - - /// - /// Gets the creation timestamp. - /// - public required DateTimeOffset CreatedAt { get; init; } - - /// - /// Gets the entry count in the bundle. - /// - public int EntryCount { get; init; } - - /// - /// Gets the bundle size in bytes. - /// - public long SizeBytes { get; init; } -} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IRouterJobSyncClient.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IRouterJobSyncClient.cs new file mode 100644 index 000000000..5e5cfa74c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/IRouterJobSyncClient.cs @@ -0,0 +1,37 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Client interface for Router job sync operations. +/// +public interface IRouterJobSyncClient +{ + /// + /// Sends a job sync bundle via the Router. + /// + Task SendJobSyncBundleAsync( + string destination, + Guid bundleId, + string tenantId, + byte[] payload, + TimeSpan timeout, + CancellationToken cancellationToken = default); + + /// + /// Receives a job sync bundle via the Router. + /// + Task ReceiveJobSyncBundleAsync( + string source, + TimeSpan timeout, + CancellationToken cancellationToken = default); + + /// + /// Lists available bundles via the Router. + /// + Task ListAvailableBundlesAsync( + string source, + TimeSpan timeout, + CancellationToken cancellationToken = default); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/JobSyncSendResult.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/JobSyncSendResult.cs new file mode 100644 index 000000000..b89faae21 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/JobSyncSendResult.cs @@ -0,0 +1,40 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Result of sending a job sync bundle. +/// +public sealed record JobSyncSendResult +{ + /// + /// Gets a value indicating whether the send was successful. + /// + public required bool Success { get; init; } + + /// + /// Gets the bundle ID. + /// + public required Guid BundleId { get; init; } + + /// + /// Gets the destination where the bundle was sent. + /// + public required string Destination { get; init; } + + /// + /// Gets the error message if the send failed. + /// + public string? Error { get; init; } + + /// + /// Gets the transmission timestamp. + /// + public DateTimeOffset TransmittedAt { get; init; } + + /// + /// Gets the size of the transmitted data in bytes. + /// + public long SizeBytes { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.List.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.List.cs new file mode 100644 index 000000000..449461b2f --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.List.cs @@ -0,0 +1,29 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class RouterJobSyncTransport +{ + /// + public async Task> ListAvailableBundlesAsync( + string source, + CancellationToken cancellationToken = default) + { + try + { + var response = await _routerClient.ListAvailableBundlesAsync( + source, + _options.ListTimeout, + cancellationToken).ConfigureAwait(false); + + return response.Bundles; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error listing available bundles from {Source}", source); + return Array.Empty(); + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Receive.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Receive.cs new file mode 100644 index 000000000..fd5ac25ef --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Receive.cs @@ -0,0 +1,47 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using System.Text; + +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class RouterJobSyncTransport +{ + /// + public async Task ReceiveBundleAsync( + string source, + CancellationToken cancellationToken = default) + { + try + { + var response = await _routerClient.ReceiveJobSyncBundleAsync( + source, + _options.ReceiveTimeout, + cancellationToken).ConfigureAwait(false); + + if (response.Payload is null || response.Payload.Length == 0) + { + _logger.LogDebug("No bundle available from {Source}", source); + return null; + } + + var json = Encoding.UTF8.GetString(response.Payload); + var bundle = await _importer.ImportFromStringAsync(json, cancellationToken) + .ConfigureAwait(false); + + _logger.LogInformation( + "Received job sync bundle {BundleId} from {Source}", + bundle.BundleId, + source); + + return bundle; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error receiving job sync bundle from {Source}", source); + return null; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Send.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Send.cs new file mode 100644 index 000000000..c858e5a29 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.Send.cs @@ -0,0 +1,87 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging; +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Telemetry; +using System.Text; + +namespace StellaOps.AirGap.Sync.Transport; + +public sealed partial class RouterJobSyncTransport +{ + /// + public async Task SendBundleAsync( + AirGapBundle bundle, + string destination, + CancellationToken cancellationToken = default) + { + var startTime = _timeProvider.GetUtcNow(); + + try + { + var json = await _exporter.ExportToStringAsync(bundle, cancellationToken) + .ConfigureAwait(false); + var payload = Encoding.UTF8.GetBytes(json); + + _logger.LogDebug( + "Sending job sync bundle {BundleId} to {Destination} ({Size} bytes)", + bundle.BundleId, + destination, + payload.Length); + + var response = await _routerClient.SendJobSyncBundleAsync( + destination, + bundle.BundleId, + bundle.TenantId, + payload, + _options.SendTimeout, + cancellationToken).ConfigureAwait(false); + + if (response.Success) + { + AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, payload.Length); + + _logger.LogInformation( + "Sent job sync bundle {BundleId} to {Destination}", + bundle.BundleId, + destination); + } + else + { + _logger.LogWarning( + "Failed to send job sync bundle {BundleId} to {Destination}: {Error}", + bundle.BundleId, + destination, + response.Error); + } + + return new JobSyncSendResult + { + Success = response.Success, + BundleId = bundle.BundleId, + Destination = destination, + Error = response.Error, + TransmittedAt = startTime, + SizeBytes = payload.Length + }; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Error sending job sync bundle {BundleId} to {Destination}", + bundle.BundleId, + destination); + + return new JobSyncSendResult + { + Success = false, + BundleId = bundle.BundleId, + Destination = destination, + Error = ex.Message, + TransmittedAt = startTime + }; + } + } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.cs index 8d6aa38c4..0d309d618 100644 --- a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.cs +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransport.cs @@ -1,15 +1,9 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Services; -using StellaOps.AirGap.Sync.Telemetry; -using System.Text; -using System.Text.Json; namespace StellaOps.AirGap.Sync.Transport; @@ -17,12 +11,13 @@ namespace StellaOps.AirGap.Sync.Transport; /// Router-based transport for job sync bundles when network is available. /// This transport uses the Router messaging infrastructure for real-time sync. /// -public sealed class RouterJobSyncTransport : IJobSyncTransport +public sealed partial class RouterJobSyncTransport : IJobSyncTransport { private readonly IAirGapBundleExporter _exporter; private readonly IAirGapBundleImporter _importer; private readonly IRouterJobSyncClient _routerClient; private readonly RouterJobSyncTransportOptions _options; + private readonly TimeProvider _timeProvider; private readonly ILogger _logger; /// @@ -33,241 +28,17 @@ public sealed class RouterJobSyncTransport : IJobSyncTransport IAirGapBundleImporter importer, IRouterJobSyncClient routerClient, IOptions options, + TimeProvider timeProvider, ILogger logger) { _exporter = exporter ?? throw new ArgumentNullException(nameof(exporter)); _importer = importer ?? throw new ArgumentNullException(nameof(importer)); _routerClient = routerClient ?? throw new ArgumentNullException(nameof(routerClient)); _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } /// public string TransportId => "router"; - - /// - public async Task SendBundleAsync( - AirGapBundle bundle, - string destination, - CancellationToken cancellationToken = default) - { - var startTime = DateTimeOffset.UtcNow; - - try - { - // Serialize bundle - var json = await _exporter.ExportToStringAsync(bundle, cancellationToken) - .ConfigureAwait(false); - var payload = Encoding.UTF8.GetBytes(json); - - _logger.LogDebug( - "Sending job sync bundle {BundleId} to {Destination} ({Size} bytes)", - bundle.BundleId, - destination, - payload.Length); - - // Send via Router - var response = await _routerClient.SendJobSyncBundleAsync( - destination, - bundle.BundleId, - bundle.TenantId, - payload, - _options.SendTimeout, - cancellationToken).ConfigureAwait(false); - - if (response.Success) - { - AirGapSyncMetrics.RecordBundleSize(bundle.CreatedByNodeId, payload.Length); - - _logger.LogInformation( - "Sent job sync bundle {BundleId} to {Destination}", - bundle.BundleId, - destination); - } - else - { - _logger.LogWarning( - "Failed to send job sync bundle {BundleId} to {Destination}: {Error}", - bundle.BundleId, - destination, - response.Error); - } - - return new JobSyncSendResult - { - Success = response.Success, - BundleId = bundle.BundleId, - Destination = destination, - Error = response.Error, - TransmittedAt = startTime, - SizeBytes = payload.Length - }; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Error sending job sync bundle {BundleId} to {Destination}", - bundle.BundleId, - destination); - - return new JobSyncSendResult - { - Success = false, - BundleId = bundle.BundleId, - Destination = destination, - Error = ex.Message, - TransmittedAt = startTime - }; - } - } - - /// - public async Task ReceiveBundleAsync( - string source, - CancellationToken cancellationToken = default) - { - try - { - var response = await _routerClient.ReceiveJobSyncBundleAsync( - source, - _options.ReceiveTimeout, - cancellationToken).ConfigureAwait(false); - - if (response.Payload is null || response.Payload.Length == 0) - { - _logger.LogDebug("No bundle available from {Source}", source); - return null; - } - - var json = Encoding.UTF8.GetString(response.Payload); - var bundle = await _importer.ImportFromStringAsync(json, cancellationToken) - .ConfigureAwait(false); - - _logger.LogInformation( - "Received job sync bundle {BundleId} from {Source}", - bundle.BundleId, - source); - - return bundle; - } - catch (Exception ex) - { - _logger.LogError(ex, "Error receiving job sync bundle from {Source}", source); - return null; - } - } - - /// - public async Task> ListAvailableBundlesAsync( - string source, - CancellationToken cancellationToken = default) - { - try - { - var response = await _routerClient.ListAvailableBundlesAsync( - source, - _options.ListTimeout, - cancellationToken).ConfigureAwait(false); - - return response.Bundles; - } - catch (Exception ex) - { - _logger.LogError(ex, "Error listing available bundles from {Source}", source); - return Array.Empty(); - } - } -} - -/// -/// Options for Router-based job sync transport. -/// -public sealed class RouterJobSyncTransportOptions -{ - /// - /// Gets or sets the timeout for send operations. - /// - public TimeSpan SendTimeout { get; set; } = TimeSpan.FromSeconds(30); - - /// - /// Gets or sets the timeout for receive operations. - /// - public TimeSpan ReceiveTimeout { get; set; } = TimeSpan.FromSeconds(30); - - /// - /// Gets or sets the timeout for list operations. - /// - public TimeSpan ListTimeout { get; set; } = TimeSpan.FromSeconds(10); - - /// - /// Gets or sets the service endpoint for job sync. - /// - public string ServiceEndpoint { get; set; } = "scheduler.job-sync"; -} - -/// -/// Client interface for Router job sync operations. -/// -public interface IRouterJobSyncClient -{ - /// - /// Sends a job sync bundle via the Router. - /// - Task SendJobSyncBundleAsync( - string destination, - Guid bundleId, - string tenantId, - byte[] payload, - TimeSpan timeout, - CancellationToken cancellationToken = default); - - /// - /// Receives a job sync bundle via the Router. - /// - Task ReceiveJobSyncBundleAsync( - string source, - TimeSpan timeout, - CancellationToken cancellationToken = default); - - /// - /// Lists available bundles via the Router. - /// - Task ListAvailableBundlesAsync( - string source, - TimeSpan timeout, - CancellationToken cancellationToken = default); -} - -/// -/// Response from a Router send operation. -/// -public sealed record RouterSendResponse -{ - /// Gets a value indicating whether the send was successful. - public bool Success { get; init; } - - /// Gets the error message if failed. - public string? Error { get; init; } -} - -/// -/// Response from a Router receive operation. -/// -public sealed record RouterReceiveResponse -{ - /// Gets the received payload. - public byte[]? Payload { get; init; } - - /// Gets the bundle ID. - public Guid? BundleId { get; init; } -} - -/// -/// Response from a Router list operation. -/// -public sealed record RouterListResponse -{ - /// Gets the available bundles. - public IReadOnlyList Bundles { get; init; } = Array.Empty(); } diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransportOptions.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransportOptions.cs new file mode 100644 index 000000000..882dca84a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterJobSyncTransportOptions.cs @@ -0,0 +1,30 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Options for Router-based job sync transport. +/// +public sealed class RouterJobSyncTransportOptions +{ + /// + /// Gets or sets the timeout for send operations. + /// + public TimeSpan SendTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Gets or sets the timeout for receive operations. + /// + public TimeSpan ReceiveTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Gets or sets the timeout for list operations. + /// + public TimeSpan ListTimeout { get; set; } = TimeSpan.FromSeconds(10); + + /// + /// Gets or sets the service endpoint for job sync. + /// + public string ServiceEndpoint { get; set; } = "scheduler.job-sync"; +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterListResponse.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterListResponse.cs new file mode 100644 index 000000000..db9fae125 --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterListResponse.cs @@ -0,0 +1,15 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Response from a Router list operation. +/// +public sealed record RouterListResponse +{ + /// + /// Gets the available bundles. + /// + public IReadOnlyList Bundles { get; init; } = Array.Empty(); +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterReceiveResponse.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterReceiveResponse.cs new file mode 100644 index 000000000..a674bb36c --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterReceiveResponse.cs @@ -0,0 +1,20 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Response from a Router receive operation. +/// +public sealed record RouterReceiveResponse +{ + /// + /// Gets the received payload. + /// + public byte[]? Payload { get; init; } + + /// + /// Gets the bundle ID. + /// + public Guid? BundleId { get; init; } +} diff --git a/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterSendResponse.cs b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterSendResponse.cs new file mode 100644 index 000000000..b6b85d33a --- /dev/null +++ b/src/AirGap/__Libraries/StellaOps.AirGap.Sync/Transport/RouterSendResponse.cs @@ -0,0 +1,20 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Transport; + +/// +/// Response from a Router send operation. +/// +public sealed record RouterSendResponse +{ + /// + /// Gets a value indicating whether the send was successful. + /// + public bool Success { get; init; } + + /// + /// Gets the error message if failed. + /// + public string? Error { get; init; } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Basic.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Basic.cs new file mode 100644 index 000000000..83511dae2 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Basic.cs @@ -0,0 +1,48 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_SameInputs_SameOutputDigest() + { + var input1 = """{"feed":"nvd","data":"test"}"""; + var input2 = """{"feed":"nvd","data":"test"}"""; + + var digest1 = ComputeSha256Hex(input1); + var digest2 = ComputeSha256Hex(input2); + + digest1.Should().Be(digest2, "Same inputs should produce same digest"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_OutputBundleName_IsDeterministic() + { + var bundleName = "offline-kit"; + var version = "1.0.0"; + var timestamp = DateTimeOffset.Parse("2025-06-15T12:00:00Z"); + + var filename1 = GenerateBundleFilename(bundleName, version, timestamp); + var filename2 = GenerateBundleFilename(bundleName, version, timestamp); + + filename1.Should().Be(filename2, "Same parameters should produce same filename"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_ManifestJson_IsDeterministic() + { + var manifest1 = CreateDeterministicManifest(); + var manifest2 = CreateDeterministicManifest(); + + var json1 = System.Text.Json.JsonSerializer.Serialize(manifest1); + var json2 = System.Text.Json.JsonSerializer.Serialize(manifest2); + + json1.Should().Be(json2); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Ordering.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Ordering.cs new file mode 100644 index 000000000..f36dff408 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Determinism.Ordering.cs @@ -0,0 +1,51 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_FeedOrdering_IsDeterministic() + { + var feeds1 = new[] { "nvd", "github", "redhat" }; + var feeds2 = new[] { "github", "redhat", "nvd" }; + + var sorted1 = feeds1.OrderBy(f => f).ToList(); + var sorted2 = feeds2.OrderBy(f => f).ToList(); + + sorted1.Should().BeEquivalentTo(sorted2, options => options.WithStrictOrdering(), + "Canonical ordering should be deterministic"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_DigestComputation_IsDeterministic() + { + var content = "deterministic content for digest test"; + var expectedDigest = ComputeSha256Hex(content); + + var digest1 = ComputeSha256Hex(content); + var digest2 = ComputeSha256Hex(content); + var digest3 = ComputeSha256Hex(content); + + digest1.Should().Be(expectedDigest); + digest2.Should().Be(expectedDigest); + digest3.Should().Be(expectedDigest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CliDeterminism_TimestampFormat_IsDeterministic() + { + var timestamp = DateTimeOffset.Parse("2025-06-15T12:00:00Z"); + + var formatted1 = timestamp.ToString("O"); + var formatted2 = timestamp.ToString("O"); + + formatted1.Should().Be(formatted2); + formatted1.Should().Be("2025-06-15T12:00:00.0000000+00:00"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.ExitCodes.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.ExitCodes.cs new file mode 100644 index 000000000..c4aa2ae8d --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.ExitCodes.cs @@ -0,0 +1,85 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_SuccessfulExport_ReturnsZero() + { + var expectedExitCode = 0; + + expectedExitCode.Should().Be(0, "Successful operations should return exit code 0"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_UserError_ReturnsOne() + { + var expectedExitCode = 1; + + expectedExitCode.Should().Be(1, "User errors should return exit code 1"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_SystemError_ReturnsTwo() + { + var expectedExitCode = 2; + + expectedExitCode.Should().Be(2, "System errors should return exit code 2"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_MissingRequiredArgument_ReturnsOne() + { + var args = new[] { "export" }; + var expectedExitCode = 1; + + args.Should().NotContain("--name", "Missing required argument"); + expectedExitCode.Should().Be(1); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_InvalidFeedPath_ReturnsOne() + { + var args = new[] + { + "export", + "--name", "test-bundle", + "--version", "1.0.0", + "--feed", "/nonexistent/path/feed.json" + }; + var expectedExitCode = 1; + + args.Should().Contain("--feed"); + expectedExitCode.Should().Be(1, "Invalid feed path should return exit code 1"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_HelpFlag_ReturnsZero() + { + var args = new[] { "--help" }; + var expectedExitCode = 0; + + args.Should().Contain("--help"); + expectedExitCode.Should().Be(0, "--help should return exit code 0"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExitCode_VersionFlag_ReturnsZero() + { + var args = new[] { "--version" }; + var expectedExitCode = 0; + + args.Should().Contain("--version"); + expectedExitCode.Should().Be(0, "--version should return exit code 0"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Export.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Export.cs new file mode 100644 index 000000000..b3937d48d --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Export.cs @@ -0,0 +1,58 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ExportCommand_IncludesManifestSummary() + { + var expectedOutputLines = new[] + { + "Creating bundle: test-bundle v1.0.0", + "Processing feeds...", + " - nvd (v2025-06-15)", + "Processing policies...", + " - default (v1.0)", + "Bundle created successfully", + " Bundle ID: ", + " Digest: sha256:", + " Size: ", + " Output: " + }; + + expectedOutputLines.Should().Contain(l => l.Contains("Bundle created")); + expectedOutputLines.Should().Contain(l => l.Contains("Digest:")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ExportCommand_IncludesBundleDigest() + { + var digestPattern = "sha256:[a-f0-9]{64}"; + + digestPattern.Should().Contain("sha256:"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ImportCommand_IncludesImportSummary() + { + var expectedOutputLines = new[] + { + "Importing bundle: ", + "Verifying bundle integrity...", + " Digest verified: sha256:", + "Importing feeds...", + " - nvd: imported", + "Importing policies...", + " - default: imported", + "Bundle imported successfully" + }; + + expectedOutputLines.Should().Contain(l => l.Contains("imported successfully")); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Validation.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Validation.cs new file mode 100644 index 000000000..e82ca94ba --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.GoldenOutput.Validation.cs @@ -0,0 +1,46 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ListCommand_IncludesBundleTable() + { + var expectedHeaders = new[] { "Bundle ID", "Name", "Version", "Created At", "Size" }; + + expectedHeaders.Should().Contain("Bundle ID"); + expectedHeaders.Should().Contain("Name"); + expectedHeaders.Should().Contain("Version"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ValidateCommand_IncludesValidationResult() + { + var expectedOutputLines = new[] + { + "Validating bundle: ", + " Manifest: valid", + " Feeds: ", + " Policies: ", + " Digest: verified", + "Validation: PASSED" + }; + + expectedOutputLines.Should().Contain(l => l.Contains("Validation:")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GoldenOutput_ErrorMessage_IncludesContext() + { + var errorMessageFormat = "Error: {message}\nContext: {details}\nSuggestion: {help}"; + + errorMessageFormat.Should().Contain("Error:"); + errorMessageFormat.Should().Contain("Context:"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Helpers.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Helpers.cs new file mode 100644 index 000000000..22e683444 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.Helpers.cs @@ -0,0 +1,34 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapCliToolTests +{ + private static string ComputeSha256Hex(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string GenerateBundleFilename(string name, string version, DateTimeOffset timestamp) + { + return $"{name}-{version}-{timestamp:yyyyMMddHHmmss}.tar.gz"; + } + + private static object CreateDeterministicManifest() + { + return new + { + bundleId = "fixed-bundle-id-123", + name = "offline-kit", + version = "1.0.0", + createdAt = "2025-06-15T12:00:00Z", + feeds = new[] + { + new { feedId = "nvd", name = "nvd", version = "v1" } + } + }; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.cs index e6c9a6f0b..50582bc57 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapCliToolTests.cs @@ -5,356 +5,14 @@ // Description: CLI1 AirGap tool tests - exit codes, golden output, determinism // ----------------------------------------------------------------------------- -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using Xunit; - -using StellaOps.TestKit; namespace StellaOps.AirGap.Bundle.Tests; /// /// CLI1 AirGap Tool Tests -/// Task AIRGAP-5100-013: Exit code tests (export → exit 0; errors → non-zero) -/// Task AIRGAP-5100-014: Golden output tests (export command → stdout snapshot) -/// Task AIRGAP-5100-015: Determinism test (same inputs → same output bundle) +/// Task AIRGAP-5100-013: Exit code tests (export -> exit 0; errors -> non-zero) +/// Task AIRGAP-5100-014: Golden output tests (export command -> stdout snapshot) +/// Task AIRGAP-5100-015: Determinism test (same inputs -> same output bundle) /// -public sealed class AirGapCliToolTests +public sealed partial class AirGapCliToolTests { - #region AIRGAP-5100-013: Exit Code Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_SuccessfulExport_ReturnsZero() - { - // Arrange - var expectedExitCode = 0; - - // Assert - Document expected behavior - expectedExitCode.Should().Be(0, "Successful operations should return exit code 0"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_UserError_ReturnsOne() - { - // Arrange - var expectedExitCode = 1; - - // Assert - Document expected behavior for user errors - // User errors: invalid arguments, missing required files, validation failures - expectedExitCode.Should().Be(1, "User errors should return exit code 1"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_SystemError_ReturnsTwo() - { - // Arrange - var expectedExitCode = 2; - - // Assert - Document expected behavior for system errors - // System errors: I/O failures, network errors, internal exceptions - expectedExitCode.Should().Be(2, "System errors should return exit code 2"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_MissingRequiredArgument_ReturnsOne() - { - // Arrange - Missing required argument scenario - var args = new[] { "export" }; // Missing --name, --version - var expectedExitCode = 1; - - // Assert - args.Should().NotContain("--name", "Missing required argument"); - expectedExitCode.Should().Be(1); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_InvalidFeedPath_ReturnsOne() - { - // Arrange - Invalid feed path scenario - var args = new[] - { - "export", - "--name", "test-bundle", - "--version", "1.0.0", - "--feed", "/nonexistent/path/feed.json" - }; - var expectedExitCode = 1; - - // Assert - args.Should().Contain("--feed"); - expectedExitCode.Should().Be(1, "Invalid feed path should return exit code 1"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_HelpFlag_ReturnsZero() - { - // Arrange - var args = new[] { "--help" }; - var expectedExitCode = 0; - - // Assert - args.Should().Contain("--help"); - expectedExitCode.Should().Be(0, "--help should return exit code 0"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExitCode_VersionFlag_ReturnsZero() - { - // Arrange - var args = new[] { "--version" }; - var expectedExitCode = 0; - - // Assert - args.Should().Contain("--version"); - expectedExitCode.Should().Be(0, "--version should return exit code 0"); - } - - #endregion - - #region AIRGAP-5100-014: Golden Output Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ExportCommand_IncludesManifestSummary() - { - // Arrange - Expected output structure for export command - var expectedOutputLines = new[] - { - "Creating bundle: test-bundle v1.0.0", - "Processing feeds...", - " - nvd (v2025-06-15)", - "Processing policies...", - " - default (v1.0)", - "Bundle created successfully", - " Bundle ID: ", - " Digest: sha256:", - " Size: ", - " Output: " - }; - - // Assert - Document expected output structure - expectedOutputLines.Should().Contain(l => l.Contains("Bundle created")); - expectedOutputLines.Should().Contain(l => l.Contains("Digest:")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ExportCommand_IncludesBundleDigest() - { - // Arrange - var digestPattern = "sha256:[a-f0-9]{64}"; - - // Assert - digestPattern.Should().Contain("sha256:"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ImportCommand_IncludesImportSummary() - { - // Arrange - Expected output structure for import command - var expectedOutputLines = new[] - { - "Importing bundle: ", - "Verifying bundle integrity...", - " Digest verified: sha256:", - "Importing feeds...", - " - nvd: imported", - "Importing policies...", - " - default: imported", - "Bundle imported successfully" - }; - - // Assert - expectedOutputLines.Should().Contain(l => l.Contains("imported successfully")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ListCommand_IncludesBundleTable() - { - // Arrange - Expected output structure for list command - var expectedHeaders = new[] { "Bundle ID", "Name", "Version", "Created At", "Size" }; - - // Assert - expectedHeaders.Should().Contain("Bundle ID"); - expectedHeaders.Should().Contain("Name"); - expectedHeaders.Should().Contain("Version"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ValidateCommand_IncludesValidationResult() - { - // Arrange - Expected output structure for validate command - var expectedOutputLines = new[] - { - "Validating bundle: ", - " Manifest: valid", - " Feeds: ", - " Policies: ", - " Digest: verified", - "Validation: PASSED" - }; - - // Assert - expectedOutputLines.Should().Contain(l => l.Contains("Validation:")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GoldenOutput_ErrorMessage_IncludesContext() - { - // Arrange - Error message format - var errorMessageFormat = "Error: {message}\nContext: {details}\nSuggestion: {help}"; - - // Assert - Error messages should include context - errorMessageFormat.Should().Contain("Error:"); - errorMessageFormat.Should().Contain("Context:"); - } - - #endregion - - #region AIRGAP-5100-015: CLI Determinism Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_SameInputs_SameOutputDigest() - { - // Arrange - Simulate CLI determinism - var input1 = """{"feed":"nvd","data":"test"}"""; - var input2 = """{"feed":"nvd","data":"test"}"""; - - // Act - var digest1 = ComputeSha256Hex(input1); - var digest2 = ComputeSha256Hex(input2); - - // Assert - digest1.Should().Be(digest2, "Same inputs should produce same digest"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_OutputBundleName_IsDeterministic() - { - // Arrange - var bundleName = "offline-kit"; - var version = "1.0.0"; - var timestamp = DateTimeOffset.Parse("2025-06-15T12:00:00Z"); - - // Act - Generate bundle filename - var filename1 = GenerateBundleFilename(bundleName, version, timestamp); - var filename2 = GenerateBundleFilename(bundleName, version, timestamp); - - // Assert - filename1.Should().Be(filename2, "Same parameters should produce same filename"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_ManifestJson_IsDeterministic() - { - // Arrange - var manifest1 = CreateDeterministicManifest(); - var manifest2 = CreateDeterministicManifest(); - - // Act - var json1 = System.Text.Json.JsonSerializer.Serialize(manifest1); - var json2 = System.Text.Json.JsonSerializer.Serialize(manifest2); - - // Assert - Same manifest should serialize identically - json1.Should().Be(json2); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_FeedOrdering_IsDeterministic() - { - // Arrange - Feeds in different order - var feeds1 = new[] { "nvd", "github", "redhat" }; - var feeds2 = new[] { "github", "redhat", "nvd" }; - - // Act - Sort both to canonical order - var sorted1 = feeds1.OrderBy(f => f).ToList(); - var sorted2 = feeds2.OrderBy(f => f).ToList(); - - // Assert - sorted1.Should().BeEquivalentTo(sorted2, options => options.WithStrictOrdering(), - "Canonical ordering should be deterministic"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_DigestComputation_IsDeterministic() - { - // Arrange - var content = "deterministic content for digest test"; - var expectedDigest = ComputeSha256Hex(content); - - // Act - Compute multiple times - var digest1 = ComputeSha256Hex(content); - var digest2 = ComputeSha256Hex(content); - var digest3 = ComputeSha256Hex(content); - - // Assert - digest1.Should().Be(expectedDigest); - digest2.Should().Be(expectedDigest); - digest3.Should().Be(expectedDigest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CliDeterminism_TimestampFormat_IsDeterministic() - { - // Arrange - var timestamp = DateTimeOffset.Parse("2025-06-15T12:00:00Z"); - - // Act - var formatted1 = timestamp.ToString("O"); // ISO 8601 - var formatted2 = timestamp.ToString("O"); - - // Assert - formatted1.Should().Be(formatted2); - formatted1.Should().Be("2025-06-15T12:00:00.0000000+00:00"); - } - - #endregion - - #region Helpers - - private static string ComputeSha256Hex(string content) - { - var bytes = Encoding.UTF8.GetBytes(content); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static string GenerateBundleFilename(string name, string version, DateTimeOffset timestamp) - { - return $"{name}-{version}-{timestamp:yyyyMMddHHmmss}.tar.gz"; - } - - private static object CreateDeterministicManifest() - { - return new - { - bundleId = "fixed-bundle-id-123", - name = "offline-kit", - version = "1.0.0", - createdAt = "2025-06-15T12:00:00Z", - feeds = new[] - { - new { feedId = "nvd", name = "nvd", version = "v1" } - } - }; - } - - #endregion } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Copy.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Copy.cs new file mode 100644 index 000000000..b71cdf5bd --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Copy.cs @@ -0,0 +1,21 @@ +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + private static void CopyDirectory(string sourceDir, string destDir) + { + Directory.CreateDirectory(destDir); + + foreach (var file in Directory.GetFiles(sourceDir)) + { + var destFile = Path.Combine(destDir, Path.GetFileName(file)); + File.Copy(file, destFile, overwrite: true); + } + + foreach (var subDir in Directory.GetDirectories(sourceDir)) + { + var destSubDir = Path.Combine(destDir, Path.GetFileName(subDir)); + CopyDirectory(subDir, destSubDir); + } + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Digest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Digest.cs new file mode 100644 index 000000000..e860b7e77 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.Digest.cs @@ -0,0 +1,14 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + private static string ComputeSha256Hex(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.IO.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.IO.cs new file mode 100644 index 000000000..62e448a7d --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Helpers.IO.cs @@ -0,0 +1,17 @@ +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + private async Task CreateFileInEnvAsync(string envPath, string relativePath, string content) + { + var fullPath = Path.Combine(envPath, relativePath); + var dir = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(dir)) + { + Directory.CreateDirectory(dir); + } + + await File.WriteAllTextAsync(fullPath, content); + return fullPath; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Components.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Components.cs new file mode 100644 index 000000000..8b65b5beb --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Components.cs @@ -0,0 +1,64 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_BundleTransfer_PreservesAllComponents() + { + var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/all-feeds.json", """{"feeds":[]}"""); + var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/default.rego", + """package default\ndefault allow = false"""); + var certPath = await CreateFileInEnvAsync(_onlineEnvPath, "certs/root.pem", + "-----BEGIN CERTIFICATE-----\ntest\n-----END CERTIFICATE-----"); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "multi-component-bundle", + "2.0.0", + DateTimeOffset.UtcNow.AddDays(30), + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedPath, "feeds/all-feeds.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + new[] + { + new PolicyBuildConfig("policy-1", "default", "1.0", policyPath, "policies/default.rego", + PolicyType.OpaRego) + }, + new[] + { + new CryptoBuildConfig("crypto-1", "trust-root", certPath, "certs/root.pem", + CryptoComponentType.TrustRoot, null) + }, + Array.Empty()); + + var bundlePath = Path.Combine(_onlineEnvPath, "multi-bundle"); + + var manifest = await builder.BuildAsync(request, bundlePath); + await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), + BundleManifestSerializer.Serialize(manifest)); + + var offlinePath = Path.Combine(_offlineEnvPath, "multi-imported"); + CopyDirectory(bundlePath, offlinePath); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + imported.Feeds.Should().HaveCount(1); + imported.Policies.Should().HaveCount(1); + imported.CryptoMaterials.Should().HaveCount(1); + + File.Exists(Path.Combine(offlinePath, "feeds/all-feeds.json")).Should().BeTrue(); + File.Exists(Path.Combine(offlinePath, "policies/default.rego")).Should().BeTrue(); + File.Exists(Path.Combine(offlinePath, "certs/root.pem")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Corrupted.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Corrupted.cs new file mode 100644 index 000000000..115119c99 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.Corrupted.cs @@ -0,0 +1,52 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_CorruptedBundle_ImportFails() + { + var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/corrupt-test.json", + """{"original":"data"}"""); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "corrupt-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed", "nvd", "v1", feedPath, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var bundlePath = Path.Combine(_onlineEnvPath, "corrupt-source"); + var manifest = await builder.BuildAsync(request, bundlePath); + await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), + BundleManifestSerializer.Serialize(manifest)); + + var offlinePath = Path.Combine(_offlineEnvPath, "corrupt-imported"); + CopyDirectory(bundlePath, offlinePath); + + await File.WriteAllTextAsync(Path.Combine(offlinePath, "feeds/nvd.json"), + """{"corrupted":"malicious data"}"""); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + var actualContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "feeds/nvd.json")); + var actualDigest = ComputeSha256Hex(actualContent); + + imported.Feeds[0].Digest.Should().NotBe(actualDigest, "Digest should not match corrupted content"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.DataIntegrity.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.DataIntegrity.cs new file mode 100644 index 000000000..0255994bc --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.OnlineOffline.DataIntegrity.cs @@ -0,0 +1,64 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_OnlineExport_OfflineImport_DataIntegrity() + { + var feedData = """ + { + "vulnerabilities": [ + {"cve": "CVE-2024-0001", "severity": "HIGH"}, + {"cve": "CVE-2024-0002", "severity": "MEDIUM"} + ], + "lastUpdated": "2025-06-15T00:00:00Z" + } + """; + var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/nvd.json", feedData); + + var builder = new BundleBuilder(); + var exportRequest = new BundleBuildRequest( + "online-offline-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("nvd-feed", "nvd", "2025-06-15", feedPath, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var bundleOutputPath = Path.Combine(_onlineEnvPath, "bundle"); + + var manifest = await builder.BuildAsync(exportRequest, bundleOutputPath); + + var manifestPath = Path.Combine(bundleOutputPath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var offlineBundlePath = Path.Combine(_offlineEnvPath, "imported-bundle"); + CopyDirectory(bundleOutputPath, offlineBundlePath); + + var importedManifestJson = await File.ReadAllTextAsync(Path.Combine(offlineBundlePath, "manifest.json")); + var importedManifest = BundleManifestSerializer.Deserialize(importedManifestJson); + + var importedFeedPath = Path.Combine(offlineBundlePath, "feeds/nvd.json"); + var importedFeedContent = await File.ReadAllTextAsync(importedFeedPath); + var importedFeedDigest = ComputeSha256Hex(importedFeedContent); + + importedManifest.Should().NotBeNull(); + importedManifest.Name.Should().Be("online-offline-test"); + importedManifest.Feeds.Should().HaveCount(1); + importedManifest.Feeds[0].Digest.Should().Be(importedFeedDigest, "Feed digest should match content"); + importedFeedContent.Should().Contain("CVE-2024-0001"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Crypto.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Crypto.cs new file mode 100644 index 000000000..fc2fb8bd0 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Crypto.cs @@ -0,0 +1,61 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_PolicyWithCrypto_BothTransferred() + { + var policyContent = "package signed\ndefault allow = false"; + var certContent = "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"; + + var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/signed.rego", policyContent); + var certPath = await CreateFileInEnvAsync(_onlineEnvPath, "certs/signing.pem", certContent); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "signed-policy-bundle", + "1.0.0", + null, + Array.Empty(), + new[] + { + new PolicyBuildConfig("signed-policy", "signed", "1.0", policyPath, "policies/signed.rego", + PolicyType.OpaRego) + }, + new[] + { + new CryptoBuildConfig("signing-cert", "signing", certPath, "certs/signing.pem", + CryptoComponentType.SigningKey, null) + }, + Array.Empty()); + + var bundlePath = Path.Combine(_onlineEnvPath, "signed-bundle"); + + var manifest = await builder.BuildAsync(request, bundlePath); + await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), + BundleManifestSerializer.Serialize(manifest)); + + var offlinePath = Path.Combine(_offlineEnvPath, "signed-imported"); + CopyDirectory(bundlePath, offlinePath); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + imported.Policies.Should().HaveCount(1); + imported.CryptoMaterials.Should().HaveCount(1); + + var importedPolicyContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "policies/signed.rego")); + var importedCertContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "certs/signing.pem")); + + importedPolicyContent.Should().Be(policyContent); + importedCertContent.Should().Be(certContent); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Multiple.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Multiple.cs new file mode 100644 index 000000000..194ba8bf0 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Multiple.cs @@ -0,0 +1,60 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_MultiplePolices_MaintainOrder() + { + var policy1Content = "package policy1\ndefault allow = true"; + var policy2Content = "package policy2\ndefault deny = false"; + var policy3Content = "package policy3\ndefault audit = true"; + + var policy1Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy1.rego", policy1Content); + var policy2Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy2.rego", policy2Content); + var policy3Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy3.rego", policy3Content); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "multi-policy-bundle", + "1.0.0", + null, + Array.Empty(), + new[] + { + new PolicyBuildConfig("policy-1", "policy1", "1.0", policy1Path, "policies/policy1.rego", + PolicyType.OpaRego), + new PolicyBuildConfig("policy-2", "policy2", "1.0", policy2Path, "policies/policy2.rego", + PolicyType.OpaRego), + new PolicyBuildConfig("policy-3", "policy3", "1.0", policy3Path, "policies/policy3.rego", + PolicyType.OpaRego) + }, + Array.Empty(), + Array.Empty()); + + var bundlePath = Path.Combine(_onlineEnvPath, "multi-policy"); + + var manifest = await builder.BuildAsync(request, bundlePath); + await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), + BundleManifestSerializer.Serialize(manifest)); + + var offlinePath = Path.Combine(_offlineEnvPath, "multi-policy-imported"); + CopyDirectory(bundlePath, offlinePath); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + imported.Policies.Should().HaveCount(3); + + File.Exists(Path.Combine(offlinePath, "policies/policy1.rego")).Should().BeTrue(); + File.Exists(Path.Combine(offlinePath, "policies/policy2.rego")).Should().BeTrue(); + File.Exists(Path.Combine(offlinePath, "policies/policy3.rego")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Verdict.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Verdict.cs new file mode 100644 index 000000000..fecc6b4bb --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.Policy.Verdict.cs @@ -0,0 +1,65 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class AirGapIntegrationTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Integration_PolicyExport_PolicyImport_IdenticalVerdict() + { + var policyContent = """ + package security + default allow = false + allow { + input.severity != "CRITICAL" + input.has_mitigation == true + } + deny { + input.severity == "CRITICAL" + input.has_mitigation == false + } + """; + var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/security.rego", policyContent); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "policy-test-bundle", + "1.0.0", + null, + Array.Empty(), + new[] + { + new PolicyBuildConfig("security-policy", "security", "1.0", policyPath, "policies/security.rego", + PolicyType.OpaRego) + }, + Array.Empty(), + Array.Empty()); + + var bundlePath = Path.Combine(_onlineEnvPath, "policy-bundle"); + + var manifest = await builder.BuildAsync(request, bundlePath); + await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), + BundleManifestSerializer.Serialize(manifest)); + + var offlinePath = Path.Combine(_offlineEnvPath, "policy-imported"); + CopyDirectory(bundlePath, offlinePath); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + var importedPolicyPath = Path.Combine(offlinePath, "policies/security.rego"); + var importedPolicyContent = await File.ReadAllTextAsync(importedPolicyPath); + + importedPolicyContent.Should().Be(policyContent, "Policy content should be identical after transfer"); + + var originalDigest = ComputeSha256Hex(policyContent); + var importedDigest = imported.Policies[0].Digest; + importedDigest.Should().Be(originalDigest, "Policy digest should match"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.cs index f9df14b78..2505f4eb9 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/AirGapIntegrationTests.cs @@ -2,30 +2,23 @@ // AirGapIntegrationTests.cs // Sprint: SPRINT_5100_0010_0004_airgap_tests // Tasks: AIRGAP-5100-016, AIRGAP-5100-017 -// Description: Integration tests for online→offline bundle workflow +// Description: Integration tests for online->offline bundle workflow // ----------------------------------------------------------------------------- -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.AirGap.Bundle.Tests; /// /// Integration Tests for AirGap Module -/// Task AIRGAP-5100-016: Export bundle (online env) → import bundle (offline env) → verify data integrity -/// Task AIRGAP-5100-017: Policy export → policy import → policy evaluation → verify identical verdict +/// Task AIRGAP-5100-016: Export bundle (online env) -> import bundle (offline env) -> verify data integrity +/// Task AIRGAP-5100-017: Policy export -> policy import -> policy evaluation -> verify identical verdict /// [Trait("Category", TestCategories.Integration)] [Trait("BlastRadius", TestCategories.BlastRadius.Integrations)] [Trait("BlastRadius", TestCategories.BlastRadius.Persistence)] -public sealed class AirGapIntegrationTests : IDisposable +public sealed partial class AirGapIntegrationTests : IDisposable { private readonly string _tempRoot; private readonly string _onlineEnvPath; @@ -36,360 +29,25 @@ public sealed class AirGapIntegrationTests : IDisposable _tempRoot = Path.Combine(Path.GetTempPath(), $"airgap-integration-{Guid.NewGuid():N}"); _onlineEnvPath = Path.Combine(_tempRoot, "online"); _offlineEnvPath = Path.Combine(_tempRoot, "offline"); - + Directory.CreateDirectory(_onlineEnvPath); Directory.CreateDirectory(_offlineEnvPath); } public void Dispose() { - if (Directory.Exists(_tempRoot)) + if (!Directory.Exists(_tempRoot)) { - try { Directory.Delete(_tempRoot, recursive: true); } - catch { /* Ignore cleanup errors */ } + return; } - } - #region AIRGAP-5100-016: Online → Offline Bundle Transfer Integration - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_OnlineExport_OfflineImport_DataIntegrity() - { - // Arrange - Create source data in "online" environment - var feedData = """ + try { - "vulnerabilities": [ - {"cve": "CVE-2024-0001", "severity": "HIGH"}, - {"cve": "CVE-2024-0002", "severity": "MEDIUM"} - ], - "lastUpdated": "2025-06-15T00:00:00Z" + Directory.Delete(_tempRoot, recursive: true); } - """; - var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/nvd.json", feedData); - - var builder = new BundleBuilder(); - var exportRequest = new BundleBuildRequest( - "online-offline-test", - "1.0.0", - null, - new[] { new FeedBuildConfig("nvd-feed", "nvd", "2025-06-15", feedPath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var bundleOutputPath = Path.Combine(_onlineEnvPath, "bundle"); - - // Act - Export in online environment - var manifest = await builder.BuildAsync(exportRequest, bundleOutputPath); - - // Write manifest to bundle - var manifestPath = Path.Combine(bundleOutputPath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - // Simulate transfer to offline environment (copy files) - var offlineBundlePath = Path.Combine(_offlineEnvPath, "imported-bundle"); - CopyDirectory(bundleOutputPath, offlineBundlePath); - - // Import in offline environment - load manifest directly - var importedManifestJson = await File.ReadAllTextAsync(Path.Combine(offlineBundlePath, "manifest.json")); - var importedManifest = BundleManifestSerializer.Deserialize(importedManifestJson); - - // Verify data integrity - var importedFeedPath = Path.Combine(offlineBundlePath, "feeds/nvd.json"); - var importedFeedContent = await File.ReadAllTextAsync(importedFeedPath); - var importedFeedDigest = ComputeSha256Hex(importedFeedContent); - - // Assert - importedManifest.Should().NotBeNull(); - importedManifest.Name.Should().Be("online-offline-test"); - importedManifest.Feeds.Should().HaveCount(1); - importedManifest.Feeds[0].Digest.Should().Be(importedFeedDigest, "Feed digest should match content"); - importedFeedContent.Should().Contain("CVE-2024-0001"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_BundleTransfer_PreservesAllComponents() - { - // Arrange - Create multi-component bundle - var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/all-feeds.json", """{"feeds":[]}"""); - var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/default.rego", """package default\ndefault allow = false"""); - var certPath = await CreateFileInEnvAsync(_onlineEnvPath, "certs/root.pem", "-----BEGIN CERTIFICATE-----\ntest\n-----END CERTIFICATE-----"); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "multi-component-bundle", - "2.0.0", - DateTimeOffset.UtcNow.AddDays(30), - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedPath, "feeds/all-feeds.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - new[] { new PolicyBuildConfig("policy-1", "default", "1.0", policyPath, "policies/default.rego", PolicyType.OpaRego) }, - new[] { new CryptoBuildConfig("crypto-1", "trust-root", certPath, "certs/root.pem", CryptoComponentType.TrustRoot, null) }, - Array.Empty()); - - var bundlePath = Path.Combine(_onlineEnvPath, "multi-bundle"); - - // Act - var manifest = await builder.BuildAsync(request, bundlePath); - await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), BundleManifestSerializer.Serialize(manifest)); - - // Transfer to offline - var offlinePath = Path.Combine(_offlineEnvPath, "multi-imported"); - CopyDirectory(bundlePath, offlinePath); - - // Load manifest directly - var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - All components transferred - imported.Feeds.Should().HaveCount(1); - imported.Policies.Should().HaveCount(1); - imported.CryptoMaterials.Should().HaveCount(1); - - // Verify files exist - File.Exists(Path.Combine(offlinePath, "feeds/all-feeds.json")).Should().BeTrue(); - File.Exists(Path.Combine(offlinePath, "policies/default.rego")).Should().BeTrue(); - File.Exists(Path.Combine(offlinePath, "certs/root.pem")).Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_CorruptedBundle_ImportFails() - { - // Arrange - var feedPath = await CreateFileInEnvAsync(_onlineEnvPath, "feeds/corrupt-test.json", """{"original":"data"}"""); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "corrupt-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed", "nvd", "v1", feedPath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var bundlePath = Path.Combine(_onlineEnvPath, "corrupt-source"); - var manifest = await builder.BuildAsync(request, bundlePath); - await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), BundleManifestSerializer.Serialize(manifest)); - - // Transfer and corrupt - var offlinePath = Path.Combine(_offlineEnvPath, "corrupt-imported"); - CopyDirectory(bundlePath, offlinePath); - - // Corrupt the feed file after transfer - await File.WriteAllTextAsync(Path.Combine(offlinePath, "feeds/nvd.json"), """{"corrupted":"malicious data"}"""); - - // Act - Load manifest directly (digest verification would fail if validated) - var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Verify digest mismatch - var actualContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "feeds/nvd.json")); - var actualDigest = ComputeSha256Hex(actualContent); - - // Assert - imported.Feeds[0].Digest.Should().NotBe(actualDigest, "Digest should not match corrupted content"); - } - - #endregion - - #region AIRGAP-5100-017: Policy Export/Import/Evaluation Integration - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_PolicyExport_PolicyImport_IdenticalVerdict() - { - // Arrange - Create a policy in online environment - var policyContent = """ - package security - - default allow = false - - allow { - input.severity != "CRITICAL" - input.has_mitigation == true - } - - deny { - input.severity == "CRITICAL" - input.has_mitigation == false - } - """; - var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/security.rego", policyContent); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "policy-test-bundle", - "1.0.0", - null, - Array.Empty(), - new[] { new PolicyBuildConfig("security-policy", "security", "1.0", policyPath, "policies/security.rego", PolicyType.OpaRego) }, - Array.Empty(), - Array.Empty()); - - var bundlePath = Path.Combine(_onlineEnvPath, "policy-bundle"); - - // Act - Export - var manifest = await builder.BuildAsync(request, bundlePath); - await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), BundleManifestSerializer.Serialize(manifest)); - - // Transfer to offline - var offlinePath = Path.Combine(_offlineEnvPath, "policy-imported"); - CopyDirectory(bundlePath, offlinePath); - - // Load manifest directly - var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Verify policy content - var importedPolicyPath = Path.Combine(offlinePath, "policies/security.rego"); - var importedPolicyContent = await File.ReadAllTextAsync(importedPolicyPath); - - // Assert - Policy content is identical - importedPolicyContent.Should().Be(policyContent, "Policy content should be identical after transfer"); - - // Assert - Policy digest matches - var originalDigest = ComputeSha256Hex(policyContent); - var importedDigest = imported.Policies[0].Digest; - importedDigest.Should().Be(originalDigest, "Policy digest should match"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_MultiplePolices_MaintainOrder() - { - // Arrange - Create multiple policies - var policy1Content = "package policy1\ndefault allow = true"; - var policy2Content = "package policy2\ndefault deny = false"; - var policy3Content = "package policy3\ndefault audit = true"; - - var policy1Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy1.rego", policy1Content); - var policy2Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy2.rego", policy2Content); - var policy3Path = await CreateFileInEnvAsync(_onlineEnvPath, "policies/policy3.rego", policy3Content); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "multi-policy-bundle", - "1.0.0", - null, - Array.Empty(), - new[] - { - new PolicyBuildConfig("policy-1", "policy1", "1.0", policy1Path, "policies/policy1.rego", PolicyType.OpaRego), - new PolicyBuildConfig("policy-2", "policy2", "1.0", policy2Path, "policies/policy2.rego", PolicyType.OpaRego), - new PolicyBuildConfig("policy-3", "policy3", "1.0", policy3Path, "policies/policy3.rego", PolicyType.OpaRego) - }, - Array.Empty(), - Array.Empty()); - - var bundlePath = Path.Combine(_onlineEnvPath, "multi-policy"); - - // Act - var manifest = await builder.BuildAsync(request, bundlePath); - await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), BundleManifestSerializer.Serialize(manifest)); - - var offlinePath = Path.Combine(_offlineEnvPath, "multi-policy-imported"); - CopyDirectory(bundlePath, offlinePath); - - // Load manifest directly - var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - imported.Policies.Should().HaveCount(3); - - // All policy files should exist - File.Exists(Path.Combine(offlinePath, "policies/policy1.rego")).Should().BeTrue(); - File.Exists(Path.Combine(offlinePath, "policies/policy2.rego")).Should().BeTrue(); - File.Exists(Path.Combine(offlinePath, "policies/policy3.rego")).Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Integration_PolicyWithCrypto_BothTransferred() - { - // Arrange - var policyContent = "package signed\ndefault allow = false"; - var certContent = "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"; - - var policyPath = await CreateFileInEnvAsync(_onlineEnvPath, "policies/signed.rego", policyContent); - var certPath = await CreateFileInEnvAsync(_onlineEnvPath, "certs/signing.pem", certContent); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "signed-policy-bundle", - "1.0.0", - null, - Array.Empty(), - new[] { new PolicyBuildConfig("signed-policy", "signed", "1.0", policyPath, "policies/signed.rego", PolicyType.OpaRego) }, - new[] { new CryptoBuildConfig("signing-cert", "signing", certPath, "certs/signing.pem", CryptoComponentType.SigningKey, null) }, - Array.Empty()); - - var bundlePath = Path.Combine(_onlineEnvPath, "signed-bundle"); - - // Act - var manifest = await builder.BuildAsync(request, bundlePath); - await File.WriteAllTextAsync(Path.Combine(bundlePath, "manifest.json"), BundleManifestSerializer.Serialize(manifest)); - - var offlinePath = Path.Combine(_offlineEnvPath, "signed-imported"); - CopyDirectory(bundlePath, offlinePath); - - // Load manifest directly - var loadedJson = await File.ReadAllTextAsync(Path.Combine(offlinePath, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - imported.Policies.Should().HaveCount(1); - imported.CryptoMaterials.Should().HaveCount(1); - - // Verify content integrity - var importedPolicyContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "policies/signed.rego")); - var importedCertContent = await File.ReadAllTextAsync(Path.Combine(offlinePath, "certs/signing.pem")); - - importedPolicyContent.Should().Be(policyContent); - importedCertContent.Should().Be(certContent); - } - - #endregion - - #region Helpers - - private async Task CreateFileInEnvAsync(string envPath, string relativePath, string content) - { - var fullPath = Path.Combine(envPath, relativePath); - var dir = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(dir)) + catch { - Directory.CreateDirectory(dir); - } - await File.WriteAllTextAsync(fullPath, content); - return fullPath; - } - - private static void CopyDirectory(string sourceDir, string destDir) - { - Directory.CreateDirectory(destDir); - - foreach (var file in Directory.GetFiles(sourceDir)) - { - var destFile = Path.Combine(destDir, Path.GetFileName(file)); - File.Copy(file, destFile, overwrite: true); - } - - foreach (var subDir in Directory.GetDirectories(sourceDir)) - { - var destSubDir = Path.Combine(destDir, Path.GetFileName(subDir)); - CopyDirectory(subDir, destSubDir); + // Ignore cleanup errors. } } - - private static string ComputeSha256Hex(string content) - { - var bytes = Encoding.UTF8.GetBytes(content); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - #endregion } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.BinaryContent.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.BinaryContent.cs new file mode 100644 index 000000000..81c26587c --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.BinaryContent.cs @@ -0,0 +1,71 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_BinaryContent_SameDigest() + { + var builder = new BundleBuilder(); + var binaryContent = new byte[] { 0x00, 0x01, 0x02, 0x03, 0xFF, 0xFE, 0xFD }; + + var source1 = CreateSourceFileBytes("binary1.bin", binaryContent); + var source2 = CreateSourceFileBytes("binary2.bin", binaryContent); + + var request1 = new BundleBuildRequest( + "binary-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "binary", "v1", source1, "data/binary.bin", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var request2 = new BundleBuildRequest( + "binary-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "binary", "v1", source2, "data/binary.bin", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "bin1")); + var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "bin2")); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_LargeContent_SameDigest() + { + var builder = new BundleBuilder(); + var largeContent = new string('x', 1_000_000); + + var source1 = CreateSourceFile("large1.json", largeContent); + var source2 = CreateSourceFile("large2.json", largeContent); + + var request1 = CreateRequest(source1, "large1"); + var request2 = CreateRequest(source2, "large2"); + + var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "large1")); + var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "large2")); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.ContentIndependence.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.ContentIndependence.cs new file mode 100644 index 000000000..896db0ebe --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.ContentIndependence.cs @@ -0,0 +1,46 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_SameContent_DifferentSourcePath_SameDigest() + { + var builder = new BundleBuilder(); + var content = "identical content"; + + var source1 = CreateSourceFile("path1/file.json", content); + var source2 = CreateSourceFile("path2/file.json", content); + + var request1 = CreateRequest(source1, "out1"); + var request2 = CreateRequest(source2, "out2"); + + var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "out1")); + var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "out2")); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_DifferentContent_DifferentDigest() + { + var builder = new BundleBuilder(); + + var source1 = CreateSourceFile("diff1.json", "content A"); + var source2 = CreateSourceFile("diff2.json", "content B"); + + var request1 = CreateRequest(source1, "diffout1"); + var request2 = CreateRequest(source2, "diffout2"); + + var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "diffout1")); + var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "diffout2")); + + manifest1.Feeds[0].Digest.Should().NotBe(manifest2.Feeds[0].Digest); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Digest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Digest.cs new file mode 100644 index 000000000..f8a473363 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Digest.cs @@ -0,0 +1,13 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + private static string ComputeSha256(string content) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.IO.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.IO.cs new file mode 100644 index 000000000..56e4235c6 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.IO.cs @@ -0,0 +1,20 @@ +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + private string CreateSourceFile(string relativePath, string content) + { + var path = Path.Combine(_tempRoot, "source", relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + File.WriteAllText(path, content); + return path; + } + + private string CreateSourceFileBytes(string relativePath, byte[] content) + { + var path = Path.Combine(_tempRoot, "source", relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + File.WriteAllBytes(path, content); + return path; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Manifest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Manifest.cs new file mode 100644 index 000000000..1db3c4188 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Manifest.cs @@ -0,0 +1,28 @@ +using System.Collections.Immutable; +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + private BundleManifest CreateDeterministicManifest(string name) + { + return new BundleManifest + { + BundleId = "fixed-bundle-id", + Name = name, + Version = "1.0.0", + CreatedAt = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), + Feeds = ImmutableArray.Create( + new FeedComponent("f1", "nvd", "v1", "feeds/nvd.json", + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 100, new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), + FeedFormat.StellaOpsNative)), + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + 50, CryptoComponentType.TrustRoot, null)) + }; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Request.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Request.cs new file mode 100644 index 000000000..a923e667e --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Helpers.Request.cs @@ -0,0 +1,23 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + private BundleBuildRequest CreateRequest(string feedSource, string name) + { + return new BundleBuildRequest( + name, + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "test", "v1", feedSource, "feeds/test.json", + new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.MultipleComponent.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.MultipleComponent.cs new file mode 100644 index 000000000..f8ff4c74b --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.MultipleComponent.cs @@ -0,0 +1,61 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_MultipleFeeds_EachHasCorrectDigest() + { + var builder = new BundleBuilder(); + var content1 = "feed 1 content"; + var content2 = "feed 2 content"; + var content3 = "feed 3 content"; + + var feed1 = CreateSourceFile("feeds/f1.json", content1); + var feed2 = CreateSourceFile("feeds/f2.json", content2); + var feed3 = CreateSourceFile("feeds/f3.json", content3); + + var request = new BundleBuildRequest( + "multi-feed", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "nvd", "v1", feed1, "feeds/f1.json", DateTimeOffset.UtcNow, + FeedFormat.StellaOpsNative), + new FeedBuildConfig("f2", "ghsa", "v1", feed2, "feeds/f2.json", DateTimeOffset.UtcNow, + FeedFormat.OsvJson), + new FeedBuildConfig("f3", "osv", "v1", feed3, "feeds/f3.json", DateTimeOffset.UtcNow, + FeedFormat.OsvJson) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, Path.Combine(_tempRoot, "multi")); + + manifest.Feeds[0].Digest.Should().Be(ComputeSha256(content1)); + manifest.Feeds[1].Digest.Should().Be(ComputeSha256(content2)); + manifest.Feeds[2].Digest.Should().Be(ComputeSha256(content3)); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_OrderIndependence_SameManifestDigest() + { + var manifest1 = CreateDeterministicManifest("order-test"); + var manifest2 = CreateDeterministicManifest("order-test"); + + var withDigest1 = BundleManifestSerializer.WithDigest(manifest1); + var withDigest2 = BundleManifestSerializer.WithDigest(manifest2); + + withDigest1.BundleDigest.Should().Be(withDigest2.BundleDigest); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.ExportImport.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.ExportImport.cs new file mode 100644 index 000000000..b75d2937b --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.ExportImport.cs @@ -0,0 +1,58 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Roundtrip_ExportImportReexport_IdenticalBundle() + { + var builder = new BundleBuilder(); + var content = "{\"vulns\": []}"; + var feedFile = CreateSourceFile("feed.json", content); + + var outputPath1 = Path.Combine(_tempRoot, "export1"); + var outputPath2 = Path.Combine(_tempRoot, "export2"); + + var request = new BundleBuildRequest( + "roundtrip-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "nvd", "v1", feedFile, "feeds/nvd.json", + new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest1 = await builder.BuildAsync(request, outputPath1); + + var exportedPath = Path.Combine(outputPath1, "feeds/nvd.json"); + var importedContent = await File.ReadAllTextAsync(exportedPath); + + var reimportFeedFile = CreateSourceFile("reimport/feed.json", importedContent); + var request2 = new BundleBuildRequest( + "roundtrip-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "nvd", "v1", reimportFeedFile, "feeds/nvd.json", + new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest2 = await builder.BuildAsync(request2, outputPath2); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.Serialization.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.Serialization.cs new file mode 100644 index 000000000..82d887b86 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.Roundtrip.Serialization.cs @@ -0,0 +1,34 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Roundtrip_ManifestSerialize_Deserialize_Identical() + { + var original = CreateDeterministicManifest("roundtrip"); + + var json = BundleManifestSerializer.Serialize(original); + var restored = BundleManifestSerializer.Deserialize(json); + + restored.Should().BeEquivalentTo(original); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Roundtrip_ManifestSerialize_Reserialize_SameJson() + { + var original = CreateDeterministicManifest("json-roundtrip"); + + var json1 = BundleManifestSerializer.Serialize(original); + var restored = BundleManifestSerializer.Deserialize(json1); + var json2 = BundleManifestSerializer.Serialize(restored); + + json1.Should().Be(json2); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Digests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Digests.cs new file mode 100644 index 000000000..bc3614c52 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Digests.cs @@ -0,0 +1,42 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_SameInputs_SameComponentDigests() + { + var builder = new BundleBuilder(); + var content = "deterministic content"; + var feedFile1 = CreateSourceFile("feed1.json", content); + var feedFile2 = CreateSourceFile("feed2.json", content); + + var request1 = CreateRequest(feedFile1, "output1"); + var request2 = CreateRequest(feedFile2, "output2"); + + var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "output1")); + var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "output2")); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_SameManifestContent_SameBundleDigest() + { + var manifest1 = CreateDeterministicManifest("bundle-1"); + var manifest2 = CreateDeterministicManifest("bundle-1"); + + var digest1 = BundleManifestSerializer.WithDigest(manifest1).BundleDigest; + var digest2 = BundleManifestSerializer.WithDigest(manifest2).BundleDigest; + + digest1.Should().Be(digest2); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Stability.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Stability.cs new file mode 100644 index 000000000..0c402f694 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.SameInputs.Stability.cs @@ -0,0 +1,48 @@ +using System.Security.Cryptography; +using System.Text; +using FluentAssertions; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleDeterminismTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_MultipleBuilds_SameDigests() + { + var builder = new BundleBuilder(); + var content = "consistent content"; + var digests = new List(); + + for (int i = 0; i < 5; i++) + { + var feedFile = CreateSourceFile($"run{i}/feed.json", content); + var outputPath = Path.Combine(_tempRoot, $"run{i}/output"); + var request = CreateRequest(feedFile, $"run{i}"); + + var manifest = await builder.BuildAsync(request, outputPath); + digests.Add(manifest.Feeds[0].Digest); + } + + digests.Distinct().Should().HaveCount(1, "All builds should produce the same digest"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Determinism_Sha256_StableAcrossCalls() + { + var content = Encoding.UTF8.GetBytes("test content"); + var hashes = new List(); + + for (int i = 0; i < 10; i++) + { + var hash = SHA256.HashData(content); + hashes.Add(Convert.ToHexString(hash).ToLowerInvariant()); + } + + hashes.Distinct().Should().HaveCount(1); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.cs index 10b6c7fb0..823d1df84 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleDeterminismTests.cs @@ -1,20 +1,12 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.TestKit; using Xunit; namespace StellaOps.AirGap.Bundle.Tests; /// -/// Determinism tests: same inputs → same bundle hash (SHA-256). +/// Determinism tests: same inputs -> same bundle hash (SHA-256). /// Tests that bundle export is deterministic and roundtrip produces identical bundles. /// -public sealed class BundleDeterminismTests : IAsyncLifetime +public sealed partial class BundleDeterminismTests : IAsyncLifetime { private string _tempRoot = null!; @@ -31,420 +23,7 @@ public sealed class BundleDeterminismTests : IAsyncLifetime { Directory.Delete(_tempRoot, recursive: true); } + return ValueTask.CompletedTask; } - - #region Same Inputs → Same Hash Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_SameInputs_SameComponentDigests() - { - // Arrange - var builder = new BundleBuilder(); - var content = "deterministic content"; - var feedFile1 = CreateSourceFile("feed1.json", content); - var feedFile2 = CreateSourceFile("feed2.json", content); - - var request1 = CreateRequest(feedFile1, "output1"); - var request2 = CreateRequest(feedFile2, "output2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "output1")); - var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "output2")); - - // Assert - Same content produces same digest - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_SameManifestContent_SameBundleDigest() - { - // Arrange - var manifest1 = CreateDeterministicManifest("bundle-1"); - var manifest2 = CreateDeterministicManifest("bundle-1"); - - // Act - var digest1 = BundleManifestSerializer.WithDigest(manifest1).BundleDigest; - var digest2 = BundleManifestSerializer.WithDigest(manifest2).BundleDigest; - - // Assert - digest1.Should().Be(digest2); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_MultipleBuilds_SameDigests() - { - // Arrange - var builder = new BundleBuilder(); - var content = "consistent content"; - var digests = new List(); - - // Act - Build the same bundle 5 times - for (int i = 0; i < 5; i++) - { - var feedFile = CreateSourceFile($"run{i}/feed.json", content); - var outputPath = Path.Combine(_tempRoot, $"run{i}/output"); - var request = CreateRequest(feedFile, $"run{i}"); - - var manifest = await builder.BuildAsync(request, outputPath); - digests.Add(manifest.Feeds[0].Digest); - } - - // Assert - All digests should be identical - digests.Distinct().Should().HaveCount(1, "All builds should produce the same digest"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Determinism_Sha256_StableAcrossCalls() - { - // Arrange - var content = Encoding.UTF8.GetBytes("test content"); - var hashes = new List(); - - // Act - Compute hash multiple times - for (int i = 0; i < 10; i++) - { - var hash = SHA256.HashData(content); - hashes.Add(Convert.ToHexString(hash).ToLowerInvariant()); - } - - // Assert - hashes.Distinct().Should().HaveCount(1); - } - - #endregion - - #region Roundtrip Determinism Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Roundtrip_ExportImportReexport_IdenticalBundle() - { - // Arrange - var builder = new BundleBuilder(); - var content = "{\"vulns\": []}"; - var feedFile = CreateSourceFile("feed.json", content); - - var outputPath1 = Path.Combine(_tempRoot, "export1"); - var outputPath2 = Path.Combine(_tempRoot, "export2"); - - var request = new BundleBuildRequest( - "roundtrip-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "nvd", "v1", feedFile, "feeds/nvd.json", - new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - First export - var manifest1 = await builder.BuildAsync(request, outputPath1); - - // Simulate import by reading the exported file - var exportedPath = Path.Combine(outputPath1, "feeds/nvd.json"); - var importedContent = await File.ReadAllTextAsync(exportedPath); - - // Re-export using the imported file - var reimportFeedFile = CreateSourceFile("reimport/feed.json", importedContent); - var request2 = new BundleBuildRequest( - "roundtrip-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "nvd", "v1", reimportFeedFile, "feeds/nvd.json", - new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var manifest2 = await builder.BuildAsync(request2, outputPath2); - - // Assert - Feed digests should be identical - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Roundtrip_ManifestSerialize_Deserialize_Identical() - { - // Arrange - var original = CreateDeterministicManifest("roundtrip"); - - // Act - Serialize and deserialize - var json = BundleManifestSerializer.Serialize(original); - var restored = BundleManifestSerializer.Deserialize(json); - - // Assert - All fields preserved - restored.Should().BeEquivalentTo(original); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Roundtrip_ManifestSerialize_Reserialize_SameJson() - { - // Arrange - var original = CreateDeterministicManifest("json-roundtrip"); - - // Act - var json1 = BundleManifestSerializer.Serialize(original); - var restored = BundleManifestSerializer.Deserialize(json1); - var json2 = BundleManifestSerializer.Serialize(restored); - - // Assert - JSON should be identical - json1.Should().Be(json2); - } - - #endregion - - #region Content Independence Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_SameContent_DifferentSourcePath_SameDigest() - { - // Arrange - var builder = new BundleBuilder(); - var content = "identical content"; - - var source1 = CreateSourceFile("path1/file.json", content); - var source2 = CreateSourceFile("path2/file.json", content); - - var request1 = CreateRequest(source1, "out1"); - var request2 = CreateRequest(source2, "out2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "out1")); - var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "out2")); - - // Assert - Digest depends on content, not source path - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_DifferentContent_DifferentDigest() - { - // Arrange - var builder = new BundleBuilder(); - - var source1 = CreateSourceFile("diff1.json", "content A"); - var source2 = CreateSourceFile("diff2.json", "content B"); - - var request1 = CreateRequest(source1, "diffout1"); - var request2 = CreateRequest(source2, "diffout2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "diffout1")); - var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "diffout2")); - - // Assert - Different content produces different digest - manifest1.Feeds[0].Digest.Should().NotBe(manifest2.Feeds[0].Digest); - } - - #endregion - - #region Multiple Component Determinism - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_MultipleFeeds_EachHasCorrectDigest() - { - // Arrange - var builder = new BundleBuilder(); - var content1 = "feed 1 content"; - var content2 = "feed 2 content"; - var content3 = "feed 3 content"; - - var feed1 = CreateSourceFile("feeds/f1.json", content1); - var feed2 = CreateSourceFile("feeds/f2.json", content2); - var feed3 = CreateSourceFile("feeds/f3.json", content3); - - var request = new BundleBuildRequest( - "multi-feed", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "nvd", "v1", feed1, "feeds/f1.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), - new FeedBuildConfig("f2", "ghsa", "v1", feed2, "feeds/f2.json", DateTimeOffset.UtcNow, FeedFormat.OsvJson), - new FeedBuildConfig("f3", "osv", "v1", feed3, "feeds/f3.json", DateTimeOffset.UtcNow, FeedFormat.OsvJson) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, Path.Combine(_tempRoot, "multi")); - - // Assert - Each feed has its own correct digest - manifest.Feeds[0].Digest.Should().Be(ComputeSha256(content1)); - manifest.Feeds[1].Digest.Should().Be(ComputeSha256(content2)); - manifest.Feeds[2].Digest.Should().Be(ComputeSha256(content3)); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_OrderIndependence_SameManifestDigest() - { - // Note: This test verifies that the bundle digest is computed deterministically - // even when components might be processed in different orders internally - - // Arrange - var manifest1 = CreateDeterministicManifest("order-test"); - var manifest2 = CreateDeterministicManifest("order-test"); - - // Act - var withDigest1 = BundleManifestSerializer.WithDigest(manifest1); - var withDigest2 = BundleManifestSerializer.WithDigest(manifest2); - - // Assert - withDigest1.BundleDigest.Should().Be(withDigest2.BundleDigest); - } - - #endregion - - #region Binary Content Determinism - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_BinaryContent_SameDigest() - { - // Arrange - var builder = new BundleBuilder(); - var binaryContent = new byte[] { 0x00, 0x01, 0x02, 0x03, 0xFF, 0xFE, 0xFD }; - - var source1 = CreateSourceFileBytes("binary1.bin", binaryContent); - var source2 = CreateSourceFileBytes("binary2.bin", binaryContent); - - var request1 = new BundleBuildRequest( - "binary-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "binary", "v1", source1, "data/binary.bin", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var request2 = new BundleBuildRequest( - "binary-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "binary", "v1", source2, "data/binary.bin", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "bin1")); - var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "bin2")); - - // Assert - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_LargeContent_SameDigest() - { - // Arrange - var builder = new BundleBuilder(); - var largeContent = new string('x', 1_000_000); // 1MB - - var source1 = CreateSourceFile("large1.json", largeContent); - var source2 = CreateSourceFile("large2.json", largeContent); - - var request1 = CreateRequest(source1, "large1"); - var request2 = CreateRequest(source2, "large2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, Path.Combine(_tempRoot, "large1")); - var manifest2 = await builder.BuildAsync(request2, Path.Combine(_tempRoot, "large2")); - - // Assert - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest); - } - - #endregion - - #region Helpers - - private string CreateSourceFile(string relativePath, string content) - { - var path = Path.Combine(_tempRoot, "source", relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(path)!); - File.WriteAllText(path, content); - return path; - } - - private string CreateSourceFileBytes(string relativePath, byte[] content) - { - var path = Path.Combine(_tempRoot, "source", relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(path)!); - File.WriteAllBytes(path, content); - return path; - } - - private BundleBuildRequest CreateRequest(string feedSource, string name) - { - return new BundleBuildRequest( - name, - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "test", "v1", feedSource, "feeds/test.json", - new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - } - - private BundleManifest CreateDeterministicManifest(string name) - { - // Use fixed values for determinism - return new BundleManifest - { - BundleId = "fixed-bundle-id", - Name = name, - Version = "1.0.0", - CreatedAt = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), - Feeds = ImmutableArray.Create( - new FeedComponent("f1", "nvd", "v1", "feeds/nvd.json", - "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - 100, new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero), FeedFormat.StellaOpsNative)), - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - 50, CryptoComponentType.TrustRoot, null)) - }; - } - - private static string ComputeSha256(string content) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - #endregion } - - - diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Bundle.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Bundle.cs new file mode 100644 index 000000000..ecdf846cc --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Bundle.cs @@ -0,0 +1,78 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_SameInputs_ProduceSameBundleDigest() + { + var feedContent = """{"determinism":"test-001"}"""; + + var feedFile1 = await CreateTestFileAsync("source1", "feed.json", feedContent); + var feedFile2 = await CreateTestFileAsync("source2", "feed.json", feedContent); + + var builder = new BundleBuilder(); + + var request1 = new BundleBuildRequest( + "determinism-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile1, "feeds/nvd.json", + DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var request2 = new BundleBuildRequest( + "determinism-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile2, "feeds/nvd.json", + DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var outputPath1 = Path.Combine(_tempRoot, "determinism-output1"); + var outputPath2 = Path.Combine(_tempRoot, "determinism-output2"); + + var manifest1 = await builder.BuildAsync(request1, outputPath1); + var manifest2 = await builder.BuildAsync(request2, outputPath2); + + manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest, + "Same content should produce same file digest"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Determinism_DifferentInputs_ProduceDifferentDigests() + { + var feedFile1 = await CreateTestFileAsync("diff1", "feed.json", """{"version":1}"""); + var feedFile2 = await CreateTestFileAsync("diff2", "feed.json", """{"version":2}"""); + + var builder = new BundleBuilder(); + var request1 = CreateBuildRequest("diff-test", "1.0.0", feedFile1); + var request2 = CreateBuildRequest("diff-test", "1.0.0", feedFile2); + + var outputPath1 = Path.Combine(_tempRoot, "diff-output1"); + var outputPath2 = Path.Combine(_tempRoot, "diff-output2"); + + var manifest1 = await builder.BuildAsync(request1, outputPath1); + var manifest2 = await builder.BuildAsync(request2, outputPath2); + + manifest1.Feeds[0].Digest.Should().NotBe(manifest2.Feeds[0].Digest, + "Different content should produce different digests"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Serialization.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Serialization.cs new file mode 100644 index 000000000..9e2060755 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Determinism.Serialization.cs @@ -0,0 +1,23 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Determinism_ManifestSerialization_IsStable() + { + var manifest = CreateTestManifest(); + + var json1 = BundleManifestSerializer.Serialize(manifest); + var json2 = BundleManifestSerializer.Serialize(manifest); + var json3 = BundleManifestSerializer.Serialize(manifest); + + json1.Should().Be(json2); + json2.Should().Be(json3); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Basic.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Basic.cs new file mode 100644 index 000000000..528867db1 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Basic.cs @@ -0,0 +1,45 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_CreatesValidBundleStructure() + { + var feedFile = await CreateTestFileAsync("feeds", "nvd.json", """{"vulnerabilities":[]}"""); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("export-test", "1.0.0", feedFile); + var outputPath = Path.Combine(_tempRoot, "output"); + + var manifest = await builder.BuildAsync(request, outputPath); + + Directory.Exists(outputPath).Should().BeTrue("Output directory should be created"); + File.Exists(Path.Combine(outputPath, "feeds", "nvd.json")).Should().BeTrue("Feed file should be copied"); + manifest.Should().NotBeNull(); + manifest.Feeds.Should().HaveCount(1); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_SetsCorrectManifestFields() + { + var feedFile = await CreateTestFileAsync("feeds", "test-feed.json", """{"data":"test"}"""); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("manifest-test", "2.0.0", feedFile); + var outputPath = Path.Combine(_tempRoot, "manifest-output"); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Name.Should().Be("manifest-test"); + manifest.Version.Should().Be("2.0.0"); + manifest.SchemaVersion.Should().Be("1.0.0"); + manifest.BundleId.Should().NotBeNullOrEmpty(); + manifest.CreatedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5)); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Digests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Digests.cs new file mode 100644 index 000000000..0f525e9a0 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Export.Digests.cs @@ -0,0 +1,62 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_ComputesCorrectFileDigests() + { + var content = """{"content":"digest-test"}"""; + var feedFile = await CreateTestFileAsync("feeds", "digest-feed.json", content); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("digest-test", "1.0.0", feedFile); + var outputPath = Path.Combine(_tempRoot, "digest-output"); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds.Should().ContainSingle(); + var feedDigest = manifest.Feeds[0].Digest; + feedDigest.Should().NotBeNullOrEmpty(); + feedDigest.Should().HaveLength(64, "SHA-256 hex digest should be 64 characters"); + + var expectedDigest = ComputeSha256Hex(content); + feedDigest.Should().Be(expectedDigest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_ComputesCorrectBundleDigest() + { + var feedFile = await CreateTestFileAsync("feeds", "bundle-digest.json", """{"data":"bundle"}"""); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("bundle-digest-test", "1.0.0", feedFile); + var outputPath = Path.Combine(_tempRoot, "bundle-digest-output"); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.BundleDigest.Should().NotBeNullOrEmpty(); + manifest.BundleDigest.Should().HaveLength(64); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_TracksCorrectFileSizes() + { + var content = new string('x', 1024); + var feedFile = await CreateTestFileAsync("feeds", "size-test.json", content); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("size-test", "1.0.0", feedFile); + var outputPath = Path.Combine(_tempRoot, "size-output"); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds[0].SizeBytes.Should().Be(1024); + manifest.TotalSizeBytes.Should().Be(1024); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.IO.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.IO.cs new file mode 100644 index 000000000..cd52fbb2e --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.IO.cs @@ -0,0 +1,32 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + private async Task CreateTestFileAsync(string subdir, string filename, string content) + { + var dir = Path.Combine(_tempRoot, subdir); + Directory.CreateDirectory(dir); + var path = Path.Combine(dir, filename); + await File.WriteAllTextAsync(path, content); + return path; + } + + private static BundleBuildRequest CreateBuildRequest(string name, string version, string feedSourcePath) + { + return new BundleBuildRequest( + name, + version, + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedSourcePath, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.Manifest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.Manifest.cs new file mode 100644 index 000000000..39717d18e --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Helpers.Manifest.cs @@ -0,0 +1,41 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + private static BundleManifest CreateTestManifest() + { + return new BundleManifest + { + BundleId = "test-bundle-123", + SchemaVersion = "1.0.0", + Name = "test-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.Parse("2025-06-15T12:00:00Z"), + Feeds = ImmutableArray.Create(new FeedComponent( + "feed-1", + "nvd", + "v1", + "feeds/nvd.json", + "abcd1234" + new string('0', 56), + 1024, + DateTimeOffset.Parse("2025-06-15T12:00:00Z"), + FeedFormat.StellaOpsNative)), + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Empty, + TotalSizeBytes = 1024, + BundleDigest = "digest1234" + new string('0', 54) + }; + } + + private static string ComputeSha256Hex(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Import.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Import.cs new file mode 100644 index 000000000..2635139b5 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Import.cs @@ -0,0 +1,80 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_LoadsManifestCorrectly() + { + var feedFile = await CreateTestFileAsync("feeds", "import-test.json", """{"import":"test"}"""); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("import-test", "1.0.0", feedFile); + var bundlePath = Path.Combine(_tempRoot, "import-bundle"); + var manifest = await builder.BuildAsync(request, bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var loadedJson = await File.ReadAllTextAsync(manifestPath); + var loaded = BundleManifestSerializer.Deserialize(loadedJson); + + loaded.Should().NotBeNull(); + loaded.Name.Should().Be("import-test"); + loaded.Version.Should().Be("1.0.0"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_VerifiesFileIntegrity() + { + var feedContent = """{"integrity":"test"}"""; + var feedFile = await CreateTestFileAsync("feeds", "integrity.json", feedContent); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("integrity-test", "1.0.0", feedFile); + var bundlePath = Path.Combine(_tempRoot, "integrity-bundle"); + var manifest = await builder.BuildAsync(request, bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var loadedJson = await File.ReadAllTextAsync(manifestPath); + var loaded = BundleManifestSerializer.Deserialize(loadedJson); + + var feedPath = Path.Combine(bundlePath, "feeds", "nvd.json"); + File.Exists(feedPath).Should().BeTrue(); + + var actualContent = await File.ReadAllTextAsync(feedPath); + var actualDigest = ComputeSha256Hex(actualContent); + loaded.Feeds[0].Digest.Should().Be(actualDigest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_FailsOnCorruptedFile() + { + var feedFile = await CreateTestFileAsync("feeds", "corrupt.json", """{"original":"data"}"""); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("corrupt-test", "1.0.0", feedFile); + var bundlePath = Path.Combine(_tempRoot, "corrupt-bundle"); + var manifest = await builder.BuildAsync(request, bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var corruptPath = Path.Combine(bundlePath, "feeds", "nvd.json"); + await File.WriteAllTextAsync(corruptPath, """{"corrupted":"data"}"""); + + var loadedJson = await File.ReadAllTextAsync(manifestPath); + var loaded = BundleManifestSerializer.Deserialize(loadedJson); + + var actualContent = await File.ReadAllTextAsync(corruptPath); + var actualDigest = ComputeSha256Hex(actualContent); + loaded.Feeds[0].Digest.Should().NotBe(actualDigest, "Digest was computed before corruption"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Reexport.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Reexport.cs new file mode 100644 index 000000000..c93624835 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Reexport.cs @@ -0,0 +1,57 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Roundtrip_ExportImportReexport_ProducesIdenticalFileDigests() + { + var feedContent = """{"roundtrip":"determinism-test"}"""; + var feedFile = await CreateTestFileAsync("roundtrip", "feed.json", feedContent); + var builder = new BundleBuilder(); + var request = CreateBuildRequest("roundtrip-test", "1.0.0", feedFile); + var bundlePath1 = Path.Combine(_tempRoot, "roundtrip1"); + + var manifest1 = await builder.BuildAsync(request, bundlePath1); + var digest1 = manifest1.Feeds[0].Digest; + + var manifestJson = BundleManifestSerializer.Serialize(manifest1); + await File.WriteAllTextAsync(Path.Combine(bundlePath1, "manifest.json"), manifestJson); + + var loadedJson = await File.ReadAllTextAsync(Path.Combine(bundlePath1, "manifest.json")); + var imported = BundleManifestSerializer.Deserialize(loadedJson); + + var reexportFeedFile = Path.Combine(bundlePath1, "feeds", "nvd.json"); + var reexportRequest = new BundleBuildRequest( + imported.Name, + imported.Version, + imported.ExpiresAt, + new[] + { + new FeedBuildConfig( + imported.Feeds[0].FeedId, + imported.Feeds[0].Name, + imported.Feeds[0].Version, + reexportFeedFile, + imported.Feeds[0].RelativePath, + imported.Feeds[0].SnapshotAt, + imported.Feeds[0].Format) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var bundlePath2 = Path.Combine(_tempRoot, "roundtrip2"); + var manifest2 = await builder.BuildAsync(reexportRequest, bundlePath2); + var digest2 = manifest2.Feeds[0].Digest; + + digest1.Should().Be(digest2, "Roundtrip should produce identical file digests"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Serialization.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Serialization.cs new file mode 100644 index 000000000..bf0aabf09 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.Roundtrip.Serialization.cs @@ -0,0 +1,21 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Roundtrip_ManifestSerialization_PreservesAllFields() + { + var original = CreateTestManifest(); + + var json = BundleManifestSerializer.Serialize(original); + var deserialized = BundleManifestSerializer.Deserialize(json); + + deserialized.Should().BeEquivalentTo(original); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.cs index 5b0d2a130..29bd0b202 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportImportTests.cs @@ -5,26 +5,16 @@ // Description: L0 unit tests for bundle export/import and determinism tests // ----------------------------------------------------------------------------- -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.TestKit; -using Xunit; - namespace StellaOps.AirGap.Bundle.Tests; /// /// L0 Unit Tests for Bundle Export/Import -/// Task AIRGAP-5100-001: Unit tests for bundle export (data → bundle → verify structure) -/// Task AIRGAP-5100-002: Unit tests for bundle import (bundle → data → verify integrity) -/// Task AIRGAP-5100-003: Determinism test (same inputs → same bundle hash) -/// Task AIRGAP-5100-004: Determinism test (export → import → re-export → identical bundle) +/// Task AIRGAP-5100-001: Unit tests for bundle export (data -> bundle -> verify structure) +/// Task AIRGAP-5100-002: Unit tests for bundle import (bundle -> data -> verify integrity) +/// Task AIRGAP-5100-003: Determinism test (same inputs -> same bundle hash) +/// Task AIRGAP-5100-004: Determinism test (export -> import -> re-export -> identical bundle) /// -public sealed class BundleExportImportTests : IDisposable +public sealed partial class BundleExportImportTests : IDisposable { private readonly string _tempRoot; @@ -36,417 +26,18 @@ public sealed class BundleExportImportTests : IDisposable public void Dispose() { - if (Directory.Exists(_tempRoot)) + if (!Directory.Exists(_tempRoot)) { - try { Directory.Delete(_tempRoot, recursive: true); } - catch { /* Ignore cleanup errors */ } + return; + } + + try + { + Directory.Delete(_tempRoot, recursive: true); + } + catch + { + // Ignore cleanup errors. } } - - #region AIRGAP-5100-001: Bundle Export Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_CreatesValidBundleStructure() - { - // Arrange - var feedFile = await CreateTestFileAsync("feeds", "nvd.json", """{"vulnerabilities":[]}"""); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("export-test", "1.0.0", feedFile); - var outputPath = Path.Combine(_tempRoot, "output"); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - Directory.Exists(outputPath).Should().BeTrue("Output directory should be created"); - File.Exists(Path.Combine(outputPath, "feeds", "nvd.json")).Should().BeTrue("Feed file should be copied"); - manifest.Should().NotBeNull(); - manifest.Feeds.Should().HaveCount(1); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_SetsCorrectManifestFields() - { - // Arrange - var feedFile = await CreateTestFileAsync("feeds", "test-feed.json", """{"data":"test"}"""); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("manifest-test", "2.0.0", feedFile); - var outputPath = Path.Combine(_tempRoot, "manifest-output"); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Name.Should().Be("manifest-test"); - manifest.Version.Should().Be("2.0.0"); - manifest.SchemaVersion.Should().Be("1.0.0"); - manifest.BundleId.Should().NotBeNullOrEmpty(); - manifest.CreatedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5)); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_ComputesCorrectFileDigests() - { - // Arrange - var content = """{"content":"digest-test"}"""; - var feedFile = await CreateTestFileAsync("feeds", "digest-feed.json", content); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("digest-test", "1.0.0", feedFile); - var outputPath = Path.Combine(_tempRoot, "digest-output"); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Feeds.Should().ContainSingle(); - var feedDigest = manifest.Feeds[0].Digest; - feedDigest.Should().NotBeNullOrEmpty(); - feedDigest.Should().HaveLength(64, "SHA-256 hex digest should be 64 characters"); - - // Verify digest manually - var expectedDigest = ComputeSha256Hex(content); - feedDigest.Should().Be(expectedDigest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_ComputesCorrectBundleDigest() - { - // Arrange - var feedFile = await CreateTestFileAsync("feeds", "bundle-digest.json", """{"data":"bundle"}"""); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("bundle-digest-test", "1.0.0", feedFile); - var outputPath = Path.Combine(_tempRoot, "bundle-digest-output"); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.BundleDigest.Should().NotBeNullOrEmpty(); - manifest.BundleDigest.Should().HaveLength(64); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_TracksCorrectFileSizes() - { - // Arrange - var content = new string('x', 1024); // 1KB of data - var feedFile = await CreateTestFileAsync("feeds", "size-test.json", content); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("size-test", "1.0.0", feedFile); - var outputPath = Path.Combine(_tempRoot, "size-output"); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Feeds[0].SizeBytes.Should().Be(1024); - manifest.TotalSizeBytes.Should().Be(1024); - } - - #endregion - - #region AIRGAP-5100-002: Bundle Import Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_LoadsManifestCorrectly() - { - // Arrange - First export a bundle - var feedFile = await CreateTestFileAsync("feeds", "import-test.json", """{"import":"test"}"""); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("import-test", "1.0.0", feedFile); - var bundlePath = Path.Combine(_tempRoot, "import-bundle"); - var manifest = await builder.BuildAsync(request, bundlePath); - - // Write manifest to bundle - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - // Act - Load the bundle manifest directly - var loadedJson = await File.ReadAllTextAsync(manifestPath); - var loaded = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - loaded.Should().NotBeNull(); - loaded.Name.Should().Be("import-test"); - loaded.Version.Should().Be("1.0.0"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_VerifiesFileIntegrity() - { - // Arrange - var feedContent = """{"integrity":"test"}"""; - var feedFile = await CreateTestFileAsync("feeds", "integrity.json", feedContent); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("integrity-test", "1.0.0", feedFile); - var bundlePath = Path.Combine(_tempRoot, "integrity-bundle"); - var manifest = await builder.BuildAsync(request, bundlePath); - - // Write manifest - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - // Act - Load manifest directly - var loadedJson = await File.ReadAllTextAsync(manifestPath); - var loaded = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - Verify file exists and digest matches - var feedPath = Path.Combine(bundlePath, "feeds", "nvd.json"); - File.Exists(feedPath).Should().BeTrue(); - - var actualContent = await File.ReadAllTextAsync(feedPath); - var actualDigest = ComputeSha256Hex(actualContent); - loaded.Feeds[0].Digest.Should().Be(actualDigest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_FailsOnCorruptedFile() - { - // Arrange - var feedFile = await CreateTestFileAsync("feeds", "corrupt.json", """{"original":"data"}"""); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("corrupt-test", "1.0.0", feedFile); - var bundlePath = Path.Combine(_tempRoot, "corrupt-bundle"); - var manifest = await builder.BuildAsync(request, bundlePath); - - // Write manifest - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - // Corrupt the feed file - var corruptPath = Path.Combine(bundlePath, "feeds", "nvd.json"); - await File.WriteAllTextAsync(corruptPath, """{"corrupted":"data"}"""); - - // Act - Load manifest directly (original digest was computed before corruption) - var loadedJson = await File.ReadAllTextAsync(manifestPath); - var loaded = BundleManifestSerializer.Deserialize(loadedJson); - - // Assert - File content has changed, digest no longer matches - var actualContent = await File.ReadAllTextAsync(corruptPath); - var actualDigest = ComputeSha256Hex(actualContent); - loaded.Feeds[0].Digest.Should().NotBe(actualDigest, "Digest was computed before corruption"); - } - - #endregion - - #region AIRGAP-5100-003: Determinism Tests (Same Inputs → Same Hash) - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_SameInputs_ProduceSameBundleDigest() - { - // Arrange - var feedContent = """{"determinism":"test-001"}"""; - - // Create two identical source files - var feedFile1 = await CreateTestFileAsync("source1", "feed.json", feedContent); - var feedFile2 = await CreateTestFileAsync("source2", "feed.json", feedContent); - - var builder = new BundleBuilder(); - - // Create identical requests (except file paths) - var request1 = new BundleBuildRequest( - "determinism-test", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile1, "feeds/nvd.json", DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var request2 = new BundleBuildRequest( - "determinism-test", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile2, "feeds/nvd.json", DateTimeOffset.Parse("2025-01-01T00:00:00Z"), FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var outputPath1 = Path.Combine(_tempRoot, "determinism-output1"); - var outputPath2 = Path.Combine(_tempRoot, "determinism-output2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, outputPath1); - var manifest2 = await builder.BuildAsync(request2, outputPath2); - - // Assert - File digests should be identical (content-based) - manifest1.Feeds[0].Digest.Should().Be(manifest2.Feeds[0].Digest, - "Same content should produce same file digest"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Determinism_DifferentInputs_ProduceDifferentDigests() - { - // Arrange - var feedFile1 = await CreateTestFileAsync("diff1", "feed.json", """{"version":1}"""); - var feedFile2 = await CreateTestFileAsync("diff2", "feed.json", """{"version":2}"""); - - var builder = new BundleBuilder(); - var request1 = CreateBuildRequest("diff-test", "1.0.0", feedFile1); - var request2 = CreateBuildRequest("diff-test", "1.0.0", feedFile2); - - var outputPath1 = Path.Combine(_tempRoot, "diff-output1"); - var outputPath2 = Path.Combine(_tempRoot, "diff-output2"); - - // Act - var manifest1 = await builder.BuildAsync(request1, outputPath1); - var manifest2 = await builder.BuildAsync(request2, outputPath2); - - // Assert - manifest1.Feeds[0].Digest.Should().NotBe(manifest2.Feeds[0].Digest, - "Different content should produce different digests"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Determinism_ManifestSerialization_IsStable() - { - // Arrange - var manifest = CreateTestManifest(); - - // Act - Serialize multiple times - var json1 = BundleManifestSerializer.Serialize(manifest); - var json2 = BundleManifestSerializer.Serialize(manifest); - var json3 = BundleManifestSerializer.Serialize(manifest); - - // Assert - json1.Should().Be(json2); - json2.Should().Be(json3); - } - - #endregion - - #region AIRGAP-5100-004: Roundtrip Determinism (Export → Import → Re-export) - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Roundtrip_ExportImportReexport_ProducesIdenticalFileDigests() - { - // Arrange - Initial export - var feedContent = """{"roundtrip":"determinism-test"}"""; - var feedFile = await CreateTestFileAsync("roundtrip", "feed.json", feedContent); - var builder = new BundleBuilder(); - var request = CreateBuildRequest("roundtrip-test", "1.0.0", feedFile); - var bundlePath1 = Path.Combine(_tempRoot, "roundtrip1"); - - // Act - Export first time - var manifest1 = await builder.BuildAsync(request, bundlePath1); - var digest1 = manifest1.Feeds[0].Digest; - - // Import by loading manifest directly - var manifestJson = BundleManifestSerializer.Serialize(manifest1); - await File.WriteAllTextAsync(Path.Combine(bundlePath1, "manifest.json"), manifestJson); - - var loadedJson = await File.ReadAllTextAsync(Path.Combine(bundlePath1, "manifest.json")); - var imported = BundleManifestSerializer.Deserialize(loadedJson); - - // Re-export using the imported bundle's files - var reexportFeedFile = Path.Combine(bundlePath1, "feeds", "nvd.json"); - var reexportRequest = new BundleBuildRequest( - imported.Name, - imported.Version, - imported.ExpiresAt, - new[] { new FeedBuildConfig( - imported.Feeds[0].FeedId, - imported.Feeds[0].Name, - imported.Feeds[0].Version, - reexportFeedFile, - imported.Feeds[0].RelativePath, - imported.Feeds[0].SnapshotAt, - imported.Feeds[0].Format) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var bundlePath2 = Path.Combine(_tempRoot, "roundtrip2"); - var manifest2 = await builder.BuildAsync(reexportRequest, bundlePath2); - var digest2 = manifest2.Feeds[0].Digest; - - // Assert - digest1.Should().Be(digest2, "Roundtrip should produce identical file digests"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Roundtrip_ManifestSerialization_PreservesAllFields() - { - // Arrange - var original = CreateTestManifest(); - - // Act - var json = BundleManifestSerializer.Serialize(original); - var deserialized = BundleManifestSerializer.Deserialize(json); - - // Assert - deserialized.Should().BeEquivalentTo(original); - } - - #endregion - - #region Helpers - - private async Task CreateTestFileAsync(string subdir, string filename, string content) - { - var dir = Path.Combine(_tempRoot, subdir); - Directory.CreateDirectory(dir); - var path = Path.Combine(dir, filename); - await File.WriteAllTextAsync(path, content); - return path; - } - - private static BundleBuildRequest CreateBuildRequest(string name, string version, string feedSourcePath) - { - return new BundleBuildRequest( - name, - version, - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedSourcePath, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - } - - private static BundleManifest CreateTestManifest() - { - return new BundleManifest - { - BundleId = "test-bundle-123", - SchemaVersion = "1.0.0", - Name = "test-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.Parse("2025-06-15T12:00:00Z"), - Feeds = ImmutableArray.Create(new FeedComponent( - "feed-1", - "nvd", - "v1", - "feeds/nvd.json", - "abcd1234" + new string('0', 56), - 1024, - DateTimeOffset.Parse("2025-06-15T12:00:00Z"), - FeedFormat.StellaOpsNative)), - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Empty, - TotalSizeBytes = 1024, - BundleDigest = "digest1234" + new string('0', 54) - }; - } - - private static string ComputeSha256Hex(string content) - { - var bytes = Encoding.UTF8.GetBytes(content); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - #endregion } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Builder.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Builder.cs new file mode 100644 index 000000000..d039a2918 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Builder.cs @@ -0,0 +1,90 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportModeTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Builder_LightMode_SetsExportModeInManifest() + { + var outputPath = Path.Combine(_testDir, "light-bundle"); + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + Name: "light-test", + Version: "1.0.0", + ExpiresAt: null, + Feeds: Array.Empty(), + Policies: Array.Empty(), + CryptoMaterials: Array.Empty(), + RuleBundles: Array.Empty(), + ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Light }); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.ExportMode.Should().Be("light"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Builder_FullMode_SetsExportModeInManifest() + { + var outputPath = Path.Combine(_testDir, "full-bundle"); + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + Name: "full-test", + Version: "1.0.0", + ExpiresAt: null, + Feeds: Array.Empty(), + Policies: Array.Empty(), + CryptoMaterials: Array.Empty(), + RuleBundles: Array.Empty(), + ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Full }); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.ExportMode.Should().Be("full"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Builder_NoExportOptions_DefaultsToLight() + { + var outputPath = Path.Combine(_testDir, "default-bundle"); + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + Name: "default-test", + Version: "1.0.0", + ExpiresAt: null, + Feeds: Array.Empty(), + Policies: Array.Empty(), + CryptoMaterials: Array.Empty(), + RuleBundles: Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.ExportMode.Should().Be("light"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleManifest_ExportMode_IsNullable() + { + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = System.Collections.Immutable.ImmutableArray.Empty, + Policies = System.Collections.Immutable.ImmutableArray.Empty, + CryptoMaterials = System.Collections.Immutable.ImmutableArray.Empty + }; + + manifest.ExportMode.Should().BeNull(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Tests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Tests.cs new file mode 100644 index 000000000..4a891315f --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.Tests.cs @@ -0,0 +1,75 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportModeTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleExportMode_Enum_HasLightAndFull() + { + var values = Enum.GetValues(); + values.Should().Contain(BundleExportMode.Light); + values.Should().Contain(BundleExportMode.Full); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleBuilderOptions_DefaultMode_IsLight() + { + var options = new BundleBuilderOptions(); + options.Mode.Should().Be(BundleExportMode.Light); + options.MaxBlobSizeBytes.Should().BeNull(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleBuilderOptions_FullMode_CanSetMaxBlobSize() + { + var options = new BundleBuilderOptions + { + Mode = BundleExportMode.Full, + MaxBlobSizeBytes = 100 * 1024 * 1024 + }; + options.Mode.Should().Be(BundleExportMode.Full); + options.MaxBlobSizeBytes.Should().Be(100 * 1024 * 1024); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleBuildRequest_ExportOptions_DefaultsToNull() + { + var request = new BundleBuildRequest( + Name: "test", + Version: "1.0.0", + ExpiresAt: null, + Feeds: Array.Empty(), + Policies: Array.Empty(), + CryptoMaterials: Array.Empty(), + RuleBundles: Array.Empty()); + + request.ExportOptions.Should().BeNull(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BundleBuildRequest_WithExportOptions_AcceptsFullMode() + { + var request = new BundleBuildRequest( + Name: "test-full", + Version: "2.0.0", + ExpiresAt: null, + Feeds: Array.Empty(), + Policies: Array.Empty(), + CryptoMaterials: Array.Empty(), + RuleBundles: Array.Empty(), + ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Full }); + + request.ExportOptions.Should().NotBeNull(); + request.ExportOptions!.Mode.Should().Be(BundleExportMode.Full); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.cs index 47fdf6c29..3e0d15d00 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportModeTests.cs @@ -1,18 +1,6 @@ -// ----------------------------------------------------------------------------- -// BundleExportModeTests.cs -// Sprint: SPRINT_20260122_040_Platform_oci_delta_attestation_pipeline (040-04) -// Description: Unit tests for two-tier bundle export mode (light/full) -// ----------------------------------------------------------------------------- - -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.TestKit; -using Xunit; - namespace StellaOps.AirGap.Bundle.Tests; -public sealed class BundleExportModeTests : IDisposable +public sealed partial class BundleExportModeTests : IDisposable { private readonly string _testDir; @@ -24,161 +12,13 @@ public sealed class BundleExportModeTests : IDisposable public void Dispose() { - try { Directory.Delete(_testDir, recursive: true); } catch { /* best-effort */ } - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleExportMode_Enum_HasLightAndFull() - { - var values = Enum.GetValues(); - values.Should().Contain(BundleExportMode.Light); - values.Should().Contain(BundleExportMode.Full); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleBuilderOptions_DefaultMode_IsLight() - { - var options = new BundleBuilderOptions(); - options.Mode.Should().Be(BundleExportMode.Light); - options.MaxBlobSizeBytes.Should().BeNull(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleBuilderOptions_FullMode_CanSetMaxBlobSize() - { - var options = new BundleBuilderOptions + try { - Mode = BundleExportMode.Full, - MaxBlobSizeBytes = 100 * 1024 * 1024 // 100MB - }; - options.Mode.Should().Be(BundleExportMode.Full); - options.MaxBlobSizeBytes.Should().Be(100 * 1024 * 1024); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleBuildRequest_ExportOptions_DefaultsToNull() - { - var request = new BundleBuildRequest( - Name: "test", - Version: "1.0.0", - ExpiresAt: null, - Feeds: Array.Empty(), - Policies: Array.Empty(), - CryptoMaterials: Array.Empty(), - RuleBundles: Array.Empty()); - - request.ExportOptions.Should().BeNull(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleBuildRequest_WithExportOptions_AcceptsFullMode() - { - var request = new BundleBuildRequest( - Name: "test-full", - Version: "2.0.0", - ExpiresAt: null, - Feeds: Array.Empty(), - Policies: Array.Empty(), - CryptoMaterials: Array.Empty(), - RuleBundles: Array.Empty(), - ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Full }); - - request.ExportOptions.Should().NotBeNull(); - request.ExportOptions!.Mode.Should().Be(BundleExportMode.Full); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Builder_LightMode_SetsExportModeInManifest() - { - // Arrange - var outputPath = Path.Combine(_testDir, "light-bundle"); - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - Name: "light-test", - Version: "1.0.0", - ExpiresAt: null, - Feeds: Array.Empty(), - Policies: Array.Empty(), - CryptoMaterials: Array.Empty(), - RuleBundles: Array.Empty(), - ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Light }); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.ExportMode.Should().Be("light"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Builder_FullMode_SetsExportModeInManifest() - { - // Arrange - var outputPath = Path.Combine(_testDir, "full-bundle"); - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - Name: "full-test", - Version: "1.0.0", - ExpiresAt: null, - Feeds: Array.Empty(), - Policies: Array.Empty(), - CryptoMaterials: Array.Empty(), - RuleBundles: Array.Empty(), - ExportOptions: new BundleBuilderOptions { Mode = BundleExportMode.Full }); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.ExportMode.Should().Be("full"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Builder_NoExportOptions_DefaultsToLight() - { - // Arrange - var outputPath = Path.Combine(_testDir, "default-bundle"); - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - Name: "default-test", - Version: "1.0.0", - ExpiresAt: null, - Feeds: Array.Empty(), - Policies: Array.Empty(), - CryptoMaterials: Array.Empty(), - RuleBundles: Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.ExportMode.Should().Be("light"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BundleManifest_ExportMode_IsNullable() - { - // Backwards compat: old manifests won't have exportMode - var manifest = new BundleManifest + Directory.Delete(_testDir, recursive: true); + } + catch { - BundleId = "test", - Name = "test", - Version = "1.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = System.Collections.Immutable.ImmutableArray.Empty, - Policies = System.Collections.Immutable.ImmutableArray.Empty, - CryptoMaterials = System.Collections.Immutable.ImmutableArray.Empty - }; - - manifest.ExportMode.Should().BeNull(); + // best-effort cleanup + } } } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.CryptoType.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.CryptoType.cs new file mode 100644 index 000000000..baee8e0e3 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.CryptoType.cs @@ -0,0 +1,40 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData(CryptoComponentType.TrustRoot)] + [InlineData(CryptoComponentType.IntermediateCa)] + [InlineData(CryptoComponentType.TimestampRoot)] + [InlineData(CryptoComponentType.SigningKey)] + [InlineData(CryptoComponentType.FulcioRoot)] + public async Task Export_CryptoType_Preserved(CryptoComponentType type) + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, $"crypto-{type}"); + var certFile = CreateTempFile("cert", "content"); + + var request = new BundleBuildRequest( + "crypto-type-test", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + new[] + { + new CryptoBuildConfig("c1", "test", certFile, "certs/test", type, null) + }, + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.CryptoMaterials[0].Type.Should().Be(type); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Digest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Digest.cs new file mode 100644 index 000000000..404bd53c0 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Digest.cs @@ -0,0 +1,65 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_DigestComputation_MatchesSha256() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "digest"); + var content = "test content for hashing"; + var feedFile = CreateTempFile("test.json", content); + var expectedDigest = ComputeSha256(content); + + var request = new BundleBuildRequest( + "digest-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds[0].Digest.Should().BeEquivalentTo(expectedDigest, options => options.IgnoringCase()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_BundleDigest_ComputedFromManifest() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "bundle-digest"); + var feedFile = CreateTempFile("feed.json", "{}"); + + var request = new BundleBuildRequest( + "bundle-digest-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.BundleDigest.Should().NotBeNullOrEmpty(); + manifest.BundleDigest.Should().HaveLength(64); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.DirectoryStructure.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.DirectoryStructure.cs new file mode 100644 index 000000000..f6265b870 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.DirectoryStructure.cs @@ -0,0 +1,52 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_CreatesNestedDirectories() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "nested"); + var feedFile = CreateTempFile("feed.json", "{}"); + var policyFile = CreateTempFile("policy.rego", "package test"); + var certFile = CreateTempFile("cert.pem", "cert"); + + var request = new BundleBuildRequest( + "nested-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "nvd", "v1", feedFile, "feeds/nvd/v1/data.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + new[] + { + new PolicyBuildConfig("p1", "default", "1.0", policyFile, "policies/rego/default.rego", + PolicyType.OpaRego) + }, + new[] + { + new CryptoBuildConfig("c1", "root", certFile, "crypto/certs/ca/root.pem", + CryptoComponentType.TrustRoot, null) + }, + Array.Empty()); + + await builder.BuildAsync(request, outputPath); + + Directory.Exists(Path.Combine(outputPath, "feeds", "nvd", "v1")).Should().BeTrue(); + Directory.Exists(Path.Combine(outputPath, "policies", "rego")).Should().BeTrue(); + Directory.Exists(Path.Combine(outputPath, "crypto", "certs", "ca")).Should().BeTrue(); + + File.Exists(Path.Combine(outputPath, "feeds", "nvd", "v1", "data.json")).Should().BeTrue(); + File.Exists(Path.Combine(outputPath, "policies", "rego", "default.rego")).Should().BeTrue(); + File.Exists(Path.Combine(outputPath, "crypto", "certs", "ca", "root.pem")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Expiration.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Expiration.cs new file mode 100644 index 000000000..6d23a49ce --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Expiration.cs @@ -0,0 +1,59 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_WithExpiration_PreservesExpiryDate() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "expiry"); + var expiresAt = DateTimeOffset.UtcNow.AddDays(30); + + var request = new BundleBuildRequest( + "expiry-test", + "1.0.0", + expiresAt, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.ExpiresAt.Should().BeCloseTo(expiresAt, TimeSpan.FromSeconds(1)); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_CryptoWithExpiration_PreservesComponentExpiry() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "crypto-expiry"); + var certFile = CreateTempFile("cert.pem", "cert"); + var componentExpiry = DateTimeOffset.UtcNow.AddYears(5); + + var request = new BundleBuildRequest( + "crypto-expiry-test", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + new[] + { + new CryptoBuildConfig("c1", "root", certFile, "certs/root.pem", + CryptoComponentType.TrustRoot, componentExpiry) + }, + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.CryptoMaterials[0].ExpiresAt.Should().BeCloseTo(componentExpiry, TimeSpan.FromSeconds(1)); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.FeedFormat.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.FeedFormat.cs new file mode 100644 index 000000000..c0c291e31 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.FeedFormat.cs @@ -0,0 +1,40 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData(FeedFormat.StellaOpsNative)] + [InlineData(FeedFormat.TrivyDb)] + [InlineData(FeedFormat.GrypeDb)] + [InlineData(FeedFormat.OsvJson)] + public async Task Export_FeedFormat_Preserved(FeedFormat format) + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, $"format-{format}"); + var feedFile = CreateTempFile("feed.json", "{}"); + + var request = new BundleBuildRequest( + "format-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", + DateTimeOffset.UtcNow, format) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds[0].Format.Should().Be(format); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Helpers.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Helpers.cs new file mode 100644 index 000000000..6ec1d9f97 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Helpers.cs @@ -0,0 +1,21 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + private string CreateTempFile(string name, string content) + { + var path = Path.Combine(_tempRoot, "source", name); + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + File.WriteAllText(path, content); + return path; + } + + private static string ComputeSha256(string content) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.PolicyType.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.PolicyType.cs new file mode 100644 index 000000000..943d814fb --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.PolicyType.cs @@ -0,0 +1,39 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData(PolicyType.OpaRego)] + [InlineData(PolicyType.LatticeRules)] + [InlineData(PolicyType.UnknownBudgets)] + [InlineData(PolicyType.ScoringWeights)] + public async Task Export_PolicyType_Preserved(PolicyType type) + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, $"policy-{type}"); + var policyFile = CreateTempFile("policy", "content"); + + var request = new BundleBuildRequest( + "policy-type-test", + "1.0.0", + null, + Array.Empty(), + new[] + { + new PolicyBuildConfig("p1", "test", "1.0", policyFile, "policies/test", type) + }, + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Policies[0].Type.Should().Be(type); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Basic.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Basic.cs new file mode 100644 index 000000000..f73d6bc71 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Basic.cs @@ -0,0 +1,84 @@ +using System.Text; +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_EmptyBundle_CreatesValidManifest() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "empty"); + var request = new BundleBuildRequest( + "empty-bundle", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Should().NotBeNull(); + manifest.BundleId.Should().NotBeNullOrEmpty(); + manifest.Name.Should().Be("empty-bundle"); + manifest.Version.Should().Be("1.0.0"); + manifest.SchemaVersion.Should().Be("1.0.0"); + manifest.CreatedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromMinutes(1)); + manifest.Feeds.Should().BeEmpty(); + manifest.Policies.Should().BeEmpty(); + manifest.CryptoMaterials.Should().BeEmpty(); + manifest.TotalSizeBytes.Should().Be(0); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_WithFeed_CopiesFileAndComputesDigest() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "with-feed"); + var feedContent = "{\"vulns\": []}"; + var feedFile = CreateTempFile("feed.json", feedContent); + + var request = new BundleBuildRequest( + "feed-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig( + "feed-1", + "nvd", + "v1", + feedFile, + "feeds/nvd.json", + DateTimeOffset.UtcNow, + FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds.Should().HaveCount(1); + var feed = manifest.Feeds[0]; + feed.FeedId.Should().Be("feed-1"); + feed.Name.Should().Be("nvd"); + feed.Version.Should().Be("v1"); + feed.RelativePath.Should().Be("feeds/nvd.json"); + feed.Digest.Should().NotBeNullOrEmpty(); + feed.Digest.Should().HaveLength(64); + feed.SizeBytes.Should().Be(Encoding.UTF8.GetByteCount(feedContent)); + feed.Format.Should().Be(FeedFormat.StellaOpsNative); + + File.Exists(Path.Combine(outputPath, "feeds/nvd.json")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Components.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Components.cs new file mode 100644 index 000000000..79ac8f504 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.Components.cs @@ -0,0 +1,92 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_WithPolicy_CopiesFileAndComputesDigest() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "with-policy"); + var policyContent = "package policy\ndefault allow = false"; + var policyFile = CreateTempFile("default.rego", policyContent); + + var request = new BundleBuildRequest( + "policy-bundle", + "1.0.0", + null, + Array.Empty(), + new[] + { + new PolicyBuildConfig( + "policy-1", + "default", + "1.0", + policyFile, + "policies/default.rego", + PolicyType.OpaRego) + }, + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Policies.Should().HaveCount(1); + var policy = manifest.Policies[0]; + policy.PolicyId.Should().Be("policy-1"); + policy.Name.Should().Be("default"); + policy.Version.Should().Be("1.0"); + policy.RelativePath.Should().Be("policies/default.rego"); + policy.Digest.Should().HaveLength(64); + policy.Type.Should().Be(PolicyType.OpaRego); + + File.Exists(Path.Combine(outputPath, "policies/default.rego")).Should().BeTrue(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_WithCryptoMaterial_CopiesFileAndComputesDigest() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "with-crypto"); + var certContent = "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"; + var certFile = CreateTempFile("root.pem", certContent); + + var request = new BundleBuildRequest( + "crypto-bundle", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + new[] + { + new CryptoBuildConfig( + "crypto-1", + "trust-root", + certFile, + "certs/root.pem", + CryptoComponentType.TrustRoot, + DateTimeOffset.UtcNow.AddYears(10)) + }, + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.CryptoMaterials.Should().HaveCount(1); + var crypto = manifest.CryptoMaterials[0]; + crypto.ComponentId.Should().Be("crypto-1"); + crypto.Name.Should().Be("trust-root"); + crypto.RelativePath.Should().Be("certs/root.pem"); + crypto.Digest.Should().HaveLength(64); + crypto.Type.Should().Be(CryptoComponentType.TrustRoot); + crypto.ExpiresAt.Should().NotBeNull(); + + File.Exists(Path.Combine(outputPath, "certs/root.pem")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.TotalSize.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.TotalSize.cs new file mode 100644 index 000000000..435aa3406 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.Structure.TotalSize.cs @@ -0,0 +1,47 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleExportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Export_MultipleComponents_CalculatesTotalSize() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "multi"); + + var feed1 = CreateTempFile("feed1.json", new string('a', 100)); + var feed2 = CreateTempFile("feed2.json", new string('b', 200)); + var policy = CreateTempFile("policy.rego", new string('c', 50)); + + var request = new BundleBuildRequest( + "multi-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("f1", "nvd", "v1", feed1, "feeds/f1.json", DateTimeOffset.UtcNow, + FeedFormat.StellaOpsNative), + new FeedBuildConfig("f2", "ghsa", "v1", feed2, "feeds/f2.json", DateTimeOffset.UtcNow, + FeedFormat.StellaOpsNative) + }, + new[] + { + new PolicyBuildConfig("p1", "default", "1.0", policy, "policies/default.rego", + PolicyType.OpaRego) + }, + Array.Empty(), + Array.Empty()); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Feeds.Should().HaveCount(2); + manifest.Policies.Should().HaveCount(1); + manifest.TotalSizeBytes.Should().Be(100 + 200 + 50); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.cs index 6cec88f9c..8f74ef68c 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleExportTests.cs @@ -1,20 +1,11 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; using Xunit; -using StellaOps.TestKit; namespace StellaOps.AirGap.Bundle.Tests; /// -/// Unit tests for bundle export: data → bundle → verify structure. -/// Tests that bundle export produces correct structure with all components. +/// Unit tests for bundle export: data -> bundle -> verify structure. /// -public sealed class BundleExportTests : IAsyncLifetime +public sealed partial class BundleExportTests : IAsyncLifetime { private string _tempRoot = null!; @@ -31,513 +22,7 @@ public sealed class BundleExportTests : IAsyncLifetime { Directory.Delete(_tempRoot, recursive: true); } + return ValueTask.CompletedTask; } - - #region L0 Export Structure Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_EmptyBundle_CreatesValidManifest() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "empty"); - var request = new BundleBuildRequest( - "empty-bundle", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - Structure valid - manifest.Should().NotBeNull(); - manifest.BundleId.Should().NotBeNullOrEmpty(); - manifest.Name.Should().Be("empty-bundle"); - manifest.Version.Should().Be("1.0.0"); - manifest.SchemaVersion.Should().Be("1.0.0"); - manifest.CreatedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromMinutes(1)); - manifest.Feeds.Should().BeEmpty(); - manifest.Policies.Should().BeEmpty(); - manifest.CryptoMaterials.Should().BeEmpty(); - manifest.TotalSizeBytes.Should().Be(0); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_WithFeed_CopiesFileAndComputesDigest() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "with-feed"); - var feedContent = "{\"vulns\": []}"; - var feedFile = CreateTempFile("feed.json", feedContent); - - var request = new BundleBuildRequest( - "feed-bundle", - "1.0.0", - null, - new[] - { - new FeedBuildConfig( - "feed-1", - "nvd", - "v1", - feedFile, - "feeds/nvd.json", - DateTimeOffset.UtcNow, - FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - Feed copied and hashed - manifest.Feeds.Should().HaveCount(1); - var feed = manifest.Feeds[0]; - feed.FeedId.Should().Be("feed-1"); - feed.Name.Should().Be("nvd"); - feed.Version.Should().Be("v1"); - feed.RelativePath.Should().Be("feeds/nvd.json"); - feed.Digest.Should().NotBeNullOrEmpty(); - feed.Digest.Should().HaveLength(64); // SHA-256 hex - feed.SizeBytes.Should().Be(Encoding.UTF8.GetByteCount(feedContent)); - feed.Format.Should().Be(FeedFormat.StellaOpsNative); - - // File exists in output - File.Exists(Path.Combine(outputPath, "feeds/nvd.json")).Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_WithPolicy_CopiesFileAndComputesDigest() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "with-policy"); - var policyContent = "package policy\ndefault allow = false"; - var policyFile = CreateTempFile("default.rego", policyContent); - - var request = new BundleBuildRequest( - "policy-bundle", - "1.0.0", - null, - Array.Empty(), - new[] - { - new PolicyBuildConfig( - "policy-1", - "default", - "1.0", - policyFile, - "policies/default.rego", - PolicyType.OpaRego) - }, - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Policies.Should().HaveCount(1); - var policy = manifest.Policies[0]; - policy.PolicyId.Should().Be("policy-1"); - policy.Name.Should().Be("default"); - policy.Version.Should().Be("1.0"); - policy.RelativePath.Should().Be("policies/default.rego"); - policy.Digest.Should().HaveLength(64); - policy.Type.Should().Be(PolicyType.OpaRego); - - File.Exists(Path.Combine(outputPath, "policies/default.rego")).Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_WithCryptoMaterial_CopiesFileAndComputesDigest() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "with-crypto"); - var certContent = "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"; - var certFile = CreateTempFile("root.pem", certContent); - - var request = new BundleBuildRequest( - "crypto-bundle", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - new[] - { - new CryptoBuildConfig( - "crypto-1", - "trust-root", - certFile, - "certs/root.pem", - CryptoComponentType.TrustRoot, - DateTimeOffset.UtcNow.AddYears(10)) - }, - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.CryptoMaterials.Should().HaveCount(1); - var crypto = manifest.CryptoMaterials[0]; - crypto.ComponentId.Should().Be("crypto-1"); - crypto.Name.Should().Be("trust-root"); - crypto.RelativePath.Should().Be("certs/root.pem"); - crypto.Digest.Should().HaveLength(64); - crypto.Type.Should().Be(CryptoComponentType.TrustRoot); - crypto.ExpiresAt.Should().NotBeNull(); - - File.Exists(Path.Combine(outputPath, "certs/root.pem")).Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_MultipleComponents_CalculatesTotalSize() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "multi"); - - var feed1 = CreateTempFile("feed1.json", new string('a', 100)); - var feed2 = CreateTempFile("feed2.json", new string('b', 200)); - var policy = CreateTempFile("policy.rego", new string('c', 50)); - - var request = new BundleBuildRequest( - "multi-bundle", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "nvd", "v1", feed1, "feeds/f1.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), - new FeedBuildConfig("f2", "ghsa", "v1", feed2, "feeds/f2.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - new[] - { - new PolicyBuildConfig("p1", "default", "1.0", policy, "policies/default.rego", PolicyType.OpaRego) - }, - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Feeds.Should().HaveCount(2); - manifest.Policies.Should().HaveCount(1); - manifest.TotalSizeBytes.Should().Be(100 + 200 + 50); - } - - #endregion - - #region Digest Computation Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_DigestComputation_MatchesSha256() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "digest"); - var content = "test content for hashing"; - var feedFile = CreateTempFile("test.json", content); - - var expectedDigest = ComputeSha256(content); - - var request = new BundleBuildRequest( - "digest-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Feeds[0].Digest.Should().BeEquivalentTo(expectedDigest, options => options.IgnoringCase()); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_BundleDigest_ComputedFromManifest() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "bundle-digest"); - var feedFile = CreateTempFile("feed.json", "{}"); - - var request = new BundleBuildRequest( - "bundle-digest-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - Bundle digest is computed - manifest.BundleDigest.Should().NotBeNullOrEmpty(); - manifest.BundleDigest.Should().HaveLength(64); - } - - #endregion - - #region Directory Structure Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_CreatesNestedDirectories() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "nested"); - var feedFile = CreateTempFile("feed.json", "{}"); - var policyFile = CreateTempFile("policy.rego", "package test"); - var certFile = CreateTempFile("cert.pem", "cert"); - - var request = new BundleBuildRequest( - "nested-bundle", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "nvd", "v1", feedFile, "feeds/nvd/v1/data.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) - }, - new[] - { - new PolicyBuildConfig("p1", "default", "1.0", policyFile, "policies/rego/default.rego", PolicyType.OpaRego) - }, - new[] - { - new CryptoBuildConfig("c1", "root", certFile, "crypto/certs/ca/root.pem", CryptoComponentType.TrustRoot, null) - }, - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - All nested directories created - Directory.Exists(Path.Combine(outputPath, "feeds", "nvd", "v1")).Should().BeTrue(); - Directory.Exists(Path.Combine(outputPath, "policies", "rego")).Should().BeTrue(); - Directory.Exists(Path.Combine(outputPath, "crypto", "certs", "ca")).Should().BeTrue(); - - File.Exists(Path.Combine(outputPath, "feeds", "nvd", "v1", "data.json")).Should().BeTrue(); - File.Exists(Path.Combine(outputPath, "policies", "rego", "default.rego")).Should().BeTrue(); - File.Exists(Path.Combine(outputPath, "crypto", "certs", "ca", "root.pem")).Should().BeTrue(); - } - - #endregion - - #region Feed Format Tests - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData(FeedFormat.StellaOpsNative)] - [InlineData(FeedFormat.TrivyDb)] - [InlineData(FeedFormat.GrypeDb)] - [InlineData(FeedFormat.OsvJson)] - public async Task Export_FeedFormat_Preserved(FeedFormat format) - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, $"format-{format}"); - var feedFile = CreateTempFile("feed.json", "{}"); - - var request = new BundleBuildRequest( - "format-test", - "1.0.0", - null, - new[] - { - new FeedBuildConfig("f1", "test", "v1", feedFile, "feeds/test.json", DateTimeOffset.UtcNow, format) - }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Feeds[0].Format.Should().Be(format); - } - - #endregion - - #region Policy Type Tests - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData(PolicyType.OpaRego)] - [InlineData(PolicyType.LatticeRules)] - [InlineData(PolicyType.UnknownBudgets)] - [InlineData(PolicyType.ScoringWeights)] - public async Task Export_PolicyType_Preserved(PolicyType type) - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, $"policy-{type}"); - var policyFile = CreateTempFile("policy", "content"); - - var request = new BundleBuildRequest( - "policy-type-test", - "1.0.0", - null, - Array.Empty(), - new[] - { - new PolicyBuildConfig("p1", "test", "1.0", policyFile, "policies/test", type) - }, - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Policies[0].Type.Should().Be(type); - } - - #endregion - - #region Crypto Component Type Tests - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData(CryptoComponentType.TrustRoot)] - [InlineData(CryptoComponentType.IntermediateCa)] - [InlineData(CryptoComponentType.TimestampRoot)] - [InlineData(CryptoComponentType.SigningKey)] - [InlineData(CryptoComponentType.FulcioRoot)] - public async Task Export_CryptoType_Preserved(CryptoComponentType type) - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, $"crypto-{type}"); - var certFile = CreateTempFile("cert", "content"); - - var request = new BundleBuildRequest( - "crypto-type-test", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - new[] - { - new CryptoBuildConfig("c1", "test", certFile, "certs/test", type, null) - }, - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.CryptoMaterials[0].Type.Should().Be(type); - } - - #endregion - - #region Expiration Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_WithExpiration_PreservesExpiryDate() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "expiry"); - var expiresAt = DateTimeOffset.UtcNow.AddDays(30); - - var request = new BundleBuildRequest( - "expiry-test", - "1.0.0", - expiresAt, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.ExpiresAt.Should().BeCloseTo(expiresAt, TimeSpan.FromSeconds(1)); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Export_CryptoWithExpiration_PreservesComponentExpiry() - { - // Arrange - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "crypto-expiry"); - var certFile = CreateTempFile("cert.pem", "cert"); - var componentExpiry = DateTimeOffset.UtcNow.AddYears(5); - - var request = new BundleBuildRequest( - "crypto-expiry-test", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - new[] - { - new CryptoBuildConfig("c1", "root", certFile, "certs/root.pem", CryptoComponentType.TrustRoot, componentExpiry) - }, - Array.Empty()); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.CryptoMaterials[0].ExpiresAt.Should().BeCloseTo(componentExpiry, TimeSpan.FromSeconds(1)); - } - - #endregion - - #region Helpers - - private string CreateTempFile(string name, string content) - { - var path = Path.Combine(_tempRoot, "source", name); - Directory.CreateDirectory(Path.GetDirectoryName(path)!); - File.WriteAllText(path, content); - return path; - } - - private static string ComputeSha256(string content) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - #endregion } - - - diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Digest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Digest.cs new file mode 100644 index 000000000..d52e032cb --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Digest.cs @@ -0,0 +1,36 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_DigestVerification_MatchesExpected() + { + var content = "test content"; + var expectedDigest = ComputeSha256(content); + var filePath = Path.Combine(_tempRoot, "digest-test.txt"); + await File.WriteAllTextAsync(filePath, content); + + var actualDigest = await ComputeFileDigestAsync(filePath); + + actualDigest.Should().BeEquivalentTo(expectedDigest, options => options.IgnoringCase()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_DigestVerification_FailsOnTamperedFile() + { + var originalContent = "original content"; + var expectedDigest = ComputeSha256(originalContent); + var filePath = Path.Combine(_tempRoot, "tampered.txt"); + await File.WriteAllTextAsync(filePath, "tampered content"); + + var actualDigest = await ComputeFileDigestAsync(filePath); + + actualDigest.Should().NotBeEquivalentTo(expectedDigest); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Bundle.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Bundle.cs new file mode 100644 index 000000000..ac6bf3d36 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Bundle.cs @@ -0,0 +1,99 @@ +using System.Collections.Immutable; +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + private string CreateBundleWithWrongContent() + { + var bundlePath = Path.Combine(_tempRoot, $"wrong-content-{Guid.NewGuid():N}"); + Directory.CreateDirectory(bundlePath); + + var feedsDir = Path.Combine(bundlePath, "feeds"); + Directory.CreateDirectory(feedsDir); + + File.WriteAllText(Path.Combine(feedsDir, "nvd.json"), "wrong content"); + File.WriteAllText(Path.Combine(feedsDir, "ghsa.json"), "also wrong"); + + var certsDir = Path.Combine(bundlePath, "certs"); + Directory.CreateDirectory(certsDir); + File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert"); + + return bundlePath; + } + + private string CreateValidBundle() + { + var bundlePath = Path.Combine(_tempRoot, $"valid-{Guid.NewGuid():N}"); + Directory.CreateDirectory(bundlePath); + + var feedsDir = Path.Combine(bundlePath, "feeds"); + Directory.CreateDirectory(feedsDir); + File.WriteAllText(Path.Combine(feedsDir, "nvd.json"), "nvd-content"); + File.WriteAllText(Path.Combine(feedsDir, "ghsa.json"), "ghsa-content"); + + var certsDir = Path.Combine(bundlePath, "certs"); + Directory.CreateDirectory(certsDir); + File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert-content"); + + return bundlePath; + } + + private BundleManifest CreateMatchingManifest(string bundlePath) + { + return new BundleManifest + { + BundleId = Guid.NewGuid().ToString(), + Name = "valid-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Create( + new FeedComponent("nvd-feed", "nvd", "v1", "feeds/nvd.json", + ComputeSha256("nvd-content"), 11, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), + new FeedComponent("ghsa-feed", "ghsa", "v1", "feeds/ghsa.json", + ComputeSha256("ghsa-content"), 12, DateTimeOffset.UtcNow, FeedFormat.OsvJson)), + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", + ComputeSha256("cert-content"), 12, CryptoComponentType.TrustRoot, null)) + }; + } + + private string CreateValidBundleWithPolicies() + { + var bundlePath = Path.Combine(_tempRoot, $"valid-policies-{Guid.NewGuid():N}"); + Directory.CreateDirectory(bundlePath); + + var policiesDir = Path.Combine(bundlePath, "policies"); + Directory.CreateDirectory(policiesDir); + File.WriteAllText(Path.Combine(policiesDir, "default.rego"), "package default"); + File.WriteAllText(Path.Combine(policiesDir, "lattice.json"), "{}"); + + var certsDir = Path.Combine(bundlePath, "certs"); + Directory.CreateDirectory(certsDir); + File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert-content"); + + return bundlePath; + } + + private BundleManifest CreateMatchingManifestWithPolicies(string bundlePath) + { + return new BundleManifest + { + BundleId = Guid.NewGuid().ToString(), + Name = "policy-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Empty, + Policies = ImmutableArray.Create( + new PolicyComponent("p1", "default", "1.0", "policies/default.rego", + ComputeSha256("package default"), 15, PolicyType.OpaRego), + new PolicyComponent("p2", "lattice", "1.0", "policies/lattice.json", + ComputeSha256("{}"), 2, PolicyType.LatticeRules)), + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", + ComputeSha256("cert-content"), 12, CryptoComponentType.TrustRoot, null)) + }; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Digest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Digest.cs new file mode 100644 index 000000000..614a9a72f --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Digest.cs @@ -0,0 +1,20 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + private static string ComputeSha256(string content) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static async Task ComputeFileDigestAsync(string filePath) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Manifest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Manifest.cs new file mode 100644 index 000000000..57de4c2e8 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Helpers.Manifest.cs @@ -0,0 +1,86 @@ +using System.Collections.Immutable; +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + private BundleManifest CreateEmptyManifest() => new() + { + BundleId = Guid.NewGuid().ToString(), + Name = "empty", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Empty, + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Empty + }; + + private BundleManifest CreateFullManifest() => new() + { + BundleId = Guid.NewGuid().ToString(), + Name = "full-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + ExpiresAt = DateTimeOffset.UtcNow.AddDays(30), + Feeds = ImmutableArray.Create( + new FeedComponent("f1", "nvd", "v1", "feeds/nvd.json", new string('a', 64), 100, + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)), + Policies = ImmutableArray.Create( + new PolicyComponent("p1", "default", "1.0", "policies/default.rego", new string('b', 64), 50, + PolicyType.OpaRego)), + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, + CryptoComponentType.TrustRoot, null)), + TotalSizeBytes = 180 + }; + + private BundleManifest CreateManifestWithFeeds() => new() + { + BundleId = Guid.NewGuid().ToString(), + Name = "feed-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Create( + new FeedComponent("nvd-feed", "nvd", "v1", "feeds/nvd.json", new string('a', 64), 100, + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), + new FeedComponent("ghsa-feed", "ghsa", "v1", "feeds/ghsa.json", new string('b', 64), 200, + DateTimeOffset.UtcNow, FeedFormat.OsvJson)), + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, + CryptoComponentType.TrustRoot, null)) + }; + + private BundleManifest CreateManifestWithPolicies() => new() + { + BundleId = Guid.NewGuid().ToString(), + Name = "policy-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Empty, + Policies = ImmutableArray.Create( + new PolicyComponent("p1", "rego-policy", "1.0", "policies/rego.rego", new string('a', 64), 50, + PolicyType.OpaRego), + new PolicyComponent("p2", "lattice-policy", "1.0", "policies/lattice.json", new string('b', 64), + 60, PolicyType.LatticeRules)), + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, + CryptoComponentType.TrustRoot, null)) + }; + + private BundleManifest CreateManifestWithCrypto() => new() + { + BundleId = Guid.NewGuid().ToString(), + Name = "crypto-bundle", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Empty, + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Create( + new CryptoComponent("c1", "trust-root", "certs/root.pem", new string('a', 64), 30, + CryptoComponentType.TrustRoot, DateTimeOffset.UtcNow.AddYears(10)), + new CryptoComponent("c2", "fulcio-root", "certs/fulcio.pem", new string('b', 64), 40, + CryptoComponentType.FulcioRoot, null)) + }; +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.Errors.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.Errors.cs new file mode 100644 index 000000000..c4c498ef4 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.Errors.cs @@ -0,0 +1,57 @@ +using FluentAssertions; +using NSubstitute; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Loader_ThrowsOnValidationFailure() + { + var bundlePath = CreateValidBundle(); + var manifest = CreateMatchingManifest(bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var feedRegistry = Substitute.For(); + var policyRegistry = Substitute.For(); + var cryptoRegistry = Substitute.For(); + var validator = Substitute.For(); + validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(new BundleValidationResult(false, + new[] { new BundleValidationError("Test", "Test error") }, + Array.Empty(), 0)); + + var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); + + var action = async () => await loader.LoadAsync(bundlePath); + await action.Should().ThrowAsync() + .WithMessage("*validation failed*"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Loader_ThrowsOnMissingManifest() + { + var bundlePath = Path.Combine(_tempRoot, "no-manifest"); + Directory.CreateDirectory(bundlePath); + + var feedRegistry = Substitute.For(); + var policyRegistry = Substitute.For(); + var cryptoRegistry = Substitute.For(); + var validator = Substitute.For(); + + var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); + + var action = async () => await loader.LoadAsync(bundlePath); + await action.Should().ThrowAsync(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.cs new file mode 100644 index 000000000..91bfb4eed --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Loader.cs @@ -0,0 +1,65 @@ +using FluentAssertions; +using NSubstitute; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Loader_RegistersAllFeeds() + { + var bundlePath = CreateValidBundle(); + var manifest = CreateMatchingManifest(bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var feedRegistry = Substitute.For(); + var policyRegistry = Substitute.For(); + var cryptoRegistry = Substitute.For(); + var validator = Substitute.For(); + validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(new BundleValidationResult(true, Array.Empty(), + Array.Empty(), 0)); + + var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); + + await loader.LoadAsync(bundlePath); + + feedRegistry.Received(manifest.Feeds.Length) + .Register(Arg.Any(), Arg.Any()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Loader_RegistersAllPolicies() + { + var bundlePath = CreateValidBundleWithPolicies(); + var manifest = CreateMatchingManifestWithPolicies(bundlePath); + + var manifestPath = Path.Combine(bundlePath, "manifest.json"); + await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); + + var feedRegistry = Substitute.For(); + var policyRegistry = Substitute.For(); + var cryptoRegistry = Substitute.For(); + var validator = Substitute.For(); + validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(new BundleValidationResult(true, Array.Empty(), + Array.Empty(), 0)); + + var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); + + await loader.LoadAsync(bundlePath); + + policyRegistry.Received(manifest.Policies.Length) + .Register(Arg.Any(), Arg.Any()); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.ManifestParsing.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.ManifestParsing.cs new file mode 100644 index 000000000..b089909fd --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.ManifestParsing.cs @@ -0,0 +1,80 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Import_ManifestDeserialization_PreservesAllFields() + { + var manifest = CreateFullManifest(); + var json = BundleManifestSerializer.Serialize(manifest); + + var imported = BundleManifestSerializer.Deserialize(json); + + imported.Should().BeEquivalentTo(manifest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Import_ManifestDeserialization_HandlesEmptyCollections() + { + var manifest = CreateEmptyManifest(); + var json = BundleManifestSerializer.Serialize(manifest); + + var imported = BundleManifestSerializer.Deserialize(json); + + imported.Feeds.Should().BeEmpty(); + imported.Policies.Should().BeEmpty(); + imported.CryptoMaterials.Should().BeEmpty(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Import_ManifestDeserialization_PreservesFeedComponents() + { + var manifest = CreateManifestWithFeeds(); + var json = BundleManifestSerializer.Serialize(manifest); + + var imported = BundleManifestSerializer.Deserialize(json); + + imported.Feeds.Should().HaveCount(2); + imported.Feeds[0].FeedId.Should().Be("nvd-feed"); + imported.Feeds[0].Format.Should().Be(FeedFormat.StellaOpsNative); + imported.Feeds[1].FeedId.Should().Be("ghsa-feed"); + imported.Feeds[1].Format.Should().Be(FeedFormat.OsvJson); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Import_ManifestDeserialization_PreservesPolicyComponents() + { + var manifest = CreateManifestWithPolicies(); + var json = BundleManifestSerializer.Serialize(manifest); + + var imported = BundleManifestSerializer.Deserialize(json); + + imported.Policies.Should().HaveCount(2); + imported.Policies[0].Type.Should().Be(PolicyType.OpaRego); + imported.Policies[1].Type.Should().Be(PolicyType.LatticeRules); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Import_ManifestDeserialization_PreservesCryptoComponents() + { + var manifest = CreateManifestWithCrypto(); + var json = BundleManifestSerializer.Serialize(manifest); + + var imported = BundleManifestSerializer.Deserialize(json); + + imported.CryptoMaterials.Should().HaveCount(2); + imported.CryptoMaterials[0].Type.Should().Be(CryptoComponentType.TrustRoot); + imported.CryptoMaterials[1].Type.Should().Be(CryptoComponentType.FulcioRoot); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Validation.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Validation.cs new file mode 100644 index 000000000..9f69a6cfe --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.Validation.cs @@ -0,0 +1,96 @@ +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleImportTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Validation_FailsWhenFilesMissing() + { + var bundlePath = Path.Combine(_tempRoot, "missing-files"); + Directory.CreateDirectory(bundlePath); + + var manifest = CreateManifestWithFeeds(); + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, bundlePath); + + result.IsValid.Should().BeFalse(); + result.Errors.Should().Contain(e => + e.Message.Contains("digest mismatch") || e.Message.Contains("FILE_NOT_FOUND")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Validation_FailsWhenDigestMismatch() + { + var bundlePath = CreateBundleWithWrongContent(); + var manifest = CreateManifestWithFeeds(); + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, bundlePath); + + result.IsValid.Should().BeFalse(); + result.Errors.Should().Contain(e => e.Message.Contains("digest mismatch")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Validation_SucceedsWhenAllDigestsMatch() + { + var bundlePath = CreateValidBundle(); + var manifest = CreateMatchingManifest(bundlePath); + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, bundlePath); + + result.IsValid.Should().BeTrue(); + result.Errors.Should().BeEmpty(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Validation_WarnsWhenExpired() + { + var bundlePath = CreateValidBundle(); + var manifest = CreateMatchingManifest(bundlePath) with + { + ExpiresAt = DateTimeOffset.UtcNow.AddDays(-1) + }; + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, bundlePath); + + result.Warnings.Should().Contain(w => w.Message.Contains("expired")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Import_Validation_WarnsWhenFeedsOld() + { + var bundlePath = CreateValidBundle(); + var manifest = CreateMatchingManifest(bundlePath); + + var oldManifest = manifest with + { + Feeds = manifest.Feeds.Select(f => f with + { + SnapshotAt = DateTimeOffset.UtcNow.AddDays(-30) + }).ToImmutableArray() + }; + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(oldManifest, bundlePath); + + result.Warnings.Should().Contain(w => w.Message.Contains("days old")); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.cs index 1650833dd..e4bcc4147 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleImportTests.cs @@ -1,23 +1,11 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using FluentAssertions; -using NSubstitute; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.AirGap.Bundle.Validation; using Xunit; - -using StellaOps.TestKit; namespace StellaOps.AirGap.Bundle.Tests; /// -/// Unit tests for bundle import: bundle → data → verify integrity. -/// Tests that bundle import correctly validates and loads all components. +/// Unit tests for bundle import: bundle -> data -> verify integrity. /// -public sealed class BundleImportTests : IAsyncLifetime +public sealed partial class BundleImportTests : IAsyncLifetime { private string _tempRoot = null!; @@ -34,532 +22,7 @@ public sealed class BundleImportTests : IAsyncLifetime { Directory.Delete(_tempRoot, recursive: true); } + return ValueTask.CompletedTask; } - - #region Manifest Parsing Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Import_ManifestDeserialization_PreservesAllFields() - { - // Arrange - var manifest = CreateFullManifest(); - var json = BundleManifestSerializer.Serialize(manifest); - - // Act - var imported = BundleManifestSerializer.Deserialize(json); - - // Assert - imported.Should().BeEquivalentTo(manifest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Import_ManifestDeserialization_HandlesEmptyCollections() - { - // Arrange - var manifest = CreateEmptyManifest(); - var json = BundleManifestSerializer.Serialize(manifest); - - // Act - var imported = BundleManifestSerializer.Deserialize(json); - - // Assert - imported.Feeds.Should().BeEmpty(); - imported.Policies.Should().BeEmpty(); - imported.CryptoMaterials.Should().BeEmpty(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Import_ManifestDeserialization_PreservesFeedComponents() - { - // Arrange - var manifest = CreateManifestWithFeeds(); - var json = BundleManifestSerializer.Serialize(manifest); - - // Act - var imported = BundleManifestSerializer.Deserialize(json); - - // Assert - imported.Feeds.Should().HaveCount(2); - imported.Feeds[0].FeedId.Should().Be("nvd-feed"); - imported.Feeds[0].Format.Should().Be(FeedFormat.StellaOpsNative); - imported.Feeds[1].FeedId.Should().Be("ghsa-feed"); - imported.Feeds[1].Format.Should().Be(FeedFormat.OsvJson); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Import_ManifestDeserialization_PreservesPolicyComponents() - { - // Arrange - var manifest = CreateManifestWithPolicies(); - var json = BundleManifestSerializer.Serialize(manifest); - - // Act - var imported = BundleManifestSerializer.Deserialize(json); - - // Assert - imported.Policies.Should().HaveCount(2); - imported.Policies[0].Type.Should().Be(PolicyType.OpaRego); - imported.Policies[1].Type.Should().Be(PolicyType.LatticeRules); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Import_ManifestDeserialization_PreservesCryptoComponents() - { - // Arrange - var manifest = CreateManifestWithCrypto(); - var json = BundleManifestSerializer.Serialize(manifest); - - // Act - var imported = BundleManifestSerializer.Deserialize(json); - - // Assert - imported.CryptoMaterials.Should().HaveCount(2); - imported.CryptoMaterials[0].Type.Should().Be(CryptoComponentType.TrustRoot); - imported.CryptoMaterials[1].Type.Should().Be(CryptoComponentType.FulcioRoot); - } - - #endregion - - #region Validation Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Validation_FailsWhenFilesMissing() - { - // Arrange - var bundlePath = Path.Combine(_tempRoot, "missing-files"); - Directory.CreateDirectory(bundlePath); - - var manifest = CreateManifestWithFeeds(); - // Don't create the actual feed files - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, bundlePath); - - // Assert - result.IsValid.Should().BeFalse(); - result.Errors.Should().Contain(e => e.Message.Contains("digest mismatch") || e.Message.Contains("FILE_NOT_FOUND")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Validation_FailsWhenDigestMismatch() - { - // Arrange - var bundlePath = CreateBundleWithWrongContent(); - var manifest = CreateManifestWithFeeds(); - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, bundlePath); - - // Assert - result.IsValid.Should().BeFalse(); - result.Errors.Should().Contain(e => e.Message.Contains("digest mismatch")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Validation_SucceedsWhenAllDigestsMatch() - { - // Arrange - var bundlePath = CreateValidBundle(); - var manifest = CreateMatchingManifest(bundlePath); - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, bundlePath); - - // Assert - result.IsValid.Should().BeTrue(); - result.Errors.Should().BeEmpty(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Validation_WarnsWhenExpired() - { - // Arrange - var bundlePath = CreateValidBundle(); - var manifest = CreateMatchingManifest(bundlePath) with - { - ExpiresAt = DateTimeOffset.UtcNow.AddDays(-1) // Expired - }; - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, bundlePath); - - // Assert - Validation succeeds but with warning - // (depends on implementation - may fail if expiry is enforced) - result.Warnings.Should().Contain(w => w.Message.Contains("expired")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Validation_WarnsWhenFeedsOld() - { - // Arrange - var bundlePath = CreateValidBundle(); - var manifest = CreateMatchingManifest(bundlePath); - - // Modify feed snapshot time to be old - var oldManifest = manifest with - { - Feeds = manifest.Feeds.Select(f => f with - { - SnapshotAt = DateTimeOffset.UtcNow.AddDays(-30) - }).ToImmutableArray() - }; - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(oldManifest, bundlePath); - - // Assert - result.Warnings.Should().Contain(w => w.Message.Contains("days old")); - } - - #endregion - - #region Bundle Loader Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Loader_RegistersAllFeeds() - { - // Arrange - var bundlePath = CreateValidBundle(); - var manifest = CreateMatchingManifest(bundlePath); - - // Write manifest file - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - var feedRegistry = Substitute.For(); - var policyRegistry = Substitute.For(); - var cryptoRegistry = Substitute.For(); - var validator = Substitute.For(); - validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) - .Returns(new BundleValidationResult(true, Array.Empty(), - Array.Empty(), 0)); - - var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); - - // Act - await loader.LoadAsync(bundlePath); - - // Assert - feedRegistry.Received(manifest.Feeds.Length).Register(Arg.Any(), Arg.Any()); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Loader_RegistersAllPolicies() - { - // Arrange - var bundlePath = CreateValidBundleWithPolicies(); - var manifest = CreateMatchingManifestWithPolicies(bundlePath); - - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - var feedRegistry = Substitute.For(); - var policyRegistry = Substitute.For(); - var cryptoRegistry = Substitute.For(); - var validator = Substitute.For(); - validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) - .Returns(new BundleValidationResult(true, Array.Empty(), - Array.Empty(), 0)); - - var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); - - // Act - await loader.LoadAsync(bundlePath); - - // Assert - policyRegistry.Received(manifest.Policies.Length).Register(Arg.Any(), Arg.Any()); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Loader_ThrowsOnValidationFailure() - { - // Arrange - var bundlePath = CreateValidBundle(); - var manifest = CreateMatchingManifest(bundlePath); - - var manifestPath = Path.Combine(bundlePath, "manifest.json"); - await File.WriteAllTextAsync(manifestPath, BundleManifestSerializer.Serialize(manifest)); - - var feedRegistry = Substitute.For(); - var policyRegistry = Substitute.For(); - var cryptoRegistry = Substitute.For(); - var validator = Substitute.For(); - validator.ValidateAsync(Arg.Any(), Arg.Any(), Arg.Any()) - .Returns(new BundleValidationResult(false, - new[] { new BundleValidationError("Test", "Test error") }, - Array.Empty(), 0)); - - var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); - - // Act & Assert - var action = async () => await loader.LoadAsync(bundlePath); - await action.Should().ThrowAsync() - .WithMessage("*validation failed*"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_Loader_ThrowsOnMissingManifest() - { - // Arrange - var bundlePath = Path.Combine(_tempRoot, "no-manifest"); - Directory.CreateDirectory(bundlePath); - // Don't create manifest.json - - var feedRegistry = Substitute.For(); - var policyRegistry = Substitute.For(); - var cryptoRegistry = Substitute.For(); - var validator = Substitute.For(); - - var loader = new BundleLoader(validator, feedRegistry, policyRegistry, cryptoRegistry); - - // Act & Assert - var action = async () => await loader.LoadAsync(bundlePath); - await action.Should().ThrowAsync(); - } - - #endregion - - #region Digest Verification Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_DigestVerification_MatchesExpected() - { - // Arrange - var content = "test content"; - var expectedDigest = ComputeSha256(content); - var filePath = Path.Combine(_tempRoot, "digest-test.txt"); - await File.WriteAllTextAsync(filePath, content); - - // Act - var actualDigest = await ComputeFileDigestAsync(filePath); - - // Assert - actualDigest.Should().BeEquivalentTo(expectedDigest, options => options.IgnoringCase()); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Import_DigestVerification_FailsOnTamperedFile() - { - // Arrange - var originalContent = "original content"; - var expectedDigest = ComputeSha256(originalContent); - var filePath = Path.Combine(_tempRoot, "tampered.txt"); - await File.WriteAllTextAsync(filePath, "tampered content"); - - // Act - var actualDigest = await ComputeFileDigestAsync(filePath); - - // Assert - actualDigest.Should().NotBeEquivalentTo(expectedDigest); - } - - #endregion - - #region Helpers - - private BundleManifest CreateEmptyManifest() => new() - { - BundleId = Guid.NewGuid().ToString(), - Name = "empty", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Empty, - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Empty - }; - - private BundleManifest CreateFullManifest() => new() - { - BundleId = Guid.NewGuid().ToString(), - Name = "full-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - ExpiresAt = DateTimeOffset.UtcNow.AddDays(30), - Feeds = ImmutableArray.Create( - new FeedComponent("f1", "nvd", "v1", "feeds/nvd.json", new string('a', 64), 100, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)), - Policies = ImmutableArray.Create( - new PolicyComponent("p1", "default", "1.0", "policies/default.rego", new string('b', 64), 50, PolicyType.OpaRego)), - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, CryptoComponentType.TrustRoot, null)), - TotalSizeBytes = 180 - }; - - private BundleManifest CreateManifestWithFeeds() => new() - { - BundleId = Guid.NewGuid().ToString(), - Name = "feed-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Create( - new FeedComponent("nvd-feed", "nvd", "v1", "feeds/nvd.json", new string('a', 64), 100, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), - new FeedComponent("ghsa-feed", "ghsa", "v1", "feeds/ghsa.json", new string('b', 64), 200, DateTimeOffset.UtcNow, FeedFormat.OsvJson)), - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, CryptoComponentType.TrustRoot, null)) - }; - - private BundleManifest CreateManifestWithPolicies() => new() - { - BundleId = Guid.NewGuid().ToString(), - Name = "policy-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Empty, - Policies = ImmutableArray.Create( - new PolicyComponent("p1", "rego-policy", "1.0", "policies/rego.rego", new string('a', 64), 50, PolicyType.OpaRego), - new PolicyComponent("p2", "lattice-policy", "1.0", "policies/lattice.json", new string('b', 64), 60, PolicyType.LatticeRules)), - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", new string('c', 64), 30, CryptoComponentType.TrustRoot, null)) - }; - - private BundleManifest CreateManifestWithCrypto() => new() - { - BundleId = Guid.NewGuid().ToString(), - Name = "crypto-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Empty, - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "trust-root", "certs/root.pem", new string('a', 64), 30, CryptoComponentType.TrustRoot, DateTimeOffset.UtcNow.AddYears(10)), - new CryptoComponent("c2", "fulcio-root", "certs/fulcio.pem", new string('b', 64), 40, CryptoComponentType.FulcioRoot, null)) - }; - - private string CreateBundleWithWrongContent() - { - var bundlePath = Path.Combine(_tempRoot, $"wrong-content-{Guid.NewGuid():N}"); - Directory.CreateDirectory(bundlePath); - - var feedsDir = Path.Combine(bundlePath, "feeds"); - Directory.CreateDirectory(feedsDir); - - // Write content that doesn't match the expected digest - File.WriteAllText(Path.Combine(feedsDir, "nvd.json"), "wrong content"); - File.WriteAllText(Path.Combine(feedsDir, "ghsa.json"), "also wrong"); - - var certsDir = Path.Combine(bundlePath, "certs"); - Directory.CreateDirectory(certsDir); - File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert"); - - return bundlePath; - } - - private string CreateValidBundle() - { - var bundlePath = Path.Combine(_tempRoot, $"valid-{Guid.NewGuid():N}"); - Directory.CreateDirectory(bundlePath); - - var feedsDir = Path.Combine(bundlePath, "feeds"); - Directory.CreateDirectory(feedsDir); - File.WriteAllText(Path.Combine(feedsDir, "nvd.json"), "nvd-content"); - File.WriteAllText(Path.Combine(feedsDir, "ghsa.json"), "ghsa-content"); - - var certsDir = Path.Combine(bundlePath, "certs"); - Directory.CreateDirectory(certsDir); - File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert-content"); - - return bundlePath; - } - - private BundleManifest CreateMatchingManifest(string bundlePath) - { - return new BundleManifest - { - BundleId = Guid.NewGuid().ToString(), - Name = "valid-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Create( - new FeedComponent("nvd-feed", "nvd", "v1", "feeds/nvd.json", - ComputeSha256("nvd-content"), 11, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative), - new FeedComponent("ghsa-feed", "ghsa", "v1", "feeds/ghsa.json", - ComputeSha256("ghsa-content"), 12, DateTimeOffset.UtcNow, FeedFormat.OsvJson)), - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", - ComputeSha256("cert-content"), 12, CryptoComponentType.TrustRoot, null)) - }; - } - - private string CreateValidBundleWithPolicies() - { - var bundlePath = Path.Combine(_tempRoot, $"valid-policies-{Guid.NewGuid():N}"); - Directory.CreateDirectory(bundlePath); - - var policiesDir = Path.Combine(bundlePath, "policies"); - Directory.CreateDirectory(policiesDir); - File.WriteAllText(Path.Combine(policiesDir, "default.rego"), "package default"); - File.WriteAllText(Path.Combine(policiesDir, "lattice.json"), "{}"); - - var certsDir = Path.Combine(bundlePath, "certs"); - Directory.CreateDirectory(certsDir); - File.WriteAllText(Path.Combine(certsDir, "root.pem"), "cert-content"); - - return bundlePath; - } - - private BundleManifest CreateMatchingManifestWithPolicies(string bundlePath) - { - return new BundleManifest - { - BundleId = Guid.NewGuid().ToString(), - Name = "policy-bundle", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Empty, - Policies = ImmutableArray.Create( - new PolicyComponent("p1", "default", "1.0", "policies/default.rego", - ComputeSha256("package default"), 15, PolicyType.OpaRego), - new PolicyComponent("p2", "lattice", "1.0", "policies/lattice.json", - ComputeSha256("{}"), 2, PolicyType.LatticeRules)), - CryptoMaterials = ImmutableArray.Create( - new CryptoComponent("c1", "root", "certs/root.pem", - ComputeSha256("cert-content"), 12, CryptoComponentType.TrustRoot, null)) - }; - } - - private static string ComputeSha256(string content) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static async Task ComputeFileDigestAsync(string filePath) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - #endregion } - - - diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Build.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Build.cs new file mode 100644 index 000000000..e52dda7c2 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Build.cs @@ -0,0 +1,83 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleInlineArtifactSizeTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task BuildAsync_InlineArtifactUnderLimit_StaysInline() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "inline-ok"); + var content = new byte[BundleSizeValidator.MaxInlineBlobSize - 8]; + var request = new BundleBuildRequest( + "inline-ok", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: new[] + { + new BundleArtifactBuildConfig + { + Type = "sbom", + ContentType = "application/json", + Content = content + } + }); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Artifacts.Should().HaveCount(1); + manifest.Artifacts[0].Path.Should().BeNull(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task BuildAsync_InlineArtifactOverLimit_ExternalizesToArtifactsDir() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "inline-over"); + var warnings = new List(); + var content = new byte[BundleSizeValidator.MaxInlineBlobSize + 1]; + var request = new BundleBuildRequest( + "inline-over", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: new[] + { + new BundleArtifactBuildConfig + { + Type = "sbom", + ContentType = "application/json", + Content = content, + FileName = "sbom.json" + } + }, + WarningSink: warnings); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Artifacts.Should().HaveCount(1); + var artifact = manifest.Artifacts[0]; + artifact.Path.Should().NotBeNullOrEmpty(); + artifact.Path.Should().StartWith("artifacts/"); + warnings.Should().ContainSingle(); + + var artifactPath = Path.Combine(outputPath, artifact.Path!.Replace('/', Path.DirectorySeparatorChar)); + File.Exists(artifactPath).Should().BeTrue(); + new FileInfo(artifactPath).Length.Should().Be(content.Length); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Strict.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Strict.cs new file mode 100644 index 000000000..4f9276077 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.Strict.cs @@ -0,0 +1,40 @@ +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleInlineArtifactSizeTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task BuildAsync_InlineArtifactOverLimit_StrictModeThrows() + { + var builder = new BundleBuilder(); + var outputPath = Path.Combine(_tempRoot, "inline-strict"); + var content = new byte[BundleSizeValidator.MaxInlineBlobSize + 1]; + var request = new BundleBuildRequest( + "inline-strict", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: new[] + { + new BundleArtifactBuildConfig + { + Type = "sbom", + ContentType = "application/json", + Content = content + } + }, + StrictInlineArtifacts: true); + + await Assert.ThrowsAsync( + () => builder.BuildAsync(request, outputPath)); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.cs index 2c37ca348..b92f7ca88 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleInlineArtifactSizeTests.cs @@ -1,13 +1,8 @@ -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.AirGap.Bundle.Validation; -using StellaOps.TestKit; using Xunit; namespace StellaOps.AirGap.Bundle.Tests; -public sealed class BundleInlineArtifactSizeTests : IAsyncLifetime +public sealed partial class BundleInlineArtifactSizeTests : IAsyncLifetime { private string _tempRoot = null!; @@ -27,106 +22,4 @@ public sealed class BundleInlineArtifactSizeTests : IAsyncLifetime return ValueTask.CompletedTask; } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task BuildAsync_InlineArtifactUnderLimit_StaysInline() - { - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "inline-ok"); - var content = new byte[BundleSizeValidator.MaxInlineBlobSize - 8]; - var request = new BundleBuildRequest( - "inline-ok", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: new[] - { - new BundleArtifactBuildConfig - { - Type = "sbom", - ContentType = "application/json", - Content = content - } - }); - - var manifest = await builder.BuildAsync(request, outputPath); - - manifest.Artifacts.Should().HaveCount(1); - manifest.Artifacts[0].Path.Should().BeNull(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task BuildAsync_InlineArtifactOverLimit_ExternalizesToArtifactsDir() - { - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "inline-over"); - var warnings = new List(); - var content = new byte[BundleSizeValidator.MaxInlineBlobSize + 1]; - var request = new BundleBuildRequest( - "inline-over", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: new[] - { - new BundleArtifactBuildConfig - { - Type = "sbom", - ContentType = "application/json", - Content = content, - FileName = "sbom.json" - } - }, - WarningSink: warnings); - - var manifest = await builder.BuildAsync(request, outputPath); - - manifest.Artifacts.Should().HaveCount(1); - var artifact = manifest.Artifacts[0]; - artifact.Path.Should().NotBeNullOrEmpty(); - artifact.Path.Should().StartWith("artifacts/"); - warnings.Should().ContainSingle(); - - var artifactPath = Path.Combine(outputPath, artifact.Path!.Replace('/', Path.DirectorySeparatorChar)); - File.Exists(artifactPath).Should().BeTrue(); - new FileInfo(artifactPath).Length.Should().Be(content.Length); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task BuildAsync_InlineArtifactOverLimit_StrictModeThrows() - { - var builder = new BundleBuilder(); - var outputPath = Path.Combine(_tempRoot, "inline-strict"); - var content = new byte[BundleSizeValidator.MaxInlineBlobSize + 1]; - var request = new BundleBuildRequest( - "inline-strict", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: new[] - { - new BundleArtifactBuildConfig - { - Type = "sbom", - ContentType = "application/json", - Content = content - } - }, - StrictInlineArtifacts: true); - - await Assert.ThrowsAsync( - () => builder.BuildAsync(request, outputPath)); - } } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V1.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V1.cs new file mode 100644 index 000000000..307f9bdbf --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V1.cs @@ -0,0 +1,45 @@ +using System.Collections.Immutable; +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + private static BundleManifest CreateManifest() + { + return new BundleManifest + { + BundleId = Guid.NewGuid().ToString(), + SchemaVersion = "1.0.0", + Name = "offline-test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Create(new FeedComponent( + "feed-1", + "nvd", + "v1", + "feeds/nvd.json", + new string('a', 64), + 10, + DateTimeOffset.UtcNow, + FeedFormat.StellaOpsNative)), + Policies = ImmutableArray.Create(new PolicyComponent( + "policy-1", + "default", + "1.0", + "policies/default.rego", + new string('b', 64), + 10, + PolicyType.OpaRego)), + CryptoMaterials = ImmutableArray.Create(new CryptoComponent( + "crypto-1", + "trust-root", + "certs/root.pem", + new string('c', 64), + 10, + CryptoComponentType.TrustRoot, + null)), + TotalSizeBytes = 30 + }; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V2.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V2.cs new file mode 100644 index 000000000..d5c9536d2 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.Helpers.V2.cs @@ -0,0 +1,66 @@ +using StellaOps.AirGap.Bundle.Models; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + private static BundleManifest CreateV2Manifest() + { + return new BundleManifest + { + BundleId = Guid.NewGuid().ToString(), + SchemaVersion = "2.0.0", + Name = "offline-bundle-v2", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = [], + Policies = [], + CryptoMaterials = [], + Image = "registry.example.com/app@sha256:abc123def456", + Artifacts = + [ + new BundleArtifact("sbom.cdx.json", "sbom", "application/vnd.cyclonedx+json", "sha256:aaa", 1024), + new BundleArtifact("sbom.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:bbb", 512) + ], + CanonicalManifestHash = new string('c', 64), + Subject = new BundleSubject + { + Sha256 = new string('a', 64), + Sha512 = new string('b', 128) + }, + Timestamps = + [ + new Rfc3161TimestampEntry + { + TsaChainPaths = ["tsa/chain/root.pem"], + OcspBlobs = ["tsa/ocsp/resp.der"], + CrlBlobs = ["tsa/crl/list.crl"], + TstBase64 = "dGVzdA==" + }, + new EidasQtsTimestampEntry + { + QtsMetaPath = "tsa/eidas/qts.json" + } + ], + RekorProofs = + [ + new RekorProofEntry + { + EntryBodyPath = "rekor/entry.json", + LeafHash = "sha256:leaf", + InclusionProofPath = "rekor/proof.json", + SignedEntryTimestamp = "base64set" + } + ], + Verify = new BundleVerifySection + { + Keys = ["kms://example/key"], + Expectations = new BundleVerifyExpectations + { + PayloadTypes = ["application/vnd.cyclonedx+json;version=1.6"], + RekorRequired = true + } + } + }; + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V1.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V1.cs new file mode 100644 index 000000000..c3bf0134c --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V1.cs @@ -0,0 +1,65 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Serializer_RoundTrip_PreservesFields() + { + var manifest = CreateManifest(); + var json = BundleManifestSerializer.Serialize(manifest); + var deserialized = BundleManifestSerializer.Deserialize(json); + deserialized.Should().BeEquivalentTo(manifest); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Validator_FlagsMissingFeedFile() + { + var manifest = CreateManifest(); + var validator = new BundleValidator(); + var result = await validator.ValidateAsync(manifest, Path.GetTempPath()); + + result.IsValid.Should().BeFalse(); + result.Errors.Should().NotBeEmpty(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Builder_CopiesComponentsAndComputesDigest() + { + var tempRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); + var sourceFile = Path.Combine(tempRoot, "feed.json"); + Directory.CreateDirectory(tempRoot); + await File.WriteAllTextAsync(sourceFile, "feed"); + + var builder = new BundleBuilder(); + var request = new BundleBuildRequest( + "offline-test", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", sourceFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty()); + + var outputPath = Path.Combine(tempRoot, "bundle"); + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.BundleDigest.Should().NotBeNullOrEmpty(); + manifest.CanonicalManifestHash.Should().NotBeNullOrEmpty(); + File.Exists(Path.Combine(outputPath, "feeds", "nvd.json")).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Basic.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Basic.cs new file mode 100644 index 000000000..d5fae693d --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Basic.cs @@ -0,0 +1,78 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestV2_DefaultSchemaVersion_Is200() + { + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = [], + Policies = [], + CryptoMaterials = [] + }; + + manifest.SchemaVersion.Should().Be("2.0.0"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestV2_WithImage_SetsImageReference() + { + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = [], + Policies = [], + CryptoMaterials = [], + Image = "registry.example.com/app@sha256:abc123" + }; + + manifest.Image.Should().Be("registry.example.com/app@sha256:abc123"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestV2_WithArtifacts_ContainsExpectedEntries() + { + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = [], + Policies = [], + CryptoMaterials = [], + Image = "registry.example.com/app@sha256:abc123", + Artifacts = + [ + new BundleArtifact("sbom.cdx.json", "sbom", "application/vnd.cyclonedx+json", "sha256:def", 1024), + new BundleArtifact("sbom.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:ghi", 512), + new BundleArtifact("vex.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:jkl", 256), + new BundleArtifact("rekor.proof.json", "rekor-proof", "application/json", "sha256:mno", 128), + new BundleArtifact("oci.referrers.json", "oci-referrers", + "application/vnd.oci.image.index.v1+json", "sha256:pqr", 64) + ] + }; + + manifest.Artifacts.Should().HaveCount(5); + manifest.Artifacts.Should().Contain(a => a.Path == "sbom.cdx.json"); + manifest.Artifacts.Should().Contain(a => a.Type == "dsse"); + manifest.Artifacts.Should().Contain(a => a.Type == "rekor-proof"); + manifest.Artifacts.Should().Contain(a => a.Type == "oci-referrers"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Serialization.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Serialization.cs new file mode 100644 index 000000000..a296dafde --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Serialization.cs @@ -0,0 +1,30 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Serialization; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestV2_Serialization_RoundTrip() + { + var manifest = CreateV2Manifest(); + var json = BundleManifestSerializer.Serialize(manifest); + var deserialized = BundleManifestSerializer.Deserialize(json); + + deserialized.SchemaVersion.Should().Be("2.0.0"); + deserialized.Image.Should().Be(manifest.Image); + deserialized.Artifacts.Should().HaveCount(manifest.Artifacts.Length); + deserialized.Verify.Should().NotBeNull(); + deserialized.Verify!.Keys.Should().BeEquivalentTo(manifest.Verify!.Keys); + deserialized.Subject.Should().NotBeNull(); + deserialized.Subject!.Sha256.Should().Be(manifest.Subject!.Sha256); + deserialized.Timestamps.Should().HaveCount(2); + deserialized.Timestamps[0].Should().BeOfType(); + deserialized.RekorProofs.Should().HaveCount(1); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Verify.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Verify.cs new file mode 100644 index 000000000..ce2a7de43 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.V2.Verify.cs @@ -0,0 +1,78 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleManifestTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestV2_WithVerifySection_ContainsKeysAndExpectations() + { + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = [], + Policies = [], + CryptoMaterials = [], + Image = "registry.example.com/app@sha256:abc123", + CanonicalManifestHash = new string('c', 64), + Subject = new BundleSubject + { + Sha256 = new string('a', 64), + Sha512 = new string('b', 128) + }, + Timestamps = + [ + new Rfc3161TimestampEntry + { + TsaChainPaths = ["tsa/chain/root.pem"], + OcspBlobs = ["tsa/ocsp/resp.der"], + CrlBlobs = ["tsa/crl/list.crl"], + TstBase64 = "dGVzdA==" + }, + new EidasQtsTimestampEntry + { + QtsMetaPath = "tsa/eidas/qts.json" + } + ], + RekorProofs = + [ + new RekorProofEntry + { + EntryBodyPath = "rekor/entry.json", + LeafHash = "sha256:leaf", + InclusionProofPath = "rekor/proof.json", + SignedEntryTimestamp = "base64set" + } + ], + Verify = new BundleVerifySection + { + Keys = ["kms://projects/test/locations/global/keyRings/ring/cryptoKeys/key"], + TrustRoot = "trust-root.pem", + RekorCheckpointPath = "rekor-checkpoint.json", + Expectations = new BundleVerifyExpectations + { + PayloadTypes = ["application/vnd.cyclonedx+json;version=1.6", "application/vnd.openvex+json"], + RekorRequired = true, + MinSignatures = 1, + RequiredArtifacts = ["sbom.cdx.json", "sbom.statement.dsse.json"], + VerifyChecksums = true + } + } + }; + + manifest.Verify.Should().NotBeNull(); + manifest.Verify!.Keys.Should().HaveCount(1); + manifest.Verify.Keys[0].Should().StartWith("kms://"); + manifest.Verify.Expectations.Should().NotBeNull(); + manifest.Verify.Expectations!.PayloadTypes.Should().HaveCount(2); + manifest.Verify.Expectations.RekorRequired.Should().BeTrue(); + manifest.Verify.Expectations.RequiredArtifacts.Should().HaveCount(2); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.cs index e52129454..edce34b4a 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleManifestTests.cs @@ -1,322 +1,5 @@ -using System.Collections.Immutable; -using FluentAssertions; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Serialization; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.AirGap.Bundle.Validation; -using Xunit; - -using StellaOps.TestKit; namespace StellaOps.AirGap.Bundle.Tests; -public class BundleManifestTests +public sealed partial class BundleManifestTests { - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Serializer_RoundTrip_PreservesFields() - { - var manifest = CreateManifest(); - var json = BundleManifestSerializer.Serialize(manifest); - var deserialized = BundleManifestSerializer.Deserialize(json); - deserialized.Should().BeEquivalentTo(manifest); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Validator_FlagsMissingFeedFile() - { - var manifest = CreateManifest(); - var validator = new BundleValidator(); - var result = await validator.ValidateAsync(manifest, Path.GetTempPath()); - - result.IsValid.Should().BeFalse(); - result.Errors.Should().NotBeEmpty(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Builder_CopiesComponentsAndComputesDigest() - { - var tempRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); - var sourceFile = Path.Combine(tempRoot, "feed.json"); - Directory.CreateDirectory(tempRoot); - await File.WriteAllTextAsync(sourceFile, "feed"); - - var builder = new BundleBuilder(); - var request = new BundleBuildRequest( - "offline-test", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", sourceFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty()); - - var outputPath = Path.Combine(tempRoot, "bundle"); - var manifest = await builder.BuildAsync(request, outputPath); - - manifest.BundleDigest.Should().NotBeNullOrEmpty(); - manifest.CanonicalManifestHash.Should().NotBeNullOrEmpty(); - File.Exists(Path.Combine(outputPath, "feeds", "nvd.json")).Should().BeTrue(); - } - - private static BundleManifest CreateManifest() - { - return new BundleManifest - { - BundleId = Guid.NewGuid().ToString(), - SchemaVersion = "1.0.0", - Name = "offline-test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Create(new FeedComponent( - "feed-1", - "nvd", - "v1", - "feeds/nvd.json", - new string('a', 64), - 10, - DateTimeOffset.UtcNow, - FeedFormat.StellaOpsNative)), - Policies = ImmutableArray.Create(new PolicyComponent( - "policy-1", - "default", - "1.0", - "policies/default.rego", - new string('b', 64), - 10, - PolicyType.OpaRego)), - CryptoMaterials = ImmutableArray.Create(new CryptoComponent( - "crypto-1", - "trust-root", - "certs/root.pem", - new string('c', 64), - 10, - CryptoComponentType.TrustRoot, - null)), - TotalSizeBytes = 30 - }; - } - - // ------------------------------------------------------------------------- - // v2.0.0 Tests - Sprint: SPRINT_20260118_018 (TASK-018-001) - // ------------------------------------------------------------------------- - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestV2_DefaultSchemaVersion_Is200() - { - var manifest = new BundleManifest - { - BundleId = "test", - Name = "test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = [], - Policies = [], - CryptoMaterials = [] - }; - - manifest.SchemaVersion.Should().Be("2.0.0"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestV2_WithImage_SetsImageReference() - { - var manifest = new BundleManifest - { - BundleId = "test", - Name = "test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = [], - Policies = [], - CryptoMaterials = [], - Image = "registry.example.com/app@sha256:abc123" - }; - - manifest.Image.Should().Be("registry.example.com/app@sha256:abc123"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestV2_WithArtifacts_ContainsExpectedEntries() - { - var manifest = new BundleManifest - { - BundleId = "test", - Name = "test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = [], - Policies = [], - CryptoMaterials = [], - Image = "registry.example.com/app@sha256:abc123", - Artifacts = - [ - new BundleArtifact("sbom.cdx.json", "sbom", "application/vnd.cyclonedx+json", "sha256:def", 1024), - new BundleArtifact("sbom.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:ghi", 512), - new BundleArtifact("vex.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:jkl", 256), - new BundleArtifact("rekor.proof.json", "rekor-proof", "application/json", "sha256:mno", 128), - new BundleArtifact("oci.referrers.json", "oci-referrers", "application/vnd.oci.image.index.v1+json", "sha256:pqr", 64) - ] - }; - - manifest.Artifacts.Should().HaveCount(5); - manifest.Artifacts.Should().Contain(a => a.Path == "sbom.cdx.json"); - manifest.Artifacts.Should().Contain(a => a.Type == "dsse"); - manifest.Artifacts.Should().Contain(a => a.Type == "rekor-proof"); - manifest.Artifacts.Should().Contain(a => a.Type == "oci-referrers"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestV2_WithVerifySection_ContainsKeysAndExpectations() - { - var manifest = new BundleManifest - { - BundleId = "test", - Name = "test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = [], - Policies = [], - CryptoMaterials = [], - Image = "registry.example.com/app@sha256:abc123", - CanonicalManifestHash = new string('c', 64), - Subject = new BundleSubject - { - Sha256 = new string('a', 64), - Sha512 = new string('b', 128) - }, - Timestamps = - [ - new Rfc3161TimestampEntry - { - TsaChainPaths = ["tsa/chain/root.pem"], - OcspBlobs = ["tsa/ocsp/resp.der"], - CrlBlobs = ["tsa/crl/list.crl"], - TstBase64 = "dGVzdA==" - }, - new EidasQtsTimestampEntry - { - QtsMetaPath = "tsa/eidas/qts.json" - } - ], - RekorProofs = - [ - new RekorProofEntry - { - EntryBodyPath = "rekor/entry.json", - LeafHash = "sha256:leaf", - InclusionProofPath = "rekor/proof.json", - SignedEntryTimestamp = "base64set" - } - ], - Verify = new BundleVerifySection - { - Keys = ["kms://projects/test/locations/global/keyRings/ring/cryptoKeys/key"], - TrustRoot = "trust-root.pem", - RekorCheckpointPath = "rekor-checkpoint.json", - Expectations = new BundleVerifyExpectations - { - PayloadTypes = ["application/vnd.cyclonedx+json;version=1.6", "application/vnd.openvex+json"], - RekorRequired = true, - MinSignatures = 1, - RequiredArtifacts = ["sbom.cdx.json", "sbom.statement.dsse.json"], - VerifyChecksums = true - } - } - }; - - manifest.Verify.Should().NotBeNull(); - manifest.Verify!.Keys.Should().HaveCount(1); - manifest.Verify.Keys[0].Should().StartWith("kms://"); - manifest.Verify.Expectations.Should().NotBeNull(); - manifest.Verify.Expectations!.PayloadTypes.Should().HaveCount(2); - manifest.Verify.Expectations.RekorRequired.Should().BeTrue(); - manifest.Verify.Expectations.RequiredArtifacts.Should().HaveCount(2); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestV2_Serialization_RoundTrip() - { - var manifest = CreateV2Manifest(); - var json = BundleManifestSerializer.Serialize(manifest); - var deserialized = BundleManifestSerializer.Deserialize(json); - - deserialized.SchemaVersion.Should().Be("2.0.0"); - deserialized.Image.Should().Be(manifest.Image); - deserialized.Artifacts.Should().HaveCount(manifest.Artifacts.Length); - deserialized.Verify.Should().NotBeNull(); - deserialized.Verify!.Keys.Should().BeEquivalentTo(manifest.Verify!.Keys); - deserialized.Subject.Should().NotBeNull(); - deserialized.Subject!.Sha256.Should().Be(manifest.Subject!.Sha256); - deserialized.Timestamps.Should().HaveCount(2); - deserialized.Timestamps[0].Should().BeOfType(); - deserialized.RekorProofs.Should().HaveCount(1); - } - - private static BundleManifest CreateV2Manifest() - { - return new BundleManifest - { - BundleId = Guid.NewGuid().ToString(), - SchemaVersion = "2.0.0", - Name = "offline-bundle-v2", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = [], - Policies = [], - CryptoMaterials = [], - Image = "registry.example.com/app@sha256:abc123def456", - Artifacts = - [ - new BundleArtifact("sbom.cdx.json", "sbom", "application/vnd.cyclonedx+json", "sha256:aaa", 1024), - new BundleArtifact("sbom.statement.dsse.json", "dsse", "application/vnd.dsse+json", "sha256:bbb", 512) - ], - CanonicalManifestHash = new string('c', 64), - Subject = new BundleSubject - { - Sha256 = new string('a', 64), - Sha512 = new string('b', 128) - }, - Timestamps = - [ - new Rfc3161TimestampEntry - { - TsaChainPaths = ["tsa/chain/root.pem"], - OcspBlobs = ["tsa/ocsp/resp.der"], - CrlBlobs = ["tsa/crl/list.crl"], - TstBase64 = "dGVzdA==" - }, - new EidasQtsTimestampEntry - { - QtsMetaPath = "tsa/eidas/qts.json" - } - ], - RekorProofs = - [ - new RekorProofEntry - { - EntryBodyPath = "rekor/entry.json", - LeafHash = "sha256:leaf", - InclusionProofPath = "rekor/proof.json", - SignedEntryTimestamp = "base64set" - } - ], - Verify = new BundleVerifySection - { - Keys = ["kms://example/key"], - Expectations = new BundleVerifyExpectations - { - PayloadTypes = ["application/vnd.cyclonedx+json;version=1.6"], - RekorRequired = true - } - } - }; - } } - diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Revocation.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Revocation.cs new file mode 100644 index 000000000..5a709e2a9 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Revocation.cs @@ -0,0 +1,76 @@ +using System.Formats.Asn1; +using System.Security.Cryptography.X509Certificates; +using StellaOps.AirGap.Bundle.Services; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleTimestampOfflineVerificationTests +{ + private static byte[] CreateOcspSuccessResponse() + { + var writer = new AsnWriter(AsnEncodingRules.DER); + writer.PushSequence(); + writer.WriteEnumeratedValue(OcspResponseStatus.Successful); + writer.PopSequence(); + return writer.Encode(); + } + + private enum OcspResponseStatus + { + Successful = 0, + MalformedRequest = 1, + InternalError = 2, + TryLater = 3, + SigRequired = 5, + Unauthorized = 6 + } + + private static byte[] CreateCrlPlaceholder() + { + var writer = new AsnWriter(AsnEncodingRules.DER); + writer.PushSequence(); + writer.WriteInteger(1); + writer.PopSequence(); + return writer.Encode(); + } + + private sealed class FixedOcspFetcher : IOcspResponseFetcher + { + private readonly byte[] _response; + + public FixedOcspFetcher(byte[] response) + { + _response = response; + } + + public Task> FetchAsync( + IReadOnlyList certificateChain, + CancellationToken ct = default) + { + var blobs = certificateChain + .Select((_, index) => new TsaRevocationBlob(index, _response, "memory://ocsp")) + .ToList(); + return Task.FromResult>(blobs); + } + } + + private sealed class FixedCrlFetcher : ICrlFetcher + { + private readonly byte[] _response; + + public FixedCrlFetcher(byte[] response) + { + _response = response; + } + + public Task> FetchAsync( + IReadOnlyList certificateChain, + CancellationToken ct = default) + { + var blobs = certificateChain + .Select((_, index) => new TsaRevocationBlob(index, _response, "memory://crl")) + .ToList(); + return Task.FromResult>(blobs); + } + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Token.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Token.cs new file mode 100644 index 000000000..336f7ae6b --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Helpers.Token.cs @@ -0,0 +1,61 @@ +using System.Security.Cryptography; +using System.Security.Cryptography.Pkcs; +using System.Security.Cryptography.X509Certificates; +using System.Text; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleTimestampOfflineVerificationTests +{ + private static (X509Certificate2 RootCert, byte[] TokenBytes, DateTimeOffset SigningTime) CreateSignedToken() + { + var signingTime = DateTimeOffset.UtcNow; + + using var rootKey = RSA.Create(2048); + var rootRequest = new CertificateRequest( + "CN=Test TSA Root", + rootKey, + HashAlgorithmName.SHA256, + RSASignaturePadding.Pkcs1); + rootRequest.CertificateExtensions.Add( + new X509BasicConstraintsExtension(true, false, 0, true)); + rootRequest.CertificateExtensions.Add( + new X509SubjectKeyIdentifierExtension(rootRequest.PublicKey, false)); + + var rootCert = rootRequest.CreateSelfSigned( + signingTime.AddDays(-1), + signingTime.AddYears(1)); + + using var leafKey = RSA.Create(2048); + var leafRequest = new CertificateRequest( + "CN=Test TSA Leaf", + leafKey, + HashAlgorithmName.SHA256, + RSASignaturePadding.Pkcs1); + leafRequest.CertificateExtensions.Add( + new X509BasicConstraintsExtension(false, false, 0, true)); + leafRequest.CertificateExtensions.Add( + new X509KeyUsageExtension(X509KeyUsageFlags.DigitalSignature, true)); + leafRequest.CertificateExtensions.Add( + new X509SubjectKeyIdentifierExtension(leafRequest.PublicKey, false)); + + var leafCert = leafRequest.Create( + rootCert, + signingTime.AddDays(-1), + signingTime.AddMonths(6), + Guid.NewGuid().ToByteArray()); + var leafWithKey = leafCert.CopyWithPrivateKey(leafKey); + + var content = new ContentInfo(Encoding.UTF8.GetBytes("timestamp-test")); + var signedCms = new SignedCms(content, detached: false); + var signer = new CmsSigner(leafWithKey) + { + IncludeOption = X509IncludeOption.WholeChain + }; + signer.Certificates.Add(rootCert); + signer.SignedAttributes.Add(new Pkcs9SigningTime(signingTime.UtcDateTime)); + signedCms.ComputeSignature(signer); + + return (rootCert, signedCms.Encode(), signingTime); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Tests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Tests.cs new file mode 100644 index 000000000..507ec4ea5 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.Tests.cs @@ -0,0 +1,82 @@ +using System.Security.Cryptography.X509Certificates; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class BundleTimestampOfflineVerificationTests +{ + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task BundleBuilder_BundlesTimestampEvidence_And_VerifiesOffline() + { + var (rootCert, tokenBytes, signingTime) = CreateSignedToken(); + var ocsp = CreateOcspSuccessResponse(); + var crl = CreateCrlPlaceholder(); + + var builder = new BundleBuilder( + TimeProvider.System, + SystemGuidProvider.Instance, + new TsaChainBundler(), + new FixedOcspFetcher(ocsp), + new FixedCrlFetcher(crl)); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var request = new BundleBuildRequest( + "timestamp-bundle", + "1.0.0", + null, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Array.Empty(), + new TimestampBuildConfig[] + { + new Rfc3161TimestampBuildConfig(tokenBytes) + }); + + var manifest = await builder.BuildAsync(request, outputPath); + + var entry = Assert.Single(manifest.Timestamps.OfType()); + Assert.NotEmpty(entry.TsaChainPaths); + Assert.NotEmpty(entry.OcspBlobs); + Assert.NotEmpty(entry.CrlBlobs); + + var chain = entry.TsaChainPaths + .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) + .Select(path => X509Certificate2.CreateFromPem(File.ReadAllText(path))) + .ToList(); + var ocspResponses = entry.OcspBlobs + .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) + .Select(File.ReadAllBytes) + .ToList(); + var crlSnapshots = entry.CrlBlobs + .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) + .Select(File.ReadAllBytes) + .ToList(); + + var trustRoots = new[] + { + new TimeTrustRoot("tsa-root", rootCert.Export(X509ContentType.Cert), "rsa") + }; + + var verifier = new Rfc3161Verifier(); + var options = new TimeTokenVerificationOptions + { + Offline = true, + CertificateChain = chain, + OcspResponses = ocspResponses, + Crls = crlSnapshots, + VerificationTime = signingTime + }; + + var result = verifier.Verify(tokenBytes, trustRoots, out var anchor, options); + + Assert.True(result.IsValid, result.Reason); + Assert.NotEqual(TimeAnchor.Unknown, anchor); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.cs index 40d670649..dc6af3ed2 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/BundleTimestampOfflineVerificationTests.cs @@ -1,18 +1,8 @@ -using System.Formats.Asn1; -using System.Security.Cryptography; -using System.Security.Cryptography.Pkcs; -using System.Security.Cryptography.X509Certificates; -using System.Text; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.AirGap.Time.Models; -using StellaOps.AirGap.Time.Services; -using StellaOps.TestKit; using Xunit; namespace StellaOps.AirGap.Bundle.Tests; -public sealed class BundleTimestampOfflineVerificationTests : IAsyncLifetime +public sealed partial class BundleTimestampOfflineVerificationTests : IAsyncLifetime { private string _tempRoot = null!; @@ -32,195 +22,4 @@ public sealed class BundleTimestampOfflineVerificationTests : IAsyncLifetime return ValueTask.CompletedTask; } - - [Trait("Category", TestCategories.Integration)] - [Fact] - public async Task BundleBuilder_BundlesTimestampEvidence_And_VerifiesOffline() - { - var (rootCert, tokenBytes, signingTime) = CreateSignedToken(); - var ocsp = CreateOcspSuccessResponse(); - var crl = CreateCrlPlaceholder(); - - var builder = new BundleBuilder( - TimeProvider.System, - SystemGuidProvider.Instance, - new TsaChainBundler(), - new FixedOcspFetcher(ocsp), - new FixedCrlFetcher(crl)); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var request = new BundleBuildRequest( - "timestamp-bundle", - "1.0.0", - null, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Array.Empty(), - new TimestampBuildConfig[] - { - new Rfc3161TimestampBuildConfig(tokenBytes) - }); - - var manifest = await builder.BuildAsync(request, outputPath); - - var entry = Assert.Single(manifest.Timestamps.OfType()); - Assert.NotEmpty(entry.TsaChainPaths); - Assert.NotEmpty(entry.OcspBlobs); - Assert.NotEmpty(entry.CrlBlobs); - - var chain = entry.TsaChainPaths - .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) - .Select(path => X509Certificate2.CreateFromPem(File.ReadAllText(path))) - .ToList(); - var ocspResponses = entry.OcspBlobs - .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) - .Select(File.ReadAllBytes) - .ToList(); - var crlSnapshots = entry.CrlBlobs - .Select(path => Path.Combine(outputPath, path.Replace('/', Path.DirectorySeparatorChar))) - .Select(File.ReadAllBytes) - .ToList(); - - var trustRoots = new[] - { - new TimeTrustRoot("tsa-root", rootCert.Export(X509ContentType.Cert), "rsa") - }; - - var verifier = new Rfc3161Verifier(); - var options = new TimeTokenVerificationOptions - { - Offline = true, - CertificateChain = chain, - OcspResponses = ocspResponses, - Crls = crlSnapshots, - VerificationTime = signingTime - }; - - var result = verifier.Verify(tokenBytes, trustRoots, out var anchor, options); - - Assert.True(result.IsValid, result.Reason); - Assert.NotEqual(TimeAnchor.Unknown, anchor); - } - - private static (X509Certificate2 RootCert, byte[] TokenBytes, DateTimeOffset SigningTime) CreateSignedToken() - { - var signingTime = DateTimeOffset.UtcNow; - - using var rootKey = RSA.Create(2048); - var rootRequest = new CertificateRequest( - "CN=Test TSA Root", - rootKey, - HashAlgorithmName.SHA256, - RSASignaturePadding.Pkcs1); - rootRequest.CertificateExtensions.Add( - new X509BasicConstraintsExtension(true, false, 0, true)); - rootRequest.CertificateExtensions.Add( - new X509SubjectKeyIdentifierExtension(rootRequest.PublicKey, false)); - - var rootCert = rootRequest.CreateSelfSigned( - signingTime.AddDays(-1), - signingTime.AddYears(1)); - - using var leafKey = RSA.Create(2048); - var leafRequest = new CertificateRequest( - "CN=Test TSA Leaf", - leafKey, - HashAlgorithmName.SHA256, - RSASignaturePadding.Pkcs1); - leafRequest.CertificateExtensions.Add( - new X509BasicConstraintsExtension(false, false, 0, true)); - leafRequest.CertificateExtensions.Add( - new X509KeyUsageExtension(X509KeyUsageFlags.DigitalSignature, true)); - leafRequest.CertificateExtensions.Add( - new X509SubjectKeyIdentifierExtension(leafRequest.PublicKey, false)); - - var leafCert = leafRequest.Create( - rootCert, - signingTime.AddDays(-1), - signingTime.AddMonths(6), - Guid.NewGuid().ToByteArray()); - var leafWithKey = leafCert.CopyWithPrivateKey(leafKey); - - var content = new ContentInfo(Encoding.UTF8.GetBytes("timestamp-test")); - var signedCms = new SignedCms(content, detached: false); - var signer = new CmsSigner(leafWithKey) - { - IncludeOption = X509IncludeOption.WholeChain - }; - signer.Certificates.Add(rootCert); - signer.SignedAttributes.Add(new Pkcs9SigningTime(signingTime.UtcDateTime)); - signedCms.ComputeSignature(signer); - - return (rootCert, signedCms.Encode(), signingTime); - } - - private static byte[] CreateOcspSuccessResponse() - { - var writer = new AsnWriter(AsnEncodingRules.DER); - writer.PushSequence(); - // OCSP response status: 0 = successful - writer.WriteEnumeratedValue(OcspResponseStatus.Successful); - writer.PopSequence(); - return writer.Encode(); - } - - private enum OcspResponseStatus - { - Successful = 0, - MalformedRequest = 1, - InternalError = 2, - TryLater = 3, - SigRequired = 5, - Unauthorized = 6 - } - - private static byte[] CreateCrlPlaceholder() - { - var writer = new AsnWriter(AsnEncodingRules.DER); - writer.PushSequence(); - writer.WriteInteger(1); - writer.PopSequence(); - return writer.Encode(); - } - - private sealed class FixedOcspFetcher : IOcspResponseFetcher - { - private readonly byte[] _response; - - public FixedOcspFetcher(byte[] response) - { - _response = response; - } - - public Task> FetchAsync( - IReadOnlyList certificateChain, - CancellationToken ct = default) - { - var blobs = certificateChain - .Select((_, index) => new TsaRevocationBlob(index, _response, "memory://ocsp")) - .ToList(); - return Task.FromResult>(blobs); - } - } - - private sealed class FixedCrlFetcher : ICrlFetcher - { - private readonly byte[] _response; - - public FixedCrlFetcher(byte[] response) - { - _response = response; - } - - public Task> FetchAsync( - IReadOnlyList certificateChain, - CancellationToken ct = default) - { - var blobs = certificateChain - .Select((_, index) => new TsaRevocationBlob(index, _response, "memory://crl")) - .ToList(); - return Task.FromResult>(blobs); - } - } } diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleArtifactType.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleArtifactType.cs new file mode 100644 index 000000000..fa494a7b7 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleArtifactType.cs @@ -0,0 +1,32 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "BundleArtifactType has FunctionMap value")] + public void BundleArtifactType_HasFunctionMap() + { + BundleArtifactType.FunctionMap.Should().BeDefined(); + } + + [Fact(DisplayName = "BundleArtifactType has FunctionMapDsse value")] + public void BundleArtifactType_HasFunctionMapDsse() + { + BundleArtifactType.FunctionMapDsse.Should().BeDefined(); + } + + [Fact(DisplayName = "BundleArtifactType has Observations value")] + public void BundleArtifactType_HasObservations() + { + BundleArtifactType.Observations.Should().BeDefined(); + } + + [Fact(DisplayName = "BundleArtifactType has VerificationReport value")] + public void BundleArtifactType_HasVerificationReport() + { + BundleArtifactType.VerificationReport.Should().BeDefined(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.FunctionMap.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.FunctionMap.cs new file mode 100644 index 000000000..ff3f6ab78 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.FunctionMap.cs @@ -0,0 +1,54 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "BundleBuilder packages function-map artifact")] + public async Task BundleBuilder_PackagesFunctionMapArtifact() + { + var sourceDir = Path.Combine(_tempRoot, "source"); + Directory.CreateDirectory(sourceDir); + + var feedFile = Path.Combine(sourceDir, "feed.json"); + await File.WriteAllTextAsync(feedFile, "{}"); + + var fmFile = Path.Combine(sourceDir, "function-map.json"); + await File.WriteAllTextAsync(fmFile, "{\"_type\":\"https://stella.ops/predicates/function-map/v1\"}"); + + var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); + + var request = new BundleBuildRequest( + "test-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: new[] { fmConfig }); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var builder = new BundleBuilder(); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Artifacts.Should().ContainSingle(); + var artifact = manifest.Artifacts[0]; + artifact.Type.Should().Be("function-map"); + artifact.Path.Should().Be("function-maps/testservice-function-map.json"); + artifact.Digest.Should().StartWith("sha256:"); + artifact.SizeBytes.Should().BeGreaterThan(0); + + var bundledFile = Path.Combine(outputPath, "function-maps", "testservice-function-map.json"); + File.Exists(bundledFile).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Multiple.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Multiple.cs new file mode 100644 index 000000000..d38dd8654 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Multiple.cs @@ -0,0 +1,60 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "BundleBuilder packages multiple function-map artifacts")] + public async Task BundleBuilder_PackagesMultipleArtifacts() + { + var sourceDir = Path.Combine(_tempRoot, "source"); + Directory.CreateDirectory(sourceDir); + + var feedFile = Path.Combine(sourceDir, "feed.json"); + await File.WriteAllTextAsync(feedFile, "{}"); + + var fmFile = Path.Combine(sourceDir, "function-map.json"); + await File.WriteAllTextAsync(fmFile, "{\"predicate\":{}}"); + + var obsFile = Path.Combine(sourceDir, "obs.ndjson"); + await File.WriteAllTextAsync(obsFile, "{\"symbol\":\"SSL_connect\"}\n"); + + var reportFile = Path.Combine(sourceDir, "report.json"); + await File.WriteAllTextAsync(reportFile, "{\"verified\":true}"); + + var artifacts = new[] + { + FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "myservice"), + FunctionMapBundleIntegration.CreateObservationsConfig(obsFile, "2026-01-22"), + FunctionMapBundleIntegration.CreateVerificationReportConfig(reportFile) + }; + + var request = new BundleBuildRequest( + "test-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: artifacts); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var builder = new BundleBuilder(); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Artifacts.Should().HaveCount(3); + manifest.Artifacts.Select(a => a.Type).Should().Contain("function-map"); + manifest.Artifacts.Select(a => a.Type).Should().Contain("observations"); + manifest.Artifacts.Select(a => a.Type).Should().Contain("verification-report"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Observations.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Observations.cs new file mode 100644 index 000000000..b87625d32 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleBuilder.Observations.cs @@ -0,0 +1,53 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "BundleBuilder packages observations artifact")] + public async Task BundleBuilder_PackagesObservationsArtifact() + { + var sourceDir = Path.Combine(_tempRoot, "source"); + Directory.CreateDirectory(sourceDir); + + var feedFile = Path.Combine(sourceDir, "feed.json"); + await File.WriteAllTextAsync(feedFile, "{}"); + + var obsFile = Path.Combine(sourceDir, "obs.ndjson"); + await File.WriteAllTextAsync(obsFile, "{\"symbol\":\"SSL_connect\"}\n{\"symbol\":\"SSL_read\"}\n"); + + var obsConfig = FunctionMapBundleIntegration.CreateObservationsConfig(obsFile, "2026-01-22"); + + var request = new BundleBuildRequest( + "test-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + Array.Empty(), + Array.Empty(), + Artifacts: new[] { obsConfig }); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var builder = new BundleBuilder(); + + var manifest = await builder.BuildAsync(request, outputPath); + + manifest.Artifacts.Should().ContainSingle(); + var artifact = manifest.Artifacts[0]; + artifact.Type.Should().Be("observations"); + artifact.Path.Should().Be("observations/observations-2026-01-22.ndjson"); + artifact.ContentType.Should().Be("application/x-ndjson"); + + var bundledFile = Path.Combine(outputPath, "observations", "observations-2026-01-22.ndjson"); + File.Exists(bundledFile).Should().BeTrue(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.DigestMismatch.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.DigestMismatch.cs new file mode 100644 index 000000000..4bdfd1de9 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.DigestMismatch.cs @@ -0,0 +1,60 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "Validator fails when artifact digest mismatches")] + public async Task Validator_FailsWhenArtifactDigestMismatches() + { + var sourceDir = Path.Combine(_tempRoot, "source"); + Directory.CreateDirectory(sourceDir); + + var feedFile = Path.Combine(sourceDir, "feed.json"); + await File.WriteAllTextAsync(feedFile, "{}"); + + var fmFile = Path.Combine(sourceDir, "function-map.json"); + await File.WriteAllTextAsync(fmFile, "{\"_type\":\"function-map\"}"); + + var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); + var cryptoFile = Path.Combine(sourceDir, "root.pem"); + await File.WriteAllTextAsync(cryptoFile, "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"); + + var request = new BundleBuildRequest( + "test-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + new[] + { + new CryptoBuildConfig("crypto-1", "root", cryptoFile, "crypto/root.pem", + CryptoComponentType.TrustRoot, null) + }, + Array.Empty(), + Artifacts: new[] { fmConfig }); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var builder = new BundleBuilder(); + var manifest = await builder.BuildAsync(request, outputPath); + + var bundledFile = Path.Combine(outputPath, "function-maps", "testservice-function-map.json"); + await File.WriteAllTextAsync(bundledFile, "{\"tampered\":true}"); + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, outputPath); + + result.Errors.Should().Contain(e => + e.Component == "Artifacts" && e.Message.Contains("digest mismatch")); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.NoDigest.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.NoDigest.cs new file mode 100644 index 000000000..ce5c64cf3 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.NoDigest.cs @@ -0,0 +1,63 @@ +using System.Collections.Immutable; +using System.Text; +using FluentAssertions; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Validation; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "Validator warns when artifact has no digest")] + public async Task Validator_WarnsWhenArtifactHasNoDigest() + { + var outputPath = Path.Combine(_tempRoot, "bundle"); + Directory.CreateDirectory(Path.Combine(outputPath, "function-maps")); + + var fmPath = Path.Combine(outputPath, "function-maps", "test-function-map.json"); + await File.WriteAllTextAsync(fmPath, "{}"); + + var feedDir = Path.Combine(outputPath, "feeds"); + Directory.CreateDirectory(feedDir); + var feedPath = Path.Combine(feedDir, "nvd.json"); + await File.WriteAllTextAsync(feedPath, "{}"); + + var cryptoDir = Path.Combine(outputPath, "crypto"); + Directory.CreateDirectory(cryptoDir); + var cryptoPath = Path.Combine(cryptoDir, "root.pem"); + await File.WriteAllTextAsync(cryptoPath, "cert"); + + var manifest = new BundleManifest + { + BundleId = "test", + Name = "test", + Version = "1.0.0", + CreatedAt = DateTimeOffset.UtcNow, + Feeds = ImmutableArray.Create(new FeedComponent( + "feed-1", "nvd", "v1", "feeds/nvd.json", + System.Security.Cryptography.SHA256.HashData(Encoding.UTF8.GetBytes("{}")) + .Select(b => b.ToString("x2")).Aggregate((a, b) => a + b), + 2, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)), + Policies = ImmutableArray.Empty, + CryptoMaterials = ImmutableArray.Create(new CryptoComponent( + "crypto-1", "root", "crypto/root.pem", + System.Security.Cryptography.SHA256.HashData(Encoding.UTF8.GetBytes("cert")) + .Select(b => b.ToString("x2")).Aggregate((a, b) => a + b), + 4, CryptoComponentType.TrustRoot, null)), + Artifacts = ImmutableArray.Create(new BundleArtifact( + "function-maps/test-function-map.json", + "function-map", + "application/vnd.stella.function-map+json", + null, + 2)) + }; + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, outputPath); + + result.Warnings.Should().Contain(w => + w.Component == "Artifacts" && w.Message.Contains("no digest")); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.Pass.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.Pass.cs new file mode 100644 index 000000000..02f3a16c4 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.BundleValidator.Pass.cs @@ -0,0 +1,57 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using StellaOps.AirGap.Bundle.Models; +using StellaOps.AirGap.Bundle.Services; +using StellaOps.AirGap.Bundle.Validation; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "Validator passes when artifact digests match")] + public async Task Validator_PassesWhenArtifactDigestsMatch() + { + var sourceDir = Path.Combine(_tempRoot, "source"); + Directory.CreateDirectory(sourceDir); + + var feedFile = Path.Combine(sourceDir, "feed.json"); + await File.WriteAllTextAsync(feedFile, "{}"); + + var fmFile = Path.Combine(sourceDir, "function-map.json"); + var fmContent = "{\"_type\":\"function-map\"}"; + await File.WriteAllTextAsync(fmFile, fmContent); + + var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); + var cryptoFile = Path.Combine(sourceDir, "root.pem"); + await File.WriteAllTextAsync(cryptoFile, "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"); + + var request = new BundleBuildRequest( + "test-bundle", + "1.0.0", + null, + new[] + { + new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", + DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) + }, + Array.Empty(), + new[] + { + new CryptoBuildConfig("crypto-1", "root", cryptoFile, "crypto/root.pem", + CryptoComponentType.TrustRoot, null) + }, + Array.Empty(), + Artifacts: new[] { fmConfig }); + + var outputPath = Path.Combine(_tempRoot, "bundle"); + var builder = new BundleBuilder(); + var manifest = await builder.BuildAsync(request, outputPath); + + var validator = new BundleValidator(); + + var result = await validator.ValidateAsync(manifest, outputPath); + + result.Errors.Where(e => e.Component == "Artifacts").Should().BeEmpty(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Constants.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Constants.cs new file mode 100644 index 000000000..9c0ca950e --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Constants.cs @@ -0,0 +1,35 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "ArtifactTypes constants have correct values")] + public void ArtifactTypes_CorrectValues() + { + FunctionMapBundleIntegration.ArtifactTypes.FunctionMap.Should().Be("function-map"); + FunctionMapBundleIntegration.ArtifactTypes.FunctionMapDsse.Should().Be("function-map.dsse"); + FunctionMapBundleIntegration.ArtifactTypes.Observations.Should().Be("observations"); + FunctionMapBundleIntegration.ArtifactTypes.VerificationReport.Should().Be("verification-report"); + FunctionMapBundleIntegration.ArtifactTypes.VerificationReportDsse.Should().Be("verification-report.dsse"); + } + + [Fact(DisplayName = "MediaTypes constants have correct values")] + public void MediaTypes_CorrectValues() + { + FunctionMapBundleIntegration.MediaTypes.FunctionMap.Should().Be("application/vnd.stella.function-map+json"); + FunctionMapBundleIntegration.MediaTypes.FunctionMapDsse.Should().Be("application/vnd.dsse+json"); + FunctionMapBundleIntegration.MediaTypes.Observations.Should().Be("application/x-ndjson"); + FunctionMapBundleIntegration.MediaTypes.VerificationReport.Should().Be("application/vnd.stella.verification-report+json"); + } + + [Fact(DisplayName = "BundlePaths constants have correct values")] + public void BundlePaths_CorrectValues() + { + FunctionMapBundleIntegration.BundlePaths.FunctionMapsDir.Should().Be("function-maps"); + FunctionMapBundleIntegration.BundlePaths.ObservationsDir.Should().Be("observations"); + FunctionMapBundleIntegration.BundlePaths.VerificationDir.Should().Be("verification"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Config.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Config.cs new file mode 100644 index 000000000..7999e8196 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Config.cs @@ -0,0 +1,73 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "CreateFunctionMapConfig produces correct config")] + public void CreateFunctionMapConfig_ProducesCorrectConfig() + { + var sourcePath = Path.Combine(_tempRoot, "fm.json"); + + var config = FunctionMapBundleIntegration.CreateFunctionMapConfig(sourcePath, "myservice"); + + config.Type.Should().Be("function-map"); + config.ContentType.Should().Be("application/vnd.stella.function-map+json"); + config.SourcePath.Should().Be(sourcePath); + config.RelativePath.Should().Be("function-maps/myservice-function-map.json"); + } + + [Fact(DisplayName = "CreateFunctionMapDsseConfig produces correct config")] + public void CreateFunctionMapDsseConfig_ProducesCorrectConfig() + { + var sourcePath = Path.Combine(_tempRoot, "fm.dsse.json"); + + var config = FunctionMapBundleIntegration.CreateFunctionMapDsseConfig(sourcePath, "myservice"); + + config.Type.Should().Be("function-map.dsse"); + config.ContentType.Should().Be("application/vnd.dsse+json"); + config.SourcePath.Should().Be(sourcePath); + config.RelativePath.Should().Be("function-maps/myservice-function-map.dsse.json"); + } + + [Fact(DisplayName = "CreateObservationsConfig produces correct config")] + public void CreateObservationsConfig_ProducesCorrectConfig() + { + var sourcePath = Path.Combine(_tempRoot, "obs.ndjson"); + + var config = FunctionMapBundleIntegration.CreateObservationsConfig(sourcePath, "2026-01-22"); + + config.Type.Should().Be("observations"); + config.ContentType.Should().Be("application/x-ndjson"); + config.SourcePath.Should().Be(sourcePath); + config.RelativePath.Should().Be("observations/observations-2026-01-22.ndjson"); + } + + [Fact(DisplayName = "CreateVerificationReportConfig produces correct config")] + public void CreateVerificationReportConfig_ProducesCorrectConfig() + { + var sourcePath = Path.Combine(_tempRoot, "report.json"); + + var config = FunctionMapBundleIntegration.CreateVerificationReportConfig(sourcePath); + + config.Type.Should().Be("verification-report"); + config.ContentType.Should().Be("application/vnd.stella.verification-report+json"); + config.SourcePath.Should().Be(sourcePath); + config.RelativePath.Should().Be("verification/verification-report.json"); + } + + [Fact(DisplayName = "CreateVerificationReportDsseConfig produces correct config")] + public void CreateVerificationReportDsseConfig_ProducesCorrectConfig() + { + var sourcePath = Path.Combine(_tempRoot, "report.dsse.json"); + + var config = FunctionMapBundleIntegration.CreateVerificationReportDsseConfig(sourcePath); + + config.Type.Should().Be("verification-report.dsse"); + config.ContentType.Should().Be("application/vnd.dsse+json"); + config.SourcePath.Should().Be(sourcePath); + config.RelativePath.Should().Be("verification/verification-report.dsse.json"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Content.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Content.cs new file mode 100644 index 000000000..90cc17e77 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Factory.Content.cs @@ -0,0 +1,46 @@ +using System.Text; +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Fact(DisplayName = "CreateFunctionMapFromContent produces correct config")] + public void CreateFunctionMapFromContent_ProducesCorrectConfig() + { + var content = Encoding.UTF8.GetBytes("{\"schema\":\"v1\"}"); + + var config = FunctionMapBundleIntegration.CreateFunctionMapFromContent(content, "myservice"); + + config.Type.Should().Be("function-map"); + config.ContentType.Should().Be("application/vnd.stella.function-map+json"); + config.Content.Should().BeEquivalentTo(content); + config.SourcePath.Should().BeNull(); + config.RelativePath.Should().Be("function-maps/myservice-function-map.json"); + } + + [Fact(DisplayName = "CreateObservationsFromContent produces correct config")] + public void CreateObservationsFromContent_ProducesCorrectConfig() + { + var content = Encoding.UTF8.GetBytes("{\"obs\":1}\n{\"obs\":2}\n"); + + var config = FunctionMapBundleIntegration.CreateObservationsFromContent(content, "2026-01-22"); + + config.Type.Should().Be("observations"); + config.ContentType.Should().Be("application/x-ndjson"); + config.Content.Should().BeEquivalentTo(content); + config.RelativePath.Should().Be("observations/observations-2026-01-22.ndjson"); + } + + [Fact(DisplayName = "CreateFunctionMapConfig sanitizes service name")] + public void CreateFunctionMapConfig_SanitizesServiceName() + { + var sourcePath = Path.Combine(_tempRoot, "fm.json"); + + var config = FunctionMapBundleIntegration.CreateFunctionMapConfig(sourcePath, "my/service:v1"); + + config.RelativePath.Should().Be("function-maps/my-service-v1-function-map.json"); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Predicates.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Predicates.cs new file mode 100644 index 000000000..fdc372078 --- /dev/null +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.Predicates.cs @@ -0,0 +1,48 @@ +using FluentAssertions; +using StellaOps.AirGap.Bundle.FunctionMap; +using Xunit; + +namespace StellaOps.AirGap.Bundle.Tests; + +public sealed partial class FunctionMapBundleIntegrationTests +{ + [Theory(DisplayName = "IsFunctionMapArtifact returns true for function-map types")] + [InlineData("function-map")] + [InlineData("function-map.dsse")] + [InlineData("observations")] + [InlineData("verification-report")] + [InlineData("verification-report.dsse")] + public void IsFunctionMapArtifact_TrueForKnownTypes(string type) + { + FunctionMapBundleIntegration.IsFunctionMapArtifact(type).Should().BeTrue(); + } + + [Theory(DisplayName = "IsFunctionMapArtifact returns false for non-function-map types")] + [InlineData("sbom")] + [InlineData("vex")] + [InlineData("rekor.proof")] + [InlineData("other")] + [InlineData(null)] + public void IsFunctionMapArtifact_FalseForOtherTypes(string? type) + { + FunctionMapBundleIntegration.IsFunctionMapArtifact(type).Should().BeFalse(); + } + + [Theory(DisplayName = "IsDsseArtifact returns true for DSSE types")] + [InlineData("function-map.dsse")] + [InlineData("verification-report.dsse")] + public void IsDsseArtifact_TrueForDsseTypes(string type) + { + FunctionMapBundleIntegration.IsDsseArtifact(type).Should().BeTrue(); + } + + [Theory(DisplayName = "IsDsseArtifact returns false for non-DSSE types")] + [InlineData("function-map")] + [InlineData("observations")] + [InlineData("verification-report")] + [InlineData(null)] + public void IsDsseArtifact_FalseForNonDsseTypes(string? type) + { + FunctionMapBundleIntegration.IsDsseArtifact(type).Should().BeFalse(); + } +} diff --git a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.cs b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.cs index 24ba96410..b9f8457c4 100644 --- a/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.cs +++ b/src/AirGap/__Libraries/__Tests/StellaOps.AirGap.Bundle.Tests/FunctionMapBundleIntegrationTests.cs @@ -3,13 +3,6 @@ // Sprint: SPRINT_20260122_039_Scanner_runtime_linkage_verification // Task: RLV-011 - Bundle Integration: function_map Artifact Type -using System.Collections.Immutable; -using System.Text; -using FluentAssertions; -using StellaOps.AirGap.Bundle.FunctionMap; -using StellaOps.AirGap.Bundle.Models; -using StellaOps.AirGap.Bundle.Services; -using StellaOps.AirGap.Bundle.Validation; using StellaOps.TestKit; using Xunit; @@ -17,7 +10,7 @@ namespace StellaOps.AirGap.Bundle.Tests; [Trait("Category", TestCategories.Unit)] [Trait("Sprint", "039")] -public sealed class FunctionMapBundleIntegrationTests : IDisposable +public sealed partial class FunctionMapBundleIntegrationTests : IDisposable { private readonly string _tempRoot; @@ -34,494 +27,4 @@ public sealed class FunctionMapBundleIntegrationTests : IDisposable Directory.Delete(_tempRoot, recursive: true); } } - - #region Artifact Type Constants Tests - - [Fact(DisplayName = "ArtifactTypes constants have correct values")] - public void ArtifactTypes_CorrectValues() - { - FunctionMapBundleIntegration.ArtifactTypes.FunctionMap.Should().Be("function-map"); - FunctionMapBundleIntegration.ArtifactTypes.FunctionMapDsse.Should().Be("function-map.dsse"); - FunctionMapBundleIntegration.ArtifactTypes.Observations.Should().Be("observations"); - FunctionMapBundleIntegration.ArtifactTypes.VerificationReport.Should().Be("verification-report"); - FunctionMapBundleIntegration.ArtifactTypes.VerificationReportDsse.Should().Be("verification-report.dsse"); - } - - [Fact(DisplayName = "MediaTypes constants have correct values")] - public void MediaTypes_CorrectValues() - { - FunctionMapBundleIntegration.MediaTypes.FunctionMap.Should().Be("application/vnd.stella.function-map+json"); - FunctionMapBundleIntegration.MediaTypes.FunctionMapDsse.Should().Be("application/vnd.dsse+json"); - FunctionMapBundleIntegration.MediaTypes.Observations.Should().Be("application/x-ndjson"); - FunctionMapBundleIntegration.MediaTypes.VerificationReport.Should().Be("application/vnd.stella.verification-report+json"); - } - - [Fact(DisplayName = "BundlePaths constants have correct values")] - public void BundlePaths_CorrectValues() - { - FunctionMapBundleIntegration.BundlePaths.FunctionMapsDir.Should().Be("function-maps"); - FunctionMapBundleIntegration.BundlePaths.ObservationsDir.Should().Be("observations"); - FunctionMapBundleIntegration.BundlePaths.VerificationDir.Should().Be("verification"); - } - - #endregion - - #region Factory Method Tests - - [Fact(DisplayName = "CreateFunctionMapConfig produces correct config")] - public void CreateFunctionMapConfig_ProducesCorrectConfig() - { - var sourcePath = Path.Combine(_tempRoot, "fm.json"); - - var config = FunctionMapBundleIntegration.CreateFunctionMapConfig(sourcePath, "myservice"); - - config.Type.Should().Be("function-map"); - config.ContentType.Should().Be("application/vnd.stella.function-map+json"); - config.SourcePath.Should().Be(sourcePath); - config.RelativePath.Should().Be("function-maps/myservice-function-map.json"); - } - - [Fact(DisplayName = "CreateFunctionMapDsseConfig produces correct config")] - public void CreateFunctionMapDsseConfig_ProducesCorrectConfig() - { - var sourcePath = Path.Combine(_tempRoot, "fm.dsse.json"); - - var config = FunctionMapBundleIntegration.CreateFunctionMapDsseConfig(sourcePath, "myservice"); - - config.Type.Should().Be("function-map.dsse"); - config.ContentType.Should().Be("application/vnd.dsse+json"); - config.SourcePath.Should().Be(sourcePath); - config.RelativePath.Should().Be("function-maps/myservice-function-map.dsse.json"); - } - - [Fact(DisplayName = "CreateObservationsConfig produces correct config")] - public void CreateObservationsConfig_ProducesCorrectConfig() - { - var sourcePath = Path.Combine(_tempRoot, "obs.ndjson"); - - var config = FunctionMapBundleIntegration.CreateObservationsConfig(sourcePath, "2026-01-22"); - - config.Type.Should().Be("observations"); - config.ContentType.Should().Be("application/x-ndjson"); - config.SourcePath.Should().Be(sourcePath); - config.RelativePath.Should().Be("observations/observations-2026-01-22.ndjson"); - } - - [Fact(DisplayName = "CreateVerificationReportConfig produces correct config")] - public void CreateVerificationReportConfig_ProducesCorrectConfig() - { - var sourcePath = Path.Combine(_tempRoot, "report.json"); - - var config = FunctionMapBundleIntegration.CreateVerificationReportConfig(sourcePath); - - config.Type.Should().Be("verification-report"); - config.ContentType.Should().Be("application/vnd.stella.verification-report+json"); - config.SourcePath.Should().Be(sourcePath); - config.RelativePath.Should().Be("verification/verification-report.json"); - } - - [Fact(DisplayName = "CreateVerificationReportDsseConfig produces correct config")] - public void CreateVerificationReportDsseConfig_ProducesCorrectConfig() - { - var sourcePath = Path.Combine(_tempRoot, "report.dsse.json"); - - var config = FunctionMapBundleIntegration.CreateVerificationReportDsseConfig(sourcePath); - - config.Type.Should().Be("verification-report.dsse"); - config.ContentType.Should().Be("application/vnd.dsse+json"); - config.SourcePath.Should().Be(sourcePath); - config.RelativePath.Should().Be("verification/verification-report.dsse.json"); - } - - [Fact(DisplayName = "CreateFunctionMapFromContent produces correct config")] - public void CreateFunctionMapFromContent_ProducesCorrectConfig() - { - var content = Encoding.UTF8.GetBytes("{\"schema\":\"v1\"}"); - - var config = FunctionMapBundleIntegration.CreateFunctionMapFromContent(content, "myservice"); - - config.Type.Should().Be("function-map"); - config.ContentType.Should().Be("application/vnd.stella.function-map+json"); - config.Content.Should().BeEquivalentTo(content); - config.SourcePath.Should().BeNull(); - config.RelativePath.Should().Be("function-maps/myservice-function-map.json"); - } - - [Fact(DisplayName = "CreateObservationsFromContent produces correct config")] - public void CreateObservationsFromContent_ProducesCorrectConfig() - { - var content = Encoding.UTF8.GetBytes("{\"obs\":1}\n{\"obs\":2}\n"); - - var config = FunctionMapBundleIntegration.CreateObservationsFromContent(content, "2026-01-22"); - - config.Type.Should().Be("observations"); - config.ContentType.Should().Be("application/x-ndjson"); - config.Content.Should().BeEquivalentTo(content); - config.RelativePath.Should().Be("observations/observations-2026-01-22.ndjson"); - } - - [Fact(DisplayName = "CreateFunctionMapConfig sanitizes service name")] - public void CreateFunctionMapConfig_SanitizesServiceName() - { - var sourcePath = Path.Combine(_tempRoot, "fm.json"); - - var config = FunctionMapBundleIntegration.CreateFunctionMapConfig(sourcePath, "my/service:v1"); - - config.RelativePath.Should().Be("function-maps/my-service-v1-function-map.json"); - } - - #endregion - - #region Predicate Tests - - [Theory(DisplayName = "IsFunctionMapArtifact returns true for function-map types")] - [InlineData("function-map")] - [InlineData("function-map.dsse")] - [InlineData("observations")] - [InlineData("verification-report")] - [InlineData("verification-report.dsse")] - public void IsFunctionMapArtifact_TrueForKnownTypes(string type) - { - FunctionMapBundleIntegration.IsFunctionMapArtifact(type).Should().BeTrue(); - } - - [Theory(DisplayName = "IsFunctionMapArtifact returns false for non-function-map types")] - [InlineData("sbom")] - [InlineData("vex")] - [InlineData("rekor.proof")] - [InlineData("other")] - [InlineData(null)] - public void IsFunctionMapArtifact_FalseForOtherTypes(string? type) - { - FunctionMapBundleIntegration.IsFunctionMapArtifact(type).Should().BeFalse(); - } - - [Theory(DisplayName = "IsDsseArtifact returns true for DSSE types")] - [InlineData("function-map.dsse")] - [InlineData("verification-report.dsse")] - public void IsDsseArtifact_TrueForDsseTypes(string type) - { - FunctionMapBundleIntegration.IsDsseArtifact(type).Should().BeTrue(); - } - - [Theory(DisplayName = "IsDsseArtifact returns false for non-DSSE types")] - [InlineData("function-map")] - [InlineData("observations")] - [InlineData("verification-report")] - [InlineData(null)] - public void IsDsseArtifact_FalseForNonDsseTypes(string? type) - { - FunctionMapBundleIntegration.IsDsseArtifact(type).Should().BeFalse(); - } - - #endregion - - #region BundleBuilder Integration Tests - - [Fact(DisplayName = "BundleBuilder packages function-map artifact")] - public async Task BundleBuilder_PackagesFunctionMapArtifact() - { - // Arrange - var sourceDir = Path.Combine(_tempRoot, "source"); - Directory.CreateDirectory(sourceDir); - - var feedFile = Path.Combine(sourceDir, "feed.json"); - await File.WriteAllTextAsync(feedFile, "{}"); - - var fmFile = Path.Combine(sourceDir, "function-map.json"); - await File.WriteAllTextAsync(fmFile, "{\"_type\":\"https://stella.ops/predicates/function-map/v1\"}"); - - var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); - - var request = new BundleBuildRequest( - "test-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: new[] { fmConfig }); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var builder = new BundleBuilder(); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Artifacts.Should().ContainSingle(); - var artifact = manifest.Artifacts[0]; - artifact.Type.Should().Be("function-map"); - artifact.Path.Should().Be("function-maps/testservice-function-map.json"); - artifact.Digest.Should().StartWith("sha256:"); - artifact.SizeBytes.Should().BeGreaterThan(0); - - var bundledFile = Path.Combine(outputPath, "function-maps", "testservice-function-map.json"); - File.Exists(bundledFile).Should().BeTrue(); - } - - [Fact(DisplayName = "BundleBuilder packages observations artifact")] - public async Task BundleBuilder_PackagesObservationsArtifact() - { - // Arrange - var sourceDir = Path.Combine(_tempRoot, "source"); - Directory.CreateDirectory(sourceDir); - - var feedFile = Path.Combine(sourceDir, "feed.json"); - await File.WriteAllTextAsync(feedFile, "{}"); - - var obsFile = Path.Combine(sourceDir, "obs.ndjson"); - await File.WriteAllTextAsync(obsFile, "{\"symbol\":\"SSL_connect\"}\n{\"symbol\":\"SSL_read\"}\n"); - - var obsConfig = FunctionMapBundleIntegration.CreateObservationsConfig(obsFile, "2026-01-22"); - - var request = new BundleBuildRequest( - "test-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: new[] { obsConfig }); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var builder = new BundleBuilder(); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Artifacts.Should().ContainSingle(); - var artifact = manifest.Artifacts[0]; - artifact.Type.Should().Be("observations"); - artifact.Path.Should().Be("observations/observations-2026-01-22.ndjson"); - artifact.ContentType.Should().Be("application/x-ndjson"); - - var bundledFile = Path.Combine(outputPath, "observations", "observations-2026-01-22.ndjson"); - File.Exists(bundledFile).Should().BeTrue(); - } - - [Fact(DisplayName = "BundleBuilder packages multiple function-map artifacts")] - public async Task BundleBuilder_PackagesMultipleArtifacts() - { - // Arrange - var sourceDir = Path.Combine(_tempRoot, "source"); - Directory.CreateDirectory(sourceDir); - - var feedFile = Path.Combine(sourceDir, "feed.json"); - await File.WriteAllTextAsync(feedFile, "{}"); - - var fmFile = Path.Combine(sourceDir, "function-map.json"); - await File.WriteAllTextAsync(fmFile, "{\"predicate\":{}}"); - - var obsFile = Path.Combine(sourceDir, "obs.ndjson"); - await File.WriteAllTextAsync(obsFile, "{\"symbol\":\"SSL_connect\"}\n"); - - var reportFile = Path.Combine(sourceDir, "report.json"); - await File.WriteAllTextAsync(reportFile, "{\"verified\":true}"); - - var artifacts = new[] - { - FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "myservice"), - FunctionMapBundleIntegration.CreateObservationsConfig(obsFile, "2026-01-22"), - FunctionMapBundleIntegration.CreateVerificationReportConfig(reportFile) - }; - - var request = new BundleBuildRequest( - "test-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - Array.Empty(), - Array.Empty(), - Artifacts: artifacts); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var builder = new BundleBuilder(); - - // Act - var manifest = await builder.BuildAsync(request, outputPath); - - // Assert - manifest.Artifacts.Should().HaveCount(3); - manifest.Artifacts.Select(a => a.Type).Should().Contain("function-map"); - manifest.Artifacts.Select(a => a.Type).Should().Contain("observations"); - manifest.Artifacts.Select(a => a.Type).Should().Contain("verification-report"); - } - - #endregion - - #region BundleValidator Integration Tests - - [Fact(DisplayName = "Validator passes when artifact digests match")] - public async Task Validator_PassesWhenArtifactDigestsMatch() - { - // Arrange - build a bundle with function-map artifact - var sourceDir = Path.Combine(_tempRoot, "source"); - Directory.CreateDirectory(sourceDir); - - var feedFile = Path.Combine(sourceDir, "feed.json"); - await File.WriteAllTextAsync(feedFile, "{}"); - - var fmFile = Path.Combine(sourceDir, "function-map.json"); - var fmContent = "{\"_type\":\"function-map\"}"; - await File.WriteAllTextAsync(fmFile, fmContent); - - var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); - var cryptoFile = Path.Combine(sourceDir, "root.pem"); - await File.WriteAllTextAsync(cryptoFile, "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"); - - var request = new BundleBuildRequest( - "test-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - new[] { new CryptoBuildConfig("crypto-1", "root", cryptoFile, "crypto/root.pem", CryptoComponentType.TrustRoot, null) }, - Array.Empty(), - Artifacts: new[] { fmConfig }); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var builder = new BundleBuilder(); - var manifest = await builder.BuildAsync(request, outputPath); - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, outputPath); - - // Assert - result.Errors.Where(e => e.Component == "Artifacts").Should().BeEmpty(); - } - - [Fact(DisplayName = "Validator fails when artifact digest mismatches")] - public async Task Validator_FailsWhenArtifactDigestMismatches() - { - // Arrange - build a bundle, then tamper with the artifact - var sourceDir = Path.Combine(_tempRoot, "source"); - Directory.CreateDirectory(sourceDir); - - var feedFile = Path.Combine(sourceDir, "feed.json"); - await File.WriteAllTextAsync(feedFile, "{}"); - - var fmFile = Path.Combine(sourceDir, "function-map.json"); - await File.WriteAllTextAsync(fmFile, "{\"_type\":\"function-map\"}"); - - var fmConfig = FunctionMapBundleIntegration.CreateFunctionMapConfig(fmFile, "testservice"); - var cryptoFile = Path.Combine(sourceDir, "root.pem"); - await File.WriteAllTextAsync(cryptoFile, "-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----"); - - var request = new BundleBuildRequest( - "test-bundle", - "1.0.0", - null, - new[] { new FeedBuildConfig("feed-1", "nvd", "v1", feedFile, "feeds/nvd.json", DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative) }, - Array.Empty(), - new[] { new CryptoBuildConfig("crypto-1", "root", cryptoFile, "crypto/root.pem", CryptoComponentType.TrustRoot, null) }, - Array.Empty(), - Artifacts: new[] { fmConfig }); - - var outputPath = Path.Combine(_tempRoot, "bundle"); - var builder = new BundleBuilder(); - var manifest = await builder.BuildAsync(request, outputPath); - - // Tamper with the function-map file - var bundledFile = Path.Combine(outputPath, "function-maps", "testservice-function-map.json"); - await File.WriteAllTextAsync(bundledFile, "{\"tampered\":true}"); - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, outputPath); - - // Assert - result.Errors.Should().Contain(e => - e.Component == "Artifacts" && e.Message.Contains("digest mismatch")); - } - - [Fact(DisplayName = "Validator warns when artifact has no digest")] - public async Task Validator_WarnsWhenArtifactHasNoDigest() - { - // Arrange - create a manifest with an artifact that has no digest - var outputPath = Path.Combine(_tempRoot, "bundle"); - Directory.CreateDirectory(Path.Combine(outputPath, "function-maps")); - - var fmPath = Path.Combine(outputPath, "function-maps", "test-function-map.json"); - await File.WriteAllTextAsync(fmPath, "{}"); - - var feedDir = Path.Combine(outputPath, "feeds"); - Directory.CreateDirectory(feedDir); - var feedPath = Path.Combine(feedDir, "nvd.json"); - await File.WriteAllTextAsync(feedPath, "{}"); - - var cryptoDir = Path.Combine(outputPath, "crypto"); - Directory.CreateDirectory(cryptoDir); - var cryptoPath = Path.Combine(cryptoDir, "root.pem"); - await File.WriteAllTextAsync(cryptoPath, "cert"); - - var manifest = new BundleManifest - { - BundleId = "test", - Name = "test", - Version = "1.0.0", - CreatedAt = DateTimeOffset.UtcNow, - Feeds = ImmutableArray.Create(new FeedComponent( - "feed-1", "nvd", "v1", "feeds/nvd.json", - System.Security.Cryptography.SHA256.HashData(Encoding.UTF8.GetBytes("{}")).Select(b => b.ToString("x2")).Aggregate((a, b) => a + b), - 2, DateTimeOffset.UtcNow, FeedFormat.StellaOpsNative)), - Policies = ImmutableArray.Empty, - CryptoMaterials = ImmutableArray.Create(new CryptoComponent( - "crypto-1", "root", "crypto/root.pem", - System.Security.Cryptography.SHA256.HashData(Encoding.UTF8.GetBytes("cert")).Select(b => b.ToString("x2")).Aggregate((a, b) => a + b), - 4, CryptoComponentType.TrustRoot, null)), - Artifacts = ImmutableArray.Create(new BundleArtifact( - "function-maps/test-function-map.json", - "function-map", - "application/vnd.stella.function-map+json", - null, // No digest - 2)) - }; - - var validator = new BundleValidator(); - - // Act - var result = await validator.ValidateAsync(manifest, outputPath); - - // Assert - result.Warnings.Should().Contain(w => - w.Component == "Artifacts" && w.Message.Contains("no digest")); - } - - #endregion - - #region BundleArtifactType Enum Tests - - [Fact(DisplayName = "BundleArtifactType has FunctionMap value")] - public void BundleArtifactType_HasFunctionMap() - { - BundleArtifactType.FunctionMap.Should().BeDefined(); - } - - [Fact(DisplayName = "BundleArtifactType has FunctionMapDsse value")] - public void BundleArtifactType_HasFunctionMapDsse() - { - BundleArtifactType.FunctionMapDsse.Should().BeDefined(); - } - - [Fact(DisplayName = "BundleArtifactType has Observations value")] - public void BundleArtifactType_HasObservations() - { - BundleArtifactType.Observations.Should().BeDefined(); - } - - [Fact(DisplayName = "BundleArtifactType has VerificationReport value")] - public void BundleArtifactType_HasVerificationReport() - { - BundleArtifactType.VerificationReport.Should().BeDefined(); - } - - #endregion } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPersistenceExtensionsTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPersistenceExtensionsTests.cs new file mode 100644 index 000000000..d887c61ab --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPersistenceExtensionsTests.cs @@ -0,0 +1,46 @@ +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using StellaOps.AirGap.Controller.Stores; +using StellaOps.AirGap.Importer.Versioning; +using StellaOps.AirGap.Persistence.Extensions; +using StellaOps.AirGap.Persistence.Postgres; +using StellaOps.AirGap.Persistence.Postgres.Repositories; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed class AirGapPersistenceExtensionsTests +{ + [Fact] + [Intent(TestIntents.Operational, "Ensures AirGap persistence services register expected types.")] + [Trait("Category", TestCategories.Unit)] + public void AddAirGapPersistence_RegistersServicesFromOptions() + { + var services = new TestServiceCollection(); + + services.AddAirGapPersistence(options => + { + options.ConnectionString = "Host=localhost;Database=airgap"; + options.SchemaName = "airgap"; + }); + + services.Any(sd => sd.ServiceType == typeof(AirGapDataSource)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IHostedService)) + .Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IAirGapStateStore) && + sd.ImplementationType == typeof(PostgresAirGapStateStore)) + .Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IBundleVersionStore) && + sd.ImplementationType == typeof(PostgresBundleVersionStore)) + .Should().BeTrue(); + } + + private sealed class TestServiceCollection : List, IServiceCollection + { + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresCollection.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresCollection.cs new file mode 100644 index 000000000..4004e26df --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresCollection.cs @@ -0,0 +1,13 @@ +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +/// +/// Collection definition for AirGap PostgreSQL integration tests. +/// Tests in this collection share a single PostgreSQL container instance. +/// +[CollectionDefinition(Name)] +public sealed class AirGapPostgresCollection : ICollectionFixture +{ + public const string Name = "AirGapPostgres"; +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Indexes.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Indexes.cs new file mode 100644 index 000000000..0bca48a24 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Indexes.cs @@ -0,0 +1,55 @@ +using System.Collections.Generic; +using System.Threading; +using Npgsql; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapPostgresFixture +{ + /// + /// Gets all index names for a specific table in the test schema. + /// + public async Task> GetIndexNamesAsync( + string tableName, + CancellationToken cancellationToken = default) + { + await using var connection = new NpgsqlConnection(ConnectionString); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + + await using var cmd = new NpgsqlCommand( + """ + SELECT indexname FROM pg_indexes + WHERE schemaname = @schema AND tablename = @table; + """, + connection); + cmd.Parameters.AddWithValue("schema", SchemaName); + cmd.Parameters.AddWithValue("table", tableName); + + var indexes = new List(); + await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + indexes.Add(reader.GetString(0)); + } + + return indexes; + } + + /// + /// Ensures migrations have been run. This is idempotent and safe to call multiple times. + /// + public async Task EnsureMigrationsRunAsync(CancellationToken cancellationToken = default) + { + var migrationAssembly = GetMigrationAssembly(); + if (migrationAssembly is null) + { + return; + } + + await Fixture.RunMigrationsFromAssemblyAsync( + migrationAssembly, + GetModuleName(), + GetResourcePrefix(), + cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Tables.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Tables.cs new file mode 100644 index 000000000..3da0fd77c --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.Tables.cs @@ -0,0 +1,63 @@ +using System.Collections.Generic; +using System.Threading; +using Npgsql; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapPostgresFixture +{ + /// + /// Gets all table names in the test schema. + /// + public async Task> GetTableNamesAsync(CancellationToken cancellationToken = default) + { + await using var connection = new NpgsqlConnection(ConnectionString); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + + await using var cmd = new NpgsqlCommand( + """ + SELECT table_name FROM information_schema.tables + WHERE table_schema = @schema AND table_type = 'BASE TABLE'; + """, + connection); + cmd.Parameters.AddWithValue("schema", SchemaName); + + var tables = new List(); + await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + tables.Add(reader.GetString(0)); + } + + return tables; + } + + /// + /// Gets all column names for a specific table in the test schema. + /// + public async Task> GetColumnNamesAsync( + string tableName, + CancellationToken cancellationToken = default) + { + await using var connection = new NpgsqlConnection(ConnectionString); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + + await using var cmd = new NpgsqlCommand( + """ + SELECT column_name FROM information_schema.columns + WHERE table_schema = @schema AND table_name = @table; + """, + connection); + cmd.Parameters.AddWithValue("schema", SchemaName); + cmd.Parameters.AddWithValue("table", tableName); + + var columns = new List(); + await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + columns.Add(reader.GetString(0)); + } + + return columns; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.cs index c1ab5bd57..728c042ae 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapPostgresFixture.cs @@ -1,5 +1,4 @@ using System.Reflection; -using Npgsql; using StellaOps.AirGap.Persistence.Postgres; using StellaOps.Infrastructure.Postgres.Testing; using Xunit; @@ -10,118 +9,11 @@ namespace StellaOps.AirGap.Persistence.Tests; /// PostgreSQL integration test fixture for the AirGap module. /// Runs migrations from embedded resources and provides test isolation. /// -public sealed class AirGapPostgresFixture : PostgresIntegrationFixture, ICollectionFixture +public sealed partial class AirGapPostgresFixture : PostgresIntegrationFixture, ICollectionFixture { - protected override Assembly? GetMigrationAssembly() - => typeof(AirGapDataSource).Assembly; + protected override Assembly? GetMigrationAssembly() => typeof(AirGapDataSource).Assembly; protected override string GetModuleName() => "AirGap"; protected override string? GetResourcePrefix() => null; - - /// - /// Gets all table names in the test schema. - /// - public async Task> GetTableNamesAsync(CancellationToken cancellationToken = default) - { - await using var connection = new NpgsqlConnection(ConnectionString); - await connection.OpenAsync(cancellationToken).ConfigureAwait(false); - - await using var cmd = new NpgsqlCommand( - """ - SELECT table_name FROM information_schema.tables - WHERE table_schema = @schema AND table_type = 'BASE TABLE'; - """, - connection); - cmd.Parameters.AddWithValue("schema", SchemaName); - - var tables = new List(); - await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - tables.Add(reader.GetString(0)); - } - - return tables; - } - - /// - /// Gets all column names for a specific table in the test schema. - /// - public async Task> GetColumnNamesAsync(string tableName, CancellationToken cancellationToken = default) - { - await using var connection = new NpgsqlConnection(ConnectionString); - await connection.OpenAsync(cancellationToken).ConfigureAwait(false); - - await using var cmd = new NpgsqlCommand( - """ - SELECT column_name FROM information_schema.columns - WHERE table_schema = @schema AND table_name = @table; - """, - connection); - cmd.Parameters.AddWithValue("schema", SchemaName); - cmd.Parameters.AddWithValue("table", tableName); - - var columns = new List(); - await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - columns.Add(reader.GetString(0)); - } - - return columns; - } - - /// - /// Gets all index names for a specific table in the test schema. - /// - public async Task> GetIndexNamesAsync(string tableName, CancellationToken cancellationToken = default) - { - await using var connection = new NpgsqlConnection(ConnectionString); - await connection.OpenAsync(cancellationToken).ConfigureAwait(false); - - await using var cmd = new NpgsqlCommand( - """ - SELECT indexname FROM pg_indexes - WHERE schemaname = @schema AND tablename = @table; - """, - connection); - cmd.Parameters.AddWithValue("schema", SchemaName); - cmd.Parameters.AddWithValue("table", tableName); - - var indexes = new List(); - await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - indexes.Add(reader.GetString(0)); - } - - return indexes; - } - - /// - /// Ensures migrations have been run. This is idempotent and safe to call multiple times. - /// - public async Task EnsureMigrationsRunAsync(CancellationToken cancellationToken = default) - { - var migrationAssembly = GetMigrationAssembly(); - if (migrationAssembly != null) - { - await Fixture.RunMigrationsFromAssemblyAsync( - migrationAssembly, - GetModuleName(), - GetResourcePrefix(), - cancellationToken).ConfigureAwait(false); - } - } -} - -/// -/// Collection definition for AirGap PostgreSQL integration tests. -/// Tests in this collection share a single PostgreSQL container instance. -/// -[CollectionDefinition(Name)] -public sealed class AirGapPostgresCollection : ICollectionFixture -{ - public const string Name = "AirGapPostgres"; } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.Content.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.Content.cs new file mode 100644 index 000000000..3a279061d --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.Content.cs @@ -0,0 +1,61 @@ +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapStorageIntegrationTests +{ + [Fact] + [Intent(TestIntents.Operational, "Ensures content budgets are returned with stable keys.")] + [Trait("Category", TestCategories.QueryDeterminism)] + public async Task QueryDeterminism_ContentBudgets_ReturnInConsistentOrderAsync() + { + var tenantId = "tenant-det-02"; + var state = CreateTestState(tenantId, "state-det-02", contentBudgets: new Dictionary + { + ["zebra"] = new StalenessBudget(100, 200), + ["alpha"] = new StalenessBudget(300, 400), + ["middle"] = new StalenessBudget(500, 600) + }); + await _store.SetAsync(state); + + var results = new List>(); + for (var i = 0; i < 5; i++) + { + var fetched = await _store.GetAsync(tenantId); + results.Add(fetched.ContentBudgets); + } + + var keys1 = results[0].Keys.OrderBy(k => k).ToList(); + foreach (var result in results.Skip(1)) + { + var keys = result.Keys.OrderBy(k => k).ToList(); + keys.Should().BeEquivalentTo(keys1, options => options.WithStrictOrdering()); + } + } + + [Fact] + [Intent(TestIntents.Operational, "Confirms tenant isolation for AirGap state reads.")] + [Trait("Category", TestCategories.QueryDeterminism)] + public async Task QueryDeterminism_MultipleTenants_IsolatedResultsAsync() + { + var tenant1 = "tenant-det-04a"; + var tenant2 = "tenant-det-04b"; + + await _store.SetAsync(CreateTestState(tenant1, "state-det-04a", sealed_: true, policyHash: "sha256:tenant1")); + await _store.SetAsync(CreateTestState(tenant2, "state-det-04b", sealed_: false, policyHash: "sha256:tenant2")); + + var result1 = await _store.GetAsync(tenant1); + var result2 = await _store.GetAsync(tenant2); + + result1.Sealed.Should().BeTrue(); + result1.PolicyHash.Should().Be("sha256:tenant1"); + result2.Sealed.Should().BeFalse(); + result2.PolicyHash.Should().Be("sha256:tenant2"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.State.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.State.cs new file mode 100644 index 000000000..8da2a854d --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Determinism.State.cs @@ -0,0 +1,50 @@ +using FluentAssertions; +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapStorageIntegrationTests +{ + [Fact] + [Intent(TestIntents.Operational, "Verifies repeated reads return consistent state.")] + [Trait("Category", TestCategories.QueryDeterminism)] + public async Task QueryDeterminism_SameInput_SameOutputAsync() + { + var tenantId = "tenant-det-01"; + var state = CreateTestState(tenantId, "state-det-01"); + await _store.SetAsync(state); + + var result1 = await _store.GetAsync(tenantId); + var result2 = await _store.GetAsync(tenantId); + var result3 = await _store.GetAsync(tenantId); + + result1.Should().BeEquivalentTo(result2); + result2.Should().BeEquivalentTo(result3); + } + + [Fact] + [Intent(TestIntents.Operational, "Verifies time anchor fields round trip correctly.")] + [Trait("Category", TestCategories.QueryDeterminism)] + public async Task QueryDeterminism_TimeAnchor_PreservesAllFieldsAsync() + { + var tenantId = "tenant-det-03"; + var anchorTime = new DateTimeOffset(2025, 6, 15, 12, 0, 0, TimeSpan.Zero); + var state = CreateTestState(tenantId, "state-det-03", timeAnchor: new TimeAnchor( + anchorTime, + "tsa.example.com", + "RFC3161", + "sha256:fingerprint", + "sha256:tokendigest")); + await _store.SetAsync(state); + + var fetched1 = await _store.GetAsync(tenantId); + var fetched2 = await _store.GetAsync(tenantId); + + fetched1.TimeAnchor.Should().BeEquivalentTo(fetched2.TimeAnchor); + fetched1.TimeAnchor.AnchorTime.Should().Be(anchorTime); + fetched1.TimeAnchor.Source.Should().Be("tsa.example.com"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Idempotency.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Idempotency.cs new file mode 100644 index 000000000..726fa39d2 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Idempotency.cs @@ -0,0 +1,81 @@ +using System.Collections.Generic; +using FluentAssertions; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapStorageIntegrationTests +{ + [Fact] + [Intent(TestIntents.Operational, "Confirms repeated writes do not fail for the same tenant.")] + [Trait("Category", TestCategories.StorageIdempotency)] + public async Task Idempotency_SetStateTwice_NoExceptionAsync() + { + var tenantId = "tenant-idem-01"; + var state = CreateTestState(tenantId, "state-idem-01"); + + await _store.SetAsync(state); + var act = async () => await _store.SetAsync(state); + + await act.Should().NotThrowAsync("Setting state twice should be idempotent"); + } + + [Fact] + [Intent(TestIntents.Operational, "Ensures the latest write wins without creating duplicates.")] + [Trait("Category", TestCategories.StorageIdempotency)] + public async Task Idempotency_SetStateTwice_SingleRecordAsync() + { + var tenantId = "tenant-idem-02"; + var state1 = CreateTestState(tenantId, "state-idem-02a", sealed_: true, policyHash: "sha256:policy-v1"); + var state2 = CreateTestState(tenantId, "state-idem-02b", sealed_: true, policyHash: "sha256:policy-v2"); + + await _store.SetAsync(state1); + await _store.SetAsync(state2); + var fetched = await _store.GetAsync(tenantId); + + fetched.PolicyHash.Should().Be("sha256:policy-v2", "Second set should update, not duplicate"); + } + + [Fact] + [Intent(TestIntents.Operational, "Validates concurrent writes keep state consistent.")] + [Trait("Category", TestCategories.StorageConcurrency)] + [Trait("Category", TestCategories.StorageIdempotency)] + public async Task Idempotency_ConcurrentSets_NoDataCorruptionAsync() + { + var tenantId = "tenant-idem-03"; + var tasks = new List(); + + for (var i = 0; i < 10; i++) + { + var state = CreateTestState( + tenantId, + $"state-idem-03-{i}", + sealed_: i % 2 == 0, + policyHash: $"sha256:policy-{i}"); + tasks.Add(_store.SetAsync(state)); + } + + await Task.WhenAll(tasks); + + var fetched = await _store.GetAsync(tenantId); + fetched.Should().NotBeNull(); + fetched.TenantId.Should().Be(tenantId); + fetched.PolicyHash.Should().StartWith("sha256:policy-"); + } + + [Fact] + [Intent(TestIntents.Operational, "Ensures repeat imports do not fail for identical state payloads.")] + [Trait("Category", TestCategories.StorageIdempotency)] + public async Task Idempotency_SetSameStateTwice_NoExceptionAsync() + { + var tenantId = "tenant-idem-04"; + var state = CreateTestState(tenantId, "state-idem-04", sealed_: true); + + await _store.SetAsync(state); + var act = async () => await _store.SetAsync(state); + + await act.Should().NotThrowAsync("Importing identical state twice should be idempotent"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Migrations.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Migrations.cs new file mode 100644 index 000000000..2b2443c0c --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.Migrations.cs @@ -0,0 +1,67 @@ +using FluentAssertions; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class AirGapStorageIntegrationTests +{ + [Fact] + [Intent(TestIntents.Operational, "Validates AirGap storage migrations create required tables.")] + [Trait("Category", TestCategories.StorageMigration)] + public async Task Migration_SchemaContainsRequiredTablesAsync() + { + var expectedTables = new[] + { + "state", + "bundle_versions", + "bundle_version_history" + }; + + var tables = await _fixture.GetTableNamesAsync(); + + foreach (var expectedTable in expectedTables) + { + tables.Should().Contain(t => t.Contains(expectedTable, StringComparison.OrdinalIgnoreCase), + $"Table '{expectedTable}' should exist in schema"); + } + } + + [Fact] + [Intent(TestIntents.Operational, "Verifies required columns exist after AirGap migrations.")] + [Trait("Category", TestCategories.StorageMigration)] + public async Task Migration_AirGapStateHasRequiredColumnsAsync() + { + var expectedColumns = new[] { "tenant_id", "sealed", "policy_hash", "time_anchor", "created_at", "updated_at" }; + + var columns = await _fixture.GetColumnNamesAsync("state"); + + foreach (var expectedColumn in expectedColumns) + { + columns.Should().Contain(c => c.Contains(expectedColumn, StringComparison.OrdinalIgnoreCase), + $"Column '{expectedColumn}' should exist in airgap_state"); + } + } + + [Fact] + [Intent(TestIntents.Operational, "Ensures AirGap migrations are idempotent.")] + [Trait("Category", TestCategories.StorageMigration)] + public async Task Migration_IsIdempotentAsync() + { + var act = async () => await _fixture.EnsureMigrationsRunAsync(); + + await act.Should().NotThrowAsync("Running migrations multiple times should be idempotent"); + } + + [Fact] + [Intent(TestIntents.Operational, "Ensures tenant lookup indexes exist for AirGap state.")] + [Trait("Category", TestCategories.StorageMigration)] + public async Task Migration_HasTenantIndexAsync() + { + var indexes = await _fixture.GetIndexNamesAsync("state"); + + indexes.Should().Contain(i => i.Contains("tenant", StringComparison.OrdinalIgnoreCase), + "airgap_state should have tenant index for multi-tenant queries"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.cs index 126fc7efb..393a2cc5e 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/AirGapStorageIntegrationTests.cs @@ -1,11 +1,5 @@ -// ----------------------------------------------------------------------------- -// AirGapStorageIntegrationTests.cs -// Sprint: SPRINT_5100_0010_0004_airgap_tests -// Tasks: AIRGAP-5100-007, AIRGAP-5100-008, AIRGAP-5100-009 -// Description: S1 Storage tests - migrations, idempotency, query determinism -// ----------------------------------------------------------------------------- - -using FluentAssertions; +using System; +using System.Collections.Generic; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; using StellaOps.AirGap.Controller.Domain; @@ -21,12 +15,20 @@ namespace StellaOps.AirGap.Persistence.Tests; /// /// S1 Storage Layer Tests for AirGap /// Task AIRGAP-5100-007: Migration tests (apply from scratch, apply from N-1) -/// Task AIRGAP-5100-008: Idempotency tests (same bundle imported twice → no duplicates) +/// Task AIRGAP-5100-008: Idempotency tests (same bundle imported twice -> no duplicates) /// Task AIRGAP-5100-009: Query determinism tests (explicit ORDER BY checks) /// [Collection(AirGapPostgresCollection.Name)] -public sealed class AirGapStorageIntegrationTests : IAsyncLifetime +[Trait("Category", TestCategories.Integration)] +[Trait("BlastRadius", TestCategories.BlastRadius.Persistence)] +public sealed partial class AirGapStorageIntegrationTests : IAsyncLifetime { + private static readonly DateTimeOffset DefaultTransitionAt = + new(2025, 1, 15, 0, 0, 0, TimeSpan.Zero); + + private static readonly StalenessBudget DefaultStalenessBudget = + new(1800, 3600); + private readonly AirGapPostgresFixture _fixture; private readonly PostgresAirGapStateStore _store; private readonly AirGapDataSource _dataSource; @@ -55,288 +57,26 @@ public sealed class AirGapStorageIntegrationTests : IAsyncLifetime await _dataSource.DisposeAsync(); } - #region AIRGAP-5100-007: Migration Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Migration_SchemaContainsRequiredTables() - { - // Arrange - var expectedTables = new[] - { - "state", - "bundle_versions", - "bundle_version_history" - }; - - // Act - var tables = await _fixture.GetTableNamesAsync(); - - // Assert - foreach (var expectedTable in expectedTables) - { - tables.Should().Contain(t => t.Contains(expectedTable, StringComparison.OrdinalIgnoreCase), - $"Table '{expectedTable}' should exist in schema"); - } - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Migration_AirGapStateHasRequiredColumns() - { - // Arrange - var expectedColumns = new[] { "tenant_id", "sealed", "policy_hash", "time_anchor", "created_at", "updated_at" }; - - // Act - var columns = await _fixture.GetColumnNamesAsync("state"); - - // Assert - foreach (var expectedColumn in expectedColumns) - { - columns.Should().Contain(c => c.Contains(expectedColumn, StringComparison.OrdinalIgnoreCase), - $"Column '{expectedColumn}' should exist in airgap_state"); - } - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Migration_IsIdempotent() - { - // Act - Running migrations again should not fail - var act = async () => - { - await _fixture.EnsureMigrationsRunAsync(); - }; - - // Assert - await act.Should().NotThrowAsync("Running migrations multiple times should be idempotent"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Migration_HasTenantIndex() - { - // Act - var indexes = await _fixture.GetIndexNamesAsync("state"); - - // Assert - indexes.Should().Contain(i => i.Contains("tenant", StringComparison.OrdinalIgnoreCase), - "airgap_state should have tenant index for multi-tenant queries"); - } - - #endregion - - #region AIRGAP-5100-008: Idempotency Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Idempotency_SetStateTwice_NoException() - { - // Arrange - var tenantId = $"tenant-idem-{Guid.NewGuid():N}"; - var state = CreateTestState(tenantId); - - // Act - Set state twice - await _store.SetAsync(state); - var act = async () => await _store.SetAsync(state); - - // Assert - await act.Should().NotThrowAsync("Setting state twice should be idempotent"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Idempotency_SetStateTwice_SingleRecord() - { - // Arrange - var tenantId = $"tenant-single-{Guid.NewGuid():N}"; - var state1 = CreateTestState(tenantId, sealed_: true, policyHash: "sha256:policy-v1"); - var state2 = CreateTestState(tenantId, sealed_: true, policyHash: "sha256:policy-v2"); - - // Act - await _store.SetAsync(state1); - await _store.SetAsync(state2); - var fetched = await _store.GetAsync(tenantId); - - // Assert - Should have latest value, not duplicate - fetched.PolicyHash.Should().Be("sha256:policy-v2", "Second set should update, not duplicate"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Idempotency_ConcurrentSets_NoDataCorruption() - { - // Arrange - var tenantId = $"tenant-concurrent-{Guid.NewGuid():N}"; - var tasks = new List(); - - // Act - Concurrent sets - for (int i = 0; i < 10; i++) - { - var iteration = i; - tasks.Add(Task.Run(async () => - { - var state = CreateTestState(tenantId, sealed_: iteration % 2 == 0, policyHash: $"sha256:policy-{iteration}"); - await _store.SetAsync(state); - })); - } - - await Task.WhenAll(tasks); - - // Assert - Should have valid state (no corruption) - var fetched = await _store.GetAsync(tenantId); - fetched.Should().NotBeNull(); - fetched.TenantId.Should().Be(tenantId); - fetched.PolicyHash.Should().StartWith("sha256:policy-"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task Idempotency_SameBundleIdTwice_NoException() - { - // Arrange - var tenantId = $"tenant-bundle-{Guid.NewGuid():N}"; - var bundleId = Guid.NewGuid().ToString("N"); - - // Create state with bundle reference - var state = CreateTestState(tenantId, sealed_: true); - - // Act - Set same state twice (simulating duplicate bundle import) - await _store.SetAsync(state); - var act = async () => await _store.SetAsync(state); - - // Assert - await act.Should().NotThrowAsync("Importing same bundle twice should be idempotent"); - } - - #endregion - - #region AIRGAP-5100-009: Query Determinism Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task QueryDeterminism_SameInput_SameOutput() - { - // Arrange - var tenantId = $"tenant-det-{Guid.NewGuid():N}"; - var state = CreateTestState(tenantId); - await _store.SetAsync(state); - - // Act - Query multiple times - var result1 = await _store.GetAsync(tenantId); - var result2 = await _store.GetAsync(tenantId); - var result3 = await _store.GetAsync(tenantId); - - // Assert - All results should be equivalent - result1.Should().BeEquivalentTo(result2); - result2.Should().BeEquivalentTo(result3); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task QueryDeterminism_ContentBudgets_ReturnInConsistentOrder() - { - // Arrange - var tenantId = $"tenant-budgets-{Guid.NewGuid():N}"; - var state = CreateTestState(tenantId) with - { - ContentBudgets = new Dictionary - { - ["zebra"] = new StalenessBudget(100, 200), - ["alpha"] = new StalenessBudget(300, 400), - ["middle"] = new StalenessBudget(500, 600) - } - }; - await _store.SetAsync(state); - - // Act - Query multiple times - var results = new List>(); - for (int i = 0; i < 5; i++) - { - var fetched = await _store.GetAsync(tenantId); - results.Add(fetched.ContentBudgets); - } - - // Assert - All queries should return same keys - var keys1 = results[0].Keys.OrderBy(k => k).ToList(); - foreach (var result in results.Skip(1)) - { - var keys = result.Keys.OrderBy(k => k).ToList(); - keys.Should().BeEquivalentTo(keys1, options => options.WithStrictOrdering()); - } - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task QueryDeterminism_TimeAnchor_PreservesAllFields() - { - // Arrange - var tenantId = $"tenant-anchor-{Guid.NewGuid():N}"; - var anchorTime = DateTimeOffset.Parse("2025-06-15T12:00:00Z"); - var state = CreateTestState(tenantId) with - { - TimeAnchor = new TimeAnchor( - anchorTime, - "tsa.example.com", - "RFC3161", - "sha256:fingerprint", - "sha256:tokendigest") - }; - await _store.SetAsync(state); - - // Act - var fetched1 = await _store.GetAsync(tenantId); - var fetched2 = await _store.GetAsync(tenantId); - - // Assert - fetched1.TimeAnchor.Should().BeEquivalentTo(fetched2.TimeAnchor); - fetched1.TimeAnchor.AnchorTime.Should().Be(anchorTime); - fetched1.TimeAnchor.Source.Should().Be("tsa.example.com"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task QueryDeterminism_MultipleTenants_IsolatedResults() - { - // Arrange - var tenant1 = $"tenant-iso1-{Guid.NewGuid():N}"; - var tenant2 = $"tenant-iso2-{Guid.NewGuid():N}"; - - await _store.SetAsync(CreateTestState(tenant1, sealed_: true, policyHash: "sha256:tenant1-policy")); - await _store.SetAsync(CreateTestState(tenant2, sealed_: false, policyHash: "sha256:tenant2-policy")); - - // Act - var result1 = await _store.GetAsync(tenant1); - var result2 = await _store.GetAsync(tenant2); - - // Assert - result1.Sealed.Should().BeTrue(); - result1.PolicyHash.Should().Be("sha256:tenant1-policy"); - result2.Sealed.Should().BeFalse(); - result2.PolicyHash.Should().Be("sha256:tenant2-policy"); - } - - #endregion - - #region Helpers - - private static AirGapState CreateTestState(string tenantId, bool sealed_ = false, string? policyHash = null) + private static AirGapState CreateTestState( + string tenantId, + string stateId, + bool sealed_ = false, + string? policyHash = null, + IReadOnlyDictionary? contentBudgets = null, + TimeAnchor? timeAnchor = null) { return new AirGapState { - Id = Guid.NewGuid().ToString("N"), + Id = stateId, TenantId = tenantId, Sealed = sealed_, PolicyHash = policyHash, - LastTransitionAt = DateTimeOffset.UtcNow, - StalenessBudget = new StalenessBudget(1800, 3600), + TimeAnchor = timeAnchor ?? TimeAnchor.Unknown, + LastTransitionAt = DefaultTransitionAt, + StalenessBudget = DefaultStalenessBudget, DriftBaselineSeconds = 5, - ContentBudgets = new Dictionary() + ContentBudgets = contentBudgets ?? + new Dictionary(StringComparer.OrdinalIgnoreCase) }; } - - #endregion } - - - diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Read.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Read.cs new file mode 100644 index 000000000..d1bc9aa82 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Read.cs @@ -0,0 +1,20 @@ +using FluentAssertions; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresAirGapStateStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Returns default state for tenants with no stored record.")] + public async Task GetAsync_ReturnsDefaultStateForNewTenantAsync() + { + var state = await _store.GetAsync(TenantId); + + state.Should().NotBeNull(); + state.TenantId.Should().Be(TenantId); + state.Sealed.Should().BeFalse(); + state.PolicyHash.Should().BeNull(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Write.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Write.cs new file mode 100644 index 000000000..5ae0f03bb --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.Write.cs @@ -0,0 +1,93 @@ +using System.Collections.Generic; +using FluentAssertions; +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresAirGapStateStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Ensures stored state round trips with all fields intact.")] + public async Task SetAndGet_RoundTripsStateAsync() + { + var timeAnchor = new TimeAnchor( + AnchorTime, + "tsa.example.com", + "RFC3161", + "sha256:fingerprint123", + "sha256:tokendigest456"); + var budgets = new Dictionary + { + ["advisories"] = new StalenessBudget(7200, 14400), + ["vex"] = new StalenessBudget(3600, 7200) + }; + var state = CreateState( + TenantId, + "state-store-01", + sealed_: true, + policyHash: "sha256:policy789", + budgets: budgets, + timeAnchor: timeAnchor); + + await _store.SetAsync(state); + var fetched = await _store.GetAsync(TenantId); + + fetched.Should().NotBeNull(); + fetched.Sealed.Should().BeTrue(); + fetched.PolicyHash.Should().Be("sha256:policy789"); + fetched.TimeAnchor.Source.Should().Be("tsa.example.com"); + fetched.TimeAnchor.Format.Should().Be("RFC3161"); + fetched.StalenessBudget.WarningSeconds.Should().Be(1800); + fetched.StalenessBudget.BreachSeconds.Should().Be(3600); + fetched.DriftBaselineSeconds.Should().Be(5); + fetched.ContentBudgets.Should().HaveCount(2); + fetched.ContentBudgets["advisories"].WarningSeconds.Should().Be(7200); + } + + [Fact] + [Intent(TestIntents.Operational, "Confirms existing state updates rather than duplicating.")] + [Trait("Category", TestCategories.StorageIdempotency)] + public async Task SetAsync_UpdatesExistingStateAsync() + { + var state1 = CreateState(TenantId, "state-store-02a"); + var state2 = CreateState( + TenantId, + "state-store-02b", + sealed_: true, + policyHash: "sha256:updated", + timeAnchor: new TimeAnchor(AnchorTime, "updated-source", "rfc3161", "", "")); + + await _store.SetAsync(state1); + await _store.SetAsync(state2); + var fetched = await _store.GetAsync(TenantId); + + fetched.Sealed.Should().BeTrue(); + fetched.PolicyHash.Should().Be("sha256:updated"); + fetched.TimeAnchor.Source.Should().Be("updated-source"); + fetched.StalenessBudget.WarningSeconds.Should().Be(1800); + } + + [Fact] + [Intent(TestIntents.Operational, "Stores and retrieves per-content staleness budgets.")] + public async Task SetAsync_PersistsContentBudgetsAsync() + { + var budgets = new Dictionary + { + ["advisories"] = new StalenessBudget(3600, 7200), + ["vex"] = new StalenessBudget(1800, 3600), + ["policy"] = new StalenessBudget(900, 1800) + }; + var state = CreateState(TenantId, "state-store-03", budgets: budgets); + + await _store.SetAsync(state); + var fetched = await _store.GetAsync(TenantId); + + fetched.ContentBudgets.Should().HaveCount(3); + fetched.ContentBudgets.Should().ContainKey("advisories"); + fetched.ContentBudgets.Should().ContainKey("vex"); + fetched.ContentBudgets.Should().ContainKey("policy"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.cs index aafd1bae9..6692b23ae 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresAirGapStateStoreTests.cs @@ -1,4 +1,5 @@ -using FluentAssertions; +using System; +using System.Collections.Generic; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; using StellaOps.AirGap.Controller.Domain; @@ -6,18 +7,27 @@ using StellaOps.AirGap.Persistence.Postgres; using StellaOps.AirGap.Persistence.Postgres.Repositories; using StellaOps.AirGap.Time.Models; using StellaOps.Infrastructure.Postgres.Options; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.AirGap.Persistence.Tests; [Collection(AirGapPostgresCollection.Name)] -public sealed class PostgresAirGapStateStoreTests : IAsyncLifetime +[Trait("Category", TestCategories.Integration)] +[Trait("BlastRadius", TestCategories.BlastRadius.Persistence)] +public sealed partial class PostgresAirGapStateStoreTests : IAsyncLifetime { + private static readonly DateTimeOffset AnchorTime = + new(2025, 5, 1, 8, 30, 0, TimeSpan.Zero); + + private static readonly DateTimeOffset TransitionTime = + new(2025, 5, 1, 8, 45, 0, TimeSpan.Zero); + private readonly AirGapPostgresFixture _fixture; private readonly PostgresAirGapStateStore _store; private readonly AirGapDataSource _dataSource; - private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8]; + + private const string TenantId = "tenant-store-01"; public PostgresAirGapStateStoreTests(AirGapPostgresFixture fixture) { @@ -43,133 +53,26 @@ public sealed class PostgresAirGapStateStoreTests : IAsyncLifetime await _dataSource.DisposeAsync(); } - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task GetAsync_ReturnsDefaultStateForNewTenant() + private static AirGapState CreateState( + string tenantId, + string stateId, + bool sealed_ = false, + string? policyHash = null, + IReadOnlyDictionary? budgets = null, + TimeAnchor? timeAnchor = null) { - // Act - var state = await _store.GetAsync(_tenantId); - - // Assert - state.Should().NotBeNull(); - state.TenantId.Should().Be(_tenantId); - state.Sealed.Should().BeFalse(); - state.PolicyHash.Should().BeNull(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SetAndGet_RoundTripsState() - { - // Arrange - var timeAnchor = new TimeAnchor( - DateTimeOffset.UtcNow, - "tsa.example.com", - "RFC3161", - "sha256:fingerprint123", - "sha256:tokendigest456"); - - var state = new AirGapState + return new AirGapState { - Id = Guid.NewGuid().ToString("N"), - TenantId = _tenantId, - Sealed = true, - PolicyHash = "sha256:policy789", - TimeAnchor = timeAnchor, - LastTransitionAt = DateTimeOffset.UtcNow, + Id = stateId, + TenantId = tenantId, + Sealed = sealed_, + PolicyHash = policyHash, + TimeAnchor = timeAnchor ?? TimeAnchor.Unknown, + LastTransitionAt = TransitionTime, StalenessBudget = new StalenessBudget(1800, 3600), DriftBaselineSeconds = 5, - ContentBudgets = new Dictionary - { - ["advisories"] = new StalenessBudget(7200, 14400), - ["vex"] = new StalenessBudget(3600, 7200) - } + ContentBudgets = budgets ?? + new Dictionary(StringComparer.OrdinalIgnoreCase) }; - - // Act - await _store.SetAsync(state); - var fetched = await _store.GetAsync(_tenantId); - - // Assert - fetched.Should().NotBeNull(); - fetched.Sealed.Should().BeTrue(); - fetched.PolicyHash.Should().Be("sha256:policy789"); - fetched.TimeAnchor.Source.Should().Be("tsa.example.com"); - fetched.TimeAnchor.Format.Should().Be("RFC3161"); - fetched.StalenessBudget.WarningSeconds.Should().Be(1800); - fetched.StalenessBudget.BreachSeconds.Should().Be(3600); - fetched.DriftBaselineSeconds.Should().Be(5); - fetched.ContentBudgets.Should().HaveCount(2); - fetched.ContentBudgets["advisories"].WarningSeconds.Should().Be(7200); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SetAsync_UpdatesExistingState() - { - // Arrange - var state1 = new AirGapState - { - Id = Guid.NewGuid().ToString("N"), - TenantId = _tenantId, - Sealed = false, - TimeAnchor = TimeAnchor.Unknown, - StalenessBudget = StalenessBudget.Default - }; - - var state2 = new AirGapState - { - Id = state1.Id, - TenantId = _tenantId, - Sealed = true, - PolicyHash = "sha256:updated", - TimeAnchor = new TimeAnchor(DateTimeOffset.UtcNow, "updated-source", "rfc3161", "", ""), - LastTransitionAt = DateTimeOffset.UtcNow, - StalenessBudget = new StalenessBudget(600, 1200) - }; - - // Act - await _store.SetAsync(state1); - await _store.SetAsync(state2); - var fetched = await _store.GetAsync(_tenantId); - - // Assert - fetched.Sealed.Should().BeTrue(); - fetched.PolicyHash.Should().Be("sha256:updated"); - fetched.TimeAnchor.Source.Should().Be("updated-source"); - fetched.StalenessBudget.WarningSeconds.Should().Be(600); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SetAsync_PersistsContentBudgets() - { - // Arrange - var state = new AirGapState - { - Id = Guid.NewGuid().ToString("N"), - TenantId = _tenantId, - TimeAnchor = TimeAnchor.Unknown, - StalenessBudget = StalenessBudget.Default, - ContentBudgets = new Dictionary - { - ["advisories"] = new StalenessBudget(3600, 7200), - ["vex"] = new StalenessBudget(1800, 3600), - ["policy"] = new StalenessBudget(900, 1800) - } - }; - - // Act - await _store.SetAsync(state); - var fetched = await _store.GetAsync(_tenantId); - - // Assert - fetched.ContentBudgets.Should().HaveCount(3); - fetched.ContentBudgets.Should().ContainKey("advisories"); - fetched.ContentBudgets.Should().ContainKey("vex"); - fetched.ContentBudgets.Should().ContainKey("policy"); } } - - - diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.ForceActivation.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.ForceActivation.cs new file mode 100644 index 000000000..a5d703021 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.ForceActivation.cs @@ -0,0 +1,43 @@ +using FluentAssertions; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresBundleVersionStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Allows explicit force activation of older versions.")] + public async Task UpsertAsync_AllowsForceActivateOlderVersionAsync() + { + var tenantId = "tenant-bundle-06"; + var current = CreateRecord( + tenantId, + "advisory", + major: 2, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt, + activatedAt: BaseActivatedAt); + await _store.UpsertAsync(current); + + var forced = CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt.AddHours(1), + activatedAt: BaseActivatedAt.AddHours(1), + wasForceActivated: true, + forceActivateReason: "manual-override"); + await _store.UpsertAsync(forced); + + var currentAfter = await _store.GetCurrentAsync(tenantId, "advisory"); + + currentAfter.Should().NotBeNull(); + currentAfter!.VersionString.Should().Be("1.0.0"); + currentAfter.WasForceActivated.Should().BeTrue(); + currentAfter.ForceActivateReason.Should().Be("manual-override"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.History.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.History.cs new file mode 100644 index 000000000..ea17aec5f --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.History.cs @@ -0,0 +1,79 @@ +using FluentAssertions; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresBundleVersionStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Records activation history for bundle versions.")] + [Trait("Category", TestCategories.QueryDeterminism)] + public async Task UpsertAsync_RecordsHistoryAsync() + { + var tenantId = "tenant-bundle-04"; + var v1 = CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt, + activatedAt: BaseActivatedAt); + var v2 = CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 1, + patch: 0, + createdAt: BaseCreatedAt.AddHours(1), + activatedAt: BaseActivatedAt.AddHours(1)); + + await _store.UpsertAsync(v1); + await _store.UpsertAsync(v2); + + var history = await _store.GetHistoryAsync(tenantId, "advisory"); + + history.Should().HaveCount(2); + history[0].VersionString.Should().Be("1.1.0"); + history[1].VersionString.Should().Be("1.0.0"); + } + + [Fact] + [Intent(TestIntents.Operational, "Applies history limits for bundle version queries.")] + public async Task GetHistoryAsync_RespectsLimitAsync() + { + var tenantId = "tenant-bundle-05"; + await _store.UpsertAsync(CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt, + activatedAt: BaseActivatedAt)); + await _store.UpsertAsync(CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 1, + patch: 0, + createdAt: BaseCreatedAt.AddHours(1), + activatedAt: BaseActivatedAt.AddHours(1))); + await _store.UpsertAsync(CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 2, + patch: 0, + createdAt: BaseCreatedAt.AddHours(2), + activatedAt: BaseActivatedAt.AddHours(2))); + + var history = await _store.GetHistoryAsync(tenantId, "advisory", limit: 2); + + history.Should().HaveCount(2); + history[0].VersionString.Should().Be("1.2.0"); + history[1].VersionString.Should().Be("1.1.0"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Read.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Read.cs new file mode 100644 index 000000000..62daf126d --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Read.cs @@ -0,0 +1,17 @@ +using FluentAssertions; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresBundleVersionStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Returns null when no bundle version exists for a tenant.")] + public async Task GetCurrentAsync_ReturnsNullWhenMissingAsync() + { + var result = await _store.GetCurrentAsync("tenant-bundle-01", "advisory"); + + result.Should().BeNull(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Upsert.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Upsert.cs new file mode 100644 index 000000000..f64708995 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.Upsert.cs @@ -0,0 +1,60 @@ +using FluentAssertions; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +public sealed partial class PostgresBundleVersionStoreTests +{ + [Fact] + [Intent(TestIntents.Operational, "Persists the current bundle version for a tenant.")] + public async Task UpsertAsync_PersistsCurrentVersionAsync() + { + var tenantId = "tenant-bundle-02"; + var record = CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt, + activatedAt: BaseActivatedAt); + + await _store.UpsertAsync(record); + var current = await _store.GetCurrentAsync(tenantId, "advisory"); + + current.Should().NotBeNull(); + current!.VersionString.Should().Be("1.0.0"); + current.BundleDigest.Should().Be("sha256:bundle-1-0-0"); + current.ActivatedAt.Should().Be(BaseActivatedAt); + } + + [Fact] + [Intent(TestIntents.Operational, "Rejects non-monotonic bundle version updates.")] + public async Task UpsertAsync_RejectsNonMonotonicVersionAsync() + { + var tenantId = "tenant-bundle-03"; + var current = CreateRecord( + tenantId, + "advisory", + major: 2, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt, + activatedAt: BaseActivatedAt); + await _store.UpsertAsync(current); + + var older = CreateRecord( + tenantId, + "advisory", + major: 1, + minor: 0, + patch: 0, + createdAt: BaseCreatedAt.AddHours(1), + activatedAt: BaseActivatedAt.AddHours(1)); + + var act = async () => await _store.UpsertAsync(older); + + await act.Should().ThrowAsync(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.cs new file mode 100644 index 000000000..aba64ee14 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/PostgresBundleVersionStoreTests.cs @@ -0,0 +1,82 @@ +using System; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Importer.Versioning; +using StellaOps.AirGap.Persistence.Postgres; +using StellaOps.AirGap.Persistence.Postgres.Repositories; +using StellaOps.Infrastructure.Postgres.Options; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AirGap.Persistence.Tests; + +[Collection(AirGapPostgresCollection.Name)] +[Trait("Category", TestCategories.Integration)] +[Trait("BlastRadius", TestCategories.BlastRadius.Persistence)] +public sealed partial class PostgresBundleVersionStoreTests : IAsyncLifetime +{ + private static readonly DateTimeOffset BaseCreatedAt = + new(2025, 2, 1, 0, 0, 0, TimeSpan.Zero); + + private static readonly DateTimeOffset BaseActivatedAt = + new(2025, 2, 1, 0, 10, 0, TimeSpan.Zero); + + private readonly AirGapPostgresFixture _fixture; + private readonly PostgresBundleVersionStore _store; + private readonly AirGapDataSource _dataSource; + + public PostgresBundleVersionStoreTests(AirGapPostgresFixture fixture) + { + _fixture = fixture; + var options = Options.Create(new PostgresOptions + { + ConnectionString = fixture.ConnectionString, + SchemaName = fixture.SchemaName, + AutoMigrate = false + }); + + _dataSource = new AirGapDataSource(options, NullLogger.Instance); + _store = new PostgresBundleVersionStore(_dataSource, NullLogger.Instance); + } + + public async ValueTask InitializeAsync() + { + await _fixture.TruncateAllTablesAsync(); + } + + public async ValueTask DisposeAsync() + { + await _dataSource.DisposeAsync(); + } + + private static BundleVersionRecord CreateRecord( + string tenantId, + string bundleType, + int major, + int minor, + int patch, + DateTimeOffset createdAt, + DateTimeOffset activatedAt, + string? prerelease = null, + bool wasForceActivated = false, + string? forceActivateReason = null) + { + var versionString = prerelease is null + ? $"{major}.{minor}.{patch}" + : $"{major}.{minor}.{patch}-{prerelease}"; + + return new BundleVersionRecord( + TenantId: tenantId, + BundleType: bundleType, + VersionString: versionString, + Major: major, + Minor: minor, + Patch: patch, + Prerelease: prerelease, + BundleCreatedAt: createdAt, + BundleDigest: $"sha256:bundle-{major}-{minor}-{patch}", + ActivatedAt: activatedAt, + WasForceActivated: wasForceActivated, + ForceActivateReason: forceActivateReason); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/TASKS.md b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/TASKS.md index 59e1b393b..6ca39070d 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/TASKS.md +++ b/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/TASKS.md @@ -1,10 +1,8 @@ -# AirGap Persistence Tests Task Board - +# StellaOps.AirGap.Persistence.Tests Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. | Task ID | Status | Notes | | --- | --- | --- | -| AUDIT-0029-M | DONE | Revalidated 2026-01-06; findings recorded in audit report. | -| AUDIT-0029-T | DONE | Revalidated 2026-01-06; findings recorded in audit report. | -| AUDIT-0029-A | DONE | Waived (test project; revalidated 2026-01-06). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/__Tests/StellaOps.AirGap.Persistence.Tests/StellaOps.AirGap.Persistence.Tests.md. | +| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Errors.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Errors.cs new file mode 100644 index 000000000..df92fe654 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Errors.cs @@ -0,0 +1,22 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class AirGapBundleDsseSignerTests +{ + [Fact] + public async Task SignAsync_WithMissingSecret_ThrowsInvalidOperationAsync() + { + var signer = CreateSigner("hmac", secretBase64: null); + var bundle = CreateTestBundle(); + + var act = async () => await signer.SignAsync(bundle); + + await act.Should().ThrowAsync() + .WithMessage("*SecretBase64*"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Sign.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Sign.cs new file mode 100644 index 000000000..bef648063 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Sign.cs @@ -0,0 +1,62 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class AirGapBundleDsseSignerTests +{ + [Fact] + public async Task SignAsync_WhenDisabled_ReturnsNullAsync() + { + var signer = CreateSigner("none"); + var bundle = CreateTestBundle(); + + var result = await signer.SignAsync(bundle); + + result.Should().BeNull(); + signer.IsEnabled.Should().BeFalse(); + } + + [Fact] + public async Task SignAsync_WhenEnabled_ReturnsValidSignatureAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64, "test-key"); + var bundle = CreateTestBundle(); + + var result = await signer.SignAsync(bundle); + + result.Should().NotBeNull(); + result!.KeyId.Should().Be("test-key"); + result.Signature.Should().NotBeEmpty(); + result.SignatureBase64.Should().NotBeNullOrWhiteSpace(); + signer.IsEnabled.Should().BeTrue(); + } + + [Fact] + public async Task SignAsync_DeterministicForSameInputAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle = CreateTestBundle(); + + var result1 = await signer.SignAsync(bundle); + var result2 = await signer.SignAsync(bundle); + + result1!.SignatureBase64.Should().Be(result2!.SignatureBase64); + } + + [Fact] + public async Task SignAsync_DifferentForDifferentManifestAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle1 = CreateTestBundle(manifestDigest: "sha256:aaa"); + var bundle2 = CreateTestBundle(manifestDigest: "sha256:bbb"); + + var result1 = await signer.SignAsync(bundle1); + var result2 = await signer.SignAsync(bundle2); + + result1!.SignatureBase64.Should().NotBe(result2!.SignatureBase64); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Verify.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Verify.cs new file mode 100644 index 000000000..bd7fb4715 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.Verify.cs @@ -0,0 +1,76 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Services; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class AirGapBundleDsseSignerTests +{ + [Fact] + public async Task VerifyAsync_WhenDisabled_ReturnsSigningDisabledAsync() + { + var signer = CreateSigner("none"); + var bundle = CreateTestBundle(); + + var result = await signer.VerifyAsync(bundle); + + result.Should().Be(AirGapBundleVerificationResult.SigningDisabled); + } + + [Fact] + public async Task VerifyAsync_WhenNoSignature_ReturnsMissingSignatureAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle = CreateTestBundle(signature: null); + + var result = await signer.VerifyAsync(bundle); + + result.Should().Be(AirGapBundleVerificationResult.MissingSignature); + } + + [Fact] + public async Task VerifyAsync_WithValidSignature_ReturnsValidAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle = CreateTestBundle(); + + var signResult = await signer.SignAsync(bundle); + var signedBundle = bundle with { Signature = signResult!.SignatureBase64, SignedBy = signResult.KeyId }; + + var verifyResult = await signer.VerifyAsync(signedBundle); + + verifyResult.Should().Be(AirGapBundleVerificationResult.Valid); + } + + [Fact] + public async Task VerifyAsync_WithTamperedSignature_ReturnsInvalidAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle = CreateTestBundle(); + + var signResult = await signer.SignAsync(bundle); + var tamperedBundle = bundle with + { + Signature = signResult!.SignatureBase64, + ManifestDigest = "sha256:tampered" + }; + + var verifyResult = await signer.VerifyAsync(tamperedBundle); + + verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature); + } + + [Fact] + public async Task VerifyAsync_WithInvalidBase64Signature_ReturnsInvalidAsync() + { + var signer = CreateSigner("hmac", TestSecretBase64); + var bundle = CreateTestBundle(signature: "not-valid-base64!!!"); + + var verifyResult = await signer.VerifyAsync(bundle); + + verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.cs index 1d0664143..b1571ea51 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapBundleDsseSignerTests.cs @@ -1,227 +1,42 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - -using System.Security.Cryptography; -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Services; using StellaOps.TestKit; +using StellaOps.TestKit.Traits; using Xunit; namespace StellaOps.AirGap.Sync.Tests; -/// -/// Unit tests for . -/// [Trait("Category", TestCategories.Unit)] -public sealed class AirGapBundleDsseSignerTests +[Intent(TestIntents.Safety, "Validates deterministic DSSE signing and verification.")] +public sealed partial class AirGapBundleDsseSignerTests { - private static readonly string TestSecretBase64 = Convert.ToBase64String( - RandomNumberGenerator.GetBytes(32)); - - [Fact] - public async Task SignAsync_WhenDisabled_ReturnsNull() + private static readonly byte[] TestSecret = new byte[] { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, + 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20 + }; - // Act - var result = await signer.SignAsync(bundle); + private static readonly string TestSecretBase64 = Convert.ToBase64String(TestSecret); + private static readonly DateTimeOffset DefaultCreatedAt = + new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); - // Assert - result.Should().BeNull(); - signer.IsEnabled.Should().BeFalse(); - } - - [Fact] - public async Task SignAsync_WhenEnabled_ReturnsValidSignature() + private static AirGapBundleDsseSigner CreateSigner(string mode, string? secretBase64 = null, string? keyId = null) { - // Arrange var options = Options.Create(new AirGapBundleDsseOptions { - Mode = "hmac", - SecretBase64 = TestSecretBase64, - KeyId = "test-key" + Mode = mode, + SecretBase64 = secretBase64, + KeyId = keyId ?? "test-key" }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - // Act - var result = await signer.SignAsync(bundle); - - // Assert - result.Should().NotBeNull(); - result!.KeyId.Should().Be("test-key"); - result.Signature.Should().NotBeEmpty(); - result.SignatureBase64.Should().NotBeNullOrWhiteSpace(); - signer.IsEnabled.Should().BeTrue(); - } - - [Fact] - public async Task SignAsync_DeterministicForSameInput() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - - // Act - var result1 = await signer.SignAsync(bundle); - var result2 = await signer.SignAsync(bundle); - - // Assert - result1!.SignatureBase64.Should().Be(result2!.SignatureBase64); - } - - [Fact] - public async Task SignAsync_DifferentForDifferentManifest() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle1 = CreateTestBundle(manifestDigest: "sha256:aaa"); - var bundle2 = CreateTestBundle(manifestDigest: "sha256:bbb"); - - // Act - var result1 = await signer.SignAsync(bundle1); - var result2 = await signer.SignAsync(bundle2); - - // Assert - result1!.SignatureBase64.Should().NotBe(result2!.SignatureBase64); - } - - [Fact] - public async Task VerifyAsync_WhenDisabled_ReturnsSigningDisabled() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions { Mode = "none" }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - - // Act - var result = await signer.VerifyAsync(bundle); - - // Assert - result.Should().Be(AirGapBundleVerificationResult.SigningDisabled); - } - - [Fact] - public async Task VerifyAsync_WhenNoSignature_ReturnsMissingSignature() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(signature: null); - - // Act - var result = await signer.VerifyAsync(bundle); - - // Assert - result.Should().Be(AirGapBundleVerificationResult.MissingSignature); - } - - [Fact] - public async Task VerifyAsync_WithValidSignature_ReturnsValid() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - - // Sign the bundle first - var signResult = await signer.SignAsync(bundle); - var signedBundle = bundle with { Signature = signResult!.SignatureBase64, SignedBy = signResult.KeyId }; - - // Act - var verifyResult = await signer.VerifyAsync(signedBundle); - - // Assert - verifyResult.Should().Be(AirGapBundleVerificationResult.Valid); - } - - [Fact] - public async Task VerifyAsync_WithTamperedSignature_ReturnsInvalid() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - - // Sign and then tamper - var signResult = await signer.SignAsync(bundle); - var tamperedBundle = bundle with - { - Signature = signResult!.SignatureBase64, - ManifestDigest = "sha256:tampered" - }; - - // Act - var verifyResult = await signer.VerifyAsync(tamperedBundle); - - // Assert - verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature); - } - - [Fact] - public async Task VerifyAsync_WithInvalidBase64Signature_ReturnsInvalid() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = TestSecretBase64 - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(signature: "not-valid-base64!!!"); - - // Act - var verifyResult = await signer.VerifyAsync(bundle); - - // Assert - verifyResult.Should().Be(AirGapBundleVerificationResult.InvalidSignature); - } - - [Fact] - public void SignAsync_WithMissingSecret_ThrowsInvalidOperation() - { - // Arrange - var options = Options.Create(new AirGapBundleDsseOptions - { - Mode = "hmac", - SecretBase64 = null - }); - var signer = new AirGapBundleDsseSigner(options, NullLogger.Instance); - var bundle = CreateTestBundle(); - - // Act & Assert - var act = async () => await signer.SignAsync(bundle); - act.Should().ThrowAsync() - .WithMessage("*SecretBase64*"); + return new AirGapBundleDsseSigner(options, NullLogger.Instance); } private static AirGapBundle CreateTestBundle( @@ -232,7 +47,7 @@ public sealed class AirGapBundleDsseSignerTests { BundleId = Guid.Parse("11111111-1111-1111-1111-111111111111"), TenantId = "test-tenant", - CreatedAt = DateTimeOffset.Parse("2026-01-07T12:00:00Z"), + CreatedAt = DefaultCreatedAt, CreatedByNodeId = "test-node", JobLogs = new List(), ManifestDigest = manifestDigest ?? "sha256:abc123def456", diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapSyncServiceCollectionExtensionsTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapSyncServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000..49e2f8f8a --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/AirGapSyncServiceCollectionExtensionsTests.cs @@ -0,0 +1,45 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.AirGap.Sync.Services; +using StellaOps.AirGap.Sync.Stores; +using StellaOps.Determinism; +using StellaOps.HybridLogicalClock; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +[Trait("Category", TestCategories.Unit)] +[Intent(TestIntents.Operational, "Ensures AirGap sync registrations include core services and time provider.")] +public sealed class AirGapSyncServiceCollectionExtensionsTests +{ + [Fact] + public void AddAirGapSyncServices_RegistersExpectedServices() + { + var services = new TestServiceCollection(); + + services.AddAirGapSyncServices("node-a"); + + services.Any(sd => sd.ServiceType == typeof(TimeProvider)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IHybridLogicalClock)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IHlcStateStore)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IConflictResolver)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IHlcMergeService)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IAirGapBundleImporter)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IAirGapBundleExporter)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IOfflineJobLogStore)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IOfflineHlcManager)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IGuidProvider)).Should().BeTrue(); + services.Any(sd => sd.ServiceType == typeof(IAirGapBundleDsseSigner)).Should().BeTrue(); + } + + private sealed class TestServiceCollection : List, IServiceCollection + { + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Multi.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Multi.cs new file mode 100644 index 000000000..940fe8848 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Multi.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_ThreeEntriesSamePayload_TakesEarliestDropsTwo() + { + var jobId = Guid.Parse("44444444-4444-4444-4444-444444444444"); + var payloadHash = CreatePayloadHash(0xCC); + + var entryA = CreateEntryWithPayloadHash("node-a", 150, 0, jobId, payloadHash); + var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); + var entryC = CreateEntryWithPayloadHash("node-c", 200, 0, jobId, payloadHash); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB), + ("node-c", entryC) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.DuplicateTimestamp); + result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); + result.SelectedEntry.Should().Be(entryB); + result.DroppedEntries.Should().HaveCount(2); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Tiebreakers.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Tiebreakers.cs new file mode 100644 index 000000000..0e909b099 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.Tiebreakers.cs @@ -0,0 +1,53 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_SamePhysicalTime_UsesLogicalCounter() + { + var jobId = Guid.Parse("55555555-5555-5555-5555-555555555555"); + var payloadHash = CreatePayloadHash(0xDD); + + var entryA = CreateEntryWithPayloadHash("node-a", 100, 2, jobId, payloadHash); + var entryB = CreateEntryWithPayloadHash("node-b", 100, 1, jobId, payloadHash); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.SelectedEntry.Should().Be(entryB); + result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA); + } + + [Fact] + public void Resolve_SamePhysicalTimeAndCounter_UsesNodeId() + { + var jobId = Guid.Parse("66666666-6666-6666-6666-666666666666"); + var payloadHash = CreatePayloadHash(0xEE); + + var entryA = CreateEntryWithPayloadHash("alpha-node", 100, 0, jobId, payloadHash); + var entryB = CreateEntryWithPayloadHash("beta-node", 100, 0, jobId, payloadHash); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("beta-node", entryB), + ("alpha-node", entryA) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.SelectedEntry.Should().Be(entryA); + result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.cs new file mode 100644 index 000000000..00f966510 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.DuplicateTimestamp.cs @@ -0,0 +1,57 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_TwoEntriesSamePayload_TakesEarliest() + { + var jobId = Guid.Parse("22222222-2222-2222-2222-222222222222"); + var payloadHash = CreatePayloadHash(0xAA); + + var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash); + var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHash); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.DuplicateTimestamp); + result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); + result.SelectedEntry.Should().Be(entryA); + result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB); + } + + [Fact] + public void Resolve_TwoEntriesSamePayload_TakesEarliest_WhenSecondComesFirst() + { + var jobId = Guid.Parse("33333333-3333-3333-3333-333333333333"); + var payloadHash = CreatePayloadHash(0xBB); + + var entryA = CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash); + var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.DuplicateTimestamp); + result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); + result.SelectedEntry.Should().Be(entryB); + result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.EdgeCases.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.EdgeCases.cs new file mode 100644 index 000000000..208d249ac --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.EdgeCases.cs @@ -0,0 +1,34 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_NullConflicting_ThrowsArgumentNullException() + { + var jobId = Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"); + + var act = () => _sut.Resolve(jobId, null!); + + act.Should().Throw() + .WithParameterName("conflicting"); + } + + [Fact] + public void Resolve_EmptyConflicting_ThrowsArgumentException() + { + var jobId = Guid.Parse("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"); + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>(); + + var act = () => _sut.Resolve(jobId, conflicting); + + act.Should().Throw() + .WithParameterName("conflicting"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.Helpers.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.Helpers.cs new file mode 100644 index 000000000..d5964804e --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.Helpers.cs @@ -0,0 +1,55 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + private static byte[] CreatePayloadHash(byte prefix) + { + var hash = new byte[32]; + hash[0] = prefix; + return hash; + } + + private static OfflineJobLogEntry CreateEntry( + string nodeId, + long physicalTime, + int logicalCounter, + Guid jobId) + { + var payloadHash = new byte[32]; + jobId.ToByteArray().CopyTo(payloadHash, 0); + + return CreateEntryWithPayloadHash(nodeId, physicalTime, logicalCounter, jobId, payloadHash); + } + + private static OfflineJobLogEntry CreateEntryWithPayloadHash( + string nodeId, + long physicalTime, + int logicalCounter, + Guid jobId, + byte[] payloadHash) + { + var hlc = new HlcTimestamp + { + PhysicalTime = physicalTime, + NodeId = nodeId, + LogicalCounter = logicalCounter + }; + + return new OfflineJobLogEntry + { + NodeId = nodeId, + THlc = hlc, + JobId = jobId, + Payload = $"{{\"id\":\"{jobId}\"}}", + PayloadHash = payloadHash, + Link = new byte[32], + EnqueuedAt = FixedEnqueuedAt + }; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.PayloadMismatch.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.PayloadMismatch.cs new file mode 100644 index 000000000..d751bab58 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.PayloadMismatch.cs @@ -0,0 +1,85 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_DifferentPayloads_ReturnsError() + { + var jobId = Guid.Parse("77777777-7777-7777-7777-777777777777"); + + var payloadHashA = CreatePayloadHash(0x01); + var payloadHashB = CreatePayloadHash(0x02); + + var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA); + var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHashB); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.PayloadMismatch); + result.Resolution.Should().Be(ResolutionStrategy.Error); + result.Error.Should().NotBeNullOrEmpty(); + result.Error.Should().Contain(jobId.ToString()); + result.Error.Should().Contain("conflicting payloads"); + result.SelectedEntry.Should().BeNull(); + result.DroppedEntries.Should().BeNull(); + } + + [Fact] + public void Resolve_ThreeDifferentPayloads_ReturnsError() + { + var jobId = Guid.Parse("88888888-8888-8888-8888-888888888888"); + + var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, CreatePayloadHash(0x01)); + var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, CreatePayloadHash(0x02)); + var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, CreatePayloadHash(0x03)); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB), + ("node-c", entryC) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.PayloadMismatch); + result.Resolution.Should().Be(ResolutionStrategy.Error); + } + + [Fact] + public void Resolve_TwoSameOneUnique_ReturnsError() + { + var jobId = Guid.Parse("99999999-9999-9999-9999-999999999999"); + var sharedPayload = CreatePayloadHash(0xAA); + var uniquePayload = CreatePayloadHash(0xBB); + + var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, sharedPayload); + var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, sharedPayload); + var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, uniquePayload); + + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entryA), + ("node-b", entryB), + ("node-c", entryC) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.PayloadMismatch); + result.Resolution.Should().Be(ResolutionStrategy.Error); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.SingleEntry.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.SingleEntry.cs new file mode 100644 index 000000000..637f81029 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.SingleEntry.cs @@ -0,0 +1,30 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class ConflictResolverTests +{ + [Fact] + public void Resolve_SingleEntry_ReturnsDuplicateTimestampWithTakeEarliest() + { + var jobId = Guid.Parse("11111111-1111-1111-1111-111111111111"); + var entry = CreateEntry("node-a", 100, 0, jobId); + var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> + { + ("node-a", entry) + }; + + var result = _sut.Resolve(jobId, conflicting); + + result.Type.Should().Be(ConflictType.DuplicateTimestamp); + result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); + result.SelectedEntry.Should().Be(entry); + result.DroppedEntries.Should().BeEmpty(); + result.Error.Should().BeNull(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.cs index 491c36be1..c4d5823d0 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/ConflictResolverTests.cs @@ -1,342 +1,25 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Services; -using StellaOps.HybridLogicalClock; using StellaOps.TestKit; +using StellaOps.TestKit.Traits; using Xunit; namespace StellaOps.AirGap.Sync.Tests; -/// -/// Unit tests for . -/// [Trait("Category", TestCategories.Unit)] -public sealed class ConflictResolverTests +[Intent(TestIntents.Operational, "Validates conflict resolution ordering and payload checks.")] +public sealed partial class ConflictResolverTests { + private static readonly DateTimeOffset FixedEnqueuedAt = + new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); + private readonly ConflictResolver _sut; public ConflictResolverTests() { _sut = new ConflictResolver(NullLogger.Instance); } - - #region Single Entry Tests - - [Fact] - public void Resolve_SingleEntry_ReturnsDuplicateTimestampWithTakeEarliest() - { - // Arrange - var jobId = Guid.Parse("11111111-1111-1111-1111-111111111111"); - var entry = CreateEntry("node-a", 100, 0, jobId); - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entry) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.Type.Should().Be(ConflictType.DuplicateTimestamp); - result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); - result.SelectedEntry.Should().Be(entry); - result.DroppedEntries.Should().BeEmpty(); - result.Error.Should().BeNull(); - } - - #endregion - - #region Duplicate Timestamp Tests (Same Payload) - - [Fact] - public void Resolve_TwoEntriesSamePayload_TakesEarliest() - { - // Arrange - var jobId = Guid.Parse("22222222-2222-2222-2222-222222222222"); - var payloadHash = CreatePayloadHash(0xAA); - - var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash); - var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHash); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.Type.Should().Be(ConflictType.DuplicateTimestamp); - result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); - result.SelectedEntry.Should().Be(entryA); - result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB); - } - - [Fact] - public void Resolve_TwoEntriesSamePayload_TakesEarliest_WhenSecondComesFirst() - { - // Arrange - Earlier entry is second in list - var jobId = Guid.Parse("33333333-3333-3333-3333-333333333333"); - var payloadHash = CreatePayloadHash(0xBB); - - var entryA = CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash); - var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earlier - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - Should take entryB (earlier) - result.Type.Should().Be(ConflictType.DuplicateTimestamp); - result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); - result.SelectedEntry.Should().Be(entryB); - result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA); - } - - [Fact] - public void Resolve_ThreeEntriesSamePayload_TakesEarliestDropsTwo() - { - // Arrange - var jobId = Guid.Parse("44444444-4444-4444-4444-444444444444"); - var payloadHash = CreatePayloadHash(0xCC); - - var entryA = CreateEntryWithPayloadHash("node-a", 150, 0, jobId, payloadHash); - var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earliest - var entryC = CreateEntryWithPayloadHash("node-c", 200, 0, jobId, payloadHash); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB), - ("node-c", entryC) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.Type.Should().Be(ConflictType.DuplicateTimestamp); - result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest); - result.SelectedEntry.Should().Be(entryB); - result.DroppedEntries.Should().HaveCount(2); - } - - [Fact] - public void Resolve_SamePhysicalTime_UsesLogicalCounter() - { - // Arrange - var jobId = Guid.Parse("55555555-5555-5555-5555-555555555555"); - var payloadHash = CreatePayloadHash(0xDD); - - var entryA = CreateEntryWithPayloadHash("node-a", 100, 2, jobId, payloadHash); // Higher counter - var entryB = CreateEntryWithPayloadHash("node-b", 100, 1, jobId, payloadHash); // Earlier - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.SelectedEntry.Should().Be(entryB); // Lower logical counter - result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA); - } - - [Fact] - public void Resolve_SamePhysicalTimeAndCounter_UsesNodeId() - { - // Arrange - var jobId = Guid.Parse("66666666-6666-6666-6666-666666666666"); - var payloadHash = CreatePayloadHash(0xEE); - - var entryA = CreateEntryWithPayloadHash("alpha-node", 100, 0, jobId, payloadHash); - var entryB = CreateEntryWithPayloadHash("beta-node", 100, 0, jobId, payloadHash); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("beta-node", entryB), - ("alpha-node", entryA) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - "alpha-node" < "beta-node" alphabetically - result.SelectedEntry.Should().Be(entryA); - result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB); - } - - #endregion - - #region Payload Mismatch Tests - - [Fact] - public void Resolve_DifferentPayloads_ReturnsError() - { - // Arrange - var jobId = Guid.Parse("77777777-7777-7777-7777-777777777777"); - - var payloadHashA = CreatePayloadHash(0x01); - var payloadHashB = CreatePayloadHash(0x02); - - var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA); - var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHashB); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.Type.Should().Be(ConflictType.PayloadMismatch); - result.Resolution.Should().Be(ResolutionStrategy.Error); - result.Error.Should().NotBeNullOrEmpty(); - result.Error.Should().Contain(jobId.ToString()); - result.Error.Should().Contain("conflicting payloads"); - result.SelectedEntry.Should().BeNull(); - result.DroppedEntries.Should().BeNull(); - } - - [Fact] - public void Resolve_ThreeDifferentPayloads_ReturnsError() - { - // Arrange - var jobId = Guid.Parse("88888888-8888-8888-8888-888888888888"); - - var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, CreatePayloadHash(0x01)); - var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, CreatePayloadHash(0x02)); - var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, CreatePayloadHash(0x03)); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB), - ("node-c", entryC) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - result.Type.Should().Be(ConflictType.PayloadMismatch); - result.Resolution.Should().Be(ResolutionStrategy.Error); - } - - [Fact] - public void Resolve_TwoSameOneUnique_ReturnsError() - { - // Arrange - 2 entries with same payload, 1 with different - var jobId = Guid.Parse("99999999-9999-9999-9999-999999999999"); - var sharedPayload = CreatePayloadHash(0xAA); - var uniquePayload = CreatePayloadHash(0xBB); - - var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, sharedPayload); - var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, sharedPayload); - var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, uniquePayload); - - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)> - { - ("node-a", entryA), - ("node-b", entryB), - ("node-c", entryC) - }; - - // Act - var result = _sut.Resolve(jobId, conflicting); - - // Assert - Should be error due to different payloads - result.Type.Should().Be(ConflictType.PayloadMismatch); - result.Resolution.Should().Be(ResolutionStrategy.Error); - } - - #endregion - - #region Edge Cases - - [Fact] - public void Resolve_NullConflicting_ThrowsArgumentNullException() - { - // Arrange - var jobId = Guid.NewGuid(); - - // Act & Assert - var act = () => _sut.Resolve(jobId, null!); - act.Should().Throw() - .WithParameterName("conflicting"); - } - - [Fact] - public void Resolve_EmptyConflicting_ThrowsArgumentException() - { - // Arrange - var jobId = Guid.NewGuid(); - var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>(); - - // Act & Assert - var act = () => _sut.Resolve(jobId, conflicting); - act.Should().Throw() - .WithParameterName("conflicting"); - } - - #endregion - - #region Helper Methods - - private static byte[] CreatePayloadHash(byte prefix) - { - var hash = new byte[32]; - hash[0] = prefix; - return hash; - } - - private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId) - { - var payloadHash = new byte[32]; - jobId.ToByteArray().CopyTo(payloadHash, 0); - - return CreateEntryWithPayloadHash(nodeId, physicalTime, logicalCounter, jobId, payloadHash); - } - - private static OfflineJobLogEntry CreateEntryWithPayloadHash( - string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash) - { - var hlc = new HlcTimestamp - { - PhysicalTime = physicalTime, - NodeId = nodeId, - LogicalCounter = logicalCounter - }; - - return new OfflineJobLogEntry - { - NodeId = nodeId, - THlc = hlc, - JobId = jobId, - Payload = $"{{\"id\":\"{jobId}\"}}", - PayloadHash = payloadHash, - Link = new byte[32], - EnqueuedAt = DateTimeOffset.UtcNow - }; - } - - #endregion } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.List.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.List.cs new file mode 100644 index 000000000..d9da4aa3a --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.List.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class FileBasedJobSyncTransportTests +{ + [Fact] + public async Task ListAvailableBundlesAsync_ReturnsBundlesOrderedByCreatedAt() + { + using var outputRoot = new TempDirectory("airgap-sync-out"); + using var inputRoot = new TempDirectory("airgap-sync-in"); + var exporter = new StubBundleExporter(); + var importer = new StubBundleImporter(); + var sut = CreateTransport(outputRoot, inputRoot, exporter, importer); + var firstId = Guid.Parse("55555555-5555-5555-5555-555555555555"); + var secondId = Guid.Parse("44444444-4444-4444-4444-444444444444"); + var firstCreated = new DateTimeOffset(2026, 1, 6, 12, 0, 0, TimeSpan.Zero); + var secondCreated = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); + + WriteBundleJson(inputRoot.Path, firstId, firstCreated, 2); + WriteBundleJson(inputRoot.Path, secondId, secondCreated, 1); + + var result = await sut.ListAvailableBundlesAsync(inputRoot.Path); + + result.Should().HaveCount(2); + result[0].BundleId.Should().Be(secondId); + result[0].EntryCount.Should().Be(1); + result[1].BundleId.Should().Be(firstId); + result[1].EntryCount.Should().Be(2); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.ReceiveRejectsEscaping.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.ReceiveRejectsEscaping.cs new file mode 100644 index 000000000..20d7ef5d3 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.ReceiveRejectsEscaping.cs @@ -0,0 +1,28 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class FileBasedJobSyncTransportTests +{ + [Fact] + public async Task ReceiveBundleAsync_RejectsPathOutsideRoot() + { + using var outputRoot = new TempDirectory("airgap-sync-out"); + using var inputRoot = new TempDirectory("airgap-sync-in"); + var exporter = new StubBundleExporter(); + var importer = new StubBundleImporter(); + var sut = CreateTransport(outputRoot, inputRoot, exporter, importer); + var source = $"..{Path.DirectorySeparatorChar}escape"; + + var result = await sut.ReceiveBundleAsync(source); + + result.Should().BeNull(); + importer.ImportFromFileCalls.Should().Be(0); + importer.LastPath.Should().BeNull(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Send.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Send.cs new file mode 100644 index 000000000..d5a237140 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Send.cs @@ -0,0 +1,35 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class FileBasedJobSyncTransportTests +{ + [Fact] + public async Task SendBundleAsync_WritesBundleUnderOutputRoot() + { + using var outputRoot = new TempDirectory("airgap-sync-out"); + using var inputRoot = new TempDirectory("airgap-sync-in"); + var exporter = new StubBundleExporter { Payload = "{\"ok\":true}" }; + var importer = new StubBundleImporter(); + var bundleId = Guid.Parse("77777777-7777-7777-7777-777777777777"); + var bundle = CreateBundle(bundleId); + var now = new DateTimeOffset(2026, 1, 7, 12, 30, 0, TimeSpan.Zero); + var sut = CreateTransport(outputRoot, inputRoot, exporter, importer, new FixedTimeProvider(now)); + + var result = await sut.SendBundleAsync(bundle, "exports"); + + result.Success.Should().BeTrue(); + result.TransmittedAt.Should().Be(now); + result.Destination.Should().NotBeNullOrWhiteSpace(); + exporter.LastPath.Should().NotBeNull(); + exporter.LastPath!.Should().StartWith(outputRoot.Path); + exporter.LastPath.Should().Contain($"job-sync-{bundleId:N}.json"); + result.Destination.Should().Be(exporter.LastPath); + result.SizeBytes.Should().Be(new FileInfo(exporter.LastPath).Length); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.SendRejectsEscaping.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.SendRejectsEscaping.cs new file mode 100644 index 000000000..cefb3d51d --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.SendRejectsEscaping.cs @@ -0,0 +1,32 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class FileBasedJobSyncTransportTests +{ + [Fact] + public async Task SendBundleAsync_RejectsPathOutsideRoot() + { + using var outputRoot = new TempDirectory("airgap-sync-out"); + using var inputRoot = new TempDirectory("airgap-sync-in"); + var exporter = new StubBundleExporter(); + var importer = new StubBundleImporter(); + var bundle = CreateBundle(Guid.Parse("66666666-6666-6666-6666-666666666666")); + var now = new DateTimeOffset(2026, 1, 7, 12, 45, 0, TimeSpan.Zero); + var sut = CreateTransport(outputRoot, inputRoot, exporter, importer, new FixedTimeProvider(now)); + var destination = $"..{Path.DirectorySeparatorChar}escape"; + + var result = await sut.SendBundleAsync(bundle, destination); + + result.Success.Should().BeFalse(); + result.Error.Should().Contain("escapes configured root"); + result.TransmittedAt.Should().Be(now); + exporter.ExportToFileCalls.Should().Be(0); + exporter.LastPath.Should().BeNull(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Stubs.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Stubs.cs new file mode 100644 index 000000000..fff802cb2 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.Stubs.cs @@ -0,0 +1,88 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Services; + +namespace StellaOps.AirGap.Sync.Tests; + +internal sealed class StubBundleExporter : IAirGapBundleExporter +{ + public string? LastPath { get; private set; } + public AirGapBundle? LastBundle { get; private set; } + public int ExportToFileCalls { get; private set; } + public string Payload { get; set; } = "{}"; + + public Task ExportAsync( + string tenantId, + IReadOnlyList? nodeIds = null, + CancellationToken cancellationToken = default) + { + return Task.FromResult(new AirGapBundle + { + BundleId = Guid.Parse("99999999-9999-9999-9999-999999999999"), + TenantId = tenantId, + CreatedAt = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero), + CreatedByNodeId = "node-a", + JobLogs = Array.Empty(), + ManifestDigest = "sha256:stub-export" + }); + } + + public Task ExportToFileAsync( + AirGapBundle bundle, + string outputPath, + CancellationToken cancellationToken = default) + { + ExportToFileCalls++; + LastBundle = bundle; + LastPath = outputPath; + Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!); + File.WriteAllText(outputPath, Payload); + return Task.CompletedTask; + } + + public Task ExportToStringAsync( + AirGapBundle bundle, + CancellationToken cancellationToken = default) + { + LastBundle = bundle; + return Task.FromResult(Payload); + } +} + +internal sealed class StubBundleImporter : IAirGapBundleImporter +{ + public string? LastPath { get; private set; } + public int ImportFromFileCalls { get; private set; } + public AirGapBundle Result { get; set; } = new AirGapBundle + { + BundleId = Guid.Parse("88888888-8888-8888-8888-888888888888"), + TenantId = "test-tenant", + CreatedAt = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero), + CreatedByNodeId = "node-a", + JobLogs = Array.Empty(), + ManifestDigest = "sha256:stub-import" + }; + + public Task ImportFromFileAsync( + string inputPath, + CancellationToken cancellationToken = default) + { + ImportFromFileCalls++; + LastPath = inputPath; + return Task.FromResult(Result); + } + + public BundleValidationResult Validate(AirGapBundle bundle) + => new() + { + IsValid = true, + Issues = Array.Empty() + }; + + public Task ImportFromStringAsync( + string json, + CancellationToken cancellationToken = default) + => Task.FromResult(Result); +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.cs new file mode 100644 index 000000000..7959feafd --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/FileBasedJobSyncTransportTests.cs @@ -0,0 +1,71 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using StellaOps.AirGap.Sync.Transport; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +[Trait("Category", TestCategories.Unit)] +[Intent(TestIntents.Operational, "Validates file-based transport path resolution and timestamps.")] +public sealed partial class FileBasedJobSyncTransportTests +{ + private static readonly DateTimeOffset FixedNow = + new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); + + private static AirGapBundle CreateBundle(Guid bundleId) + { + return new AirGapBundle + { + BundleId = bundleId, + TenantId = "test-tenant", + CreatedAt = FixedNow, + CreatedByNodeId = "node-a", + JobLogs = Array.Empty(), + ManifestDigest = "sha256:test-bundle" + }; + } + + private static string WriteBundleJson(string directory, Guid bundleId, DateTimeOffset createdAt, int entryCount) + { + var entries = entryCount switch + { + 0 => "[]", + 1 => "[{}]", + 2 => "[{},{}]", + _ => throw new ArgumentOutOfRangeException(nameof(entryCount)) + }; + + var json = $"{{\"bundleId\":\"{bundleId:D}\",\"tenantId\":\"test-tenant\",\"createdByNodeId\":\"node-a\",\"createdAt\":\"{createdAt:O}\",\"jobLogs\":[{{\"entries\":{entries}}}]}}"; + var path = Path.Combine(directory, $"job-sync-{bundleId:N}.json"); + File.WriteAllText(path, json); + return path; + } + + private static FileBasedJobSyncTransport CreateTransport( + TempDirectory outputRoot, + TempDirectory inputRoot, + StubBundleExporter exporter, + StubBundleImporter importer, + TimeProvider? timeProvider = null) + { + var options = Options.Create(new FileBasedJobSyncTransportOptions + { + OutputDirectory = outputRoot.Path, + InputDirectory = inputRoot.Path + }); + + return new FileBasedJobSyncTransport( + exporter, + importer, + options, + timeProvider ?? new FixedTimeProvider(FixedNow), + NullLogger.Instance); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Determinism.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Determinism.cs new file mode 100644 index 000000000..0c728077b --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Determinism.cs @@ -0,0 +1,58 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + [Fact] + public async Task MergeAsync_SameInput_ProducesSameOutputAsync() + { + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), + CreateEntry("node-a", 300, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000")) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), + CreateEntry("node-b", 400, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000")) + }); + + var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB }); + var result2 = await _sut.MergeAsync(new[] { nodeA, nodeB }); + + result1.MergedEntries.Should().HaveCount(result2.MergedEntries.Count); + for (var i = 0; i < result1.MergedEntries.Count; i++) + { + result1.MergedEntries[i].JobId.Should().Be(result2.MergedEntries[i].JobId); + result1.MergedEntries[i].THlc.Should().Be(result2.MergedEntries[i].THlc); + result1.MergedEntries[i].MergedLink.Should().BeEquivalentTo(result2.MergedEntries[i].MergedLink); + } + result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead); + } + + [Fact] + public async Task MergeAsync_InputOrderIndependent_ProducesSameOutputAsync() + { + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")) + }); + + var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB }); + var result2 = await _sut.MergeAsync(new[] { nodeB, nodeA }); + + result1.MergedEntries.Select(e => e.JobId).Should() + .BeEquivalentTo(result2.MergedEntries.Select(e => e.JobId)); + result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Duplicates.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Duplicates.cs new file mode 100644 index 000000000..3ecee0799 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Duplicates.cs @@ -0,0 +1,90 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + [Fact] + public async Task MergeAsync_DuplicateJobId_SamePayload_TakesEarliestAsync() + { + var jobId = Guid.Parse("dddddddd-dddd-dddd-dddd-dddddddddddd"); + var payloadHash = new byte[32]; + payloadHash[0] = 0xAA; + + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHash) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); + + result.MergedEntries.Should().ContainSingle(); + result.MergedEntries[0].SourceNodeId.Should().Be("node-a"); + result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); + result.Duplicates.Should().ContainSingle(); + result.Duplicates[0].JobId.Should().Be(jobId); + result.Duplicates[0].NodeId.Should().Be("node-b"); + result.Duplicates[0].THlc.PhysicalTime.Should().Be(105); + } + + [Fact] + public async Task MergeAsync_TriplicateJobId_SamePayload_TakesEarliestAsync() + { + var jobId = Guid.Parse("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee"); + var payloadHash = new byte[32]; + payloadHash[0] = 0xBB; + + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash) + }); + var nodeC = CreateNodeLog("node-c", new[] + { + CreateEntryWithPayloadHash("node-c", 150, 0, jobId, payloadHash) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC }); + + result.MergedEntries.Should().ContainSingle(); + result.MergedEntries[0].SourceNodeId.Should().Be("node-b"); + result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); + result.Duplicates.Should().HaveCount(2); + } + + [Fact] + public async Task MergeAsync_DuplicateJobId_DifferentPayload_ThrowsErrorAsync() + { + var jobId = Guid.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff"); + var payloadHashA = new byte[32]; + payloadHashA[0] = 0x01; + var payloadHashB = new byte[32]; + payloadHashB[0] = 0x02; + + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHashB) + }); + + var act = () => _sut.MergeAsync(new[] { nodeA, nodeB }); + + await act.Should().ThrowAsync() + .WithMessage("*conflicting payloads*"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Helpers.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Helpers.cs new file mode 100644 index 000000000..79a48c1bd --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.Helpers.cs @@ -0,0 +1,87 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + private static NodeJobLog CreateNodeLog(string nodeId, IEnumerable entries) + { + var entryList = entries.ToList(); + var lastEntry = entryList.LastOrDefault(); + + return new NodeJobLog + { + NodeId = nodeId, + Entries = entryList, + LastHlc = lastEntry?.THlc ?? new HlcTimestamp + { + PhysicalTime = 0, + NodeId = nodeId, + LogicalCounter = 0 + }, + ChainHead = lastEntry?.Link ?? new byte[32] + }; + } + + private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId) + { + var payloadHash = new byte[32]; + jobId.ToByteArray().CopyTo(payloadHash, 0); + + var hlc = new HlcTimestamp + { + PhysicalTime = physicalTime, + NodeId = nodeId, + LogicalCounter = logicalCounter + }; + + return new OfflineJobLogEntry + { + NodeId = nodeId, + THlc = hlc, + JobId = jobId, + Payload = $"{{\"id\":\"{jobId}\"}}", + PayloadHash = payloadHash, + Link = new byte[32], + EnqueuedAt = FixedEnqueuedAt + }; + } + + private static OfflineJobLogEntry CreateEntryWithPayloadHash( + string nodeId, + long physicalTime, + int logicalCounter, + Guid jobId, + byte[] payloadHash) + { + var hlc = new HlcTimestamp + { + PhysicalTime = physicalTime, + NodeId = nodeId, + LogicalCounter = logicalCounter + }; + + return new OfflineJobLogEntry + { + NodeId = nodeId, + THlc = hlc, + JobId = jobId, + Payload = $"{{\"id\":\"{jobId}\"}}", + PayloadHash = payloadHash, + Link = new byte[32], + EnqueuedAt = FixedEnqueuedAt + }; + } + + private static Guid CreateDeterministicGuid(int nodeIndex, int entryIndex) + { + var bytes = new byte[16]; + bytes[0] = (byte)nodeIndex; + bytes[1] = (byte)entryIndex; + return new Guid(bytes); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeBasics.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeBasics.cs new file mode 100644 index 000000000..087efe157 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeBasics.cs @@ -0,0 +1,44 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + [Fact] + public async Task MergeAsync_EmptyInput_ReturnsEmptyResultAsync() + { + var nodeLogs = new List(); + + var result = await _sut.MergeAsync(nodeLogs); + + result.MergedEntries.Should().BeEmpty(); + result.Duplicates.Should().BeEmpty(); + result.SourceNodes.Should().BeEmpty(); + result.MergedChainHead.Should().BeNull(); + } + + [Fact] + public async Task MergeAsync_SingleNode_PreservesOrderAsync() + { + var nodeLog = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")), + CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")), + CreateEntry("node-a", 300, 0, Guid.Parse("33333333-3333-3333-3333-333333333333")) + }); + + var result = await _sut.MergeAsync(new[] { nodeLog }); + + result.MergedEntries.Should().HaveCount(3); + result.MergedEntries[0].JobId.Should().Be(Guid.Parse("11111111-1111-1111-1111-111111111111")); + result.MergedEntries[1].JobId.Should().Be(Guid.Parse("22222222-2222-2222-2222-222222222222")); + result.MergedEntries[2].JobId.Should().Be(Guid.Parse("33333333-3333-3333-3333-333333333333")); + result.Duplicates.Should().BeEmpty(); + result.SourceNodes.Should().ContainSingle().Which.Should().Be("node-a"); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeOrdering.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeOrdering.cs new file mode 100644 index 000000000..adb35a911 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MergeOrdering.cs @@ -0,0 +1,96 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + [Fact] + public async Task MergeAsync_TwoNodes_MergesByHlcOrderAsync() + { + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), + CreateEntry("node-a", 102, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000")) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntry("node-b", 101, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), + CreateEntry("node-b", 103, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000")) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); + + result.MergedEntries.Should().HaveCount(4); + result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); + result.MergedEntries[1].THlc.PhysicalTime.Should().Be(101); + result.MergedEntries[2].THlc.PhysicalTime.Should().Be(102); + result.MergedEntries[3].THlc.PhysicalTime.Should().Be(103); + result.SourceNodes.Should().HaveCount(2); + } + + [Fact] + public async Task MergeAsync_SamePhysicalTime_OrdersByLogicalCounterAsync() + { + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")), + CreateEntry("node-a", 100, 2, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000003")) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntry("node-b", 100, 1, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")), + CreateEntry("node-b", 100, 3, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000004")) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); + + result.MergedEntries.Should().HaveCount(4); + result.MergedEntries[0].THlc.LogicalCounter.Should().Be(0); + result.MergedEntries[1].THlc.LogicalCounter.Should().Be(1); + result.MergedEntries[2].THlc.LogicalCounter.Should().Be(2); + result.MergedEntries[3].THlc.LogicalCounter.Should().Be(3); + } + + [Fact] + public async Task MergeAsync_SameTimeAndCounter_OrdersByNodeIdAsync() + { + var nodeA = CreateNodeLog("alpha-node", new[] + { + CreateEntry("alpha-node", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")) + }); + var nodeB = CreateNodeLog("beta-node", new[] + { + CreateEntry("beta-node", 100, 0, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); + + result.MergedEntries.Should().HaveCount(2); + result.MergedEntries[0].SourceNodeId.Should().Be("alpha-node"); + result.MergedEntries[1].SourceNodeId.Should().Be("beta-node"); + } + + [Fact] + public async Task MergeAsync_RecomputesUnifiedChainAsync() + { + var nodeLog = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")), + CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")) + }); + + var result = await _sut.MergeAsync(new[] { nodeLog }); + + result.MergedEntries.Should().HaveCount(2); + result.MergedEntries[0].MergedLink.Should().NotBeNull(); + result.MergedEntries[1].MergedLink.Should().NotBeNull(); + result.MergedChainHead.Should().NotBeNull(); + result.MergedEntries[0].MergedLink.Should().HaveCount(32); + result.MergedChainHead.Should().BeEquivalentTo(result.MergedEntries[1].MergedLink); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MultiNode.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MultiNode.cs new file mode 100644 index 000000000..f0811908a --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.MultiNode.cs @@ -0,0 +1,59 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using StellaOps.AirGap.Sync.Models; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +public sealed partial class HlcMergeServiceTests +{ + [Fact] + public async Task MergeAsync_ThreeNodes_MergesCorrectlyAsync() + { + var nodeA = CreateNodeLog("node-a", new[] + { + CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), + CreateEntry("node-a", 400, 0, Guid.Parse("aaaaaaaa-0007-0000-0000-000000000000")) + }); + var nodeB = CreateNodeLog("node-b", new[] + { + CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), + CreateEntry("node-b", 500, 0, Guid.Parse("bbbbbbbb-0008-0000-0000-000000000000")) + }); + var nodeC = CreateNodeLog("node-c", new[] + { + CreateEntry("node-c", 300, 0, Guid.Parse("cccccccc-0003-0000-0000-000000000000")), + CreateEntry("node-c", 600, 0, Guid.Parse("cccccccc-0009-0000-0000-000000000000")) + }); + + var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC }); + + result.MergedEntries.Should().HaveCount(6); + result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should().BeInAscendingOrder(); + result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should() + .ContainInOrder(100L, 200L, 300L, 400L, 500L, 600L); + result.SourceNodes.Should().HaveCount(3); + } + + [Fact] + public async Task MergeAsync_ManyNodes_PreservesTotalOrderAsync() + { + var nodes = new List(); + for (var i = 0; i < 5; i++) + { + var nodeId = $"node-{i:D2}"; + nodes.Add(CreateNodeLog(nodeId, new[] + { + CreateEntry(nodeId, 100 + i * 10, 0, CreateDeterministicGuid(i, 0)), + CreateEntry(nodeId, 150 + i * 10, 0, CreateDeterministicGuid(i, 1)) + })); + } + + var result = await _sut.MergeAsync(nodes); + + result.MergedEntries.Should().HaveCount(10); + result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should().BeInAscendingOrder(); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.cs index e65dad226..3d71e08d6 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/HlcMergeServiceTests.cs @@ -1,23 +1,21 @@ -// +// // Copyright (c) StellaOps. Licensed under BUSL-1.1. // - -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.AirGap.Sync.Models; using StellaOps.AirGap.Sync.Services; -using StellaOps.HybridLogicalClock; using StellaOps.TestKit; +using StellaOps.TestKit.Traits; using Xunit; namespace StellaOps.AirGap.Sync.Tests; -/// -/// Unit tests for . -/// [Trait("Category", TestCategories.Unit)] -public sealed class HlcMergeServiceTests +[Intent(TestIntents.Operational, "Validates HLC merge ordering, duplicates, and determinism.")] +public sealed partial class HlcMergeServiceTests { + private static readonly DateTimeOffset FixedEnqueuedAt = + new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); + private readonly HlcMergeService _sut; private readonly ConflictResolver _conflictResolver; @@ -26,426 +24,4 @@ public sealed class HlcMergeServiceTests _conflictResolver = new ConflictResolver(NullLogger.Instance); _sut = new HlcMergeService(_conflictResolver, NullLogger.Instance); } - - #region OMP-014: Merge Algorithm Correctness - - [Fact] - public async Task MergeAsync_EmptyInput_ReturnsEmptyResult() - { - // Arrange - var nodeLogs = new List(); - - // Act - var result = await _sut.MergeAsync(nodeLogs); - - // Assert - result.MergedEntries.Should().BeEmpty(); - result.Duplicates.Should().BeEmpty(); - result.SourceNodes.Should().BeEmpty(); - result.MergedChainHead.Should().BeNull(); - } - - [Fact] - public async Task MergeAsync_SingleNode_PreservesOrder() - { - // Arrange - var nodeLog = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")), - CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")), - CreateEntry("node-a", 300, 0, Guid.Parse("33333333-3333-3333-3333-333333333333")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeLog }); - - // Assert - result.MergedEntries.Should().HaveCount(3); - result.MergedEntries[0].JobId.Should().Be(Guid.Parse("11111111-1111-1111-1111-111111111111")); - result.MergedEntries[1].JobId.Should().Be(Guid.Parse("22222222-2222-2222-2222-222222222222")); - result.MergedEntries[2].JobId.Should().Be(Guid.Parse("33333333-3333-3333-3333-333333333333")); - result.Duplicates.Should().BeEmpty(); - result.SourceNodes.Should().ContainSingle().Which.Should().Be("node-a"); - } - - [Fact] - public async Task MergeAsync_TwoNodes_MergesByHlcOrder() - { - // Arrange - Two nodes with interleaved HLC timestamps - // Node A: T=100, T=102 - // Node B: T=101, T=103 - // Expected order: 100, 101, 102, 103 - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), - CreateEntry("node-a", 102, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000")) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntry("node-b", 101, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), - CreateEntry("node-b", 103, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); - - // Assert - result.MergedEntries.Should().HaveCount(4); - result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); - result.MergedEntries[1].THlc.PhysicalTime.Should().Be(101); - result.MergedEntries[2].THlc.PhysicalTime.Should().Be(102); - result.MergedEntries[3].THlc.PhysicalTime.Should().Be(103); - result.SourceNodes.Should().HaveCount(2); - } - - [Fact] - public async Task MergeAsync_SamePhysicalTime_OrdersByLogicalCounter() - { - // Arrange - Same physical time, different logical counters - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")), - CreateEntry("node-a", 100, 2, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000003")) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntry("node-b", 100, 1, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")), - CreateEntry("node-b", 100, 3, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000004")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); - - // Assert - result.MergedEntries.Should().HaveCount(4); - result.MergedEntries[0].THlc.LogicalCounter.Should().Be(0); - result.MergedEntries[1].THlc.LogicalCounter.Should().Be(1); - result.MergedEntries[2].THlc.LogicalCounter.Should().Be(2); - result.MergedEntries[3].THlc.LogicalCounter.Should().Be(3); - } - - [Fact] - public async Task MergeAsync_SameTimeAndCounter_OrdersByNodeId() - { - // Arrange - Same physical time and counter, different node IDs - var nodeA = CreateNodeLog("alpha-node", new[] - { - CreateEntry("alpha-node", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")) - }); - var nodeB = CreateNodeLog("beta-node", new[] - { - CreateEntry("beta-node", 100, 0, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); - - // Assert - "alpha-node" < "beta-node" alphabetically - result.MergedEntries.Should().HaveCount(2); - result.MergedEntries[0].SourceNodeId.Should().Be("alpha-node"); - result.MergedEntries[1].SourceNodeId.Should().Be("beta-node"); - } - - [Fact] - public async Task MergeAsync_RecomputesUnifiedChain() - { - // Arrange - var nodeLog = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")), - CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeLog }); - - // Assert - Chain should be recomputed - result.MergedEntries.Should().HaveCount(2); - result.MergedEntries[0].MergedLink.Should().NotBeNull(); - result.MergedEntries[1].MergedLink.Should().NotBeNull(); - result.MergedChainHead.Should().NotBeNull(); - - // First entry's link should be computed from null prev_link - result.MergedEntries[0].MergedLink.Should().HaveCount(32); - - // Chain head should equal last entry's merged link - result.MergedChainHead.Should().BeEquivalentTo(result.MergedEntries[1].MergedLink); - } - - #endregion - - #region OMP-015: Duplicate Detection - - [Fact] - public async Task MergeAsync_DuplicateJobId_SamePayload_TakesEarliest() - { - // Arrange - Same job ID (same payload hash) from two nodes - var jobId = Guid.Parse("dddddddd-dddd-dddd-dddd-dddddddddddd"); - var payloadHash = new byte[32]; - payloadHash[0] = 0xAA; - - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHash) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB }); - - // Assert - Should take earliest (T=100 from node-a) - result.MergedEntries.Should().ContainSingle(); - result.MergedEntries[0].SourceNodeId.Should().Be("node-a"); - result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); - - // Should report duplicate - result.Duplicates.Should().ContainSingle(); - result.Duplicates[0].JobId.Should().Be(jobId); - result.Duplicates[0].NodeId.Should().Be("node-b"); - result.Duplicates[0].THlc.PhysicalTime.Should().Be(105); - } - - [Fact] - public async Task MergeAsync_TriplicateJobId_SamePayload_TakesEarliest() - { - // Arrange - Same job ID from three nodes - var jobId = Guid.Parse("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee"); - var payloadHash = new byte[32]; - payloadHash[0] = 0xBB; - - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash) // Earliest - }); - var nodeC = CreateNodeLog("node-c", new[] - { - CreateEntryWithPayloadHash("node-c", 150, 0, jobId, payloadHash) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC }); - - // Assert - Should take earliest (T=100 from node-b) - result.MergedEntries.Should().ContainSingle(); - result.MergedEntries[0].SourceNodeId.Should().Be("node-b"); - result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100); - - // Should report two duplicates - result.Duplicates.Should().HaveCount(2); - } - - [Fact] - public async Task MergeAsync_DuplicateJobId_DifferentPayload_ThrowsError() - { - // Arrange - Same job ID but different payload hashes (indicates bug) - var jobId = Guid.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff"); - var payloadHashA = new byte[32]; - payloadHashA[0] = 0x01; - var payloadHashB = new byte[32]; - payloadHashB[0] = 0x02; - - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHashB) - }); - - // Act & Assert - Should throw because payloads differ - var act = () => _sut.MergeAsync(new[] { nodeA, nodeB }); - await act.Should().ThrowAsync() - .WithMessage("*conflicting payloads*"); - } - - #endregion - - #region OMP-018: Multi-Node Merge - - [Fact] - public async Task MergeAsync_ThreeNodes_MergesCorrectly() - { - // Arrange - Three nodes with various timestamps - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), - CreateEntry("node-a", 400, 0, Guid.Parse("aaaaaaaa-0007-0000-0000-000000000000")) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), - CreateEntry("node-b", 500, 0, Guid.Parse("bbbbbbbb-0008-0000-0000-000000000000")) - }); - var nodeC = CreateNodeLog("node-c", new[] - { - CreateEntry("node-c", 300, 0, Guid.Parse("cccccccc-0003-0000-0000-000000000000")), - CreateEntry("node-c", 600, 0, Guid.Parse("cccccccc-0009-0000-0000-000000000000")) - }); - - // Act - var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC }); - - // Assert - result.MergedEntries.Should().HaveCount(6); - result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should() - .BeInAscendingOrder(); - result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should() - .ContainInOrder(100L, 200L, 300L, 400L, 500L, 600L); - result.SourceNodes.Should().HaveCount(3); - } - - [Fact] - public async Task MergeAsync_ManyNodes_PreservesTotalOrder() - { - // Arrange - 5 nodes with 2 entries each - var nodes = new List(); - for (int i = 0; i < 5; i++) - { - var nodeId = $"node-{i:D2}"; - nodes.Add(CreateNodeLog(nodeId, new[] - { - CreateEntry(nodeId, 100 + i * 10, 0, Guid.NewGuid()), - CreateEntry(nodeId, 150 + i * 10, 0, Guid.NewGuid()) - })); - } - - // Act - var result = await _sut.MergeAsync(nodes); - - // Assert - result.MergedEntries.Should().HaveCount(10); - result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should() - .BeInAscendingOrder(); - } - - #endregion - - #region OMP-019: Determinism Tests - - [Fact] - public async Task MergeAsync_SameInput_ProducesSameOutput() - { - // Arrange - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")), - CreateEntry("node-a", 300, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000")) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")), - CreateEntry("node-b", 400, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000")) - }); - - // Act - Run merge twice - var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB }); - var result2 = await _sut.MergeAsync(new[] { nodeA, nodeB }); - - // Assert - Results should be identical - result1.MergedEntries.Should().HaveCount(result2.MergedEntries.Count); - for (int i = 0; i < result1.MergedEntries.Count; i++) - { - result1.MergedEntries[i].JobId.Should().Be(result2.MergedEntries[i].JobId); - result1.MergedEntries[i].THlc.Should().Be(result2.MergedEntries[i].THlc); - result1.MergedEntries[i].MergedLink.Should().BeEquivalentTo(result2.MergedEntries[i].MergedLink); - } - result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead); - } - - [Fact] - public async Task MergeAsync_InputOrderIndependent_ProducesSameOutput() - { - // Arrange - var nodeA = CreateNodeLog("node-a", new[] - { - CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")) - }); - var nodeB = CreateNodeLog("node-b", new[] - { - CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")) - }); - - // Act - Merge in different orders - var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB }); - var result2 = await _sut.MergeAsync(new[] { nodeB, nodeA }); - - // Assert - Results should be identical regardless of input order - result1.MergedEntries.Select(e => e.JobId).Should() - .BeEquivalentTo(result2.MergedEntries.Select(e => e.JobId)); - result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead); - } - - #endregion - - #region Helper Methods - - private static NodeJobLog CreateNodeLog(string nodeId, IEnumerable entries) - { - var entryList = entries.ToList(); - var lastEntry = entryList.LastOrDefault(); - - return new NodeJobLog - { - NodeId = nodeId, - Entries = entryList, - LastHlc = lastEntry?.THlc ?? new HlcTimestamp { PhysicalTime = 0, NodeId = nodeId, LogicalCounter = 0 }, - ChainHead = lastEntry?.Link ?? new byte[32] - }; - } - - private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId) - { - var payloadHash = new byte[32]; - jobId.ToByteArray().CopyTo(payloadHash, 0); - - var hlc = new HlcTimestamp - { - PhysicalTime = physicalTime, - NodeId = nodeId, - LogicalCounter = logicalCounter - }; - - return new OfflineJobLogEntry - { - NodeId = nodeId, - THlc = hlc, - JobId = jobId, - Payload = $"{{\"id\":\"{jobId}\"}}", - PayloadHash = payloadHash, - Link = new byte[32], - EnqueuedAt = DateTimeOffset.UtcNow - }; - } - - private static OfflineJobLogEntry CreateEntryWithPayloadHash( - string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash) - { - var hlc = new HlcTimestamp - { - PhysicalTime = physicalTime, - NodeId = nodeId, - LogicalCounter = logicalCounter - }; - - return new OfflineJobLogEntry - { - NodeId = nodeId, - THlc = hlc, - JobId = jobId, - Payload = $"{{\"id\":\"{jobId}\"}}", - PayloadHash = payloadHash, - Link = new byte[32], - EnqueuedAt = DateTimeOffset.UtcNow - }; - } - - #endregion } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/OfflineHlcManagerTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/OfflineHlcManagerTests.cs new file mode 100644 index 000000000..5e31a468c --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/OfflineHlcManagerTests.cs @@ -0,0 +1,68 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Services; +using StellaOps.AirGap.Sync.Tests.TestUtilities; +using StellaOps.Determinism; +using StellaOps.HybridLogicalClock; +using StellaOps.TestKit; +using StellaOps.TestKit.Traits; +using System.Security.Cryptography; +using System.Text; +using Xunit; + +namespace StellaOps.AirGap.Sync.Tests; + +[Trait("Category", TestCategories.Unit)] +[Intent(TestIntents.Operational, "Ensures offline enqueue uses deterministic IDs and time provider.")] +public sealed class OfflineHlcManagerTests +{ + private static readonly DateTimeOffset FixedNow = + new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero); + + [Fact] + public async Task EnqueueOfflineAsync_UsesTimeProviderAndDeterministicJobIdAsync() + { + var hlcTimestamp = new HlcTimestamp + { + PhysicalTime = 100, + NodeId = "node-a", + LogicalCounter = 1 + }; + var hlc = new TestHybridLogicalClock("node-a", hlcTimestamp); + var store = new InMemoryOfflineJobLogStore(); + var timeProvider = new FixedTimeProvider(FixedNow); + + var manager = new OfflineHlcManager( + hlc, + store, + SystemGuidProvider.Instance, + timeProvider, + NullLogger.Instance); + + var result = await manager.EnqueueOfflineAsync( + new { Name = "payload" }, + "job-key"); + + var expectedJobId = ComputeExpectedJobId("job-key"); + + result.JobId.Should().Be(expectedJobId); + result.NodeId.Should().Be("node-a"); + result.Link.Should().NotBeNull(); + result.THlc.Should().Be(hlcTimestamp); + + var stored = await store.GetEntriesAsync("node-a"); + stored.Should().ContainSingle(); + stored[0].EnqueuedAt.Should().Be(FixedNow); + stored[0].JobId.Should().Be(expectedJobId); + } + + private static Guid ComputeExpectedJobId(string idempotencyKey) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(idempotencyKey)); + return new Guid(hash.AsSpan(0, 16)); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TASKS.md b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TASKS.md index 6bc05ea07..8658065fe 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TASKS.md +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TASKS.md @@ -1,10 +1,9 @@ # AirGap Sync Tests Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. | Task ID | Status | Notes | | --- | --- | --- | -| AUDIT-0793-M | DONE | Revalidated 2026-01-07. | -| AUDIT-0793-T | DONE | Revalidated 2026-01-07. | -| AUDIT-0793-A | DONE | Waived (test project; revalidated 2026-01-07). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/StellaOps.AirGap.Sync.Tests.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/FixedTimeProvider.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/FixedTimeProvider.cs new file mode 100644 index 000000000..d7e0952d3 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/FixedTimeProvider.cs @@ -0,0 +1,16 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Tests.TestUtilities; + +internal sealed class FixedTimeProvider : TimeProvider +{ + private readonly DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) + { + _utcNow = utcNow; + } + + public override DateTimeOffset GetUtcNow() => _utcNow; +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/InMemoryOfflineJobLogStore.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/InMemoryOfflineJobLogStore.cs new file mode 100644 index 000000000..e18e8d923 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/InMemoryOfflineJobLogStore.cs @@ -0,0 +1,89 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.AirGap.Sync.Models; +using StellaOps.AirGap.Sync.Stores; + +namespace StellaOps.AirGap.Sync.Tests.TestUtilities; + +internal sealed class InMemoryOfflineJobLogStore : IOfflineJobLogStore +{ + private readonly Dictionary> _entries = + new(StringComparer.Ordinal); + + public Task AppendAsync(OfflineJobLogEntry entry, CancellationToken cancellationToken = default) + { + if (!_entries.TryGetValue(entry.NodeId, out var list)) + { + list = new List(); + _entries[entry.NodeId] = list; + } + + list.Add(entry); + return Task.CompletedTask; + } + + public Task> GetEntriesAsync( + string nodeId, + CancellationToken cancellationToken = default) + { + if (!_entries.TryGetValue(nodeId, out var list)) + { + return Task.FromResult>(Array.Empty()); + } + + return Task.FromResult>( + list.OrderBy(e => e.THlc).ToList()); + } + + public async Task GetLastLinkAsync(string nodeId, CancellationToken cancellationToken = default) + { + var entries = await GetEntriesAsync(nodeId, cancellationToken); + return entries.Count > 0 ? entries[^1].Link : null; + } + + public async Task GetNodeJobLogAsync(string nodeId, CancellationToken cancellationToken = default) + { + var entries = await GetEntriesAsync(nodeId, cancellationToken); + if (entries.Count == 0) + { + return null; + } + + var lastEntry = entries[^1]; + return new NodeJobLog + { + NodeId = nodeId, + LastHlc = lastEntry.THlc, + ChainHead = lastEntry.Link, + Entries = entries + }; + } + + public async Task ClearEntriesAsync( + string nodeId, + string upToHlc, + CancellationToken cancellationToken = default) + { + if (!_entries.TryGetValue(nodeId, out var list)) + { + return 0; + } + + var remaining = list + .Where(e => string.CompareOrdinal(e.THlc.ToSortableString(), upToHlc) > 0) + .ToList(); + var cleared = list.Count - remaining.Count; + + if (remaining.Count == 0) + { + _entries.Remove(nodeId); + } + else + { + _entries[nodeId] = remaining; + } + + return await Task.FromResult(cleared); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TempDirectory.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TempDirectory.cs new file mode 100644 index 000000000..ca7e51d90 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TempDirectory.cs @@ -0,0 +1,36 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +namespace StellaOps.AirGap.Sync.Tests.TestUtilities; + +internal sealed class TempDirectory : IDisposable +{ + private static int _counter; + + public TempDirectory(string? prefix = null) + { + var id = Interlocked.Increment(ref _counter); + var name = $"{prefix ?? "airgap-sync-test"}-{id:D4}"; + Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), name); + Directory.CreateDirectory(Path); + } + + public string Path { get; } + + public void Dispose() + { + try + { + if (Directory.Exists(Path)) + { + Directory.Delete(Path, true); + } + } + catch (IOException) + { + } + catch (UnauthorizedAccessException) + { + } + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TestHybridLogicalClock.cs b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TestHybridLogicalClock.cs new file mode 100644 index 000000000..f36d77b95 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Sync.Tests/TestUtilities/TestHybridLogicalClock.cs @@ -0,0 +1,33 @@ +// +// Copyright (c) StellaOps. Licensed under BUSL-1.1. +// +using StellaOps.HybridLogicalClock; + +namespace StellaOps.AirGap.Sync.Tests.TestUtilities; + +internal sealed class TestHybridLogicalClock : IHybridLogicalClock +{ + private HlcTimestamp _timestamp; + private readonly string _nodeId; + + public TestHybridLogicalClock(string nodeId, HlcTimestamp timestamp) + { + _nodeId = nodeId; + _timestamp = timestamp; + } + + public List Received { get; } = new(); + + public HlcTimestamp Current => _timestamp; + + public string NodeId => _nodeId; + + public HlcTimestamp Tick() => _timestamp; + + public HlcTimestamp Receive(HlcTimestamp remote) + { + Received.Add(remote); + _timestamp = remote; + return remote; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Errors.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Errors.cs new file mode 100644 index 000000000..f5e6477b6 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Errors.cs @@ -0,0 +1,36 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class Rfc3161VerifierTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_HandlesExceptionsGracefully() + { + var token = new byte[256]; + new Random(42).NextBytes(token); + var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; + + var result = _verifier.Verify(token, trust, out var anchor); + + Assert.False(result.IsValid); + Assert.Contains("rfc3161-", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReportsDecodeErrorForMalformedCms() + { + var token = new byte[] { 0x30, 0x82, 0x00, 0x10, 0x06, 0x09 }; + var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.NotNull(result.Reason); + Assert.Contains("rfc3161-", result.Reason); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Guards.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Guards.cs new file mode 100644 index 000000000..1ccf342bc --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.Guards.cs @@ -0,0 +1,60 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class Rfc3161VerifierTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenTrustRootsEmpty() + { + var token = new byte[] { 0x01, 0x02, 0x03 }; + + var result = _verifier.Verify(token, Array.Empty(), out var anchor); + + Assert.False(result.IsValid); + Assert.Equal("rfc3161-trust-roots-required", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenTokenEmpty() + { + var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; + + var result = _verifier.Verify(ReadOnlySpan.Empty, trust, out var anchor); + + Assert.False(result.IsValid); + Assert.Equal("rfc3161-token-empty", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenInvalidAsn1Structure() + { + var token = new byte[] { 0x01, 0x02, 0x03 }; + var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; + + var result = _verifier.Verify(token, trust, out var anchor); + + Assert.False(result.IsValid); + Assert.Contains("rfc3161-", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ProducesTokenDigest() + { + var token = new byte[] { 0x30, 0x00 }; + var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.Contains("rfc3161-", result.Reason); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.cs index 6c2833fbd..e4fa6bc4a 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/Rfc3161VerifierTests.cs @@ -1,101 +1,13 @@ -using StellaOps.AirGap.Time.Models; using StellaOps.AirGap.Time.Services; - using StellaOps.TestKit; + namespace StellaOps.AirGap.Time.Tests; /// /// Tests for Rfc3161Verifier with real SignedCms verification. /// Per AIRGAP-TIME-57-001: Trusted time-anchor service. /// -public class Rfc3161VerifierTests +public sealed partial class Rfc3161VerifierTests { private readonly Rfc3161Verifier _verifier = new(); - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenTrustRootsEmpty() - { - var token = new byte[] { 0x01, 0x02, 0x03 }; - - var result = _verifier.Verify(token, Array.Empty(), out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("rfc3161-trust-roots-required", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenTokenEmpty() - { - var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; - - var result = _verifier.Verify(ReadOnlySpan.Empty, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("rfc3161-token-empty", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenInvalidAsn1Structure() - { - var token = new byte[] { 0x01, 0x02, 0x03 }; // Invalid ASN.1 - var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; - - var result = _verifier.Verify(token, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Contains("rfc3161-", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ProducesTokenDigest() - { - var token = new byte[] { 0x30, 0x00 }; // Empty SEQUENCE (minimal valid ASN.1) - var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; - - var result = _verifier.Verify(token, trust, out _); - - // Should fail on CMS decode but attempt was made - Assert.False(result.IsValid); - Assert.Contains("rfc3161-", result.Reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_HandlesExceptionsGracefully() - { - // Create bytes that might cause internal exceptions - var token = new byte[256]; - new Random(42).NextBytes(token); - var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; - - var result = _verifier.Verify(token, trust, out var anchor); - - // Should not throw, should return failure result - Assert.False(result.IsValid); - Assert.Contains("rfc3161-", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReportsDecodeErrorForMalformedCms() - { - // Create something that looks like CMS but isn't valid - var token = new byte[] { 0x30, 0x82, 0x00, 0x10, 0x06, 0x09 }; - var trust = new[] { new TimeTrustRoot("tsa-root", new byte[] { 0x01 }, "rsa") }; - - var result = _verifier.Verify(token, trust, out _); - - Assert.False(result.IsValid); - // Should report either decode or error - Assert.NotNull(result.Reason); - Assert.Contains("rfc3161-", result.Reason); - } } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Guards.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Guards.cs new file mode 100644 index 000000000..6833d5802 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Guards.cs @@ -0,0 +1,98 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class RoughtimeVerifierTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenTrustRootsEmpty() + { + var token = new byte[] { 0x01, 0x02, 0x03, 0x04 }; + + var result = _verifier.Verify(token, Array.Empty(), out var anchor); + + Assert.False(result.IsValid); + Assert.Equal("roughtime-trust-roots-required", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenTokenEmpty() + { + var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; + + var result = _verifier.Verify(ReadOnlySpan.Empty, trust, out var anchor); + + Assert.False(result.IsValid); + Assert.Equal("roughtime-token-empty", result.Reason); + Assert.Equal(TimeAnchor.Unknown, anchor); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenTokenTooShort() + { + var token = new byte[] { 0x01, 0x02, 0x03 }; + var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.Equal("roughtime-message-too-short", result.Reason); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenInvalidTagCount() + { + var token = new byte[8]; + BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)0); + var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.Equal("roughtime-invalid-tag-count", result.Reason); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenNonEd25519Algorithm() + { + var token = CreateMinimalRoughtimeToken(); + var trust = new[] { new TimeTrustRoot("root1", new byte[32], "rsa") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.Contains("roughtime-", result.Reason); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ReturnsFailure_WhenKeyLengthWrong() + { + var token = CreateMinimalRoughtimeToken(); + var trust = new[] { new TimeTrustRoot("root1", new byte[16], "ed25519") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + Assert.Contains("roughtime-", result.Reason); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Verify_ProducesTokenDigest() + { + var token = new byte[] { 0xAA, 0xBB, 0xCC, 0xDD }; + var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; + + var result = _verifier.Verify(token, trust, out _); + + Assert.False(result.IsValid); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Helpers.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Helpers.cs new file mode 100644 index 000000000..a80889e00 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.Helpers.cs @@ -0,0 +1,39 @@ +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class RoughtimeVerifierTests +{ + private static byte[] CreateMinimalRoughtimeToken() + { + const uint tagSig = 0x00474953; + const uint tagSrep = 0x50455253; + + var sigValue = new byte[64]; + var srepValue = CreateMinimalSrep(); + + var headerSize = 4 + 4 + 8; + var token = new byte[headerSize + sigValue.Length + srepValue.Length]; + + BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)2); + BitConverter.TryWriteBytes(token.AsSpan(4, 4), (uint)64); + BitConverter.TryWriteBytes(token.AsSpan(8, 4), tagSig); + BitConverter.TryWriteBytes(token.AsSpan(12, 4), tagSrep); + sigValue.CopyTo(token.AsSpan(16)); + srepValue.CopyTo(token.AsSpan(16 + 64)); + + return token; + } + + private static byte[] CreateMinimalSrep() + { + const uint tagMidp = 0x5044494D; + + var headerSize = 4 + 4; + var srepValue = new byte[headerSize + 8]; + + BitConverter.TryWriteBytes(srepValue.AsSpan(0, 4), (uint)1); + BitConverter.TryWriteBytes(srepValue.AsSpan(4, 4), tagMidp); + BitConverter.TryWriteBytes(srepValue.AsSpan(8, 8), 1735689600000000L); + + return srepValue; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.cs index f7c10beae..e1c5b5b75 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/RoughtimeVerifierTests.cs @@ -1,158 +1,13 @@ -using StellaOps.AirGap.Time.Models; using StellaOps.AirGap.Time.Services; - using StellaOps.TestKit; + namespace StellaOps.AirGap.Time.Tests; /// /// Tests for RoughtimeVerifier with real Ed25519 signature verification. /// Per AIRGAP-TIME-57-001: Trusted time-anchor service. /// -public class RoughtimeVerifierTests +public sealed partial class RoughtimeVerifierTests { private readonly RoughtimeVerifier _verifier = new(); - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenTrustRootsEmpty() - { - var token = new byte[] { 0x01, 0x02, 0x03, 0x04 }; - - var result = _verifier.Verify(token, Array.Empty(), out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("roughtime-trust-roots-required", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenTokenEmpty() - { - var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; - - var result = _verifier.Verify(ReadOnlySpan.Empty, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("roughtime-token-empty", result.Reason); - Assert.Equal(TimeAnchor.Unknown, anchor); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenTokenTooShort() - { - var token = new byte[] { 0x01, 0x02, 0x03 }; - var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; - - var result = _verifier.Verify(token, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("roughtime-message-too-short", result.Reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenInvalidTagCount() - { - // Create a minimal wire format with invalid tag count - var token = new byte[8]; - // Set num_tags to 0 (invalid) - BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)0); - - var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; - - var result = _verifier.Verify(token, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Equal("roughtime-invalid-tag-count", result.Reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenNonEd25519Algorithm() - { - // Create a minimal valid-looking wire format - var token = CreateMinimalRoughtimeToken(); - var trust = new[] { new TimeTrustRoot("root1", new byte[32], "rsa") }; // Wrong algorithm - - var result = _verifier.Verify(token, trust, out var anchor); - - Assert.False(result.IsValid); - // Should fail either on parsing or signature verification - Assert.Contains("roughtime-", result.Reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ReturnsFailure_WhenKeyLengthWrong() - { - var token = CreateMinimalRoughtimeToken(); - var trust = new[] { new TimeTrustRoot("root1", new byte[16], "ed25519") }; // Wrong key length - - var result = _verifier.Verify(token, trust, out var anchor); - - Assert.False(result.IsValid); - Assert.Contains("roughtime-", result.Reason); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Verify_ProducesTokenDigest() - { - var token = new byte[] { 0xAA, 0xBB, 0xCC, 0xDD }; - var trust = new[] { new TimeTrustRoot("root1", new byte[32], "ed25519") }; - - var result = _verifier.Verify(token, trust, out _); - - // Even on failure, we should get a deterministic result - Assert.False(result.IsValid); - } - - /// - /// Creates a minimal Roughtime wire format token for testing parsing paths. - /// Note: This will fail signature verification but tests the parsing logic. - /// - private static byte[] CreateMinimalRoughtimeToken() - { - // Roughtime wire format: - // [num_tags:u32] [offsets:u32[n-1]] [tags:u32[n]] [values...] - // We'll create 2 tags: SIG and SREP - - const uint TagSig = 0x00474953; // "SIG\0" - const uint TagSrep = 0x50455253; // "SREP" - - var sigValue = new byte[64]; // Ed25519 signature - var srepValue = CreateMinimalSrep(); - - // Header: num_tags=2, offset[0]=64 (sig length), tags=[SIG, SREP] - var headerSize = 4 + 4 + 8; // num_tags + 1 offset + 2 tags = 16 bytes - var token = new byte[headerSize + sigValue.Length + srepValue.Length]; - - BitConverter.TryWriteBytes(token.AsSpan(0, 4), (uint)2); // num_tags = 2 - BitConverter.TryWriteBytes(token.AsSpan(4, 4), (uint)64); // offset[0] = 64 (sig length) - BitConverter.TryWriteBytes(token.AsSpan(8, 4), TagSig); - BitConverter.TryWriteBytes(token.AsSpan(12, 4), TagSrep); - sigValue.CopyTo(token.AsSpan(16)); - srepValue.CopyTo(token.AsSpan(16 + 64)); - - return token; - } - - private static byte[] CreateMinimalSrep() - { - // SREP with MIDP tag containing 8-byte timestamp - const uint TagMidp = 0x5044494D; // "MIDP" - - // Header: num_tags=1, tags=[MIDP] - var headerSize = 4 + 4; // num_tags + 1 tag = 8 bytes - var srepValue = new byte[headerSize + 8]; // + 8 bytes for MIDP value - - BitConverter.TryWriteBytes(srepValue.AsSpan(0, 4), (uint)1); // num_tags = 1 - BitConverter.TryWriteBytes(srepValue.AsSpan(4, 4), TagMidp); - // MIDP value: microseconds since Unix epoch (example: 2025-01-01 00:00:00 UTC) - BitConverter.TryWriteBytes(srepValue.AsSpan(8, 8), 1735689600000000L); - - return srepValue; - } } diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupHostedServiceTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupHostedServiceTests.cs new file mode 100644 index 000000000..2620de345 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupHostedServiceTests.cs @@ -0,0 +1,62 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Time.Hooks; +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.AirGap.Time.Stores; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed class SealedStartupHostedServiceTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task StartAsync_ThrowsWhenAnchorMissingAsync() + { + var service = Build(out _, DateTimeOffset.UnixEpoch); + + await Assert.ThrowsAsync(() => service.StartAsync(default)); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task StartAsync_CompletesWhenAnchorFreshAsync() + { + var now = DateTimeOffset.UnixEpoch.AddSeconds(5); + var service = Build(out var statusService, now); + + var anchor = new TimeAnchor(now, "src", "fmt", "fp", "digest"); + await statusService.SetAnchorAsync("t1", anchor, new StalenessBudget(10, 20)); + + await service.StartAsync(default); + } + + private static SealedStartupHostedService Build(out TimeStatusService statusService, DateTimeOffset now) + { + var store = new InMemoryTimeAnchorStore(); + statusService = new TimeStatusService( + store, + new StalenessCalculator(), + new TimeTelemetry(), + new TestOptionsMonitor(new AirGapOptions())); + + var validator = new SealedStartupValidator(statusService, new FixedTimeProvider(now)); + var options = Options.Create(new AirGapOptions + { + TenantId = "t1", + Staleness = new StalenessOptions { WarningSeconds = 10, BreachSeconds = 20 } + }); + + return new SealedStartupHostedService(validator, options, NullLogger.Instance); + } + + private sealed class FixedTimeProvider : TimeProvider + { + private readonly DateTimeOffset _now; + + public FixedTimeProvider(DateTimeOffset now) => _now = now; + + public override DateTimeOffset GetUtcNow() => _now; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupValidatorTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupValidatorTests.cs index 8946e8d91..a7458ec78 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupValidatorTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/SealedStartupValidatorTests.cs @@ -9,7 +9,7 @@ public class SealedStartupValidatorTests { [Trait("Category", TestCategories.Unit)] [Fact] - public async Task FailsWhenAnchorMissing() + public async Task FailsWhenAnchorMissingAsync() { var validator = Build(out var statusService, DateTimeOffset.UnixEpoch); var result = await validator.ValidateAsync("t1", StalenessBudget.Default, default); @@ -19,7 +19,7 @@ public class SealedStartupValidatorTests [Trait("Category", TestCategories.Unit)] [Fact] - public async Task FailsWhenBreach() + public async Task FailsWhenBreachAsync() { var now = DateTimeOffset.UnixEpoch.AddSeconds(25); var validator = Build(out var statusService, now); @@ -35,7 +35,7 @@ public class SealedStartupValidatorTests [Trait("Category", TestCategories.Unit)] [Fact] - public async Task SucceedsWhenFresh() + public async Task SucceedsWhenFreshAsync() { var now = DateTimeOffset.UnixEpoch.AddSeconds(5); var validator = Build(out var statusService, now); @@ -47,7 +47,7 @@ public class SealedStartupValidatorTests [Trait("Category", TestCategories.Unit)] [Fact] - public async Task FailsOnBudgetMismatch() + public async Task FailsOnBudgetMismatchAsync() { var now = DateTimeOffset.UnixEpoch.AddSeconds(5); var validator = Build(out var statusService, now); diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TASKS.md b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TASKS.md index ab3ee1e17..abf6f39d7 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TASKS.md +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TASKS.md @@ -1,10 +1,12 @@ # AirGap Time Tests Task Board This board mirrors active sprint tasks for this module. -Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md` and `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`. | Task ID | Status | Notes | | --- | --- | --- | | AUDIT-0035-M | DONE | Revalidated maintainability for StellaOps.AirGap.Time.Tests (2026-01-06). | | AUDIT-0035-T | DONE | Revalidated test coverage for StellaOps.AirGap.Time.Tests (2026-01-06). | | AUDIT-0035-A | DONE | Waived (test project). | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/StellaOps.AirGap.Time.Tests.md. | +| REMED-06 | DONE | SOLID review notes updated for SPRINT_20260130_002. | diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Bundle.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Bundle.cs new file mode 100644 index 000000000..0a8281dcd --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Bundle.cs @@ -0,0 +1,58 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class TimeAnchorPolicyServiceTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task EnforceBundleImportPolicyAsync_AllowsImport_WhenAnchorValidAsync() + { + var service = CreateService(); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow().AddMinutes(-30), + "test-source", + "Roughtime", + "fingerprint", + "digest123"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var result = await service.EnforceBundleImportPolicyAsync( + "tenant-1", + "bundle-123", + _fixedTimeProvider.GetUtcNow().AddMinutes(-15)); + + Assert.True(result.Allowed); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task EnforceBundleImportPolicyAsync_BlocksImport_WhenDriftExceededAsync() + { + var options = new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 }; + var service = CreateService(options); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow().AddMinutes(-30), + "test-source", + "Roughtime", + "fingerprint", + "digest123"); + var budget = new StalenessBudget(86400, 172800); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var bundleTimestamp = _fixedTimeProvider.GetUtcNow().AddDays(-2); + + var result = await service.EnforceBundleImportPolicyAsync( + "tenant-1", + "bundle-123", + bundleTimestamp); + + Assert.False(result.Allowed); + Assert.Equal(TimeAnchorPolicyErrorCodes.DriftExceeded, result.ErrorCode); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Drift.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Drift.cs new file mode 100644 index 000000000..0a2c781cd --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Drift.cs @@ -0,0 +1,63 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class TimeAnchorPolicyServiceTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task CalculateDriftAsync_ReturnsNoDrift_WhenNoAnchorAsync() + { + var service = CreateService(); + + var result = await service.CalculateDriftAsync("tenant-1", _fixedTimeProvider.GetUtcNow()); + + Assert.False(result.HasAnchor); + Assert.Equal(TimeSpan.Zero, result.Drift); + Assert.Null(result.AnchorTime); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task CalculateDriftAsync_ReturnsDrift_WhenAnchorExistsAsync() + { + var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 }); + var anchorTime = _fixedTimeProvider.GetUtcNow().AddMinutes(-30); + var anchor = new TimeAnchor(anchorTime, "test", "Roughtime", "fp", "digest"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(15); + var result = await service.CalculateDriftAsync("tenant-1", targetTime); + + Assert.True(result.HasAnchor); + Assert.Equal(anchorTime, result.AnchorTime); + Assert.Equal(45, (int)result.Drift.TotalMinutes); + Assert.False(result.DriftExceedsThreshold); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task CalculateDriftAsync_DetectsExcessiveDriftAsync() + { + var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 60 }); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow(), + "test", + "Roughtime", + "fp", + "digest"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(5); + var result = await service.CalculateDriftAsync("tenant-1", targetTime); + + Assert.True(result.HasAnchor); + Assert.True(result.DriftExceedsThreshold); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Fixtures.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Fixtures.cs new file mode 100644 index 000000000..2c5c4be06 --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Fixtures.cs @@ -0,0 +1,54 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.AirGap.Time.Stores; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +/// +/// Tests for TimeAnchorPolicyService. +/// Per AIRGAP-TIME-57-001: Time-anchor policy enforcement. +/// +public sealed partial class TimeAnchorPolicyServiceTests +{ + private readonly TimeProvider _fixedTimeProvider; + private readonly InMemoryTimeAnchorStore _store; + private readonly StalenessCalculator _calculator; + private readonly TimeTelemetry _telemetry; + private readonly TimeStatusService _statusService; + private readonly AirGapOptions _airGapOptions; + + public TimeAnchorPolicyServiceTests() + { + _fixedTimeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); + _store = new InMemoryTimeAnchorStore(); + _calculator = new StalenessCalculator(); + _telemetry = new TimeTelemetry(); + _airGapOptions = new AirGapOptions + { + Staleness = new StalenessOptions { WarningSeconds = 3600, BreachSeconds = 7200 }, + ContentBudgets = new Dictionary() + }; + _statusService = new TimeStatusService(_store, _calculator, _telemetry, new TestOptionsMonitor(_airGapOptions)); + } + + private TimeAnchorPolicyService CreateService(TimeAnchorPolicyOptions? options = null) + { + return new TimeAnchorPolicyService( + _statusService, + Options.Create(options ?? new TimeAnchorPolicyOptions()), + NullLogger.Instance, + _fixedTimeProvider); + } + + private sealed class FakeTimeProvider : TimeProvider + { + private readonly DateTimeOffset _now; + + public FakeTimeProvider(DateTimeOffset now) => _now = now; + + public override DateTimeOffset GetUtcNow() => _now; + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Operation.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Operation.cs new file mode 100644 index 000000000..2d315310a --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Operation.cs @@ -0,0 +1,39 @@ +using StellaOps.AirGap.Time.Services; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class TimeAnchorPolicyServiceTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task EnforceOperationPolicyAsync_BlocksStrictOperations_WhenNoAnchorAsync() + { + var options = new TimeAnchorPolicyOptions + { + StrictOperations = new[] { "attestation.sign" } + }; + var service = CreateService(options); + + var result = await service.EnforceOperationPolicyAsync("tenant-1", "attestation.sign"); + + Assert.False(result.Allowed); + Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task EnforceOperationPolicyAsync_AllowsNonStrictOperations_InNonStrictModeAsync() + { + var options = new TimeAnchorPolicyOptions + { + StrictEnforcement = false, + StrictOperations = new[] { "attestation.sign" } + }; + var service = CreateService(options); + + var result = await service.EnforceOperationPolicyAsync("tenant-1", "some.other.operation"); + + Assert.True(result.Allowed); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Validation.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Validation.cs new file mode 100644 index 000000000..022f63aae --- /dev/null +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.Validation.cs @@ -0,0 +1,90 @@ +using StellaOps.AirGap.Time.Models; +using StellaOps.AirGap.Time.Services; +using StellaOps.TestKit; + +namespace StellaOps.AirGap.Time.Tests; + +public sealed partial class TimeAnchorPolicyServiceTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenNoAnchorAsync() + { + var service = CreateService(); + + var result = await service.ValidateTimeAnchorAsync("tenant-1"); + + Assert.False(result.Allowed); + Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode); + Assert.NotNull(result.Remediation); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ValidateTimeAnchorAsync_ReturnsSuccess_WhenAnchorValidAsync() + { + var service = CreateService(); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow().AddMinutes(-30), + "test-source", + "Roughtime", + "fingerprint", + "digest123"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var result = await service.ValidateTimeAnchorAsync("tenant-1"); + + Assert.True(result.Allowed); + Assert.Null(result.ErrorCode); + Assert.NotNull(result.Staleness); + Assert.False(result.Staleness.IsBreach); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ValidateTimeAnchorAsync_ReturnsWarning_WhenAnchorStaleAsync() + { + var service = CreateService(); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow().AddSeconds(-5000), + "test-source", + "Roughtime", + "fingerprint", + "digest123"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var result = await service.ValidateTimeAnchorAsync("tenant-1"); + + Assert.True(result.Allowed); + Assert.NotNull(result.Staleness); + Assert.True(result.Staleness.IsWarning); + Assert.Contains("warning", result.Reason, StringComparison.OrdinalIgnoreCase); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenAnchorBreachedAsync() + { + var service = CreateService(); + var anchor = new TimeAnchor( + _fixedTimeProvider.GetUtcNow().AddSeconds(-8000), + "test-source", + "Roughtime", + "fingerprint", + "digest123"); + var budget = new StalenessBudget(3600, 7200); + + await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); + + var result = await service.ValidateTimeAnchorAsync("tenant-1"); + + Assert.False(result.Allowed); + Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorBreached, result.ErrorCode); + Assert.NotNull(result.Staleness); + Assert.True(result.Staleness.IsBreach); + } +} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.cs deleted file mode 100644 index 13087f951..000000000 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeAnchorPolicyServiceTests.cs +++ /dev/null @@ -1,273 +0,0 @@ -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.AirGap.Time.Models; -using StellaOps.AirGap.Time.Services; -using StellaOps.AirGap.Time.Stores; - -using StellaOps.TestKit; -namespace StellaOps.AirGap.Time.Tests; - -/// -/// Tests for TimeAnchorPolicyService. -/// Per AIRGAP-TIME-57-001: Time-anchor policy enforcement. -/// -public class TimeAnchorPolicyServiceTests -{ - private readonly TimeProvider _fixedTimeProvider; - private readonly InMemoryTimeAnchorStore _store; - private readonly StalenessCalculator _calculator; - private readonly TimeTelemetry _telemetry; - private readonly TimeStatusService _statusService; - private readonly AirGapOptions _airGapOptions; - - public TimeAnchorPolicyServiceTests() - { - _fixedTimeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero)); - _store = new InMemoryTimeAnchorStore(); - _calculator = new StalenessCalculator(); - _telemetry = new TimeTelemetry(); - _airGapOptions = new AirGapOptions - { - Staleness = new StalenessOptions { WarningSeconds = 3600, BreachSeconds = 7200 }, - ContentBudgets = new Dictionary() - }; - _statusService = new TimeStatusService(_store, _calculator, _telemetry, new TestOptionsMonitor(_airGapOptions)); - } - - private TimeAnchorPolicyService CreateService(TimeAnchorPolicyOptions? options = null) - { - return new TimeAnchorPolicyService( - _statusService, - Options.Create(options ?? new TimeAnchorPolicyOptions()), - NullLogger.Instance, - _fixedTimeProvider); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenNoAnchor() - { - var service = CreateService(); - - var result = await service.ValidateTimeAnchorAsync("tenant-1"); - - Assert.False(result.Allowed); - Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode); - Assert.NotNull(result.Remediation); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task ValidateTimeAnchorAsync_ReturnsSuccess_WhenAnchorValid() - { - var service = CreateService(); - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow().AddMinutes(-30), - "test-source", - "Roughtime", - "fingerprint", - "digest123"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var result = await service.ValidateTimeAnchorAsync("tenant-1"); - - Assert.True(result.Allowed); - Assert.Null(result.ErrorCode); - Assert.NotNull(result.Staleness); - Assert.False(result.Staleness.IsBreach); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task ValidateTimeAnchorAsync_ReturnsWarning_WhenAnchorStale() - { - var service = CreateService(); - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow().AddSeconds(-5000), // Past warning threshold - "test-source", - "Roughtime", - "fingerprint", - "digest123"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var result = await service.ValidateTimeAnchorAsync("tenant-1"); - - Assert.True(result.Allowed); // Allowed but with warning - Assert.NotNull(result.Staleness); - Assert.True(result.Staleness.IsWarning); - Assert.Contains("warning", result.Reason, StringComparison.OrdinalIgnoreCase); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task ValidateTimeAnchorAsync_ReturnsFailure_WhenAnchorBreached() - { - var service = CreateService(); - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow().AddSeconds(-8000), // Past breach threshold - "test-source", - "Roughtime", - "fingerprint", - "digest123"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var result = await service.ValidateTimeAnchorAsync("tenant-1"); - - Assert.False(result.Allowed); - Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorBreached, result.ErrorCode); - Assert.NotNull(result.Staleness); - Assert.True(result.Staleness.IsBreach); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task EnforceBundleImportPolicyAsync_AllowsImport_WhenAnchorValid() - { - var service = CreateService(); - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow().AddMinutes(-30), - "test-source", - "Roughtime", - "fingerprint", - "digest123"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var result = await service.EnforceBundleImportPolicyAsync( - "tenant-1", - "bundle-123", - _fixedTimeProvider.GetUtcNow().AddMinutes(-15)); - - Assert.True(result.Allowed); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task EnforceBundleImportPolicyAsync_BlocksImport_WhenDriftExceeded() - { - var options = new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 }; // 1 hour max - var service = CreateService(options); - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow().AddMinutes(-30), - "test-source", - "Roughtime", - "fingerprint", - "digest123"); - var budget = new StalenessBudget(86400, 172800); // Large budget - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var bundleTimestamp = _fixedTimeProvider.GetUtcNow().AddDays(-2); // 2 days ago - - var result = await service.EnforceBundleImportPolicyAsync( - "tenant-1", - "bundle-123", - bundleTimestamp); - - Assert.False(result.Allowed); - Assert.Equal(TimeAnchorPolicyErrorCodes.DriftExceeded, result.ErrorCode); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task EnforceOperationPolicyAsync_BlocksStrictOperations_WhenNoAnchor() - { - var options = new TimeAnchorPolicyOptions - { - StrictOperations = new[] { "attestation.sign" } - }; - var service = CreateService(options); - - var result = await service.EnforceOperationPolicyAsync("tenant-1", "attestation.sign"); - - Assert.False(result.Allowed); - Assert.Equal(TimeAnchorPolicyErrorCodes.AnchorMissing, result.ErrorCode); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task EnforceOperationPolicyAsync_AllowsNonStrictOperations_InNonStrictMode() - { - var options = new TimeAnchorPolicyOptions - { - StrictEnforcement = false, - StrictOperations = new[] { "attestation.sign" } - }; - var service = CreateService(options); - - var result = await service.EnforceOperationPolicyAsync("tenant-1", "some.other.operation"); - - Assert.True(result.Allowed); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task CalculateDriftAsync_ReturnsNoDrift_WhenNoAnchor() - { - var service = CreateService(); - - var result = await service.CalculateDriftAsync("tenant-1", _fixedTimeProvider.GetUtcNow()); - - Assert.False(result.HasAnchor); - Assert.Equal(TimeSpan.Zero, result.Drift); - Assert.Null(result.AnchorTime); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task CalculateDriftAsync_ReturnsDrift_WhenAnchorExists() - { - var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 3600 }); - var anchorTime = _fixedTimeProvider.GetUtcNow().AddMinutes(-30); - var anchor = new TimeAnchor(anchorTime, "test", "Roughtime", "fp", "digest"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(15); - var result = await service.CalculateDriftAsync("tenant-1", targetTime); - - Assert.True(result.HasAnchor); - Assert.Equal(anchorTime, result.AnchorTime); - Assert.Equal(45, (int)result.Drift.TotalMinutes); // 30 min + 15 min - Assert.False(result.DriftExceedsThreshold); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task CalculateDriftAsync_DetectsExcessiveDrift() - { - var service = CreateService(new TimeAnchorPolicyOptions { MaxDriftSeconds = 60 }); // 1 minute max - var anchor = new TimeAnchor( - _fixedTimeProvider.GetUtcNow(), - "test", - "Roughtime", - "fp", - "digest"); - var budget = new StalenessBudget(3600, 7200); - - await _store.SetAsync("tenant-1", anchor, budget, CancellationToken.None); - - var targetTime = _fixedTimeProvider.GetUtcNow().AddMinutes(5); // 5 minutes drift - var result = await service.CalculateDriftAsync("tenant-1", targetTime); - - Assert.True(result.HasAnchor); - Assert.True(result.DriftExceedsThreshold); - } - - private sealed class FakeTimeProvider : TimeProvider - { - private readonly DateTimeOffset _now; - - public FakeTimeProvider(DateTimeOffset now) => _now = now; - - public override DateTimeOffset GetUtcNow() => _now; - } -} diff --git a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeStatusServiceTests.cs b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeStatusServiceTests.cs index 1ef827853..344f5d83c 100644 --- a/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeStatusServiceTests.cs +++ b/src/AirGap/__Tests/StellaOps.AirGap.Time.Tests/TimeStatusServiceTests.cs @@ -9,7 +9,7 @@ public class TimeStatusServiceTests { [Trait("Category", TestCategories.Unit)] [Fact] - public async Task ReturnsUnknownWhenNoAnchor() + public async Task ReturnsUnknownWhenNoAnchorAsync() { var svc = Build(out var telemetry); var status = await svc.GetStatusAsync("t1", DateTimeOffset.UnixEpoch); @@ -20,7 +20,7 @@ public class TimeStatusServiceTests [Trait("Category", TestCategories.Unit)] [Fact] - public async Task PersistsAnchorAndBudget() + public async Task PersistsAnchorAndBudgetAsync() { var svc = Build(out var telemetry); var anchor = new TimeAnchor(DateTimeOffset.UnixEpoch, "source", "fmt", "fp", "digest"); diff --git a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.Helpers.cs b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.Helpers.cs new file mode 100644 index 000000000..f0bd11193 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.Helpers.cs @@ -0,0 +1,57 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; + +namespace StellaOps.Aoc.AspNetCore.Routing; + +public sealed partial class AocGuardEndpointFilter +{ + private AocGuardOptions ResolveOptions() + { + if (_guardOptions is not null) + { + return _guardOptions; + } + + return _options?.Value ?? AocGuardOptions.Default; + } + + private static bool TryGetArgument(EndpointFilterInvocationContext context, out TRequest argument) + { + for (var i = 0; i < context.Arguments.Count; i++) + { + if (context.Arguments[i] is TRequest typedArgument) + { + argument = typedArgument; + return true; + } + } + + argument = default!; + return false; + } + + private void ValidatePayload(object payload, IAocGuard guard, AocGuardOptions options) + { + if (payload is JsonElement jsonElement) + { + guard.ValidateOrThrow(jsonElement, options); + return; + } + + if (payload is JsonDocument jsonDocument) + { + using (jsonDocument) + { + guard.ValidateOrThrow(jsonDocument.RootElement, options); + } + return; + } + + var element = JsonSerializer.SerializeToElement(payload, _serializerOptions); + guard.ValidateOrThrow(element, options); + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.cs b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.cs index 19d9ad49a..470a8228f 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilter.cs @@ -1,32 +1,38 @@ - -using HttpResults = Microsoft.AspNetCore.Http.Results; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Routing; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Aoc; -using StellaOps.Aoc.AspNetCore.Results; using System; using System.Collections.Generic; using System.Text.Json; using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; +using StellaOps.Aoc.AspNetCore.Results; +using HttpResults = Microsoft.AspNetCore.Http.Results; namespace StellaOps.Aoc.AspNetCore.Routing; -public sealed class AocGuardEndpointFilter : IEndpointFilter +public sealed partial class AocGuardEndpointFilter : IEndpointFilter { + private readonly IAocGuard _guard; + private readonly ILogger> _logger; private readonly Func> _payloadSelector; private readonly JsonSerializerOptions _serializerOptions; + private readonly IOptions? _options; private readonly AocGuardOptions? _guardOptions; public AocGuardEndpointFilter( + IAocGuard guard, + ILogger> logger, Func> payloadSelector, JsonSerializerOptions? serializerOptions, + IOptions? options, AocGuardOptions? guardOptions) { + _guard = guard ?? throw new ArgumentNullException(nameof(guard)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _payloadSelector = payloadSelector ?? throw new ArgumentNullException(nameof(payloadSelector)); _serializerOptions = serializerOptions ?? new JsonSerializerOptions(JsonSerializerDefaults.Web); + _options = options; _guardOptions = guardOptions; } @@ -39,8 +45,7 @@ public sealed class AocGuardEndpointFilter : IEndpointFilter if (!TryGetArgument(context, out var request)) { - var logger = context.HttpContext.RequestServices.GetService>>(); - logger?.LogWarning("AOC guard filter did not find request argument of type {RequestType}.", typeof(TRequest).FullName); + _logger.LogWarning("AOC guard filter did not find request argument of type {RequestType}.", typeof(TRequest).FullName); return HttpResults.Problem( statusCode: StatusCodes.Status400BadRequest, title: "AOC guard payload missing", @@ -54,16 +59,14 @@ public sealed class AocGuardEndpointFilter : IEndpointFilter } catch (Exception ex) { - var logger = context.HttpContext.RequestServices.GetService>>(); - logger?.LogError(ex, "AOC guard payload selector failed for {RequestType}.", typeof(TRequest).FullName); + _logger.LogError(ex, "AOC guard payload selector failed for {RequestType}.", typeof(TRequest).FullName); return HttpResults.Problem( statusCode: StatusCodes.Status400BadRequest, title: "AOC guard payload selector failed", detail: "Request payload could not be extracted for validation."); } - var guard = context.HttpContext.RequestServices.GetRequiredService(); - var options = ResolveOptions(context.HttpContext.RequestServices); + var options = ResolveOptions(); foreach (var payload in payloads) { @@ -74,7 +77,7 @@ public sealed class AocGuardEndpointFilter : IEndpointFilter try { - ValidatePayload(payload, guard, options); + ValidatePayload(payload, _guard, options); } catch (AocGuardException exception) { @@ -82,8 +85,7 @@ public sealed class AocGuardEndpointFilter : IEndpointFilter } catch (Exception ex) { - var logger = context.HttpContext.RequestServices.GetService>>(); - logger?.LogError(ex, "AOC guard payload validation failed for {RequestType}.", typeof(TRequest).FullName); + _logger.LogError(ex, "AOC guard payload validation failed for {RequestType}.", typeof(TRequest).FullName); return HttpResults.Problem( statusCode: StatusCodes.Status400BadRequest, title: "AOC guard payload invalid", @@ -93,51 +95,4 @@ public sealed class AocGuardEndpointFilter : IEndpointFilter return await next(context).ConfigureAwait(false); } - - private AocGuardOptions ResolveOptions(IServiceProvider services) - { - if (_guardOptions is not null) - { - return _guardOptions; - } - - var options = services.GetService>(); - return options?.Value ?? AocGuardOptions.Default; - } - - private static bool TryGetArgument(EndpointFilterInvocationContext context, out TRequest argument) - { - for (var i = 0; i < context.Arguments.Count; i++) - { - if (context.Arguments[i] is TRequest typedArgument) - { - argument = typedArgument; - return true; - } - } - - argument = default!; - return false; - } - - private void ValidatePayload(object payload, IAocGuard guard, AocGuardOptions options) - { - if (payload is JsonElement jsonElement) - { - guard.ValidateOrThrow(jsonElement, options); - return; - } - - if (payload is JsonDocument jsonDocument) - { - using (jsonDocument) - { - guard.ValidateOrThrow(jsonDocument.RootElement, options); - } - return; - } - - var element = JsonSerializer.SerializeToElement(payload, _serializerOptions); - guard.ValidateOrThrow(element, options); - } } diff --git a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilterExtensions.cs b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilterExtensions.cs index 5ff3e292a..376721677 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilterExtensions.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/Routing/AocGuardEndpointFilterExtensions.cs @@ -1,8 +1,11 @@ - -using Microsoft.AspNetCore.Builder; using System; using System.Collections.Generic; using System.Text.Json; +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; namespace StellaOps.Aoc.AspNetCore.Routing; @@ -28,7 +31,17 @@ public static class AocGuardEndpointFilterExtensions { endpointBuilder.FilterFactories.Add((routeContext, next) => { - var filter = new AocGuardEndpointFilter(payloadSelector, serializerOptions, guardOptions); + var services = routeContext.ApplicationServices; + var guard = services.GetRequiredService(); + var logger = services.GetRequiredService>>(); + var options = services.GetService>(); + var filter = new AocGuardEndpointFilter( + guard, + logger, + payloadSelector, + serializerOptions, + options, + guardOptions); return invocationContext => filter.InvokeAsync(invocationContext, next); }); }); diff --git a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/TASKS.md b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/TASKS.md index 2672ff378..568743ec1 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/TASKS.md +++ b/src/Aoc/__Libraries/StellaOps.Aoc.AspNetCore/TASKS.md @@ -8,3 +8,5 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0039-M | DONE | Revalidated maintainability for StellaOps.Aoc.AspNetCore (2026-01-06). | | AUDIT-0039-T | DONE | Revalidated test coverage for StellaOps.Aoc.AspNetCore (2026-01-06). | | AUDIT-0039-A | DONE | Hardened guard filter error handling and added tests. | +| REMED-06 | DONE | SOLID review notes refreshed 2026-02-04. | +| REMED-08 | DONE | AocGuardEndpointFilter uses constructor injection with helper partials; service locator removed; `dotnet test src/Aoc/__Tests/StellaOps.Aoc.AspNetCore.Tests/StellaOps.Aoc.AspNetCore.Tests.csproj` passed (8 tests) 2026-02-04. | diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocError.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocError.cs index 1fee5f7ae..941dcba16 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/AocError.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocError.cs @@ -40,6 +40,6 @@ public sealed record AocError( throw new ArgumentNullException(nameof(exception)); } - return FromResult(exception.Result, message); + return FromResult(new AocGuardResult(false, exception.Violations), message); } } diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs index 2cbc799ea..59eeb0a8c 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs @@ -4,7 +4,7 @@ namespace StellaOps.Aoc; public static class AocForbiddenKeys { - private static readonly ImmutableHashSet ForbiddenTopLevel = new[] + private static readonly ImmutableHashSet _forbiddenTopLevel = new[] { "severity", "cvss", @@ -18,7 +18,7 @@ public static class AocForbiddenKeys // handled separately by IsDerivedField() and produce ERR_AOC_006 }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); - public static bool IsForbiddenTopLevel(string propertyName) => ForbiddenTopLevel.Contains(propertyName); + public static bool IsForbiddenTopLevel(string propertyName) => _forbiddenTopLevel.Contains(propertyName); public static bool IsDerivedField(string propertyName) => propertyName.StartsWith("effective_", StringComparison.OrdinalIgnoreCase); diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs index a25ce47c7..b54f094d4 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs @@ -5,7 +5,7 @@ namespace StellaOps.Aoc; public sealed record AocGuardOptions { - private static readonly ImmutableHashSet DefaultRequiredTopLevel = new[] + private static readonly ImmutableHashSet _defaultRequiredTopLevel = new[] { "tenant", "source", @@ -14,7 +14,7 @@ public sealed record AocGuardOptions "linkset", }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); - private static readonly ImmutableHashSet DefaultAllowedTopLevel = DefaultRequiredTopLevel + private static readonly ImmutableHashSet _defaultAllowedTopLevel = _defaultRequiredTopLevel .Union(new[] { "_id", @@ -33,12 +33,12 @@ public sealed record AocGuardOptions public static AocGuardOptions Default { get; } = new(); - public ImmutableHashSet RequiredTopLevelFields { get; init; } = DefaultRequiredTopLevel; + public ImmutableHashSet RequiredTopLevelFields { get; init; } = _defaultRequiredTopLevel; /// /// Optional allowlist for top-level fields. Unknown fields trigger ERR_AOC_007. /// - public ImmutableHashSet AllowedTopLevelFields { get; init; } = DefaultAllowedTopLevel; + public ImmutableHashSet AllowedTopLevelFields { get; init; } = _defaultAllowedTopLevel; /// /// When true, signature metadata is required under upstream.signature. diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs index d2404a5c5..24dec89cb 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs @@ -14,21 +14,3 @@ public enum AocViolationCode InvalidTenant, InvalidSignatureMetadata, } - -public static class AocViolationCodeExtensions -{ - public static string ToErrorCode(this AocViolationCode code) => code switch - { - AocViolationCode.ForbiddenField => "ERR_AOC_001", - AocViolationCode.MergeAttempt => "ERR_AOC_002", - AocViolationCode.IdempotencyViolation => "ERR_AOC_003", - AocViolationCode.MissingProvenance => "ERR_AOC_004", - AocViolationCode.SignatureInvalid => "ERR_AOC_005", - AocViolationCode.DerivedFindingDetected => "ERR_AOC_006", - AocViolationCode.UnknownField => "ERR_AOC_007", - AocViolationCode.MissingRequiredField => "ERR_AOC_008", - AocViolationCode.InvalidTenant => "ERR_AOC_009", - AocViolationCode.InvalidSignatureMetadata => "ERR_AOC_010", - _ => "ERR_AOC_000", - }; -} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCodeExtensions.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCodeExtensions.cs new file mode 100644 index 000000000..5a5648610 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCodeExtensions.cs @@ -0,0 +1,19 @@ +namespace StellaOps.Aoc; + +public static class AocViolationCodeExtensions +{ + public static string ToErrorCode(this AocViolationCode code) => code switch + { + AocViolationCode.ForbiddenField => "ERR_AOC_001", + AocViolationCode.MergeAttempt => "ERR_AOC_002", + AocViolationCode.IdempotencyViolation => "ERR_AOC_003", + AocViolationCode.MissingProvenance => "ERR_AOC_004", + AocViolationCode.SignatureInvalid => "ERR_AOC_005", + AocViolationCode.DerivedFindingDetected => "ERR_AOC_006", + AocViolationCode.UnknownField => "ERR_AOC_007", + AocViolationCode.MissingRequiredField => "ERR_AOC_008", + AocViolationCode.InvalidTenant => "ERR_AOC_009", + AocViolationCode.InvalidSignatureMetadata => "ERR_AOC_010", + _ => "ERR_AOC_000", + }; +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Base64.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Base64.cs new file mode 100644 index 000000000..4b2730728 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Base64.cs @@ -0,0 +1,45 @@ +using System; + +namespace StellaOps.Aoc; + +public sealed partial class AocWriteGuard +{ + private static bool IsBase64Payload(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + if (TryDecodeBase64(value)) + { + return true; + } + + var normalized = value.Replace('-', '+').Replace('_', '/'); + switch (normalized.Length % 4) + { + case 2: + normalized += "=="; + break; + case 3: + normalized += "="; + break; + } + + return TryDecodeBase64(normalized); + } + + private static bool TryDecodeBase64(string value) + { + try + { + Convert.FromBase64String(value); + return true; + } + catch (FormatException) + { + return false; + } + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Content.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Content.cs new file mode 100644 index 000000000..59d39ae74 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Content.cs @@ -0,0 +1,43 @@ +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Aoc; + +public sealed partial class AocWriteGuard +{ + private static void ValidateContent( + JsonElement document, + ImmutableArray.Builder violations) + { + if (document.TryGetProperty("content", out var content) && content.ValueKind == JsonValueKind.Object) + { + if (!content.TryGetProperty("raw", out var raw) || raw.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingProvenance, + "/content/raw", + "Raw upstream payload must be preserved.")); + } + } + else + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingRequiredField, + "/content", + "Content metadata is required.")); + } + } + + private static void ValidateLinkset( + JsonElement document, + ImmutableArray.Builder violations) + { + if (!document.TryGetProperty("linkset", out var linkset) || linkset.ValueKind != JsonValueKind.Object) + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingRequiredField, + "/linkset", + "Linkset metadata is required.")); + } + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Signature.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Signature.cs new file mode 100644 index 000000000..1387d5506 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Signature.cs @@ -0,0 +1,79 @@ +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Aoc; + +public sealed partial class AocWriteGuard +{ + private static void ValidateSignature( + JsonElement signature, + ImmutableArray.Builder violations, + AocGuardOptions options) + { + if (!signature.TryGetProperty("present", out var presentElement) || + presentElement.ValueKind is not (JsonValueKind.True or JsonValueKind.False)) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidSignatureMetadata, + "/upstream/signature/present", + "Signature metadata must include 'present' boolean.")); + return; + } + + var signaturePresent = presentElement.GetBoolean(); + + if (!signaturePresent) + { + return; + } + + if (!signature.TryGetProperty("format", out var formatElement) || + formatElement.ValueKind != JsonValueKind.String || + string.IsNullOrWhiteSpace(formatElement.GetString())) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidSignatureMetadata, + "/upstream/signature/format", + "Signature format is required when signature is present.")); + } + else + { + var format = formatElement.GetString()!.Trim(); + if (options.AllowedSignatureFormats.Count > 0 && + !options.AllowedSignatureFormats.Contains(format)) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidSignatureMetadata, + "/upstream/signature/format", + $"Signature format '{format}' is not permitted.")); + } + } + + if (!signature.TryGetProperty("sig", out var sigElement) || + sigElement.ValueKind != JsonValueKind.String || + string.IsNullOrWhiteSpace(sigElement.GetString())) + { + violations.Add(AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/signature/sig", + "Signature payload is required when signature is present.")); + } + else if (!IsBase64Payload(sigElement.GetString()!)) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidSignatureMetadata, + "/upstream/signature/sig", + "Signature payload must be base64 or base64url encoded.")); + } + + if (!signature.TryGetProperty("key_id", out var keyIdElement) || + keyIdElement.ValueKind != JsonValueKind.String || + string.IsNullOrWhiteSpace(keyIdElement.GetString())) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidSignatureMetadata, + "/upstream/signature/key_id", + "Signature key identifier is required when signature is present.")); + } + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.TopLevel.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.TopLevel.cs new file mode 100644 index 000000000..63500c109 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.TopLevel.cs @@ -0,0 +1,87 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; + +namespace StellaOps.Aoc; + +public sealed partial class AocWriteGuard +{ + private static void ValidateTopLevelFields( + JsonElement document, + IEnumerable allowedTopLevelFields, + ImmutableArray.Builder violations) + { + foreach (var property in document.EnumerateObject()) + { + if (AocForbiddenKeys.IsForbiddenTopLevel(property.Name)) + { + violations.Add(AocViolation.Create( + AocViolationCode.ForbiddenField, + $"/{property.Name}", + $"Field '{property.Name}' is forbidden in AOC documents.")); + continue; + } + + if (AocForbiddenKeys.IsDerivedField(property.Name)) + { + violations.Add(AocViolation.Create( + AocViolationCode.DerivedFindingDetected, + $"/{property.Name}", + $"Derived field '{property.Name}' must not be written during ingestion.")); + } + + if (!allowedTopLevelFields.Contains(property.Name)) + { + violations.Add(AocViolation.Create( + AocViolationCode.UnknownField, + $"/{property.Name}", + $"Field '{property.Name}' is not allowed in AOC documents.")); + } + } + } + + private static void ValidateRequiredFields( + JsonElement document, + IEnumerable requiredTopLevelFields, + AocGuardOptions options, + ImmutableArray.Builder violations) + { + foreach (var required in requiredTopLevelFields.OrderBy(name => name, StringComparer.OrdinalIgnoreCase)) + { + if (options.RequireTenant && string.Equals(required, "tenant", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingRequiredField, + $"/{required}", + $"Required field '{required}' is missing.")); + } + } + } + + private static void ValidateTenant( + JsonElement document, + AocGuardOptions options, + ImmutableArray.Builder violations) + { + if (!options.RequireTenant) + { + return; + } + + if (!document.TryGetProperty("tenant", out var tenantElement) || + tenantElement.ValueKind != JsonValueKind.String || + string.IsNullOrWhiteSpace(tenantElement.GetString())) + { + violations.Add(AocViolation.Create( + AocViolationCode.InvalidTenant, + "/tenant", + "Tenant must be a non-empty string.")); + } + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Upstream.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Upstream.cs new file mode 100644 index 000000000..5385d9f98 --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.Upstream.cs @@ -0,0 +1,48 @@ +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Aoc; + +public sealed partial class AocWriteGuard +{ + private static void ValidateUpstream( + JsonElement document, + AocGuardOptions options, + ImmutableArray.Builder violations) + { + if (document.TryGetProperty("upstream", out var upstream) && upstream.ValueKind == JsonValueKind.Object) + { + if (!upstream.TryGetProperty("content_hash", out var contentHash) || + contentHash.ValueKind != JsonValueKind.String || + string.IsNullOrWhiteSpace(contentHash.GetString())) + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingProvenance, + "/upstream/content_hash", + "Upstream content hash is required.")); + } + + if (!upstream.TryGetProperty("signature", out var signature) || signature.ValueKind != JsonValueKind.Object) + { + if (options.RequireSignatureMetadata) + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingProvenance, + "/upstream/signature", + "Signature metadata is required.")); + } + } + else if (options.RequireSignatureMetadata) + { + ValidateSignature(signature, violations, options); + } + } + else + { + violations.Add(AocViolation.Create( + AocViolationCode.MissingRequiredField, + "/upstream", + "Upstream metadata is required.")); + } + } +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs index d247a1cec..30c5a5480 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs @@ -1,16 +1,10 @@ -using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Text.Json; namespace StellaOps.Aoc; -public interface IAocGuard -{ - AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null); -} - -public sealed class AocWriteGuard : IAocGuard +public sealed partial class AocWriteGuard : IAocGuard { public AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null) { @@ -20,174 +14,13 @@ public sealed class AocWriteGuard : IAocGuard var allowedTopLevelFields = (options.AllowedTopLevelFields ?? AocGuardOptions.Default.AllowedTopLevelFields) .Union(requiredTopLevelFields); - foreach (var property in document.EnumerateObject()) - { - if (AocForbiddenKeys.IsForbiddenTopLevel(property.Name)) - { - violations.Add(AocViolation.Create(AocViolationCode.ForbiddenField, $"/{property.Name}", $"Field '{property.Name}' is forbidden in AOC documents.")); - continue; - } - - if (AocForbiddenKeys.IsDerivedField(property.Name)) - { - violations.Add(AocViolation.Create(AocViolationCode.DerivedFindingDetected, $"/{property.Name}", $"Derived field '{property.Name}' must not be written during ingestion.")); - } - - if (!allowedTopLevelFields.Contains(property.Name)) - { - violations.Add(AocViolation.Create(AocViolationCode.UnknownField, $"/{property.Name}", $"Field '{property.Name}' is not allowed in AOC documents.")); - continue; - } - } - - foreach (var required in requiredTopLevelFields.OrderBy(name => name, StringComparer.OrdinalIgnoreCase)) - { - if (options.RequireTenant && string.Equals(required, "tenant", StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, $"/{required}", $"Required field '{required}' is missing.")); - continue; - } - } - - if (options.RequireTenant) - { - if (!document.TryGetProperty("tenant", out var tenantElement) || - tenantElement.ValueKind != JsonValueKind.String || - string.IsNullOrWhiteSpace(tenantElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidTenant, "/tenant", "Tenant must be a non-empty string.")); - } - } - - if (document.TryGetProperty("upstream", out var upstream) && upstream.ValueKind == JsonValueKind.Object) - { - if (!upstream.TryGetProperty("content_hash", out var contentHash) || contentHash.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(contentHash.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/content_hash", "Upstream content hash is required.")); - } - - if (!upstream.TryGetProperty("signature", out var signature) || signature.ValueKind != JsonValueKind.Object) - { - if (options.RequireSignatureMetadata) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/signature", "Signature metadata is required.")); - } - } - else if (options.RequireSignatureMetadata) - { - ValidateSignature(signature, violations, options); - } - } - else - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/upstream", "Upstream metadata is required.")); - } - - if (document.TryGetProperty("content", out var content) && content.ValueKind == JsonValueKind.Object) - { - if (!content.TryGetProperty("raw", out var raw) || raw.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/content/raw", "Raw upstream payload must be preserved.")); - } - } - else - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/content", "Content metadata is required.")); - } - - if (!document.TryGetProperty("linkset", out var linkset) || linkset.ValueKind != JsonValueKind.Object) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/linkset", "Linkset metadata is required.")); - } + ValidateTopLevelFields(document, allowedTopLevelFields, violations); + ValidateRequiredFields(document, requiredTopLevelFields, options, violations); + ValidateTenant(document, options, violations); + ValidateUpstream(document, options, violations); + ValidateContent(document, violations); + ValidateLinkset(document, violations); return AocGuardResult.FromViolations(violations); } - - private static void ValidateSignature(JsonElement signature, ImmutableArray.Builder violations, AocGuardOptions options) - { - if (!signature.TryGetProperty("present", out var presentElement) || presentElement.ValueKind is not (JsonValueKind.True or JsonValueKind.False)) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/present", "Signature metadata must include 'present' boolean.")); - return; - } - - var signaturePresent = presentElement.GetBoolean(); - - if (!signaturePresent) - { - return; - } - - if (!signature.TryGetProperty("format", out var formatElement) || formatElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(formatElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/format", "Signature format is required when signature is present.")); - } - else - { - var format = formatElement.GetString()!.Trim(); - if (options.AllowedSignatureFormats.Count > 0 && - !options.AllowedSignatureFormats.Contains(format)) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/format", $"Signature format '{format}' is not permitted.")); - } - } - - if (!signature.TryGetProperty("sig", out var sigElement) || sigElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(sigElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.SignatureInvalid, "/upstream/signature/sig", "Signature payload is required when signature is present.")); - } - else if (!IsBase64Payload(sigElement.GetString()!)) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/sig", "Signature payload must be base64 or base64url encoded.")); - } - - if (!signature.TryGetProperty("key_id", out var keyIdElement) || keyIdElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(keyIdElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/key_id", "Signature key identifier is required when signature is present.")); - } - } - - private static bool IsBase64Payload(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - if (TryDecodeBase64(value)) - { - return true; - } - - var normalized = value.Replace('-', '+').Replace('_', '/'); - switch (normalized.Length % 4) - { - case 2: - normalized += "=="; - break; - case 3: - normalized += "="; - break; - } - - return TryDecodeBase64(normalized); - } - - private static bool TryDecodeBase64(string value) - { - try - { - Convert.FromBase64String(value); - return true; - } - catch (FormatException) - { - return false; - } - } } diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/IAocGuard.cs b/src/Aoc/__Libraries/StellaOps.Aoc/IAocGuard.cs new file mode 100644 index 000000000..4d8086d0a --- /dev/null +++ b/src/Aoc/__Libraries/StellaOps.Aoc/IAocGuard.cs @@ -0,0 +1,8 @@ +using System.Text.Json; + +namespace StellaOps.Aoc; + +public interface IAocGuard +{ + AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null); +} diff --git a/src/Aoc/__Libraries/StellaOps.Aoc/TASKS.md b/src/Aoc/__Libraries/StellaOps.Aoc/TASKS.md index 9312a6846..6d773907d 100644 --- a/src/Aoc/__Libraries/StellaOps.Aoc/TASKS.md +++ b/src/Aoc/__Libraries/StellaOps.Aoc/TASKS.md @@ -8,3 +8,5 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0036-M | DONE | Revalidated maintainability for StellaOps.Aoc (2026-01-06). | | AUDIT-0036-T | DONE | Revalidated test coverage for StellaOps.Aoc (2026-01-06). | | AUDIT-0036-A | DONE | Applied error code fixes, deterministic ordering, and guard validation hardening. | +| REMED-06 | DONE | SOLID review notes refreshed 2026-02-04. | +| REMED-08 | DONE | Private field naming fixed; blocking async removed; IAocGuard extracted; AocWriteGuard and violation code mapping split into partials/files; `dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj` passed (11 tests) 2026-02-04. | diff --git a/src/Aoc/__Tests/StellaOps.Aoc.AspNetCore.Tests/AocGuardEndpointFilterTests.cs b/src/Aoc/__Tests/StellaOps.Aoc.AspNetCore.Tests/AocGuardEndpointFilterTests.cs index c5cea0f94..498d7eda4 100644 --- a/src/Aoc/__Tests/StellaOps.Aoc.AspNetCore.Tests/AocGuardEndpointFilterTests.cs +++ b/src/Aoc/__Tests/StellaOps.Aoc.AspNetCore.Tests/AocGuardEndpointFilterTests.cs @@ -4,7 +4,7 @@ using System.IO; using System.Text.Json; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; using StellaOps.Aoc; using StellaOps.Aoc.AspNetCore.Routing; using StellaOps.TestKit; @@ -14,11 +14,18 @@ namespace StellaOps.Aoc.AspNetCore.Tests; public sealed class AocGuardEndpointFilterTests { [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public async Task ReturnsProblem_WhenRequestMissing() { - var httpContext = BuildHttpContext(new TestAocGuard()); - var filter = new AocGuardEndpointFilter(_ => Array.Empty(), null, null); + var guard = new TestAocGuard(); + var httpContext = BuildHttpContext(); + var filter = new AocGuardEndpointFilter( + guard, + NullLogger>.Instance, + _ => Array.Empty(), + null, + null, + null); var context = new TestEndpointFilterInvocationContext(httpContext, Array.Empty()); var result = await filter.InvokeAsync(context, _ => new ValueTask(TypedResults.Ok())); @@ -28,11 +35,18 @@ public sealed class AocGuardEndpointFilterTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public async Task ReturnsProblem_WhenPayloadSelectorThrows() { - var httpContext = BuildHttpContext(new TestAocGuard()); - var filter = new AocGuardEndpointFilter(_ => throw new InvalidOperationException("boom"), null, null); + var guard = new TestAocGuard(); + var httpContext = BuildHttpContext(); + var filter = new AocGuardEndpointFilter( + guard, + NullLogger>.Instance, + _ => throw new InvalidOperationException("boom"), + null, + null, + null); var context = new TestEndpointFilterInvocationContext(httpContext, new object?[] { new GuardPayload(new JsonElement()) }); var result = await filter.InvokeAsync(context, _ => new ValueTask(TypedResults.Ok())); @@ -42,11 +56,18 @@ public sealed class AocGuardEndpointFilterTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public async Task ReturnsProblem_WhenSerializationFails() { - var httpContext = BuildHttpContext(new TestAocGuard()); - var filter = new AocGuardEndpointFilter(_ => new object?[] { new SelfReferencingPayload() }, null, null); + var guard = new TestAocGuard(); + var httpContext = BuildHttpContext(); + var filter = new AocGuardEndpointFilter( + guard, + NullLogger>.Instance, + _ => new object?[] { new SelfReferencingPayload() }, + null, + null, + null); var context = new TestEndpointFilterInvocationContext(httpContext, new object?[] { new GuardPayload(new JsonElement()) }); var result = await filter.InvokeAsync(context, _ => new ValueTask(TypedResults.Ok())); @@ -56,16 +77,22 @@ public sealed class AocGuardEndpointFilterTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public async Task ValidatesJsonDocumentPayloads() { var guard = new TestAocGuard(); - var httpContext = BuildHttpContext(guard); - var filter = new AocGuardEndpointFilter(_ => - { - using var doc = JsonDocument.Parse("""{"tenant":"default","source":{},"upstream":{"content_hash":"sha256:abc","signature":{"present":false}},"content":{"raw":{}},"linkset":{}}"""); - return new object?[] { JsonDocument.Parse(doc.RootElement.GetRawText()) }; - }, null, null); + var httpContext = BuildHttpContext(); + var filter = new AocGuardEndpointFilter( + guard, + NullLogger>.Instance, + _ => + { + using var doc = JsonDocument.Parse("""{"tenant":"default","source":{},"upstream":{"content_hash":"sha256:abc","signature":{"present":false}},"content":{"raw":{}},"linkset":{}}"""); + return new object?[] { JsonDocument.Parse(doc.RootElement.GetRawText()) }; + }, + null, + null, + null); var context = new TestEndpointFilterInvocationContext(httpContext, new object?[] { new GuardPayload(new JsonElement()) }); var result = await filter.InvokeAsync(context, _ => new ValueTask(TypedResults.Ok())); @@ -75,18 +102,19 @@ public sealed class AocGuardEndpointFilterTests Assert.True(guard.WasValidated); } - private static DefaultHttpContext BuildHttpContext(IAocGuard guard) + private static DefaultHttpContext BuildHttpContext() { - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(guard); - var provider = services.BuildServiceProvider(); - - return new DefaultHttpContext { RequestServices = provider, Response = { Body = new MemoryStream() } }; + return new DefaultHttpContext { Response = { Body = new MemoryStream() } }; } private static async Task ExecuteAsync(object? result, HttpContext context) { + if (result is IStatusCodeHttpResult statusResult) + { + context.Response.StatusCode = statusResult.StatusCode ?? StatusCodes.Status200OK; + return context.Response.StatusCode; + } + if (result is IResult httpResult) { await httpResult.ExecuteAsync(context); diff --git a/src/Attestor/StellaOps.Attestor.TileProxy/Endpoints/TileEndpoints.cs b/src/Attestor/StellaOps.Attestor.TileProxy/Endpoints/TileEndpoints.cs index 1dc8b6173..4e30cdd1c 100644 --- a/src/Attestor/StellaOps.Attestor.TileProxy/Endpoints/TileEndpoints.cs +++ b/src/Attestor/StellaOps.Attestor.TileProxy/Endpoints/TileEndpoints.cs @@ -188,8 +188,9 @@ public static class TileEndpoints private static IResult TriggerSync( [FromServices] IServiceProvider services, - [FromServices] ILogger logger) + [FromServices] ILoggerFactory loggerFactory) { + var logger = loggerFactory.CreateLogger("TileEndpoints"); // TODO: Trigger background sync job logger.LogInformation("Manual sync triggered"); @@ -281,7 +282,3 @@ public sealed record HealthResponse public DateTimeOffset Timestamp { get; init; } } -// Logger class for endpoint logging -file static class TileEndpoints -{ -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/TASKS.md b/src/Authority/__Libraries/StellaOps.Authority.Core/TASKS.md index 1e3396ac1..63ce2187c 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/TASKS.md +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/TASKS.md @@ -8,4 +8,5 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0086-M | DONE | Revalidated 2026-01-06. | | AUDIT-0086-T | DONE | Revalidated 2026-01-06 (coverage reviewed). | | AUDIT-0086-A | TODO | Reopened 2026-01-06: remove Guid.NewGuid default and switch digest to canonical JSON. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Authority/__Libraries/StellaOps.Authority.Core/StellaOps.Authority.Core.md. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictEvaluator.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictEvaluator.cs new file mode 100644 index 000000000..dd6e5bbad --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictEvaluator.cs @@ -0,0 +1,27 @@ +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Provides verdict evaluation capability for replay verification. +/// +public interface IVerdictEvaluator +{ + /// + /// Evaluate a verdict using the specified inputs and policy context. + /// + /// Tenant identifier. + /// Asset being evaluated. + /// Vulnerability being evaluated. + /// Pinned inputs for evaluation. + /// Policy hash to use. + /// Lattice version to use. + /// Cancellation token. + /// Verdict result. + Task EvaluateAsync( + string tenant, + string assetDigest, + string vulnerabilityId, + VerdictInputs inputs, + string policyHash, + string latticeVersion, + CancellationToken ct = default); +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictManifestStore.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictManifestStore.cs index 10d21199f..48477b2ea 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictManifestStore.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictManifestStore.cs @@ -1,5 +1,3 @@ -using System.Collections.Immutable; - namespace StellaOps.Authority.Core.Verdicts; /// @@ -85,18 +83,3 @@ public interface IVerdictManifestStore /// True if deleted, false if not found. Task DeleteAsync(string tenant, string manifestId, CancellationToken ct = default); } - -/// -/// Paginated result for manifest list queries. -/// -public sealed record VerdictManifestPage -{ - /// Manifests in this page. - public required ImmutableArray Manifests { get; init; } - - /// Token for retrieving the next page, or null if no more pages. - public string? NextPageToken { get; init; } - - /// Total count if available. - public int? TotalCount { get; init; } -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictReplayVerifier.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictReplayVerifier.cs new file mode 100644 index 000000000..f7e0fa8f7 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/IVerdictReplayVerifier.cs @@ -0,0 +1,23 @@ +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Interface for replaying verdicts to verify determinism. +/// +public interface IVerdictReplayVerifier +{ + /// + /// Verify that a verdict can be replayed to produce identical results. + /// + /// Manifest ID to verify. + /// Cancellation token. + /// Verification result with differences if any. + Task VerifyAsync(string manifestId, CancellationToken ct = default); + + /// + /// Verify that a verdict can be replayed to produce identical results. + /// + /// Manifest to verify. + /// Cancellation token. + /// Verification result with differences if any. + Task VerifyAsync(VerdictManifest manifest, CancellationToken ct = default); +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Diagnostics.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Diagnostics.cs new file mode 100644 index 000000000..5f38873e8 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Diagnostics.cs @@ -0,0 +1,23 @@ +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class InMemoryVerdictManifestStore +{ + public Task DeleteAsync(string tenant, string manifestId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentException.ThrowIfNullOrWhiteSpace(manifestId); + + var key = (tenant, manifestId); + return Task.FromResult(_manifests.TryRemove(key, out _)); + } + + /// + /// Clear all stored manifests (for testing). + /// + public void Clear() => _manifests.Clear(); + + /// + /// Get count of stored manifests (for testing). + /// + public int Count => _manifests.Count; +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Paging.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Paging.cs new file mode 100644 index 000000000..3ac5b3b81 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.Paging.cs @@ -0,0 +1,78 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class InMemoryVerdictManifestStore +{ + public Task ListByPolicyAsync( + string tenant, + string policyHash, + string latticeVersion, + int limit = 100, + string? pageToken = null, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentException.ThrowIfNullOrWhiteSpace(policyHash); + ArgumentException.ThrowIfNullOrWhiteSpace(latticeVersion); + + var offset = 0; + if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed)) + { + offset = parsed; + } + + var query = _manifests.Values + .Where(m => m.Tenant == tenant + && m.PolicyHash == policyHash + && m.LatticeVersion == latticeVersion) + .OrderByDescending(m => m.EvaluatedAt) + .ThenBy(m => m.ManifestId, StringComparer.Ordinal) + .Skip(offset) + .Take(limit + 1) + .ToList(); + + var hasMore = query.Count > limit; + var manifests = query.Take(limit).ToImmutableArray(); + + return Task.FromResult(new VerdictManifestPage + { + Manifests = manifests, + NextPageToken = hasMore ? (offset + limit).ToString() : null, + }); + } + + public Task ListByAssetAsync( + string tenant, + string assetDigest, + int limit = 100, + string? pageToken = null, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest); + + var offset = 0; + if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed)) + { + offset = parsed; + } + + var query = _manifests.Values + .Where(m => m.Tenant == tenant && m.AssetDigest == assetDigest) + .OrderByDescending(m => m.EvaluatedAt) + .ThenBy(m => m.ManifestId, StringComparer.Ordinal) + .Skip(offset) + .Take(limit + 1) + .ToList(); + + var hasMore = query.Count > limit; + var manifests = query.Take(limit).ToImmutableArray(); + + return Task.FromResult(new VerdictManifestPage + { + Manifests = manifests, + NextPageToken = hasMore ? (offset + limit).ToString() : null, + }); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.cs index f80d05e52..c4c629e16 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/InMemoryVerdictManifestStore.cs @@ -1,12 +1,11 @@ using System.Collections.Concurrent; -using System.Collections.Immutable; namespace StellaOps.Authority.Core.Verdicts; /// /// In-memory implementation of verdict manifest store for testing and development. /// -public sealed class InMemoryVerdictManifestStore : IVerdictManifestStore +public sealed partial class InMemoryVerdictManifestStore : IVerdictManifestStore { private readonly ConcurrentDictionary<(string Tenant, string ManifestId), VerdictManifest> _manifests = new(); @@ -19,7 +18,10 @@ public sealed class InMemoryVerdictManifestStore : IVerdictManifestStore return Task.FromResult(manifest); } - public Task GetByIdAsync(string tenant, string manifestId, CancellationToken ct = default) + public Task GetByIdAsync( + string tenant, + string manifestId, + CancellationToken ct = default) { ArgumentException.ThrowIfNullOrWhiteSpace(tenant); ArgumentException.ThrowIfNullOrWhiteSpace(manifestId); @@ -61,95 +63,4 @@ public sealed class InMemoryVerdictManifestStore : IVerdictManifestStore return Task.FromResult(latest); } - - public Task ListByPolicyAsync( - string tenant, - string policyHash, - string latticeVersion, - int limit = 100, - string? pageToken = null, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentException.ThrowIfNullOrWhiteSpace(policyHash); - ArgumentException.ThrowIfNullOrWhiteSpace(latticeVersion); - - var offset = 0; - if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed)) - { - offset = parsed; - } - - var query = _manifests.Values - .Where(m => m.Tenant == tenant - && m.PolicyHash == policyHash - && m.LatticeVersion == latticeVersion) - .OrderByDescending(m => m.EvaluatedAt) - .ThenBy(m => m.ManifestId, StringComparer.Ordinal) - .Skip(offset) - .Take(limit + 1) - .ToList(); - - var hasMore = query.Count > limit; - var manifests = query.Take(limit).ToImmutableArray(); - - return Task.FromResult(new VerdictManifestPage - { - Manifests = manifests, - NextPageToken = hasMore ? (offset + limit).ToString() : null, - }); - } - - public Task ListByAssetAsync( - string tenant, - string assetDigest, - int limit = 100, - string? pageToken = null, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest); - - var offset = 0; - if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed)) - { - offset = parsed; - } - - var query = _manifests.Values - .Where(m => m.Tenant == tenant && m.AssetDigest == assetDigest) - .OrderByDescending(m => m.EvaluatedAt) - .ThenBy(m => m.ManifestId, StringComparer.Ordinal) - .Skip(offset) - .Take(limit + 1) - .ToList(); - - var hasMore = query.Count > limit; - var manifests = query.Take(limit).ToImmutableArray(); - - return Task.FromResult(new VerdictManifestPage - { - Manifests = manifests, - NextPageToken = hasMore ? (offset + limit).ToString() : null, - }); - } - - public Task DeleteAsync(string tenant, string manifestId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentException.ThrowIfNullOrWhiteSpace(manifestId); - - var key = (tenant, manifestId); - return Task.FromResult(_manifests.TryRemove(key, out _)); - } - - /// - /// Clear all stored manifests (for testing). - /// - public void Clear() => _manifests.Clear(); - - /// - /// Get count of stored manifests (for testing). - /// - public int Count => _manifests.Count; } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/ReplayVerificationResult.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/ReplayVerificationResult.cs new file mode 100644 index 000000000..df9a10185 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/ReplayVerificationResult.cs @@ -0,0 +1,30 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Result of replay verification. +/// +public sealed record ReplayVerificationResult +{ + /// True if replay produced identical results. + public required bool Success { get; init; } + + /// The original manifest being verified. + public required VerdictManifest OriginalManifest { get; init; } + + /// The manifest produced by replay (if successful). + public VerdictManifest? ReplayedManifest { get; init; } + + /// List of differences between original and replayed manifests. + public ImmutableArray? Differences { get; init; } + + /// True if signature verification passed. + public bool SignatureValid { get; init; } + + /// Error message if replay failed. + public string? Error { get; init; } + + /// Duration of the replay operation. + public TimeSpan? ReplayDuration { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictExplanation.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictExplanation.cs new file mode 100644 index 000000000..0888a435b --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictExplanation.cs @@ -0,0 +1,37 @@ +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Explanation of how a single VEX source contributed to the verdict. +/// +public sealed record VerdictExplanation +{ + /// Identifier of the VEX source. + public required string SourceId { get; init; } + + /// Human-readable reason for this contribution. + public required string Reason { get; init; } + + /// Provenance score component [0, 1]. + public required double ProvenanceScore { get; init; } + + /// Coverage score component [0, 1]. + public required double CoverageScore { get; init; } + + /// Replayability score component [0, 1]. + public required double ReplayabilityScore { get; init; } + + /// Claim strength multiplier. + public required double StrengthMultiplier { get; init; } + + /// Freshness decay multiplier. + public required double FreshnessMultiplier { get; init; } + + /// Final computed claim score. + public required double ClaimScore { get; init; } + + /// VEX status this source asserted. + public required VexStatus AssertedStatus { get; init; } + + /// True if this source's claim was accepted as the winner. + public bool Accepted { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictInputs.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictInputs.cs new file mode 100644 index 000000000..1f23e244a --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictInputs.cs @@ -0,0 +1,24 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// All inputs required to replay a verdict deterministically. +/// +public sealed record VerdictInputs +{ + /// SBOM digests used in evaluation. + public required ImmutableArray SbomDigests { get; init; } + + /// Vulnerability feed snapshot identifiers. + public required ImmutableArray VulnFeedSnapshotIds { get; init; } + + /// VEX document digests considered. + public required ImmutableArray VexDocumentDigests { get; init; } + + /// Reachability graph IDs if reachability analysis was used. + public required ImmutableArray ReachabilityGraphIds { get; init; } + + /// Clock cutoff for deterministic time-based evaluation. + public required DateTimeOffset ClockCutoff { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifest.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifest.cs index 9d841b3f3..c0abb580e 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifest.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifest.cs @@ -1,29 +1,5 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; - namespace StellaOps.Authority.Core.Verdicts; -/// -/// VEX verdict status enumeration per OpenVEX specification. -/// -public enum VexStatus -{ - [JsonPropertyName("affected")] - Affected, - - [JsonPropertyName("not_affected")] - NotAffected, - - [JsonPropertyName("fixed")] - Fixed, - - [JsonPropertyName("under_investigation")] - UnderInvestigation, -} - /// /// Captures all inputs and outputs of a VEX verdict for deterministic replay. /// @@ -65,135 +41,3 @@ public sealed record VerdictManifest /// Optional Rekor transparency log ID. public string? RekorLogId { get; init; } } - -/// -/// All inputs required to replay a verdict deterministically. -/// -public sealed record VerdictInputs -{ - /// SBOM digests used in evaluation. - public required ImmutableArray SbomDigests { get; init; } - - /// Vulnerability feed snapshot identifiers. - public required ImmutableArray VulnFeedSnapshotIds { get; init; } - - /// VEX document digests considered. - public required ImmutableArray VexDocumentDigests { get; init; } - - /// Reachability graph IDs if reachability analysis was used. - public required ImmutableArray ReachabilityGraphIds { get; init; } - - /// Clock cutoff for deterministic time-based evaluation. - public required DateTimeOffset ClockCutoff { get; init; } -} - -/// -/// The computed verdict result with confidence and explanations. -/// -public sealed record VerdictResult -{ - /// Final VEX status determination. - public required VexStatus Status { get; init; } - - /// Confidence score [0, 1]. - public required double Confidence { get; init; } - - /// Detailed explanations from contributing VEX sources. - public required ImmutableArray Explanations { get; init; } - - /// References to supporting evidence. - public required ImmutableArray EvidenceRefs { get; init; } - - /// True if conflicting claims were detected. - public bool HasConflicts { get; init; } - - /// True if reachability proof was required and present. - public bool RequiresReplayProof { get; init; } -} - -/// -/// Explanation of how a single VEX source contributed to the verdict. -/// -public sealed record VerdictExplanation -{ - /// Identifier of the VEX source. - public required string SourceId { get; init; } - - /// Human-readable reason for this contribution. - public required string Reason { get; init; } - - /// Provenance score component [0, 1]. - public required double ProvenanceScore { get; init; } - - /// Coverage score component [0, 1]. - public required double CoverageScore { get; init; } - - /// Replayability score component [0, 1]. - public required double ReplayabilityScore { get; init; } - - /// Claim strength multiplier. - public required double StrengthMultiplier { get; init; } - - /// Freshness decay multiplier. - public required double FreshnessMultiplier { get; init; } - - /// Final computed claim score. - public required double ClaimScore { get; init; } - - /// VEX status this source asserted. - public required VexStatus AssertedStatus { get; init; } - - /// True if this source's claim was accepted as the winner. - public bool Accepted { get; init; } -} - -/// -/// Serialization helper for canonical JSON output. -/// -public static class VerdictManifestSerializer -{ - private static readonly JsonSerializerOptions s_options = new() - { - PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, - WriteIndented = false, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, - }; - - /// - /// Serialize manifest to deterministic JSON (stable naming policy, no indentation). - /// - public static string Serialize(VerdictManifest manifest) - { - ArgumentNullException.ThrowIfNull(manifest); - return JsonSerializer.Serialize(manifest, s_options); - } - - /// - /// Deserialize from JSON. - /// - public static VerdictManifest? Deserialize(string json) - { - if (string.IsNullOrWhiteSpace(json)) - { - return null; - } - - return JsonSerializer.Deserialize(json, s_options); - } - - /// - /// Compute SHA256 digest of the canonical JSON representation. - /// - public static string ComputeDigest(VerdictManifest manifest) - { - ArgumentNullException.ThrowIfNull(manifest); - - // Create a copy without the digest field for hashing - var forHashing = manifest with { ManifestDigest = string.Empty, SignatureBase64 = null, RekorLogId = null }; - var json = Serialize(forHashing); - var bytes = Encoding.UTF8.GetBytes(json); - var hash = SHA256.HashData(bytes); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Build.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Build.cs new file mode 100644 index 000000000..e70aa3701 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Build.cs @@ -0,0 +1,72 @@ +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictManifestBuilder +{ + public VerdictManifest Build() + { + Validate(); + + var manifestId = _idGenerator(); + var manifest = new VerdictManifest + { + ManifestId = manifestId, + Tenant = _tenant!, + AssetDigest = _assetDigest!, + VulnerabilityId = _vulnerabilityId!, + Inputs = _inputs!, + Result = _result!, + PolicyHash = _policyHash!, + LatticeVersion = _latticeVersion!, + EvaluatedAt = _evaluatedAt, + ManifestDigest = string.Empty, // Will be computed + }; + + var digest = VerdictManifestSerializer.ComputeDigest(manifest); + return manifest with { ManifestDigest = digest }; + } + + private void Validate() + { + var errors = new List(); + + if (string.IsNullOrWhiteSpace(_tenant)) + { + errors.Add("Tenant is required."); + } + + if (string.IsNullOrWhiteSpace(_assetDigest)) + { + errors.Add("Asset digest is required."); + } + + if (string.IsNullOrWhiteSpace(_vulnerabilityId)) + { + errors.Add("Vulnerability ID is required."); + } + + if (_inputs is null) + { + errors.Add("Inputs are required."); + } + + if (_result is null) + { + errors.Add("Result is required."); + } + + if (string.IsNullOrWhiteSpace(_policyHash)) + { + errors.Add("Policy hash is required."); + } + + if (string.IsNullOrWhiteSpace(_latticeVersion)) + { + errors.Add("Lattice version is required."); + } + + if (errors.Count > 0) + { + throw new InvalidOperationException($"VerdictManifest validation failed: {string.Join("; ", errors)}"); + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Core.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Core.cs new file mode 100644 index 000000000..fbe92c437 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Core.cs @@ -0,0 +1,38 @@ +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictManifestBuilder +{ + public VerdictManifestBuilder WithTenant(string tenant) + { + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new ArgumentException("Tenant must be provided.", nameof(tenant)); + } + + _tenant = tenant.Trim(); + return this; + } + + public VerdictManifestBuilder WithAsset(string assetDigest, string vulnerabilityId) + { + if (string.IsNullOrWhiteSpace(assetDigest)) + { + throw new ArgumentException("Asset digest must be provided.", nameof(assetDigest)); + } + + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId)); + } + + _assetDigest = assetDigest.Trim(); + _vulnerabilityId = vulnerabilityId.Trim().ToUpperInvariant(); + return this; + } + + public VerdictManifestBuilder WithInputs(VerdictInputs inputs) + { + _inputs = inputs ?? throw new ArgumentNullException(nameof(inputs)); + return this; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Inputs.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Inputs.cs new file mode 100644 index 000000000..b4fb53535 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Inputs.cs @@ -0,0 +1,32 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictManifestBuilder +{ + public VerdictManifestBuilder WithInputs( + IEnumerable sbomDigests, + IEnumerable vulnFeedSnapshotIds, + IEnumerable vexDocumentDigests, + IEnumerable? reachabilityGraphIds = null, + DateTimeOffset? clockCutoff = null) + { + _inputs = new VerdictInputs + { + SbomDigests = SortedImmutable(sbomDigests), + VulnFeedSnapshotIds = SortedImmutable(vulnFeedSnapshotIds), + VexDocumentDigests = SortedImmutable(vexDocumentDigests), + ReachabilityGraphIds = SortedImmutable(reachabilityGraphIds ?? Enumerable.Empty()), + ClockCutoff = clockCutoff ?? _timeProvider.GetUtcNow(), + }; + return this; + } + + private static ImmutableArray SortedImmutable(IEnumerable items) + => items + .Where(s => !string.IsNullOrWhiteSpace(s)) + .Select(s => s.Trim()) + .OrderBy(s => s, StringComparer.Ordinal) + .Distinct(StringComparer.Ordinal) + .ToImmutableArray(); +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Policy.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Policy.cs new file mode 100644 index 000000000..4ec8c6aa9 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Policy.cs @@ -0,0 +1,27 @@ +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictManifestBuilder +{ + public VerdictManifestBuilder WithPolicy(string policyHash, string latticeVersion) + { + if (string.IsNullOrWhiteSpace(policyHash)) + { + throw new ArgumentException("Policy hash must be provided.", nameof(policyHash)); + } + + if (string.IsNullOrWhiteSpace(latticeVersion)) + { + throw new ArgumentException("Lattice version must be provided.", nameof(latticeVersion)); + } + + _policyHash = policyHash.Trim(); + _latticeVersion = latticeVersion.Trim(); + return this; + } + + public VerdictManifestBuilder WithClock(DateTimeOffset evaluatedAt) + { + _evaluatedAt = evaluatedAt.ToUniversalTime(); + return this; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Result.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Result.cs new file mode 100644 index 000000000..191468acd --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.Result.cs @@ -0,0 +1,43 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictManifestBuilder +{ + public VerdictManifestBuilder WithResult(VerdictResult result) + { + _result = result ?? throw new ArgumentNullException(nameof(result)); + return this; + } + + public VerdictManifestBuilder WithResult( + VexStatus status, + double confidence, + IEnumerable explanations, + IEnumerable? evidenceRefs = null, + bool hasConflicts = false, + bool requiresReplayProof = false) + { + if (confidence < 0 || confidence > 1) + { + throw new ArgumentOutOfRangeException(nameof(confidence), "Confidence must be between 0 and 1."); + } + + var sortedExplanations = explanations + .OrderByDescending(e => e.ClaimScore) + .ThenByDescending(e => e.ProvenanceScore) + .ThenBy(e => e.SourceId, StringComparer.Ordinal) + .ToImmutableArray(); + + _result = new VerdictResult + { + Status = status, + Confidence = confidence, + Explanations = sortedExplanations, + EvidenceRefs = SortedImmutable(evidenceRefs ?? Enumerable.Empty()), + HasConflicts = hasConflicts, + RequiresReplayProof = requiresReplayProof, + }; + return this; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.cs index b423873cf..61cca6e37 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestBuilder.cs @@ -1,11 +1,9 @@ -using System.Collections.Immutable; - namespace StellaOps.Authority.Core.Verdicts; /// /// Fluent builder for constructing VerdictManifest instances with deterministic ordering. /// -public sealed class VerdictManifestBuilder +public sealed partial class VerdictManifestBuilder { private string? _tenant; private string? _assetDigest; @@ -34,194 +32,4 @@ public sealed class VerdictManifestBuilder _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _evaluatedAt = _timeProvider.GetUtcNow(); } - - public VerdictManifestBuilder WithTenant(string tenant) - { - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new ArgumentException("Tenant must be provided.", nameof(tenant)); - } - - _tenant = tenant.Trim(); - return this; - } - - public VerdictManifestBuilder WithAsset(string assetDigest, string vulnerabilityId) - { - if (string.IsNullOrWhiteSpace(assetDigest)) - { - throw new ArgumentException("Asset digest must be provided.", nameof(assetDigest)); - } - - if (string.IsNullOrWhiteSpace(vulnerabilityId)) - { - throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId)); - } - - _assetDigest = assetDigest.Trim(); - _vulnerabilityId = vulnerabilityId.Trim().ToUpperInvariant(); - return this; - } - - public VerdictManifestBuilder WithInputs(VerdictInputs inputs) - { - _inputs = inputs ?? throw new ArgumentNullException(nameof(inputs)); - return this; - } - - public VerdictManifestBuilder WithInputs( - IEnumerable sbomDigests, - IEnumerable vulnFeedSnapshotIds, - IEnumerable vexDocumentDigests, - IEnumerable? reachabilityGraphIds = null, - DateTimeOffset? clockCutoff = null) - { - _inputs = new VerdictInputs - { - SbomDigests = SortedImmutable(sbomDigests), - VulnFeedSnapshotIds = SortedImmutable(vulnFeedSnapshotIds), - VexDocumentDigests = SortedImmutable(vexDocumentDigests), - ReachabilityGraphIds = SortedImmutable(reachabilityGraphIds ?? Enumerable.Empty()), - ClockCutoff = clockCutoff ?? _timeProvider.GetUtcNow(), - }; - return this; - } - - public VerdictManifestBuilder WithResult(VerdictResult result) - { - _result = result ?? throw new ArgumentNullException(nameof(result)); - return this; - } - - public VerdictManifestBuilder WithResult( - VexStatus status, - double confidence, - IEnumerable explanations, - IEnumerable? evidenceRefs = null, - bool hasConflicts = false, - bool requiresReplayProof = false) - { - if (confidence < 0 || confidence > 1) - { - throw new ArgumentOutOfRangeException(nameof(confidence), "Confidence must be between 0 and 1."); - } - - // Sort explanations deterministically by source ID - var sortedExplanations = explanations - .OrderByDescending(e => e.ClaimScore) - .ThenByDescending(e => e.ProvenanceScore) - .ThenBy(e => e.SourceId, StringComparer.Ordinal) - .ToImmutableArray(); - - _result = new VerdictResult - { - Status = status, - Confidence = confidence, - Explanations = sortedExplanations, - EvidenceRefs = SortedImmutable(evidenceRefs ?? Enumerable.Empty()), - HasConflicts = hasConflicts, - RequiresReplayProof = requiresReplayProof, - }; - return this; - } - - public VerdictManifestBuilder WithPolicy(string policyHash, string latticeVersion) - { - if (string.IsNullOrWhiteSpace(policyHash)) - { - throw new ArgumentException("Policy hash must be provided.", nameof(policyHash)); - } - - if (string.IsNullOrWhiteSpace(latticeVersion)) - { - throw new ArgumentException("Lattice version must be provided.", nameof(latticeVersion)); - } - - _policyHash = policyHash.Trim(); - _latticeVersion = latticeVersion.Trim(); - return this; - } - - public VerdictManifestBuilder WithClock(DateTimeOffset evaluatedAt) - { - _evaluatedAt = evaluatedAt.ToUniversalTime(); - return this; - } - - public VerdictManifest Build() - { - Validate(); - - var manifestId = _idGenerator(); - var manifest = new VerdictManifest - { - ManifestId = manifestId, - Tenant = _tenant!, - AssetDigest = _assetDigest!, - VulnerabilityId = _vulnerabilityId!, - Inputs = _inputs!, - Result = _result!, - PolicyHash = _policyHash!, - LatticeVersion = _latticeVersion!, - EvaluatedAt = _evaluatedAt, - ManifestDigest = string.Empty, // Will be computed - }; - - // Compute digest over the complete manifest - var digest = VerdictManifestSerializer.ComputeDigest(manifest); - return manifest with { ManifestDigest = digest }; - } - - private void Validate() - { - var errors = new List(); - - if (string.IsNullOrWhiteSpace(_tenant)) - { - errors.Add("Tenant is required."); - } - - if (string.IsNullOrWhiteSpace(_assetDigest)) - { - errors.Add("Asset digest is required."); - } - - if (string.IsNullOrWhiteSpace(_vulnerabilityId)) - { - errors.Add("Vulnerability ID is required."); - } - - if (_inputs is null) - { - errors.Add("Inputs are required."); - } - - if (_result is null) - { - errors.Add("Result is required."); - } - - if (string.IsNullOrWhiteSpace(_policyHash)) - { - errors.Add("Policy hash is required."); - } - - if (string.IsNullOrWhiteSpace(_latticeVersion)) - { - errors.Add("Lattice version is required."); - } - - if (errors.Count > 0) - { - throw new InvalidOperationException($"VerdictManifest validation failed: {string.Join("; ", errors)}"); - } - } - - private static ImmutableArray SortedImmutable(IEnumerable items) - => items - .Where(s => !string.IsNullOrWhiteSpace(s)) - .Select(s => s.Trim()) - .OrderBy(s => s, StringComparer.Ordinal) - .Distinct(StringComparer.Ordinal) - .ToImmutableArray(); } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestPage.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestPage.cs new file mode 100644 index 000000000..4cbc98dad --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestPage.cs @@ -0,0 +1,18 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Paginated result for manifest list queries. +/// +public sealed record VerdictManifestPage +{ + /// Manifests in this page. + public required ImmutableArray Manifests { get; init; } + + /// Token for retrieving the next page, or null if no more pages. + public string? NextPageToken { get; init; } + + /// Total count if available. + public int? TotalCount { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestSerializer.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestSerializer.cs new file mode 100644 index 000000000..d2d5ef40c --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictManifestSerializer.cs @@ -0,0 +1,57 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// Serialization helper for canonical JSON output. +/// +public static class VerdictManifestSerializer +{ + private static readonly JsonSerializerOptions _options = new() + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, + }; + + /// + /// Serialize manifest to deterministic JSON (stable naming policy, no indentation). + /// + public static string Serialize(VerdictManifest manifest) + { + ArgumentNullException.ThrowIfNull(manifest); + return JsonSerializer.Serialize(manifest, _options); + } + + /// + /// Deserialize from JSON. + /// + public static VerdictManifest? Deserialize(string json) + { + if (string.IsNullOrWhiteSpace(json)) + { + return null; + } + + return JsonSerializer.Deserialize(json, _options); + } + + /// + /// Compute SHA256 digest of the canonical JSON representation. + /// + public static string ComputeDigest(VerdictManifest manifest) + { + ArgumentNullException.ThrowIfNull(manifest); + + // Create a copy without the digest field for hashing + var forHashing = manifest with { ManifestDigest = string.Empty, SignatureBase64 = null, RekorLogId = null }; + var json = Serialize(forHashing); + var bytes = Encoding.UTF8.GetBytes(json); + var hash = SHA256.HashData(bytes); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Compare.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Compare.cs new file mode 100644 index 000000000..4521c6a1b --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Compare.cs @@ -0,0 +1,56 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictReplayVerifier +{ + private static ImmutableArray CompareManifests(VerdictManifest original, VerdictManifest replayed) + { + var diffs = new List(); + + if (original.Result.Status != replayed.Result.Status) + { + diffs.Add($"Status: {original.Result.Status} vs {replayed.Result.Status}"); + } + + if (Math.Abs(original.Result.Confidence - replayed.Result.Confidence) > 0.0001) + { + diffs.Add($"Confidence: {original.Result.Confidence:F4} vs {replayed.Result.Confidence:F4}"); + } + + if (original.Result.HasConflicts != replayed.Result.HasConflicts) + { + diffs.Add($"HasConflicts: {original.Result.HasConflicts} vs {replayed.Result.HasConflicts}"); + } + + if (original.Result.Explanations.Length != replayed.Result.Explanations.Length) + { + diffs.Add($"Explanations count: {original.Result.Explanations.Length} vs {replayed.Result.Explanations.Length}"); + } + else + { + for (var i = 0; i < original.Result.Explanations.Length; i++) + { + var origExp = original.Result.Explanations[i]; + var repExp = replayed.Result.Explanations[i]; + + if (origExp.SourceId != repExp.SourceId) + { + diffs.Add($"Explanation[{i}].SourceId: {origExp.SourceId} vs {repExp.SourceId}"); + } + + if (Math.Abs(origExp.ClaimScore - repExp.ClaimScore) > 0.0001) + { + diffs.Add($"Explanation[{i}].ClaimScore: {origExp.ClaimScore:F4} vs {repExp.ClaimScore:F4}"); + } + } + } + + if (original.ManifestDigest != replayed.ManifestDigest) + { + diffs.Add($"ManifestDigest: {original.ManifestDigest} vs {replayed.ManifestDigest}"); + } + + return diffs.ToImmutableArray(); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Verify.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Verify.cs new file mode 100644 index 000000000..00d6867d0 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.Verify.cs @@ -0,0 +1,90 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +public sealed partial class VerdictReplayVerifier +{ + public Task VerifyAsync(string manifestId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(manifestId); + + return Task.FromException(new InvalidOperationException( + "Verdict replay requires a full manifest or tenant context; use VerifyAsync(VerdictManifest) instead.")); + } + + public async Task VerifyAsync( + VerdictManifest manifest, + CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(manifest); + + var stopwatch = System.Diagnostics.Stopwatch.StartNew(); + + try + { + var signatureValid = true; + if (!string.IsNullOrWhiteSpace(manifest.SignatureBase64)) + { + var sigResult = await _signer.VerifyAsync(manifest, ct).ConfigureAwait(false); + signatureValid = sigResult.Valid; + if (!signatureValid) + { + return new ReplayVerificationResult + { + Success = false, + OriginalManifest = manifest, + SignatureValid = false, + Error = $"Signature verification failed: {sigResult.Error}", + ReplayDuration = stopwatch.Elapsed, + }; + } + } + + var replayedResult = await _evaluator.EvaluateAsync( + manifest.Tenant, + manifest.AssetDigest, + manifest.VulnerabilityId, + manifest.Inputs, + manifest.PolicyHash, + manifest.LatticeVersion, + ct) + .ConfigureAwait(false); + + var replayedManifest = new VerdictManifestBuilder(() => manifest.ManifestId) + .WithTenant(manifest.Tenant) + .WithAsset(manifest.AssetDigest, manifest.VulnerabilityId) + .WithInputs(manifest.Inputs) + .WithResult(replayedResult) + .WithPolicy(manifest.PolicyHash, manifest.LatticeVersion) + .WithClock(manifest.Inputs.ClockCutoff) + .Build(); + + var differences = CompareManifests(manifest, replayedManifest); + var success = differences.Length == 0; + + stopwatch.Stop(); + + return new ReplayVerificationResult + { + Success = success, + OriginalManifest = manifest, + ReplayedManifest = replayedManifest, + Differences = differences, + SignatureValid = signatureValid, + Error = success ? null : "Replay produced different results", + ReplayDuration = stopwatch.Elapsed, + }; + } + catch (Exception ex) + { + stopwatch.Stop(); + return new ReplayVerificationResult + { + Success = false, + OriginalManifest = manifest, + Error = $"Replay failed: {ex.Message}", + ReplayDuration = stopwatch.Elapsed, + }; + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.cs index 36a17777e..37656a07d 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictReplayVerifier.cs @@ -1,86 +1,9 @@ -using System.Collections.Immutable; - namespace StellaOps.Authority.Core.Verdicts; -/// -/// Result of replay verification. -/// -public sealed record ReplayVerificationResult -{ - /// True if replay produced identical results. - public required bool Success { get; init; } - - /// The original manifest being verified. - public required VerdictManifest OriginalManifest { get; init; } - - /// The manifest produced by replay (if successful). - public VerdictManifest? ReplayedManifest { get; init; } - - /// List of differences between original and replayed manifests. - public ImmutableArray? Differences { get; init; } - - /// True if signature verification passed. - public bool SignatureValid { get; init; } - - /// Error message if replay failed. - public string? Error { get; init; } - - /// Duration of the replay operation. - public TimeSpan? ReplayDuration { get; init; } -} - -/// -/// Interface for replaying verdicts to verify determinism. -/// -public interface IVerdictReplayVerifier -{ - /// - /// Verify that a verdict can be replayed to produce identical results. - /// - /// Manifest ID to verify. - /// Cancellation token. - /// Verification result with differences if any. - Task VerifyAsync(string manifestId, CancellationToken ct = default); - - /// - /// Verify that a verdict can be replayed to produce identical results. - /// - /// Manifest to verify. - /// Cancellation token. - /// Verification result with differences if any. - Task VerifyAsync(VerdictManifest manifest, CancellationToken ct = default); -} - -/// -/// Provides verdict evaluation capability for replay verification. -/// -public interface IVerdictEvaluator -{ - /// - /// Evaluate a verdict using the specified inputs and policy context. - /// - /// Tenant identifier. - /// Asset being evaluated. - /// Vulnerability being evaluated. - /// Pinned inputs for evaluation. - /// Policy hash to use. - /// Lattice version to use. - /// Cancellation token. - /// Verdict result. - Task EvaluateAsync( - string tenant, - string assetDigest, - string vulnerabilityId, - VerdictInputs inputs, - string policyHash, - string latticeVersion, - CancellationToken ct = default); -} - /// /// Default implementation of verdict replay verifier. /// -public sealed class VerdictReplayVerifier : IVerdictReplayVerifier +public sealed partial class VerdictReplayVerifier : IVerdictReplayVerifier { private readonly IVerdictManifestStore _store; private readonly IVerdictManifestSigner _signer; @@ -95,140 +18,4 @@ public sealed class VerdictReplayVerifier : IVerdictReplayVerifier _signer = signer ?? throw new ArgumentNullException(nameof(signer)); _evaluator = evaluator ?? throw new ArgumentNullException(nameof(evaluator)); } - - public async Task VerifyAsync(string manifestId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(manifestId); - - throw new InvalidOperationException( - "Verdict replay requires a full manifest or tenant context; use VerifyAsync(VerdictManifest) instead."); - } - - public async Task VerifyAsync(VerdictManifest manifest, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(manifest); - - var stopwatch = System.Diagnostics.Stopwatch.StartNew(); - - try - { - // Verify signature first if present - var signatureValid = true; - if (!string.IsNullOrWhiteSpace(manifest.SignatureBase64)) - { - var sigResult = await _signer.VerifyAsync(manifest, ct).ConfigureAwait(false); - signatureValid = sigResult.Valid; - if (!signatureValid) - { - return new ReplayVerificationResult - { - Success = false, - OriginalManifest = manifest, - SignatureValid = false, - Error = $"Signature verification failed: {sigResult.Error}", - ReplayDuration = stopwatch.Elapsed, - }; - } - } - - // Re-evaluate using pinned inputs - var replayedResult = await _evaluator.EvaluateAsync( - manifest.Tenant, - manifest.AssetDigest, - manifest.VulnerabilityId, - manifest.Inputs, - manifest.PolicyHash, - manifest.LatticeVersion, - ct).ConfigureAwait(false); - - // Build replayed manifest - var replayedManifest = new VerdictManifestBuilder(() => manifest.ManifestId) - .WithTenant(manifest.Tenant) - .WithAsset(manifest.AssetDigest, manifest.VulnerabilityId) - .WithInputs(manifest.Inputs) - .WithResult(replayedResult) - .WithPolicy(manifest.PolicyHash, manifest.LatticeVersion) - .WithClock(manifest.Inputs.ClockCutoff) - .Build(); - - // Compare results - var differences = CompareManifests(manifest, replayedManifest); - var success = differences.Length == 0; - - stopwatch.Stop(); - - return new ReplayVerificationResult - { - Success = success, - OriginalManifest = manifest, - ReplayedManifest = replayedManifest, - Differences = differences, - SignatureValid = signatureValid, - Error = success ? null : "Replay produced different results", - ReplayDuration = stopwatch.Elapsed, - }; - } - catch (Exception ex) - { - stopwatch.Stop(); - return new ReplayVerificationResult - { - Success = false, - OriginalManifest = manifest, - Error = $"Replay failed: {ex.Message}", - ReplayDuration = stopwatch.Elapsed, - }; - } - } - - private static ImmutableArray CompareManifests(VerdictManifest original, VerdictManifest replayed) - { - var diffs = new List(); - - if (original.Result.Status != replayed.Result.Status) - { - diffs.Add($"Status: {original.Result.Status} vs {replayed.Result.Status}"); - } - - if (Math.Abs(original.Result.Confidence - replayed.Result.Confidence) > 0.0001) - { - diffs.Add($"Confidence: {original.Result.Confidence:F4} vs {replayed.Result.Confidence:F4}"); - } - - if (original.Result.HasConflicts != replayed.Result.HasConflicts) - { - diffs.Add($"HasConflicts: {original.Result.HasConflicts} vs {replayed.Result.HasConflicts}"); - } - - if (original.Result.Explanations.Length != replayed.Result.Explanations.Length) - { - diffs.Add($"Explanations count: {original.Result.Explanations.Length} vs {replayed.Result.Explanations.Length}"); - } - else - { - for (var i = 0; i < original.Result.Explanations.Length; i++) - { - var origExp = original.Result.Explanations[i]; - var repExp = replayed.Result.Explanations[i]; - - if (origExp.SourceId != repExp.SourceId) - { - diffs.Add($"Explanation[{i}].SourceId: {origExp.SourceId} vs {repExp.SourceId}"); - } - - if (Math.Abs(origExp.ClaimScore - repExp.ClaimScore) > 0.0001) - { - diffs.Add($"Explanation[{i}].ClaimScore: {origExp.ClaimScore:F4} vs {repExp.ClaimScore:F4}"); - } - } - } - - // Compare manifest digest (computed from result) - if (original.ManifestDigest != replayed.ManifestDigest) - { - diffs.Add($"ManifestDigest: {original.ManifestDigest} vs {replayed.ManifestDigest}"); - } - - return diffs.ToImmutableArray(); - } } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictResult.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictResult.cs new file mode 100644 index 000000000..63ecf431b --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VerdictResult.cs @@ -0,0 +1,27 @@ +using System.Collections.Immutable; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// The computed verdict result with confidence and explanations. +/// +public sealed record VerdictResult +{ + /// Final VEX status determination. + public required VexStatus Status { get; init; } + + /// Confidence score [0, 1]. + public required double Confidence { get; init; } + + /// Detailed explanations from contributing VEX sources. + public required ImmutableArray Explanations { get; init; } + + /// References to supporting evidence. + public required ImmutableArray EvidenceRefs { get; init; } + + /// True if conflicting claims were detected. + public bool HasConflicts { get; init; } + + /// True if reachability proof was required and present. + public bool RequiresReplayProof { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VexStatus.cs b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VexStatus.cs new file mode 100644 index 000000000..0d9f857c1 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Core/Verdicts/VexStatus.cs @@ -0,0 +1,21 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Authority.Core.Verdicts; + +/// +/// VEX verdict status enumeration per OpenVEX specification. +/// +public enum VexStatus +{ + [JsonPropertyName("affected")] + Affected, + + [JsonPropertyName("not_affected")] + NotAffected, + + [JsonPropertyName("fixed")] + Fixed, + + [JsonPropertyName("under_investigation")] + UnderInvestigation, +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/FailoverStrategy.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/FailoverStrategy.cs new file mode 100644 index 000000000..e701b6a9b --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/FailoverStrategy.cs @@ -0,0 +1,33 @@ +// ----------------------------------------------------------------------------- +// FailoverStrategy.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Strategy for handling multiple TSA providers. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Strategy for handling multiple TSA providers. +/// +public enum FailoverStrategy +{ + /// + /// Try providers in priority order until one succeeds. + /// + Priority, + + /// + /// Try providers in round-robin fashion. + /// + RoundRobin, + + /// + /// Use the provider with lowest latency from recent requests. + /// + LowestLatency, + + /// + /// Randomly select a provider. + /// + Random +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiFailureInfo.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiFailureInfo.cs new file mode 100644 index 000000000..585c395c9 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiFailureInfo.cs @@ -0,0 +1,54 @@ +// ----------------------------------------------------------------------------- +// PkiFailureInfo.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: RFC 3161 PKIFailureInfo flags. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// RFC 3161 PKIFailureInfo bit flags. +/// +[Flags] +public enum PkiFailureInfo +{ + /// + /// Unrecognized or unsupported algorithm. + /// + BadAlg = 1 << 0, + + /// + /// The request was badly formed. + /// + BadRequest = 1 << 2, + + /// + /// The data format is incorrect. + /// + BadDataFormat = 1 << 5, + + /// + /// The time source is not available. + /// + TimeNotAvailable = 1 << 14, + + /// + /// The requested policy is not supported. + /// + UnacceptedPolicy = 1 << 15, + + /// + /// The requested extension is not supported. + /// + UnacceptedExtension = 1 << 16, + + /// + /// Additional information is required. + /// + AddInfoNotAvailable = 1 << 17, + + /// + /// A system failure occurred. + /// + SystemFailure = 1 << 25 +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiStatus.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiStatus.cs new file mode 100644 index 000000000..ceef83f42 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/PkiStatus.cs @@ -0,0 +1,43 @@ +// ----------------------------------------------------------------------------- +// PkiStatus.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: RFC 3161 PKIStatus values. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// RFC 3161 PKIStatus values. +/// +public enum PkiStatus +{ + /// + /// The request was granted. + /// + Granted = 0, + + /// + /// The request was granted with modifications. + /// + GrantedWithMods = 1, + + /// + /// The request was rejected. + /// + Rejection = 2, + + /// + /// The request is being processed (async). + /// + Waiting = 3, + + /// + /// A revocation warning was issued. + /// + RevocationWarning = 4, + + /// + /// A revocation notification was issued. + /// + RevocationNotification = 5 +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TASKS.md b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TASKS.md index c43bd70e4..db96f6210 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TASKS.md +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/StellaOps.Authority.Timestamping.Abstractions.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/StellaOps.Authority.Timestamping.Abstractions.md (2026-02-04). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampExtension.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampExtension.cs new file mode 100644 index 000000000..ae8da8a97 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampExtension.cs @@ -0,0 +1,18 @@ +// ----------------------------------------------------------------------------- +// TimeStampExtension.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: RFC 3161 TimeStampReq extension wrapper. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Represents an extension in a timestamp request. +/// +/// The extension OID. +/// Whether the extension is critical. +/// The extension value. +public sealed record TimeStampExtension( + string Oid, + bool Critical, + ReadOnlyMemory Value); diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.Factory.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.Factory.cs new file mode 100644 index 000000000..5b46906af --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.Factory.cs @@ -0,0 +1,73 @@ +// ----------------------------------------------------------------------------- +// TimeStampRequest.Factory.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Factory helpers for RFC 3161 TimeStampReq. +// ----------------------------------------------------------------------------- +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Abstractions; + +public sealed partial record TimeStampRequest +{ + /// + /// Creates a new TimeStampRequest for the given data. + /// + /// The data to timestamp. + /// The hash algorithm to use. + /// Whether to include a random nonce. + /// A new TimeStampRequest. + public static TimeStampRequest Create( + ReadOnlySpan data, + HashAlgorithmName hashAlgorithm, + bool includeNonce = true) + { + var hash = ComputeHash(data, hashAlgorithm); + return new TimeStampRequest + { + HashAlgorithm = hashAlgorithm, + MessageImprint = hash, + Nonce = includeNonce ? GenerateNonce() : (ReadOnlyMemory?)null + }; + } + + /// + /// Creates a new TimeStampRequest for a pre-computed hash. + /// + /// The pre-computed hash. + /// The hash algorithm used. + /// Whether to include a random nonce. + /// A new TimeStampRequest. + public static TimeStampRequest CreateFromHash( + ReadOnlyMemory hash, + HashAlgorithmName hashAlgorithm, + bool includeNonce = true) + { + return new TimeStampRequest + { + HashAlgorithm = hashAlgorithm, + MessageImprint = hash, + Nonce = includeNonce ? GenerateNonce() : (ReadOnlyMemory?)null + }; + } + + private static byte[] ComputeHash(ReadOnlySpan data, HashAlgorithmName algorithm) + { + using var hasher = algorithm.Name switch + { + "SHA256" => SHA256.Create() as HashAlgorithm, + "SHA384" => SHA384.Create(), + "SHA512" => SHA512.Create(), + "SHA1" => SHA1.Create(), // Legacy support + _ => throw new ArgumentException($"Unsupported hash algorithm: {algorithm.Name}", nameof(algorithm)) + }; + return hasher!.ComputeHash(data.ToArray()); + } + + private static byte[] GenerateNonce() + { + var nonce = new byte[8]; + RandomNumberGenerator.Fill(nonce); + return nonce; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.cs index 77a6ee013..d0b40bb55 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampRequest.cs @@ -13,7 +13,7 @@ namespace StellaOps.Authority.Timestamping.Abstractions; /// /// Represents an RFC 3161 TimeStampReq for requesting a timestamp from a TSA. /// -public sealed record TimeStampRequest +public sealed partial record TimeStampRequest { /// /// Gets the version number (always 1 for RFC 3161). @@ -50,75 +50,4 @@ public sealed record TimeStampRequest /// public IReadOnlyList? Extensions { get; init; } - /// - /// Creates a new TimeStampRequest for the given data. - /// - /// The data to timestamp. - /// The hash algorithm to use. - /// Whether to include a random nonce. - /// A new TimeStampRequest. - public static TimeStampRequest Create( - ReadOnlySpan data, - HashAlgorithmName hashAlgorithm, - bool includeNonce = true) - { - var hash = ComputeHash(data, hashAlgorithm); - return new TimeStampRequest - { - HashAlgorithm = hashAlgorithm, - MessageImprint = hash, - Nonce = includeNonce ? GenerateNonce() : null - }; - } - - /// - /// Creates a new TimeStampRequest for a pre-computed hash. - /// - /// The pre-computed hash. - /// The hash algorithm used. - /// Whether to include a random nonce. - /// A new TimeStampRequest. - public static TimeStampRequest CreateFromHash( - ReadOnlyMemory hash, - HashAlgorithmName hashAlgorithm, - bool includeNonce = true) - { - return new TimeStampRequest - { - HashAlgorithm = hashAlgorithm, - MessageImprint = hash, - Nonce = includeNonce ? GenerateNonce() : null - }; - } - - private static byte[] ComputeHash(ReadOnlySpan data, HashAlgorithmName algorithm) - { - using var hasher = algorithm.Name switch - { - "SHA256" => SHA256.Create() as HashAlgorithm, - "SHA384" => SHA384.Create(), - "SHA512" => SHA512.Create(), - "SHA1" => SHA1.Create(), // Legacy support - _ => throw new ArgumentException($"Unsupported hash algorithm: {algorithm.Name}", nameof(algorithm)) - }; - return hasher!.ComputeHash(data.ToArray()); - } - - private static byte[] GenerateNonce() - { - var nonce = new byte[8]; - RandomNumberGenerator.Fill(nonce); - return nonce; - } } - -/// -/// Represents an extension in a timestamp request. -/// -/// The extension OID. -/// Whether the extension is critical. -/// The extension value. -public sealed record TimeStampExtension( - string Oid, - bool Critical, - ReadOnlyMemory Value); diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampResponse.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampResponse.cs index 25c531cbf..7c80d1212 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampResponse.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampResponse.cs @@ -70,86 +70,3 @@ public sealed record TimeStampResponse StatusString = statusString }; } - -/// -/// RFC 3161 PKIStatus values. -/// -public enum PkiStatus -{ - /// - /// The request was granted. - /// - Granted = 0, - - /// - /// The request was granted with modifications. - /// - GrantedWithMods = 1, - - /// - /// The request was rejected. - /// - Rejection = 2, - - /// - /// The request is being processed (async). - /// - Waiting = 3, - - /// - /// A revocation warning was issued. - /// - RevocationWarning = 4, - - /// - /// A revocation notification was issued. - /// - RevocationNotification = 5 -} - -/// -/// RFC 3161 PKIFailureInfo bit flags. -/// -[Flags] -public enum PkiFailureInfo -{ - /// - /// Unrecognized or unsupported algorithm. - /// - BadAlg = 1 << 0, - - /// - /// The request was badly formed. - /// - BadRequest = 1 << 2, - - /// - /// The data format is incorrect. - /// - BadDataFormat = 1 << 5, - - /// - /// The time source is not available. - /// - TimeNotAvailable = 1 << 14, - - /// - /// The requested policy is not supported. - /// - UnacceptedPolicy = 1 << 15, - - /// - /// The requested extension is not supported. - /// - UnacceptedExtension = 1 << 16, - - /// - /// Additional information is required. - /// - AddInfoNotAvailable = 1 << 17, - - /// - /// A system failure occurred. - /// - SystemFailure = 1 << 25 -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampToken.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampToken.cs index 06f6569b0..78a373dfb 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampToken.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampToken.cs @@ -52,113 +52,3 @@ public sealed record TimeStampToken } } } - -/// -/// Represents the TSTInfo structure from a TimeStampToken. -/// -public sealed record TstInfo -{ - /// - /// Gets the raw DER-encoded TSTInfo. - /// - public required ReadOnlyMemory EncodedTstInfo { get; init; } - - /// - /// Gets the version (always 1). - /// - public int Version { get; init; } = 1; - - /// - /// Gets the TSA policy OID. - /// - public required string PolicyOid { get; init; } - - /// - /// Gets the hash algorithm used for the message imprint. - /// - public required HashAlgorithmName HashAlgorithm { get; init; } - - /// - /// Gets the message imprint hash. - /// - public required ReadOnlyMemory MessageImprint { get; init; } - - /// - /// Gets the serial number assigned by the TSA. - /// - public required ReadOnlyMemory SerialNumber { get; init; } - - /// - /// Gets the generation time of the timestamp. - /// - public required DateTimeOffset GenTime { get; init; } - - /// - /// Gets the accuracy of the timestamp (optional). - /// - public TstAccuracy? Accuracy { get; init; } - - /// - /// Gets whether ordering is guaranteed. - /// - public bool Ordering { get; init; } - - /// - /// Gets the nonce if present. - /// - public ReadOnlyMemory? Nonce { get; init; } - - /// - /// Gets the TSA name if present. - /// - public string? TsaName { get; init; } - - /// - /// Gets any extensions. - /// - public IReadOnlyList? Extensions { get; init; } - - /// - /// Gets the effective time range considering accuracy. - /// - public (DateTimeOffset Earliest, DateTimeOffset Latest) GetTimeRange() - { - if (Accuracy is null) - return (GenTime, GenTime); - - var delta = Accuracy.ToTimeSpan(); - return (GenTime - delta, GenTime + delta); - } -} - -/// -/// Represents the accuracy of a timestamp. -/// -public sealed record TstAccuracy -{ - /// - /// Gets the seconds component. - /// - public int? Seconds { get; init; } - - /// - /// Gets the milliseconds component (0-999). - /// - public int? Millis { get; init; } - - /// - /// Gets the microseconds component (0-999). - /// - public int? Micros { get; init; } - - /// - /// Converts to a TimeSpan. - /// - public TimeSpan ToTimeSpan() - { - var totalMicros = (Seconds ?? 0) * 1_000_000L - + (Millis ?? 0) * 1_000L - + (Micros ?? 0); - return TimeSpan.FromMicroseconds(totalMicros); - } -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampVerificationResult.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampVerificationResult.cs index a36348a6d..04af77a80 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampVerificationResult.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TimeStampVerificationResult.cs @@ -97,151 +97,3 @@ public sealed record TimeStampVerificationResult Error = error }; } - -/// -/// Verification status codes. -/// -public enum VerificationStatus -{ - /// - /// The timestamp is valid. - /// - Valid, - - /// - /// The signature is invalid. - /// - SignatureInvalid, - - /// - /// The message imprint doesn't match. - /// - ImprintMismatch, - - /// - /// The nonce doesn't match. - /// - NonceMismatch, - - /// - /// Certificate validation failed. - /// - CertificateError, - - /// - /// The timestamp is structurally invalid. - /// - Invalid -} - -/// -/// Detailed verification error information. -/// -/// The error code. -/// Human-readable error message. -/// Additional details. -public sealed record VerificationError( - VerificationErrorCode Code, - string Message, - string? Details = null); - -/// -/// Verification error codes. -/// -public enum VerificationErrorCode -{ - /// - /// Unknown error. - /// - Unknown, - - /// - /// The token is malformed. - /// - MalformedToken, - - /// - /// The CMS signature is invalid. - /// - SignatureInvalid, - - /// - /// The message imprint doesn't match the original data. - /// - MessageImprintMismatch, - - /// - /// The nonce doesn't match the request. - /// - NonceMismatch, - - /// - /// The signer certificate is expired. - /// - CertificateExpired, - - /// - /// The signer certificate is revoked. - /// - CertificateRevoked, - - /// - /// The certificate chain is invalid. - /// - CertificateChainInvalid, - - /// - /// The ESSCertIDv2 binding is invalid. - /// - EssCertIdMismatch, - - /// - /// The signing certificate is missing. - /// - SignerCertificateMissing, - - /// - /// No trust anchor found for the chain. - /// - NoTrustAnchor -} - -/// -/// Non-fatal warning encountered during verification. -/// -/// The warning code. -/// Human-readable warning message. -public sealed record VerificationWarning( - VerificationWarningCode Code, - string Message); - -/// -/// Verification warning codes. -/// -public enum VerificationWarningCode -{ - /// - /// Revocation check was skipped. - /// - RevocationCheckSkipped, - - /// - /// The timestamp accuracy is large. - /// - LargeAccuracy, - - /// - /// The policy OID is not recognized. - /// - UnknownPolicy, - - /// - /// The certificate is nearing expiration. - /// - CertificateNearingExpiration, - - /// - /// Using weak hash algorithm. - /// - WeakHashAlgorithm -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaClientOptions.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaClientOptions.cs index 141825b0e..262aa218f 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaClientOptions.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaClientOptions.cs @@ -52,91 +52,3 @@ public sealed class TsaClientOptions /// public TimeStampVerificationOptions DefaultVerificationOptions { get; set; } = TimeStampVerificationOptions.Default; } - -/// -/// Configuration options for a single TSA provider. -/// -public sealed class TsaProviderOptions -{ - /// - /// Gets or sets the provider name. - /// - public required string Name { get; set; } - - /// - /// Gets or sets the TSA endpoint URL. - /// - public required Uri Url { get; set; } - - /// - /// Gets or sets the priority (lower = higher priority). - /// - public int Priority { get; set; } = 100; - - /// - /// Gets or sets the request timeout. - /// - public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30); - - /// - /// Gets or sets the number of retry attempts. - /// - public int RetryCount { get; set; } = 3; - - /// - /// Gets or sets the base delay for exponential backoff. - /// - public TimeSpan RetryBaseDelay { get; set; } = TimeSpan.FromSeconds(1); - - /// - /// Gets or sets the policy OID to request (optional). - /// - public string? PolicyOid { get; set; } - - /// - /// Gets or sets client certificate for mutual TLS (optional). - /// - public string? ClientCertificatePath { get; set; } - - /// - /// Gets or sets custom HTTP headers. - /// - public Dictionary Headers { get; set; } = []; - - /// - /// Gets or sets whether this provider is enabled. - /// - public bool Enabled { get; set; } = true; - - /// - /// Gets or sets the TSA certificate for verification (optional). - /// If not set, certificate is extracted from response. - /// - public string? TsaCertificatePath { get; set; } -} - -/// -/// Strategy for handling multiple TSA providers. -/// -public enum FailoverStrategy -{ - /// - /// Try providers in priority order until one succeeds. - /// - Priority, - - /// - /// Try providers in round-robin fashion. - /// - RoundRobin, - - /// - /// Use the provider with lowest latency from recent requests. - /// - LowestLatency, - - /// - /// Randomly select a provider. - /// - Random -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaProviderOptions.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaProviderOptions.cs new file mode 100644 index 000000000..f7edc14a4 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TsaProviderOptions.cs @@ -0,0 +1,69 @@ +// ----------------------------------------------------------------------------- +// TsaProviderOptions.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Configuration options for a single TSA provider. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Configuration options for a single TSA provider. +/// +public sealed class TsaProviderOptions +{ + /// + /// Gets or sets the provider name. + /// + public required string Name { get; set; } + + /// + /// Gets or sets the TSA endpoint URL. + /// + public required Uri Url { get; set; } + + /// + /// Gets or sets the priority (lower = higher priority). + /// + public int Priority { get; set; } = 100; + + /// + /// Gets or sets the request timeout. + /// + public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Gets or sets the number of retry attempts. + /// + public int RetryCount { get; set; } = 3; + + /// + /// Gets or sets the base delay for exponential backoff. + /// + public TimeSpan RetryBaseDelay { get; set; } = TimeSpan.FromSeconds(1); + + /// + /// Gets or sets the policy OID to request (optional). + /// + public string? PolicyOid { get; set; } + + /// + /// Gets or sets client certificate for mutual TLS (optional). + /// + public string? ClientCertificatePath { get; set; } + + /// + /// Gets or sets custom HTTP headers. + /// + public Dictionary Headers { get; set; } = []; + + /// + /// Gets or sets whether this provider is enabled. + /// + public bool Enabled { get; set; } = true; + + /// + /// Gets or sets the TSA certificate for verification (optional). + /// If not set, certificate is extracted from response. + /// + public string? TsaCertificatePath { get; set; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstAccuracy.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstAccuracy.cs new file mode 100644 index 000000000..b3f71849c --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstAccuracy.cs @@ -0,0 +1,39 @@ +// ----------------------------------------------------------------------------- +// TstAccuracy.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Accuracy metadata for timestamp tokens. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Represents the accuracy of a timestamp. +/// +public sealed record TstAccuracy +{ + /// + /// Gets the seconds component. + /// + public int? Seconds { get; init; } + + /// + /// Gets the milliseconds component (0-999). + /// + public int? Millis { get; init; } + + /// + /// Gets the microseconds component (0-999). + /// + public int? Micros { get; init; } + + /// + /// Converts to a TimeSpan. + /// + public TimeSpan ToTimeSpan() + { + var totalMicros = (Seconds ?? 0) * 1_000_000L + + (Millis ?? 0) * 1_000L + + (Micros ?? 0); + return TimeSpan.FromMicroseconds(totalMicros); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstInfo.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstInfo.cs new file mode 100644 index 000000000..1fe4a1fcf --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/TstInfo.cs @@ -0,0 +1,87 @@ +// ----------------------------------------------------------------------------- +// TstInfo.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Parsed TSTInfo metadata for timestamp tokens. +// ----------------------------------------------------------------------------- +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Represents the TSTInfo structure from a TimeStampToken. +/// +public sealed record TstInfo +{ + /// + /// Gets the raw DER-encoded TSTInfo. + /// + public required ReadOnlyMemory EncodedTstInfo { get; init; } + + /// + /// Gets the version (always 1). + /// + public int Version { get; init; } = 1; + + /// + /// Gets the TSA policy OID. + /// + public required string PolicyOid { get; init; } + + /// + /// Gets the hash algorithm used for the message imprint. + /// + public required HashAlgorithmName HashAlgorithm { get; init; } + + /// + /// Gets the message imprint hash. + /// + public required ReadOnlyMemory MessageImprint { get; init; } + + /// + /// Gets the serial number assigned by the TSA. + /// + public required ReadOnlyMemory SerialNumber { get; init; } + + /// + /// Gets the generation time of the timestamp. + /// + public required DateTimeOffset GenTime { get; init; } + + /// + /// Gets the accuracy of the timestamp (optional). + /// + public TstAccuracy? Accuracy { get; init; } + + /// + /// Gets whether ordering is guaranteed. + /// + public bool Ordering { get; init; } + + /// + /// Gets the nonce if present. + /// + public ReadOnlyMemory? Nonce { get; init; } + + /// + /// Gets the TSA name if present. + /// + public string? TsaName { get; init; } + + /// + /// Gets any extensions. + /// + public IReadOnlyList? Extensions { get; init; } + + /// + /// Gets the effective time range considering accuracy. + /// + public (DateTimeOffset Earliest, DateTimeOffset Latest) GetTimeRange() + { + if (Accuracy is null) + return (GenTime, GenTime); + + var delta = Accuracy.ToTimeSpan(); + return (GenTime - delta, GenTime + delta); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationError.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationError.cs new file mode 100644 index 000000000..11331c94d --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationError.cs @@ -0,0 +1,18 @@ +// ----------------------------------------------------------------------------- +// VerificationError.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Verification error model for timestamp validation. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Detailed verification error information. +/// +/// The error code. +/// Human-readable error message. +/// Additional details. +public sealed record VerificationError( + VerificationErrorCode Code, + string Message, + string? Details = null); diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationErrorCode.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationErrorCode.cs new file mode 100644 index 000000000..0af7b7863 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationErrorCode.cs @@ -0,0 +1,68 @@ +// ----------------------------------------------------------------------------- +// VerificationErrorCode.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Verification error codes for timestamp validation. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Verification error codes. +/// +public enum VerificationErrorCode +{ + /// + /// Unknown error. + /// + Unknown, + + /// + /// The token is malformed. + /// + MalformedToken, + + /// + /// The CMS signature is invalid. + /// + SignatureInvalid, + + /// + /// The message imprint doesn't match the original data. + /// + MessageImprintMismatch, + + /// + /// The nonce doesn't match the request. + /// + NonceMismatch, + + /// + /// The signer certificate is expired. + /// + CertificateExpired, + + /// + /// The signer certificate is revoked. + /// + CertificateRevoked, + + /// + /// The certificate chain is invalid. + /// + CertificateChainInvalid, + + /// + /// The ESSCertIDv2 binding is invalid. + /// + EssCertIdMismatch, + + /// + /// The signing certificate is missing. + /// + SignerCertificateMissing, + + /// + /// No trust anchor found for the chain. + /// + NoTrustAnchor +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationStatus.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationStatus.cs new file mode 100644 index 000000000..316aead3d --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationStatus.cs @@ -0,0 +1,43 @@ +// ----------------------------------------------------------------------------- +// VerificationStatus.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Verification status codes for timestamp validation. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Verification status codes. +/// +public enum VerificationStatus +{ + /// + /// The timestamp is valid. + /// + Valid, + + /// + /// The signature is invalid. + /// + SignatureInvalid, + + /// + /// The message imprint doesn't match. + /// + ImprintMismatch, + + /// + /// The nonce doesn't match. + /// + NonceMismatch, + + /// + /// Certificate validation failed. + /// + CertificateError, + + /// + /// The timestamp is structurally invalid. + /// + Invalid +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarning.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarning.cs new file mode 100644 index 000000000..b4e091ee8 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarning.cs @@ -0,0 +1,16 @@ +// ----------------------------------------------------------------------------- +// VerificationWarning.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Verification warning model for timestamp validation. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Non-fatal warning encountered during verification. +/// +/// The warning code. +/// Human-readable warning message. +public sealed record VerificationWarning( + VerificationWarningCode Code, + string Message); diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarningCode.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarningCode.cs new file mode 100644 index 000000000..e5c0a26f4 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping.Abstractions/VerificationWarningCode.cs @@ -0,0 +1,38 @@ +// ----------------------------------------------------------------------------- +// VerificationWarningCode.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-001 - Core Abstractions & Models +// Description: Verification warning codes for timestamp validation. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping.Abstractions; + +/// +/// Verification warning codes. +/// +public enum VerificationWarningCode +{ + /// + /// Revocation check was skipped. + /// + RevocationCheckSkipped, + + /// + /// The timestamp accuracy is large. + /// + LargeAccuracy, + + /// + /// The policy OID is not recognized. + /// + UnknownPolicy, + + /// + /// The certificate is nearing expiration. + /// + CertificateNearingExpiration, + + /// + /// Using weak hash algorithm. + /// + WeakHashAlgorithm +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Algorithms.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Algorithms.cs new file mode 100644 index 000000000..dfb85f6f8 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Algorithms.cs @@ -0,0 +1,76 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampReqEncoder.Algorithms.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Hash algorithm OID mappings. +// ----------------------------------------------------------------------------- +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampReqEncoder +{ + // OID mappings for hash algorithms + private static readonly Dictionary HashAlgorithmOids = new() + { + ["SHA1"] = "1.3.14.3.2.26", + ["SHA256"] = "2.16.840.1.101.3.4.2.1", + ["SHA384"] = "2.16.840.1.101.3.4.2.2", + ["SHA512"] = "2.16.840.1.101.3.4.2.3", + ["SHA3-256"] = "2.16.840.1.101.3.4.2.8", + ["SHA3-384"] = "2.16.840.1.101.3.4.2.9", + ["SHA3-512"] = "2.16.840.1.101.3.4.2.10" + }; + + private static void WriteAlgorithmIdentifier(AsnWriter writer, HashAlgorithmName algorithm) + { + var algorithmName = algorithm.Name ?? throw new ArgumentException("Hash algorithm name is required"); + + if (!HashAlgorithmOids.TryGetValue(algorithmName, out var oid)) + { + throw new ArgumentException($"Unsupported hash algorithm: {algorithmName}"); + } + + // AlgorithmIdentifier ::= SEQUENCE { + // algorithm OBJECT IDENTIFIER, + // parameters ANY DEFINED BY algorithm OPTIONAL + // } + using (writer.PushSequence()) + { + writer.WriteObjectIdentifier(oid); + // SHA-2 family uses NULL parameters + writer.WriteNull(); + } + } + + /// + /// Gets the OID for a hash algorithm. + /// + /// The hash algorithm. + /// The OID string. + public static string GetHashAlgorithmOid(HashAlgorithmName algorithm) + { + var name = algorithm.Name ?? throw new ArgumentException("Hash algorithm name is required"); + return HashAlgorithmOids.TryGetValue(name, out var oid) + ? oid + : throw new ArgumentException($"Unsupported hash algorithm: {name}"); + } + + /// + /// Gets the hash algorithm name from an OID. + /// + /// The OID string. + /// The hash algorithm name. + public static HashAlgorithmName GetHashAlgorithmFromOid(string oid) + { + foreach (var (name, algOid) in HashAlgorithmOids) + { + if (algOid == oid) + { + return new HashAlgorithmName(name); + } + } + throw new ArgumentException($"Unknown hash algorithm OID: {oid}"); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Extensions.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Extensions.cs new file mode 100644 index 000000000..357088a1d --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.Extensions.cs @@ -0,0 +1,38 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampReqEncoder.Extensions.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Extension encoding helpers. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampReqEncoder +{ + private static void WriteExtensions(AsnWriter writer, IReadOnlyList extensions) + { + // [0] IMPLICIT Extensions + using (writer.PushSequence(new Asn1Tag(TagClass.ContextSpecific, 0))) + { + foreach (var ext in extensions) + { + // Extension ::= SEQUENCE { + // extnID OBJECT IDENTIFIER, + // critical BOOLEAN DEFAULT FALSE, + // extnValue OCTET STRING + // } + using (writer.PushSequence()) + { + writer.WriteObjectIdentifier(ext.Oid); + if (ext.Critical) + { + writer.WriteBoolean(true); + } + writer.WriteOctetString(ext.Value.Span); + } + } + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.MessageImprint.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.MessageImprint.cs new file mode 100644 index 000000000..cb1221d31 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.MessageImprint.cs @@ -0,0 +1,26 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampReqEncoder.MessageImprint.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Message imprint encoding helpers. +// ----------------------------------------------------------------------------- +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampReqEncoder +{ + private static void WriteMessageImprint(AsnWriter writer, HashAlgorithmName algorithm, ReadOnlySpan hash) + { + // MessageImprint ::= SEQUENCE { + // hashAlgorithm AlgorithmIdentifier, + // hashedMessage OCTET STRING + // } + using (writer.PushSequence()) + { + WriteAlgorithmIdentifier(writer, algorithm); + writer.WriteOctetString(hash); + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.cs index 4a096b4e0..ec682546a 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampReqEncoder.cs @@ -8,27 +8,14 @@ using StellaOps.Authority.Timestamping.Abstractions; using System.Formats.Asn1; -using System.Security.Cryptography; namespace StellaOps.Authority.Timestamping.Asn1; /// /// Encodes RFC 3161 TimeStampReq to DER format. /// -public static class TimeStampReqEncoder +public static partial class TimeStampReqEncoder { - // OID mappings for hash algorithms - private static readonly Dictionary HashAlgorithmOids = new() - { - ["SHA1"] = "1.3.14.3.2.26", - ["SHA256"] = "2.16.840.1.101.3.4.2.1", - ["SHA384"] = "2.16.840.1.101.3.4.2.2", - ["SHA512"] = "2.16.840.1.101.3.4.2.3", - ["SHA3-256"] = "2.16.840.1.101.3.4.2.8", - ["SHA3-384"] = "2.16.840.1.101.3.4.2.9", - ["SHA3-512"] = "2.16.840.1.101.3.4.2.10" - }; - /// /// Encodes a TimeStampRequest to DER format. /// @@ -74,93 +61,4 @@ public static class TimeStampReqEncoder return writer.Encode(); } - - private static void WriteMessageImprint(AsnWriter writer, HashAlgorithmName algorithm, ReadOnlySpan hash) - { - // MessageImprint ::= SEQUENCE { - // hashAlgorithm AlgorithmIdentifier, - // hashedMessage OCTET STRING - // } - using (writer.PushSequence()) - { - WriteAlgorithmIdentifier(writer, algorithm); - writer.WriteOctetString(hash); - } - } - - private static void WriteAlgorithmIdentifier(AsnWriter writer, HashAlgorithmName algorithm) - { - var algorithmName = algorithm.Name ?? throw new ArgumentException("Hash algorithm name is required"); - - if (!HashAlgorithmOids.TryGetValue(algorithmName, out var oid)) - { - throw new ArgumentException($"Unsupported hash algorithm: {algorithmName}"); - } - - // AlgorithmIdentifier ::= SEQUENCE { - // algorithm OBJECT IDENTIFIER, - // parameters ANY DEFINED BY algorithm OPTIONAL - // } - using (writer.PushSequence()) - { - writer.WriteObjectIdentifier(oid); - // SHA-2 family uses NULL parameters - writer.WriteNull(); - } - } - - private static void WriteExtensions(AsnWriter writer, IReadOnlyList extensions) - { - // [0] IMPLICIT Extensions - using (writer.PushSequence(new Asn1Tag(TagClass.ContextSpecific, 0))) - { - foreach (var ext in extensions) - { - // Extension ::= SEQUENCE { - // extnID OBJECT IDENTIFIER, - // critical BOOLEAN DEFAULT FALSE, - // extnValue OCTET STRING - // } - using (writer.PushSequence()) - { - writer.WriteObjectIdentifier(ext.Oid); - if (ext.Critical) - { - writer.WriteBoolean(true); - } - writer.WriteOctetString(ext.Value.Span); - } - } - } - } - - /// - /// Gets the OID for a hash algorithm. - /// - /// The hash algorithm. - /// The OID string. - public static string GetHashAlgorithmOid(HashAlgorithmName algorithm) - { - var name = algorithm.Name ?? throw new ArgumentException("Hash algorithm name is required"); - return HashAlgorithmOids.TryGetValue(name, out var oid) - ? oid - : throw new ArgumentException($"Unsupported hash algorithm: {name}"); - } - - /// - /// Gets the hash algorithm name from an OID. - /// - /// The OID string. - /// The hash algorithm name. - public static HashAlgorithmName GetHashAlgorithmFromOid(string oid) - { - foreach (var (name, algOid) in HashAlgorithmOids) - { - if (algOid == oid) - { - return new HashAlgorithmName(name); - } - } - throw new ArgumentException($"Unknown hash algorithm OID: {oid}"); - } } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampRespDecoder.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampRespDecoder.cs index 87b2e7611..e0ebf8ca5 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampRespDecoder.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampRespDecoder.cs @@ -4,13 +4,8 @@ // Task: TSA-002 - ASN.1 Parsing & Generation // Description: ASN.1 DER decoder for RFC 3161 TimeStampResp. // ----------------------------------------------------------------------------- - - using StellaOps.Authority.Timestamping.Abstractions; using System.Formats.Asn1; -using System.Numerics; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; namespace StellaOps.Authority.Timestamping.Asn1; @@ -83,281 +78,3 @@ public static class TimeStampRespDecoder }; } } - -/// -/// Decodes RFC 3161 TimeStampToken from DER format. -/// -public static class TimeStampTokenDecoder -{ - private const string SignedDataOid = "1.2.840.113549.1.7.2"; - private const string TstInfoOid = "1.2.840.113549.1.9.16.1.4"; - - /// - /// Decodes a TimeStampToken from DER-encoded bytes. - /// - /// The DER-encoded TimeStampToken (ContentInfo). - /// The decoded TimeStampToken. - public static TimeStampToken Decode(ReadOnlyMemory encoded) - { - var reader = new AsnReader(encoded, AsnEncodingRules.DER); - - // ContentInfo ::= SEQUENCE { contentType, content [0] EXPLICIT } - var contentInfo = reader.ReadSequence(); - var contentType = contentInfo.ReadObjectIdentifier(); - - if (contentType != SignedDataOid) - { - throw new CryptographicException($"Expected SignedData OID, got: {contentType}"); - } - - // [0] EXPLICIT SignedData - var signedDataTag = contentInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - var signedData = signedDataTag.ReadSequence(); - - // SignedData version - signedData.ReadInteger(); - - // DigestAlgorithmIdentifiers SET - signedData.ReadSetOf(); - - // EncapsulatedContentInfo (contains TSTInfo) - var encapContent = signedData.ReadSequence(); - var encapContentType = encapContent.ReadObjectIdentifier(); - - if (encapContentType != TstInfoOid) - { - throw new CryptographicException($"Expected TSTInfo OID, got: {encapContentType}"); - } - - // [0] EXPLICIT OCTET STRING containing TSTInfo - var tstInfoWrapper = encapContent.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - var tstInfoBytes = tstInfoWrapper.ReadOctetString(); - var tstInfo = DecodeTstInfo(tstInfoBytes); - - // Extract certificates if present - X509Certificate2? signerCert = null; - List? certs = null; - string? signatureAlgorithmOid = null; - - // [0] IMPLICIT CertificateSet OPTIONAL - if (signedData.HasData) - { - var nextTag = signedData.PeekTag(); - if (nextTag.TagClass == TagClass.ContextSpecific && nextTag.TagValue == 0) - { - var certSet = signedData.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 0, true)); - certs = []; - while (certSet.HasData) - { - var certBytes = certSet.PeekEncodedValue().ToArray(); - certSet.ReadSequence(); // consume - try - { - var cert = X509CertificateLoader.LoadCertificate(certBytes); - certs.Add(cert); - } - catch - { - // Skip invalid certificates - } - } - signerCert = certs.FirstOrDefault(); - } - } - - // Skip CRLs [1] if present, then parse SignerInfos - while (signedData.HasData) - { - var tag = signedData.PeekTag(); - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) - { - signedData.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 1, true)); - continue; - } - - // SignerInfos SET OF SignerInfo - if (tag.TagValue == 17) // SET - { - var signerInfos = signedData.ReadSetOf(); - if (signerInfos.HasData) - { - var signerInfo = signerInfos.ReadSequence(); - signerInfo.ReadInteger(); // version - signerInfo.ReadSequence(); // sid (skip) - var digestAlg = signerInfo.ReadSequence(); - digestAlg.ReadObjectIdentifier(); // skip digest alg - - // Skip signed attributes if present [0] - if (signerInfo.HasData && signerInfo.PeekTag().TagClass == TagClass.ContextSpecific) - { - signerInfo.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 0, true)); - } - - if (signerInfo.HasData) - { - var sigAlg = signerInfo.ReadSequence(); - signatureAlgorithmOid = sigAlg.ReadObjectIdentifier(); - } - } - break; - } - break; - } - - return new TimeStampToken - { - EncodedToken = encoded, - TstInfo = tstInfo, - SignerCertificate = signerCert, - Certificates = certs, - SignatureAlgorithmOid = signatureAlgorithmOid - }; - } - - private static TstInfo DecodeTstInfo(byte[] encoded) - { - var reader = new AsnReader(encoded, AsnEncodingRules.DER); - var tstInfo = reader.ReadSequence(); - - // version INTEGER - var version = (int)tstInfo.ReadInteger(); - - // policy TSAPolicyId - var policyOid = tstInfo.ReadObjectIdentifier(); - - // messageImprint MessageImprint - var msgImprint = tstInfo.ReadSequence(); - var algId = msgImprint.ReadSequence(); - var hashOid = algId.ReadObjectIdentifier(); - var hashAlgorithm = TimeStampReqEncoder.GetHashAlgorithmFromOid(hashOid); - var imprint = msgImprint.ReadOctetString(); - - // serialNumber INTEGER - var serialNumber = tstInfo.ReadIntegerBytes().ToArray(); - - // genTime GeneralizedTime - var genTime = tstInfo.ReadGeneralizedTime(); - - TstAccuracy? accuracy = null; - bool ordering = false; - byte[]? nonce = null; - string? tsaName = null; - List? extensions = null; - - // Optional fields - while (tstInfo.HasData) - { - var tag = tstInfo.PeekTag(); - - // accuracy Accuracy OPTIONAL - if (tag.TagValue == 16 && tag.TagClass == TagClass.Universal) // SEQUENCE - { - accuracy = DecodeAccuracy(tstInfo.ReadSequence()); - continue; - } - - // ordering BOOLEAN DEFAULT FALSE - if (tag.TagValue == 1 && tag.TagClass == TagClass.Universal) // BOOLEAN - { - ordering = tstInfo.ReadBoolean(); - continue; - } - - // nonce INTEGER OPTIONAL - if (tag.TagValue == 2 && tag.TagClass == TagClass.Universal) // INTEGER - { - nonce = tstInfo.ReadIntegerBytes().ToArray(); - continue; - } - - // tsa [0] GeneralName OPTIONAL - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) - { - var tsaReader = tstInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - // Simplified: just read as string if it's a directoryName or other - tsaName = "(TSA GeneralName present)"; - continue; - } - - // extensions [1] IMPLICIT Extensions OPTIONAL - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) - { - var extSeq = tstInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 1)); - extensions = []; - while (extSeq.HasData) - { - var ext = extSeq.ReadSequence(); - var extOid = ext.ReadObjectIdentifier(); - var critical = false; - if (ext.HasData && ext.PeekTag().TagValue == 1) // BOOLEAN - { - critical = ext.ReadBoolean(); - } - var extValue = ext.ReadOctetString(); - extensions.Add(new TimeStampExtension(extOid, critical, extValue)); - } - continue; - } - - // Unknown, skip - tstInfo.ReadEncodedValue(); - } - - return new TstInfo - { - EncodedTstInfo = encoded, - Version = version, - PolicyOid = policyOid, - HashAlgorithm = hashAlgorithm, - MessageImprint = imprint, - SerialNumber = serialNumber, - GenTime = genTime, - Accuracy = accuracy, - Ordering = ordering, - Nonce = nonce, - TsaName = tsaName, - Extensions = extensions - }; - } - - private static TstAccuracy DecodeAccuracy(AsnReader reader) - { - int? seconds = null; - int? millis = null; - int? micros = null; - - while (reader.HasData) - { - var tag = reader.PeekTag(); - - if (tag.TagValue == 2 && tag.TagClass == TagClass.Universal) // INTEGER (seconds) - { - seconds = (int)reader.ReadInteger(); - continue; - } - - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) // [0] millis - { - var millisReader = reader.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); - millis = (int)millisReader.ReadInteger(); - continue; - } - - if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) // [1] micros - { - var microsReader = reader.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 1)); - micros = (int)microsReader.ReadInteger(); - continue; - } - - reader.ReadEncodedValue(); // skip unknown - } - - return new TstAccuracy - { - Seconds = seconds, - Millis = millis, - Micros = micros - }; - } -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Accuracy.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Accuracy.cs new file mode 100644 index 000000000..640bf9cef --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Accuracy.cs @@ -0,0 +1,54 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.Accuracy.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Accuracy field decoding. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampTokenDecoder +{ + private static TstAccuracy DecodeAccuracy(AsnReader reader) + { + int? seconds = null; + int? millis = null; + int? micros = null; + + while (reader.HasData) + { + var tag = reader.PeekTag(); + + if (tag.TagValue == 2 && tag.TagClass == TagClass.Universal) // INTEGER (seconds) + { + seconds = (int)reader.ReadInteger(); + continue; + } + + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) // [0] millis + { + var millisReader = reader.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + millis = (int)millisReader.ReadInteger(); + continue; + } + + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) // [1] micros + { + var microsReader = reader.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 1)); + micros = (int)microsReader.ReadInteger(); + continue; + } + + reader.ReadEncodedValue(); // skip unknown + } + + return new TstAccuracy + { + Seconds = seconds, + Millis = millis, + Micros = micros + }; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Certificates.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Certificates.cs new file mode 100644 index 000000000..5401fbd20 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.Certificates.cs @@ -0,0 +1,93 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.Certificates.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Certificate and signer info decoding helpers. +// ----------------------------------------------------------------------------- +using System.Formats.Asn1; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampTokenDecoder +{ + private static List? ReadCertificates( + ref AsnReader signedData, + out X509Certificate2? signerCert) + { + signerCert = null; + List? certs = null; + + // [0] IMPLICIT CertificateSet OPTIONAL + if (signedData.HasData) + { + var nextTag = signedData.PeekTag(); + if (nextTag.TagClass == TagClass.ContextSpecific && nextTag.TagValue == 0) + { + var certSet = signedData.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 0, true)); + certs = []; + while (certSet.HasData) + { + var certBytes = certSet.PeekEncodedValue().ToArray(); + certSet.ReadSequence(); // consume + try + { + var cert = X509CertificateLoader.LoadCertificate(certBytes); + certs.Add(cert); + } + catch + { + // Skip invalid certificates + } + } + signerCert = certs.FirstOrDefault(); + } + } + + return certs; + } + + private static string? ReadSignatureAlgorithmOid(ref AsnReader signedData) + { + // Skip CRLs [1] if present, then parse SignerInfos + while (signedData.HasData) + { + var tag = signedData.PeekTag(); + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) + { + signedData.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 1, true)); + continue; + } + + // SignerInfos SET OF SignerInfo + if (tag.TagValue == 17) // SET + { + var signerInfos = signedData.ReadSetOf(); + if (signerInfos.HasData) + { + var signerInfo = signerInfos.ReadSequence(); + signerInfo.ReadInteger(); // version + signerInfo.ReadSequence(); // sid (skip) + var digestAlg = signerInfo.ReadSequence(); + digestAlg.ReadObjectIdentifier(); // skip digest alg + + // Skip signed attributes if present [0] + if (signerInfo.HasData && signerInfo.PeekTag().TagClass == TagClass.ContextSpecific) + { + signerInfo.ReadSetOf(new Asn1Tag(TagClass.ContextSpecific, 0, true)); + } + + if (signerInfo.HasData) + { + var sigAlg = signerInfo.ReadSequence(); + return sigAlg.ReadObjectIdentifier(); + } + } + break; + } + break; + } + + return null; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.SignedData.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.SignedData.cs new file mode 100644 index 000000000..212407d09 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.SignedData.cs @@ -0,0 +1,44 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.SignedData.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: SignedData and TSTInfo extraction helpers. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampTokenDecoder +{ + private static AsnReader ReadSignedData(AsnReader contentInfo) + { + // [0] EXPLICIT SignedData + var signedDataTag = contentInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + var signedData = signedDataTag.ReadSequence(); + + // SignedData version and digest algorithms + signedData.ReadInteger(); + signedData.ReadSetOf(); + + return signedData; + } + + private static TstInfo ReadTstInfo(ref AsnReader signedData) + { + // EncapsulatedContentInfo (contains TSTInfo) + var encapContent = signedData.ReadSequence(); + var encapContentType = encapContent.ReadObjectIdentifier(); + + if (encapContentType != TstInfoOid) + { + throw new CryptographicException($"Expected TSTInfo OID, got: {encapContentType}"); + } + + // [0] EXPLICIT OCTET STRING containing TSTInfo + var tstInfoWrapper = encapContent.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + var tstInfoBytes = tstInfoWrapper.ReadOctetString(); + return DecodeTstInfo(tstInfoBytes); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.OptionalFields.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.OptionalFields.cs new file mode 100644 index 000000000..43755f409 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.OptionalFields.cs @@ -0,0 +1,83 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.TstInfo.OptionalFields.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: Optional TSTInfo field parsing. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampTokenDecoder +{ + private static (TstAccuracy? Accuracy, bool Ordering, byte[]? Nonce, string? TsaName, List? Extensions) + ReadOptionalFields(ref AsnReader tstInfo) + { + TstAccuracy? accuracy = null; + var ordering = false; + byte[]? nonce = null; + string? tsaName = null; + List? extensions = null; + + while (tstInfo.HasData) + { + var tag = tstInfo.PeekTag(); + + // accuracy Accuracy OPTIONAL + if (tag.TagValue == 16 && tag.TagClass == TagClass.Universal) // SEQUENCE + { + accuracy = DecodeAccuracy(tstInfo.ReadSequence()); + continue; + } + + // ordering BOOLEAN DEFAULT FALSE + if (tag.TagValue == 1 && tag.TagClass == TagClass.Universal) // BOOLEAN + { + ordering = tstInfo.ReadBoolean(); + continue; + } + + // nonce INTEGER OPTIONAL + if (tag.TagValue == 2 && tag.TagClass == TagClass.Universal) // INTEGER + { + nonce = tstInfo.ReadIntegerBytes().ToArray(); + continue; + } + + // tsa [0] GeneralName OPTIONAL + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 0) + { + _ = tstInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + // Simplified: just record that a TSA GeneralName is present. + tsaName = "(TSA GeneralName present)"; + continue; + } + + // extensions [1] IMPLICIT Extensions OPTIONAL + if (tag.TagClass == TagClass.ContextSpecific && tag.TagValue == 1) + { + var extSeq = tstInfo.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 1)); + extensions = []; + while (extSeq.HasData) + { + var ext = extSeq.ReadSequence(); + var extOid = ext.ReadObjectIdentifier(); + var critical = false; + if (ext.HasData && ext.PeekTag().TagValue == 1) // BOOLEAN + { + critical = ext.ReadBoolean(); + } + var extValue = ext.ReadOctetString(); + extensions.Add(new TimeStampExtension(extOid, critical, extValue)); + } + continue; + } + + // Unknown, skip + tstInfo.ReadEncodedValue(); + } + + return (accuracy, ordering, nonce, tsaName, extensions); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.cs new file mode 100644 index 000000000..9dfb8d96f --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.TstInfo.cs @@ -0,0 +1,63 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.TstInfo.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: TSTInfo decoding helpers. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Asn1; + +public static partial class TimeStampTokenDecoder +{ + private static TstInfo DecodeTstInfo(byte[] encoded) + { + var reader = new AsnReader(encoded, AsnEncodingRules.DER); + var tstInfo = reader.ReadSequence(); + + // version INTEGER + var version = (int)tstInfo.ReadInteger(); + + // policy TSAPolicyId + var policyOid = tstInfo.ReadObjectIdentifier(); + + // messageImprint MessageImprint + var (hashAlgorithm, imprint) = ReadMessageImprint(tstInfo); + + // serialNumber INTEGER + var serialNumber = tstInfo.ReadIntegerBytes().ToArray(); + + // genTime GeneralizedTime + var genTime = tstInfo.ReadGeneralizedTime(); + + var (accuracy, ordering, nonce, tsaName, extensions) = ReadOptionalFields(ref tstInfo); + + return new TstInfo + { + EncodedTstInfo = encoded, + Version = version, + PolicyOid = policyOid, + HashAlgorithm = hashAlgorithm, + MessageImprint = imprint, + SerialNumber = serialNumber, + GenTime = genTime, + Accuracy = accuracy, + Ordering = ordering, + Nonce = nonce, + TsaName = tsaName, + Extensions = extensions + }; + } + + private static (HashAlgorithmName HashAlgorithm, byte[] Imprint) ReadMessageImprint(AsnReader tstInfo) + { + var msgImprint = tstInfo.ReadSequence(); + var algId = msgImprint.ReadSequence(); + var hashOid = algId.ReadObjectIdentifier(); + var hashAlgorithm = TimeStampReqEncoder.GetHashAlgorithmFromOid(hashOid); + var imprint = msgImprint.ReadOctetString(); + return (hashAlgorithm, imprint); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.cs new file mode 100644 index 000000000..8dda93c02 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Asn1/TimeStampTokenDecoder.cs @@ -0,0 +1,53 @@ +// ----------------------------------------------------------------------------- +// Asn1/TimeStampTokenDecoder.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-002 - ASN.1 Parsing & Generation +// Description: RFC 3161 TimeStampToken decoder entry point. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Asn1; + +/// +/// Decodes RFC 3161 TimeStampToken from DER format. +/// +public static partial class TimeStampTokenDecoder +{ + private const string SignedDataOid = "1.2.840.113549.1.7.2"; + private const string TstInfoOid = "1.2.840.113549.1.9.16.1.4"; + + /// + /// Decodes a TimeStampToken from DER-encoded bytes. + /// + /// The DER-encoded TimeStampToken (ContentInfo). + /// The decoded TimeStampToken. + public static TimeStampToken Decode(ReadOnlyMemory encoded) + { + var reader = new AsnReader(encoded, AsnEncodingRules.DER); + + // ContentInfo ::= SEQUENCE { contentType, content [0] EXPLICIT } + var contentInfo = reader.ReadSequence(); + var contentType = contentInfo.ReadObjectIdentifier(); + + if (contentType != SignedDataOid) + { + throw new CryptographicException($"Expected SignedData OID, got: {contentType}"); + } + + var signedData = ReadSignedData(contentInfo); + var tstInfo = ReadTstInfo(ref signedData); + var certs = ReadCertificates(ref signedData, out var signerCert); + var signatureAlgorithmOid = ReadSignatureAlgorithmOid(ref signedData); + + return new TimeStampToken + { + EncodedToken = encoded, + TstInfo = tstInfo, + SignerCertificate = signerCert, + Certificates = certs, + SignatureAlgorithmOid = signatureAlgorithmOid + }; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.Helpers.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.Helpers.cs new file mode 100644 index 000000000..5d31b4f78 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.Helpers.cs @@ -0,0 +1,33 @@ +// ----------------------------------------------------------------------------- +// InMemoryTsaCacheStore.Helpers.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Cache cleanup helpers. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping.Caching; + +public sealed partial class InMemoryTsaCacheStore +{ + private void CleanupExpired(object? state) + { + var now = DateTimeOffset.UtcNow; + var expiredKeys = _cache + .Where(kvp => kvp.Value.ExpiresAt <= now) + .Select(kvp => kvp.Key) + .ToList(); + + foreach (var key in expiredKeys) + { + _cache.TryRemove(key, out _); + } + } + + private static string ToKey(ReadOnlyMemory messageImprint) + { + return Convert.ToHexString(messageImprint.Span); + } + + private sealed record CacheEntry(TimeStampToken Token, DateTimeOffset ExpiresAt); +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.cs index 030a36379..9f2299c9f 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/Caching/InMemoryTsaCacheStore.cs @@ -14,7 +14,7 @@ namespace StellaOps.Authority.Timestamping.Caching; /// /// In-memory implementation of . /// -public sealed class InMemoryTsaCacheStore : ITsaCacheStore, IDisposable +public sealed partial class InMemoryTsaCacheStore : ITsaCacheStore, IDisposable { private readonly ConcurrentDictionary _cache = new(); private readonly Timer _cleanupTimer; @@ -97,25 +97,4 @@ public sealed class InMemoryTsaCacheStore : ITsaCacheStore, IDisposable { _cleanupTimer.Dispose(); } - - private void CleanupExpired(object? state) - { - var now = DateTimeOffset.UtcNow; - var expiredKeys = _cache - .Where(kvp => kvp.Value.ExpiresAt <= now) - .Select(kvp => kvp.Key) - .ToList(); - - foreach (var key in expiredKeys) - { - _cache.TryRemove(key, out _); - } - } - - private static string ToKey(ReadOnlyMemory messageImprint) - { - return Convert.ToHexString(messageImprint.Span); - } - - private sealed record CacheEntry(TimeStampToken Token, DateTimeOffset ExpiresAt); } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.GetTimeStamp.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.GetTimeStamp.cs new file mode 100644 index 000000000..2e215390b --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.GetTimeStamp.cs @@ -0,0 +1,58 @@ +// ----------------------------------------------------------------------------- +// HttpTsaClient.GetTimeStamp.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-003 - HTTP TSA Client +// Description: Timestamp request orchestration. +// ----------------------------------------------------------------------------- +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class HttpTsaClient +{ + /// + public async Task GetTimeStampAsync( + TimeStampRequest request, + CancellationToken cancellationToken = default) + { + var orderedProviders = GetOrderedProviders(); + + foreach (var provider in orderedProviders) + { + try + { + var response = await TryGetTimeStampFromProviderAsync( + provider, request, cancellationToken) + .ConfigureAwait(false); + + if (response.IsSuccess) + { + _logger.LogInformation( + "Timestamp obtained from provider {Provider} in {Duration}ms", + provider.Name, + response.RequestDuration?.TotalMilliseconds ?? 0); + return response; + } + + _logger.LogWarning( + "Provider {Provider} returned status {Status}: {StatusString}", + provider.Name, + response.Status, + response.StatusString ?? response.FailureInfo?.ToString()); + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or OperationCanceledException) + { + _logger.LogWarning( + ex, + "Provider {Provider} failed, trying next", + provider.Name); + } + } + + return TimeStampResponse.Failure( + PkiStatus.Rejection, + PkiFailureInfo.SystemFailure, + "All TSA providers failed"); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderOrdering.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderOrdering.cs new file mode 100644 index 000000000..ae92e6c21 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderOrdering.cs @@ -0,0 +1,35 @@ +// ----------------------------------------------------------------------------- +// HttpTsaClient.ProviderOrdering.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-003 - HTTP TSA Client +// Description: Provider ordering and failover logic. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class HttpTsaClient +{ + private IEnumerable GetOrderedProviders() + { + var enabled = _options.Providers.Where(p => p.Enabled).ToList(); + + return _options.FailoverStrategy switch + { + FailoverStrategy.Priority => enabled.OrderBy(p => p.Priority), + FailoverStrategy.RoundRobin => GetRoundRobinOrder(enabled), + FailoverStrategy.Random => enabled.OrderBy(_ => Random.Shared.Next()), + FailoverStrategy.LowestLatency => enabled.OrderBy(p => p.Priority), // TODO: track latency + _ => enabled.OrderBy(p => p.Priority) + }; + } + + private IEnumerable GetRoundRobinOrder(List providers) + { + var startIndex = Interlocked.Increment(ref _roundRobinIndex) % providers.Count; + for (var i = 0; i < providers.Count; i++) + { + yield return providers[(startIndex + i) % providers.Count]; + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderRequest.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderRequest.cs new file mode 100644 index 000000000..c1f84fa7f --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.ProviderRequest.cs @@ -0,0 +1,95 @@ +// ----------------------------------------------------------------------------- +// HttpTsaClient.ProviderRequest.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-003 - HTTP TSA Client +// Description: Provider request execution. +// ----------------------------------------------------------------------------- +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Timestamping.Abstractions; +using StellaOps.Authority.Timestamping.Asn1; +using System.Diagnostics; +using System.Net.Http.Headers; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class HttpTsaClient +{ + private async Task TryGetTimeStampFromProviderAsync( + TsaProviderOptions provider, + TimeStampRequest request, + CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient($"TSA_{provider.Name}"); + client.Timeout = provider.Timeout; + + var encodedRequest = TimeStampReqEncoder.Encode(request); + var content = new ByteArrayContent(encodedRequest); + content.Headers.ContentType = new MediaTypeHeaderValue(TimeStampQueryContentType); + + foreach (var (key, value) in provider.Headers) + { + content.Headers.TryAddWithoutValidation(key, value); + } + + var stopwatch = Stopwatch.StartNew(); + var lastException = default(Exception); + + for (var attempt = 0; attempt <= provider.RetryCount; attempt++) + { + if (attempt > 0) + { + var delay = TimeSpan.FromTicks( + provider.RetryBaseDelay.Ticks * (1L << (attempt - 1))); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + + try + { + var httpResponse = await client.PostAsync( + provider.Url, content, cancellationToken) + .ConfigureAwait(false); + + if (!httpResponse.IsSuccessStatusCode) + { + _logger.LogWarning( + "TSA {Provider} returned HTTP {StatusCode}", + provider.Name, + httpResponse.StatusCode); + continue; + } + + var responseContentType = httpResponse.Content.Headers.ContentType?.MediaType; + if (responseContentType != TimeStampReplyContentType) + { + _logger.LogWarning( + "TSA {Provider} returned unexpected content type: {ContentType}", + provider.Name, + responseContentType); + } + + var responseBytes = await httpResponse.Content.ReadAsByteArrayAsync(cancellationToken) + .ConfigureAwait(false); + stopwatch.Stop(); + + var response = TimeStampRespDecoder.Decode(responseBytes); + return response with + { + ProviderName = provider.Name, + RequestDuration = stopwatch.Elapsed + }; + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + lastException = ex; + _logger.LogDebug( + ex, + "Attempt {Attempt}/{MaxAttempts} to {Provider} failed", + attempt + 1, + provider.RetryCount + 1, + provider.Name); + } + } + + throw lastException ?? new InvalidOperationException("No attempts made"); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.Verification.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.Verification.cs new file mode 100644 index 000000000..cd402c7b3 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.Verification.cs @@ -0,0 +1,34 @@ +// ----------------------------------------------------------------------------- +// HttpTsaClient.Verification.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-003 - HTTP TSA Client +// Description: Verification and parsing helpers. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using StellaOps.Authority.Timestamping.Asn1; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class HttpTsaClient +{ + /// + public async Task VerifyAsync( + TimeStampToken token, + ReadOnlyMemory originalHash, + TimeStampVerificationOptions? options = null, + CancellationToken cancellationToken = default) + { + return await _verifier.VerifyAsync( + token, + originalHash, + options ?? _options.DefaultVerificationOptions, + cancellationToken) + .ConfigureAwait(false); + } + + /// + public TimeStampToken ParseToken(ReadOnlyMemory encodedToken) + { + return TimeStampTokenDecoder.Decode(encodedToken); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.cs index e331d91e4..82d0b7a1a 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/HttpTsaClient.cs @@ -9,16 +9,13 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Authority.Timestamping.Abstractions; -using StellaOps.Authority.Timestamping.Asn1; -using System.Diagnostics; -using System.Net.Http.Headers; namespace StellaOps.Authority.Timestamping; /// /// HTTP(S) client for RFC 3161 TSA endpoints with multi-provider failover. /// -public sealed class HttpTsaClient : ITimeStampAuthorityClient +public sealed partial class HttpTsaClient : ITimeStampAuthorityClient { private const string TimeStampQueryContentType = "application/timestamp-query"; private const string TimeStampReplyContentType = "application/timestamp-reply"; @@ -54,165 +51,4 @@ public sealed class HttpTsaClient : ITimeStampAuthorityClient /// public IReadOnlyList Providers => _providerInfo; - - /// - public async Task GetTimeStampAsync( - TimeStampRequest request, - CancellationToken cancellationToken = default) - { - var orderedProviders = GetOrderedProviders(); - - foreach (var provider in orderedProviders) - { - try - { - var response = await TryGetTimeStampFromProviderAsync( - provider, request, cancellationToken); - - if (response.IsSuccess) - { - _logger.LogInformation( - "Timestamp obtained from provider {Provider} in {Duration}ms", - provider.Name, - response.RequestDuration?.TotalMilliseconds ?? 0); - return response; - } - - _logger.LogWarning( - "Provider {Provider} returned status {Status}: {StatusString}", - provider.Name, - response.Status, - response.StatusString ?? response.FailureInfo?.ToString()); - } - catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or OperationCanceledException) - { - _logger.LogWarning( - ex, - "Provider {Provider} failed, trying next", - provider.Name); - } - } - - return TimeStampResponse.Failure( - PkiStatus.Rejection, - PkiFailureInfo.SystemFailure, - "All TSA providers failed"); - } - - private async Task TryGetTimeStampFromProviderAsync( - TsaProviderOptions provider, - TimeStampRequest request, - CancellationToken cancellationToken) - { - var client = _httpClientFactory.CreateClient($"TSA_{provider.Name}"); - client.Timeout = provider.Timeout; - - var encodedRequest = TimeStampReqEncoder.Encode(request); - var content = new ByteArrayContent(encodedRequest); - content.Headers.ContentType = new MediaTypeHeaderValue(TimeStampQueryContentType); - - foreach (var (key, value) in provider.Headers) - { - content.Headers.TryAddWithoutValidation(key, value); - } - - var stopwatch = Stopwatch.StartNew(); - var lastException = default(Exception); - - for (var attempt = 0; attempt <= provider.RetryCount; attempt++) - { - if (attempt > 0) - { - var delay = TimeSpan.FromTicks( - provider.RetryBaseDelay.Ticks * (1L << (attempt - 1))); - await Task.Delay(delay, cancellationToken); - } - - try - { - var httpResponse = await client.PostAsync( - provider.Url, content, cancellationToken); - - if (!httpResponse.IsSuccessStatusCode) - { - _logger.LogWarning( - "TSA {Provider} returned HTTP {StatusCode}", - provider.Name, - httpResponse.StatusCode); - continue; - } - - var responseContentType = httpResponse.Content.Headers.ContentType?.MediaType; - if (responseContentType != TimeStampReplyContentType) - { - _logger.LogWarning( - "TSA {Provider} returned unexpected content type: {ContentType}", - provider.Name, - responseContentType); - } - - var responseBytes = await httpResponse.Content.ReadAsByteArrayAsync(cancellationToken); - stopwatch.Stop(); - - var response = TimeStampRespDecoder.Decode(responseBytes); - return response with - { - ProviderName = provider.Name, - RequestDuration = stopwatch.Elapsed - }; - } - catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) - { - lastException = ex; - _logger.LogDebug( - ex, - "Attempt {Attempt}/{MaxAttempts} to {Provider} failed", - attempt + 1, - provider.RetryCount + 1, - provider.Name); - } - } - - throw lastException ?? new InvalidOperationException("No attempts made"); - } - - /// - public async Task VerifyAsync( - TimeStampToken token, - ReadOnlyMemory originalHash, - TimeStampVerificationOptions? options = null, - CancellationToken cancellationToken = default) - { - return await _verifier.VerifyAsync( - token, originalHash, options ?? _options.DefaultVerificationOptions, cancellationToken); - } - - /// - public TimeStampToken ParseToken(ReadOnlyMemory encodedToken) - { - return TimeStampTokenDecoder.Decode(encodedToken); - } - - private IEnumerable GetOrderedProviders() - { - var enabled = _options.Providers.Where(p => p.Enabled).ToList(); - - return _options.FailoverStrategy switch - { - FailoverStrategy.Priority => enabled.OrderBy(p => p.Priority), - FailoverStrategy.RoundRobin => GetRoundRobinOrder(enabled), - FailoverStrategy.Random => enabled.OrderBy(_ => Random.Shared.Next()), - FailoverStrategy.LowestLatency => enabled.OrderBy(p => p.Priority), // TODO: track latency - _ => enabled.OrderBy(p => p.Priority) - }; - } - - private IEnumerable GetRoundRobinOrder(List providers) - { - var startIndex = Interlocked.Increment(ref _roundRobinIndex) % providers.Count; - for (var i = 0; i < providers.Count; i++) - { - yield return providers[(startIndex + i) % providers.Count]; - } - } } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/ITsaProviderRegistry.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/ITsaProviderRegistry.cs index 541940269..b6a83cf34 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/ITsaProviderRegistry.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/ITsaProviderRegistry.cs @@ -52,168 +52,3 @@ public interface ITsaProviderRegistry /// Cancellation token. Task CheckHealthAsync(string providerName, CancellationToken cancellationToken = default); } - -/// -/// State of a TSA provider including health and statistics. -/// -public sealed record TsaProviderState -{ - /// - /// Gets the provider options. - /// - public required TsaProviderOptions Options { get; init; } - - /// - /// Gets the current health status. - /// - public required TsaProviderHealth Health { get; init; } - - /// - /// Gets the usage statistics. - /// - public required TsaProviderStats Stats { get; init; } -} - -/// -/// Health status of a TSA provider. -/// -public sealed record TsaProviderHealth -{ - /// - /// Gets whether the provider is healthy. - /// - public bool IsHealthy { get; init; } - - /// - /// Gets the health status. - /// - public TsaHealthStatus Status { get; init; } - - /// - /// Gets the last error message if unhealthy. - /// - public string? LastError { get; init; } - - /// - /// Gets when the provider was last checked. - /// - public DateTimeOffset? LastCheckedAt { get; init; } - - /// - /// Gets when the provider became unhealthy. - /// - public DateTimeOffset? UnhealthySince { get; init; } - - /// - /// Gets the consecutive failure count. - /// - public int ConsecutiveFailures { get; init; } - - /// - /// Gets when the provider can be retried (if in backoff). - /// - public DateTimeOffset? RetryAfter { get; init; } - - /// - /// Creates a healthy status. - /// - public static TsaProviderHealth Healthy() => new() - { - IsHealthy = true, - Status = TsaHealthStatus.Healthy, - LastCheckedAt = DateTimeOffset.UtcNow - }; - - /// - /// Creates an unhealthy status. - /// - public static TsaProviderHealth Unhealthy(string error, int failures, DateTimeOffset? retryAfter = null) => new() - { - IsHealthy = false, - Status = retryAfter.HasValue ? TsaHealthStatus.InBackoff : TsaHealthStatus.Unhealthy, - LastError = error, - LastCheckedAt = DateTimeOffset.UtcNow, - UnhealthySince = DateTimeOffset.UtcNow, - ConsecutiveFailures = failures, - RetryAfter = retryAfter - }; -} - -/// -/// Health status enum for TSA providers. -/// -public enum TsaHealthStatus -{ - /// - /// Provider is unknown (not yet checked). - /// - Unknown, - - /// - /// Provider is healthy. - /// - Healthy, - - /// - /// Provider is degraded (slow but functional). - /// - Degraded, - - /// - /// Provider is unhealthy (failures detected). - /// - Unhealthy, - - /// - /// Provider is in backoff period after failures. - /// - InBackoff -} - -/// -/// Usage statistics for a TSA provider. -/// -public sealed record TsaProviderStats -{ - /// - /// Gets the total number of requests. - /// - public long TotalRequests { get; init; } - - /// - /// Gets the number of successful requests. - /// - public long SuccessCount { get; init; } - - /// - /// Gets the number of failed requests. - /// - public long FailureCount { get; init; } - - /// - /// Gets the success rate as a percentage. - /// - public double SuccessRate => TotalRequests > 0 - ? (double)SuccessCount / TotalRequests * 100 - : 0; - - /// - /// Gets the average latency in milliseconds. - /// - public double AverageLatencyMs { get; init; } - - /// - /// Gets the P95 latency in milliseconds. - /// - public double P95LatencyMs { get; init; } - - /// - /// Gets the last successful request time. - /// - public DateTimeOffset? LastSuccessAt { get; init; } - - /// - /// Gets the last failed request time. - /// - public DateTimeOffset? LastFailureAt { get; init; } -} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TASKS.md b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TASKS.md index 2614b141c..a53a85490 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TASKS.md +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Authority/__Libraries/StellaOps.Authority.Timestamping/StellaOps.Authority.Timestamping.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Authority/__Libraries/StellaOps.Authority.Timestamping/StellaOps.Authority.Timestamping.md. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.CertificateChain.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.CertificateChain.cs new file mode 100644 index 000000000..17b78300a --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.CertificateChain.cs @@ -0,0 +1,88 @@ +// ----------------------------------------------------------------------------- +// TimeStampTokenVerifier.CertificateChain.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-004 - TST Signature Verification +// Description: Certificate chain validation helper. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TimeStampTokenVerifier +{ + private static VerificationError? TryVerifyCertificateChain( + TimeStampVerificationOptions options, + X509Certificate2? signerCert, + List warnings, + out X509Chain? chain) + { + chain = null; + + if (!options.VerifyCertificateChain) + { + return null; + } + + if (signerCert is null) + { + return new VerificationError( + VerificationErrorCode.SignerCertificateMissing, + "No signer certificate found in timestamp token"); + } + + chain = new X509Chain(); + chain.ChainPolicy.RevocationMode = options.CheckRevocation + ? options.RevocationMode + : X509RevocationMode.NoCheck; + chain.ChainPolicy.RevocationFlag = options.RevocationFlag; + + if (options.VerificationTime.HasValue) + { + chain.ChainPolicy.VerificationTime = options.VerificationTime.Value.DateTime; + } + + if (options.TrustAnchors is not null) + { + chain.ChainPolicy.CustomTrustStore.AddRange(options.TrustAnchors); + chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + } + + if (options.IntermediateCertificates is not null) + { + chain.ChainPolicy.ExtraStore.AddRange(options.IntermediateCertificates); + } + + if (!chain.Build(signerCert)) + { + var status = chain.ChainStatus.FirstOrDefault(); + var errorCode = status.Status switch + { + X509ChainStatusFlags.NotTimeValid => VerificationErrorCode.CertificateExpired, + X509ChainStatusFlags.Revoked => VerificationErrorCode.CertificateRevoked, + X509ChainStatusFlags.UntrustedRoot => VerificationErrorCode.NoTrustAnchor, + _ => VerificationErrorCode.CertificateChainInvalid + }; + + return new VerificationError( + errorCode, + $"Certificate chain validation failed: {status.StatusInformation}", + string.Join(", ", chain.ChainStatus.Select(s => s.Status))); + } + + if (options.CheckRevocation && + chain.ChainStatus.Any(s => s.Status == X509ChainStatusFlags.RevocationStatusUnknown)) + { + warnings.Add(new VerificationWarning( + VerificationWarningCode.RevocationCheckSkipped, + "Revocation status could not be determined")); + } + + return null; + } + + private static IReadOnlyList? ExtractChainCertificates(X509Chain? chain) + { + return chain?.ChainElements.Select(e => e.Certificate).ToList(); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Signature.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Signature.cs new file mode 100644 index 000000000..7ca667ae3 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Signature.cs @@ -0,0 +1,44 @@ +// ----------------------------------------------------------------------------- +// TimeStampTokenVerifier.Signature.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-004 - TST Signature Verification +// Description: Signature validation helper. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Security.Cryptography; +using System.Security.Cryptography.Pkcs; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TimeStampTokenVerifier +{ + private static VerificationError? TryVerifySignature( + TimeStampToken token, + TimeStampVerificationOptions options, + out X509Certificate2? signerCert) + { + var signedCms = new SignedCms(); + signedCms.Decode(token.EncodedToken.ToArray()); + + signerCert = null; + try + { + if (signedCms.SignerInfos.Count > 0) + { + var signerInfo = signedCms.SignerInfos[0]; + signerCert = signerInfo.Certificate; + signerInfo.CheckSignature(verifySignatureOnly: !options.VerifyCertificateChain); + } + } + catch (CryptographicException ex) + { + return new VerificationError( + VerificationErrorCode.SignatureInvalid, + "CMS signature verification failed", + ex.Message); + } + + return null; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Validation.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Validation.cs new file mode 100644 index 000000000..f713a147f --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Validation.cs @@ -0,0 +1,67 @@ +// ----------------------------------------------------------------------------- +// TimeStampTokenVerifier.Validation.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-004 - TST Signature Verification +// Description: Validation helpers for imprint, nonce, and hash strength. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TimeStampTokenVerifier +{ + private static VerificationError? ValidateMessageImprint( + TimeStampToken token, + ReadOnlyMemory originalHash) + { + if (token.TstInfo.MessageImprint.Span.SequenceEqual(originalHash.Span)) + { + return null; + } + + return new VerificationError( + VerificationErrorCode.MessageImprintMismatch, + "The message imprint in the timestamp does not match the original hash"); + } + + private static VerificationError? ValidateNonce( + TimeStampToken token, + TimeStampVerificationOptions options) + { + if (options.ExpectedNonce is not { Length: > 0 }) + { + return null; + } + + if (token.TstInfo.Nonce is null) + { + return new VerificationError( + VerificationErrorCode.NonceMismatch, + "Expected nonce but timestamp has no nonce"); + } + + if (!token.TstInfo.Nonce.Value.Span.SequenceEqual(options.ExpectedNonce.Value.Span)) + { + return new VerificationError( + VerificationErrorCode.NonceMismatch, + "Timestamp nonce does not match expected nonce"); + } + + return null; + } + + private static void AppendWeakHashWarning( + TimeStampToken token, + TimeStampVerificationOptions options, + List warnings) + { + if (options.AllowWeakHashAlgorithms || token.TstInfo.HashAlgorithm.Name != "SHA1") + { + return; + } + + warnings.Add(new VerificationWarning( + VerificationWarningCode.WeakHashAlgorithm, + "Timestamp uses SHA-1 which is considered weak")); + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Warnings.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Warnings.cs new file mode 100644 index 000000000..fcfd96b63 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.Warnings.cs @@ -0,0 +1,68 @@ +// ----------------------------------------------------------------------------- +// TimeStampTokenVerifier.Warnings.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-004 - TST Signature Verification +// Description: Warning enrichment for policy, accuracy, and expiry. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TimeStampTokenVerifier +{ + private static void AppendPolicyWarnings( + TimeStampToken token, + TimeStampVerificationOptions options, + List warnings) + { + if (options.AcceptablePolicies is not { Count: > 0 }) + { + return; + } + + if (!options.AcceptablePolicies.Contains(token.TstInfo.PolicyOid)) + { + warnings.Add(new VerificationWarning( + VerificationWarningCode.UnknownPolicy, + $"Timestamp policy {token.TstInfo.PolicyOid} is not in acceptable policies list")); + } + } + + private static void AppendAccuracyWarnings( + TimeStampToken token, + TimeStampVerificationOptions options, + List warnings) + { + if (!options.MaxAccuracySeconds.HasValue || token.TstInfo.Accuracy is null) + { + return; + } + + var accuracySpan = token.TstInfo.Accuracy.ToTimeSpan(); + if (accuracySpan.TotalSeconds > options.MaxAccuracySeconds.Value) + { + warnings.Add(new VerificationWarning( + VerificationWarningCode.LargeAccuracy, + $"Timestamp accuracy ({accuracySpan.TotalSeconds}s) exceeds maximum ({options.MaxAccuracySeconds}s)")); + } + } + + private static void AppendExpiryWarnings( + X509Certificate2? signerCert, + List warnings) + { + if (signerCert is null) + { + return; + } + + var daysUntilExpiry = (signerCert.NotAfter - DateTime.UtcNow).TotalDays; + if (daysUntilExpiry < 30 && daysUntilExpiry > 0) + { + warnings.Add(new VerificationWarning( + VerificationWarningCode.CertificateNearingExpiration, + $"TSA certificate expires in {daysUntilExpiry:F0} days")); + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.cs index e81535657..e596598de 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimeStampTokenVerifier.cs @@ -4,20 +4,15 @@ // Task: TSA-004 - TST Signature Verification // Description: Cryptographic verification of TimeStampToken signatures. // ----------------------------------------------------------------------------- - - using Microsoft.Extensions.Logging; using StellaOps.Authority.Timestamping.Abstractions; -using System.Security.Cryptography; -using System.Security.Cryptography.Pkcs; -using System.Security.Cryptography.X509Certificates; namespace StellaOps.Authority.Timestamping; /// /// Verifies TimeStampToken signatures and certificate chains. /// -public sealed class TimeStampTokenVerifier +public sealed partial class TimeStampTokenVerifier { private readonly ILogger _logger; @@ -42,174 +37,46 @@ public sealed class TimeStampTokenVerifier try { - // Step 1: Verify message imprint matches - if (!token.TstInfo.MessageImprint.Span.SequenceEqual(originalHash.Span)) + var error = ValidateMessageImprint(token, originalHash); + if (error is not null) { - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - VerificationErrorCode.MessageImprintMismatch, - "The message imprint in the timestamp does not match the original hash"))); + return Task.FromResult(TimeStampVerificationResult.Failure(error)); } - // Step 2: Verify nonce if expected - if (options.ExpectedNonce is { Length: > 0 }) + error = ValidateNonce(token, options); + if (error is not null) { - if (token.TstInfo.Nonce is null) - { - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - VerificationErrorCode.NonceMismatch, - "Expected nonce but timestamp has no nonce"))); - } - - if (!token.TstInfo.Nonce.Value.Span.SequenceEqual(options.ExpectedNonce.Value.Span)) - { - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - VerificationErrorCode.NonceMismatch, - "Timestamp nonce does not match expected nonce"))); - } + return Task.FromResult(TimeStampVerificationResult.Failure(error)); } - // Step 3: Check hash algorithm strength - if (!options.AllowWeakHashAlgorithms && - token.TstInfo.HashAlgorithm.Name == "SHA1") + AppendWeakHashWarning(token, options, warnings); + + error = TryVerifySignature(token, options, out var signerCert); + if (error is not null) { - warnings.Add(new VerificationWarning( - VerificationWarningCode.WeakHashAlgorithm, - "Timestamp uses SHA-1 which is considered weak")); + return Task.FromResult(TimeStampVerificationResult.Failure(error)); } - // Step 4: Verify CMS signature - var signedCms = new SignedCms(); - signedCms.Decode(token.EncodedToken.ToArray()); - - X509Certificate2? signerCert = null; - try + error = TryVerifyCertificateChain(options, signerCert, warnings, out var chain); + if (error is not null) { - // Try to find signer certificate - if (signedCms.SignerInfos.Count > 0) - { - var signerInfo = signedCms.SignerInfos[0]; - signerCert = signerInfo.Certificate; - - // Verify signature - signerInfo.CheckSignature(verifySignatureOnly: !options.VerifyCertificateChain); - } - } - catch (CryptographicException ex) - { - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - VerificationErrorCode.SignatureInvalid, - "CMS signature verification failed", - ex.Message))); + return Task.FromResult(TimeStampVerificationResult.Failure(error)); } - // Step 5: Verify certificate chain if requested - X509Chain? chain = null; - if (options.VerifyCertificateChain && signerCert is not null) - { - chain = new X509Chain(); - chain.ChainPolicy.RevocationMode = options.CheckRevocation - ? options.RevocationMode - : X509RevocationMode.NoCheck; - chain.ChainPolicy.RevocationFlag = options.RevocationFlag; + AppendPolicyWarnings(token, options, warnings); + AppendAccuracyWarnings(token, options, warnings); + AppendExpiryWarnings(signerCert, warnings); - if (options.VerificationTime.HasValue) - { - chain.ChainPolicy.VerificationTime = options.VerificationTime.Value.DateTime; - } + var chainCertificates = ExtractChainCertificates(chain); + var warningList = warnings.Count > 0 ? warnings : null; - if (options.TrustAnchors is not null) - { - chain.ChainPolicy.CustomTrustStore.AddRange(options.TrustAnchors); - chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - } - - if (options.IntermediateCertificates is not null) - { - chain.ChainPolicy.ExtraStore.AddRange(options.IntermediateCertificates); - } - - if (!chain.Build(signerCert)) - { - var status = chain.ChainStatus.FirstOrDefault(); - var errorCode = status.Status switch - { - X509ChainStatusFlags.NotTimeValid => VerificationErrorCode.CertificateExpired, - X509ChainStatusFlags.Revoked => VerificationErrorCode.CertificateRevoked, - X509ChainStatusFlags.UntrustedRoot => VerificationErrorCode.NoTrustAnchor, - _ => VerificationErrorCode.CertificateChainInvalid - }; - - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - errorCode, - $"Certificate chain validation failed: {status.StatusInformation}", - string.Join(", ", chain.ChainStatus.Select(s => s.Status))))); - } - - // Check if revocation check was actually performed - if (options.CheckRevocation && - chain.ChainStatus.Any(s => s.Status == X509ChainStatusFlags.RevocationStatusUnknown)) - { - warnings.Add(new VerificationWarning( - VerificationWarningCode.RevocationCheckSkipped, - "Revocation status could not be determined")); - } - } - else if (options.VerifyCertificateChain && signerCert is null) - { - return Task.FromResult(TimeStampVerificationResult.Failure( - new VerificationError( - VerificationErrorCode.SignerCertificateMissing, - "No signer certificate found in timestamp token"))); - } - - // Step 6: Check policy if required - if (options.AcceptablePolicies is { Count: > 0 }) - { - if (!options.AcceptablePolicies.Contains(token.TstInfo.PolicyOid)) - { - warnings.Add(new VerificationWarning( - VerificationWarningCode.UnknownPolicy, - $"Timestamp policy {token.TstInfo.PolicyOid} is not in acceptable policies list")); - } - } - - // Step 7: Check accuracy if required - if (options.MaxAccuracySeconds.HasValue && token.TstInfo.Accuracy is not null) - { - var accuracySpan = token.TstInfo.Accuracy.ToTimeSpan(); - if (accuracySpan.TotalSeconds > options.MaxAccuracySeconds.Value) - { - warnings.Add(new VerificationWarning( - VerificationWarningCode.LargeAccuracy, - $"Timestamp accuracy ({accuracySpan.TotalSeconds}s) exceeds maximum ({options.MaxAccuracySeconds}s)")); - } - } - - // Step 8: Check certificate expiration warning - if (signerCert is not null) - { - var daysUntilExpiry = (signerCert.NotAfter - DateTime.UtcNow).TotalDays; - if (daysUntilExpiry < 30 && daysUntilExpiry > 0) - { - warnings.Add(new VerificationWarning( - VerificationWarningCode.CertificateNearingExpiration, - $"TSA certificate expires in {daysUntilExpiry:F0} days")); - } - } - - // Success return Task.FromResult(TimeStampVerificationResult.Success( token.TstInfo.GenTime, token.TstInfo.GetTimeRange(), token.TstInfo.PolicyOid, signerCert, - chain?.ChainElements.Select(e => e.Certificate).ToList(), - warnings.Count > 0 ? warnings : null)); + chainCertificates, + warningList)); } catch (Exception ex) { diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.CommonProviders.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.CommonProviders.cs new file mode 100644 index 000000000..e18fc1261 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.CommonProviders.cs @@ -0,0 +1,43 @@ +// ----------------------------------------------------------------------------- +// TimestampingServiceCollectionExtensions.CommonProviders.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-007 - DI Integration +// Description: Built-in TSA provider presets. +// ----------------------------------------------------------------------------- +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Authority.Timestamping; + +public static partial class TimestampingServiceCollectionExtensions +{ + /// + /// Adds common free TSA providers. + /// + /// The service collection. + /// The service collection for chaining. + public static IServiceCollection AddCommonTsaProviders(this IServiceCollection services) + { + // FreeTSA.org + services.AddTsaProvider("FreeTSA", "https://freetsa.org/tsr", opts => + { + opts.Priority = 100; + opts.Timeout = TimeSpan.FromSeconds(30); + }); + + // Digicert + services.AddTsaProvider("Digicert", "http://timestamp.digicert.com", opts => + { + opts.Priority = 200; + opts.Timeout = TimeSpan.FromSeconds(30); + }); + + // Sectigo + services.AddTsaProvider("Sectigo", "http://timestamp.sectigo.com", opts => + { + opts.Priority = 300; + opts.Timeout = TimeSpan.FromSeconds(30); + }); + + return services; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.Provider.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.Provider.cs new file mode 100644 index 000000000..668d5f03a --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.Provider.cs @@ -0,0 +1,41 @@ +// ----------------------------------------------------------------------------- +// TimestampingServiceCollectionExtensions.Provider.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-007 - DI Integration +// Description: Provider registration helpers. +// ----------------------------------------------------------------------------- +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public static partial class TimestampingServiceCollectionExtensions +{ + /// + /// Adds a TSA provider to the configuration. + /// + /// The service collection. + /// Provider name. + /// TSA endpoint URL. + /// Additional configuration. + /// The service collection for chaining. + public static IServiceCollection AddTsaProvider( + this IServiceCollection services, + string name, + string url, + Action? configure = null) + { + services.Configure(options => + { + var provider = new TsaProviderOptions + { + Name = name, + Url = new Uri(url) + }; + configure?.Invoke(provider); + options.Providers.Add(provider); + }); + + return services; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.cs index 15749ebad..3922e3c06 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TimestampingServiceCollectionExtensions.cs @@ -15,7 +15,7 @@ namespace StellaOps.Authority.Timestamping; /// /// Extension methods for registering timestamping services. /// -public static class TimestampingServiceCollectionExtensions +public static partial class TimestampingServiceCollectionExtensions { /// /// Adds RFC-3161 timestamping services to the service collection. @@ -45,63 +45,4 @@ public static class TimestampingServiceCollectionExtensions return services; } - - /// - /// Adds a TSA provider to the configuration. - /// - /// The service collection. - /// Provider name. - /// TSA endpoint URL. - /// Additional configuration. - /// The service collection for chaining. - public static IServiceCollection AddTsaProvider( - this IServiceCollection services, - string name, - string url, - Action? configure = null) - { - services.Configure(options => - { - var provider = new TsaProviderOptions - { - Name = name, - Url = new Uri(url) - }; - configure?.Invoke(provider); - options.Providers.Add(provider); - }); - - return services; - } - - /// - /// Adds common free TSA providers. - /// - /// The service collection. - /// The service collection for chaining. - public static IServiceCollection AddCommonTsaProviders(this IServiceCollection services) - { - // FreeTSA.org - services.AddTsaProvider("FreeTSA", "https://freetsa.org/tsr", opts => - { - opts.Priority = 100; - opts.Timeout = TimeSpan.FromSeconds(30); - }); - - // Digicert - services.AddTsaProvider("Digicert", "http://timestamp.digicert.com", opts => - { - opts.Priority = 200; - opts.Timeout = TimeSpan.FromSeconds(30); - }); - - // Sectigo - services.AddTsaProvider("Sectigo", "http://timestamp.sectigo.com", opts => - { - opts.Priority = 300; - opts.Timeout = TimeSpan.FromSeconds(30); - }); - - return services; - } } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaHealthStatus.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaHealthStatus.cs new file mode 100644 index 000000000..b0f6e783a --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaHealthStatus.cs @@ -0,0 +1,38 @@ +// ----------------------------------------------------------------------------- +// TsaHealthStatus.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Provider health status flags. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping; + +/// +/// Health status enum for TSA providers. +/// +public enum TsaHealthStatus +{ + /// + /// Provider is unknown (not yet checked). + /// + Unknown, + + /// + /// Provider is healthy. + /// + Healthy, + + /// + /// Provider is degraded (slow but functional). + /// + Degraded, + + /// + /// Provider is unhealthy (failures detected). + /// + Unhealthy, + + /// + /// Provider is in backoff period after failures. + /// + InBackoff +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderHealth.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderHealth.cs new file mode 100644 index 000000000..4503801d7 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderHealth.cs @@ -0,0 +1,72 @@ +// ----------------------------------------------------------------------------- +// TsaProviderHealth.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Provider health snapshot. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping; + +/// +/// Health status of a TSA provider. +/// +public sealed record TsaProviderHealth +{ + /// + /// Gets whether the provider is healthy. + /// + public bool IsHealthy { get; init; } + + /// + /// Gets the health status. + /// + public TsaHealthStatus Status { get; init; } + + /// + /// Gets the last error message if unhealthy. + /// + public string? LastError { get; init; } + + /// + /// Gets when the provider was last checked. + /// + public DateTimeOffset? LastCheckedAt { get; init; } + + /// + /// Gets when the provider became unhealthy. + /// + public DateTimeOffset? UnhealthySince { get; init; } + + /// + /// Gets the consecutive failure count. + /// + public int ConsecutiveFailures { get; init; } + + /// + /// Gets when the provider can be retried (if in backoff). + /// + public DateTimeOffset? RetryAfter { get; init; } + + /// + /// Creates a healthy status. + /// + public static TsaProviderHealth Healthy() => new() + { + IsHealthy = true, + Status = TsaHealthStatus.Healthy, + LastCheckedAt = DateTimeOffset.UtcNow + }; + + /// + /// Creates an unhealthy status. + /// + public static TsaProviderHealth Unhealthy(string error, int failures, DateTimeOffset? retryAfter = null) => new() + { + IsHealthy = false, + Status = retryAfter.HasValue ? TsaHealthStatus.InBackoff : TsaHealthStatus.Unhealthy, + LastError = error, + LastCheckedAt = DateTimeOffset.UtcNow, + UnhealthySince = DateTimeOffset.UtcNow, + ConsecutiveFailures = failures, + RetryAfter = retryAfter + }; +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.HealthCheck.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.HealthCheck.cs new file mode 100644 index 000000000..32da9d6fa --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.HealthCheck.cs @@ -0,0 +1,60 @@ +// ----------------------------------------------------------------------------- +// TsaProviderRegistry.HealthCheck.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Health probe logic for providers. +// ----------------------------------------------------------------------------- +using System.Net.Http; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TsaProviderRegistry +{ + /// + public async Task CheckHealthAsync( + string providerName, + CancellationToken cancellationToken = default) + { + if (!_states.TryGetValue(providerName, out var state)) + { + return new TsaProviderHealth + { + Status = TsaHealthStatus.Unknown, + LastError = "Provider not found" + }; + } + + try + { + var client = _httpClientFactory.CreateClient($"TSA_{providerName}"); + client.Timeout = TimeSpan.FromSeconds(10); + + // Simple connectivity check - just verify the endpoint is reachable + _ = await client.SendAsync( + new HttpRequestMessage(HttpMethod.Head, state.Options.Url), + cancellationToken) + .ConfigureAwait(false); + + // Most TSAs don't support HEAD, so any response (even 4xx) means it's reachable + var health = TsaProviderHealth.Healthy(); + + lock (state) + { + state.Health = health; + } + + return health; + } + catch (Exception ex) + { + var health = TsaProviderHealth.Unhealthy(ex.Message, state.ConsecutiveFailures + 1); + + lock (state) + { + state.Health = health; + } + + return health; + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.ProviderState.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.ProviderState.cs new file mode 100644 index 000000000..9c1bb6968 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.ProviderState.cs @@ -0,0 +1,26 @@ +// ----------------------------------------------------------------------------- +// TsaProviderRegistry.ProviderState.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Internal provider state. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TsaProviderRegistry +{ + private sealed class ProviderState + { + public required TsaProviderOptions Options { get; init; } + public TsaProviderHealth Health { get; set; } = new() { Status = TsaHealthStatus.Unknown }; + public List Latencies { get; init; } = []; + public long TotalRequests { get; set; } + public long SuccessCount { get; set; } + public long FailureCount { get; set; } + public int ConsecutiveFailures { get; set; } + public string? LastError { get; set; } + public DateTimeOffset? LastSuccessAt { get; set; } + public DateTimeOffset? LastFailureAt { get; set; } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Providers.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Providers.cs new file mode 100644 index 000000000..e19182426 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Providers.cs @@ -0,0 +1,74 @@ +// ----------------------------------------------------------------------------- +// TsaProviderRegistry.Providers.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Provider enumeration and ordering. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TsaProviderRegistry +{ + /// + public IReadOnlyList GetProviders() + { + return _states.Values.Select(s => new TsaProviderState + { + Options = s.Options, + Health = s.Health, + Stats = ComputeStats(s) + }).ToList(); + } + + /// + public IEnumerable GetOrderedProviders(bool excludeUnhealthy = true) + { + var providers = _states.Values + .Where(s => s.Options.Enabled) + .Where(s => !excludeUnhealthy || IsAvailable(s)) + .ToList(); + + return _options.FailoverStrategy switch + { + FailoverStrategy.Priority => providers.OrderBy(p => p.Options.Priority).Select(p => p.Options), + FailoverStrategy.RoundRobin => GetRoundRobinOrder(providers).Select(p => p.Options), + FailoverStrategy.LowestLatency => providers.OrderBy(p => GetAverageLatency(p)).Select(p => p.Options), + FailoverStrategy.Random => providers.OrderBy(_ => Random.Shared.Next()).Select(p => p.Options), + _ => providers.OrderBy(p => p.Options.Priority).Select(p => p.Options) + }; + } + + private bool IsAvailable(ProviderState state) + { + if (!state.Health.IsHealthy && state.Health.RetryAfter.HasValue) + { + return DateTimeOffset.UtcNow >= state.Health.RetryAfter.Value; + } + return state.Health.Status != TsaHealthStatus.Unhealthy || state.ConsecutiveFailures < 5; + } + + private double GetAverageLatency(ProviderState state) + { + lock (state) + { + return state.Latencies.Count > 0 + ? state.Latencies.Average() + : double.MaxValue; + } + } + + private IEnumerable GetRoundRobinOrder(List providers) + { + if (providers.Count == 0) + { + yield break; + } + + var startIndex = Interlocked.Increment(ref _roundRobinIndex) % providers.Count; + for (var i = 0; i < providers.Count; i++) + { + yield return providers[(startIndex + i) % providers.Count]; + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Reporting.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Reporting.cs new file mode 100644 index 000000000..692068cc4 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Reporting.cs @@ -0,0 +1,86 @@ +// ----------------------------------------------------------------------------- +// TsaProviderRegistry.Reporting.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Success/failure reporting and health lookup. +// ----------------------------------------------------------------------------- +using Microsoft.Extensions.Logging; + +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TsaProviderRegistry +{ + /// + public void ReportSuccess(string providerName, TimeSpan latency) + { + if (!_states.TryGetValue(providerName, out var state)) + { + return; + } + + lock (state) + { + state.TotalRequests++; + state.SuccessCount++; + state.LastSuccessAt = DateTimeOffset.UtcNow; + state.ConsecutiveFailures = 0; + + // Keep last 100 latencies for stats + state.Latencies.Add(latency.TotalMilliseconds); + if (state.Latencies.Count > 100) + { + state.Latencies.RemoveAt(0); + } + + state.Health = TsaProviderHealth.Healthy(); + } + + _logger.LogDebug( + "TSA {Provider} request succeeded in {Latency}ms", + providerName, + latency.TotalMilliseconds); + } + + /// + public void ReportFailure(string providerName, string error) + { + if (!_states.TryGetValue(providerName, out var state)) + { + return; + } + + lock (state) + { + state.TotalRequests++; + state.FailureCount++; + state.LastFailureAt = DateTimeOffset.UtcNow; + state.ConsecutiveFailures++; + state.LastError = error; + + // Calculate backoff based on consecutive failures + var backoffSeconds = Math.Min(300, Math.Pow(2, state.ConsecutiveFailures)); + var retryAfter = state.ConsecutiveFailures >= 3 + ? DateTimeOffset.UtcNow.AddSeconds(backoffSeconds) + : (DateTimeOffset?)null; + + state.Health = TsaProviderHealth.Unhealthy( + error, + state.ConsecutiveFailures, + retryAfter); + } + + _logger.LogWarning( + "TSA {Provider} request failed: {Error} (consecutive failures: {Failures})", + providerName, + error, + state.ConsecutiveFailures); + } + + /// + public TsaProviderHealth GetHealth(string providerName) + { + return _states.TryGetValue(providerName, out var state) + ? state.Health + : new TsaProviderHealth { Status = TsaHealthStatus.Unknown }; + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Stats.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Stats.cs new file mode 100644 index 000000000..2da1c63b4 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.Stats.cs @@ -0,0 +1,32 @@ +// ----------------------------------------------------------------------------- +// TsaProviderRegistry.Stats.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Stats aggregation for providers. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping; + +public sealed partial class TsaProviderRegistry +{ + private static TsaProviderStats ComputeStats(ProviderState state) + { + lock (state) + { + var sortedLatencies = state.Latencies.OrderBy(l => l).ToList(); + var p95Index = (int)(sortedLatencies.Count * 0.95); + + return new TsaProviderStats + { + TotalRequests = state.TotalRequests, + SuccessCount = state.SuccessCount, + FailureCount = state.FailureCount, + AverageLatencyMs = sortedLatencies.Count > 0 ? sortedLatencies.Average() : 0, + P95LatencyMs = sortedLatencies.Count > 0 + ? sortedLatencies[Math.Min(p95Index, sortedLatencies.Count - 1)] + : 0, + LastSuccessAt = state.LastSuccessAt, + LastFailureAt = state.LastFailureAt + }; + } + } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.cs index b052192aa..aa233c399 100644 --- a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.cs +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderRegistry.cs @@ -2,10 +2,8 @@ // TsaProviderRegistry.cs // Sprint: SPRINT_20260119_007 RFC-3161 TSA Client // Task: TSA-005 - Provider Configuration & Management -// Description: Implementation of TSA provider registry with health tracking. +// Description: Provider registry with health tracking. // ----------------------------------------------------------------------------- - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Authority.Timestamping.Abstractions; @@ -16,7 +14,7 @@ namespace StellaOps.Authority.Timestamping; /// /// Implementation of with health tracking and failover. /// -public sealed class TsaProviderRegistry : ITsaProviderRegistry +public sealed partial class TsaProviderRegistry : ITsaProviderRegistry { private readonly TsaClientOptions _options; private readonly IHttpClientFactory _httpClientFactory; @@ -52,212 +50,4 @@ public sealed class TsaProviderRegistry : ITsaProviderRegistry }; } } - - /// - public IReadOnlyList GetProviders() - { - return _states.Values.Select(s => new TsaProviderState - { - Options = s.Options, - Health = s.Health, - Stats = ComputeStats(s) - }).ToList(); - } - - /// - public IEnumerable GetOrderedProviders(bool excludeUnhealthy = true) - { - var providers = _states.Values - .Where(s => s.Options.Enabled) - .Where(s => !excludeUnhealthy || IsAvailable(s)) - .ToList(); - - return _options.FailoverStrategy switch - { - FailoverStrategy.Priority => providers.OrderBy(p => p.Options.Priority).Select(p => p.Options), - FailoverStrategy.RoundRobin => GetRoundRobinOrder(providers).Select(p => p.Options), - FailoverStrategy.LowestLatency => providers.OrderBy(p => GetAverageLatency(p)).Select(p => p.Options), - FailoverStrategy.Random => providers.OrderBy(_ => Random.Shared.Next()).Select(p => p.Options), - _ => providers.OrderBy(p => p.Options.Priority).Select(p => p.Options) - }; - } - - /// - public void ReportSuccess(string providerName, TimeSpan latency) - { - if (!_states.TryGetValue(providerName, out var state)) - return; - - lock (state) - { - state.TotalRequests++; - state.SuccessCount++; - state.LastSuccessAt = DateTimeOffset.UtcNow; - state.ConsecutiveFailures = 0; - - // Keep last 100 latencies for stats - state.Latencies.Add(latency.TotalMilliseconds); - if (state.Latencies.Count > 100) - { - state.Latencies.RemoveAt(0); - } - - state.Health = TsaProviderHealth.Healthy(); - } - - _logger.LogDebug( - "TSA {Provider} request succeeded in {Latency}ms", - providerName, latency.TotalMilliseconds); - } - - /// - public void ReportFailure(string providerName, string error) - { - if (!_states.TryGetValue(providerName, out var state)) - return; - - lock (state) - { - state.TotalRequests++; - state.FailureCount++; - state.LastFailureAt = DateTimeOffset.UtcNow; - state.ConsecutiveFailures++; - state.LastError = error; - - // Calculate backoff based on consecutive failures - var backoffSeconds = Math.Min(300, Math.Pow(2, state.ConsecutiveFailures)); - var retryAfter = state.ConsecutiveFailures >= 3 - ? DateTimeOffset.UtcNow.AddSeconds(backoffSeconds) - : (DateTimeOffset?)null; - - state.Health = TsaProviderHealth.Unhealthy( - error, - state.ConsecutiveFailures, - retryAfter); - } - - _logger.LogWarning( - "TSA {Provider} request failed: {Error} (consecutive failures: {Failures})", - providerName, error, state.ConsecutiveFailures); - } - - /// - public TsaProviderHealth GetHealth(string providerName) - { - return _states.TryGetValue(providerName, out var state) - ? state.Health - : new TsaProviderHealth { Status = TsaHealthStatus.Unknown }; - } - - /// - public async Task CheckHealthAsync( - string providerName, - CancellationToken cancellationToken = default) - { - if (!_states.TryGetValue(providerName, out var state)) - { - return new TsaProviderHealth - { - Status = TsaHealthStatus.Unknown, - LastError = "Provider not found" - }; - } - - try - { - var client = _httpClientFactory.CreateClient($"TSA_{providerName}"); - client.Timeout = TimeSpan.FromSeconds(10); - - // Simple connectivity check - just verify the endpoint is reachable - var response = await client.SendAsync( - new HttpRequestMessage(HttpMethod.Head, state.Options.Url), - cancellationToken); - - // Most TSAs don't support HEAD, so any response (even 4xx) means it's reachable - var health = TsaProviderHealth.Healthy(); - - lock (state) - { - state.Health = health; - } - - return health; - } - catch (Exception ex) - { - var health = TsaProviderHealth.Unhealthy(ex.Message, state.ConsecutiveFailures + 1); - - lock (state) - { - state.Health = health; - } - - return health; - } - } - - private bool IsAvailable(ProviderState state) - { - if (!state.Health.IsHealthy && state.Health.RetryAfter.HasValue) - { - return DateTimeOffset.UtcNow >= state.Health.RetryAfter.Value; - } - return state.Health.Status != TsaHealthStatus.Unhealthy || state.ConsecutiveFailures < 5; - } - - private double GetAverageLatency(ProviderState state) - { - lock (state) - { - return state.Latencies.Count > 0 - ? state.Latencies.Average() - : double.MaxValue; - } - } - - private IEnumerable GetRoundRobinOrder(List providers) - { - if (providers.Count == 0) - yield break; - - var startIndex = Interlocked.Increment(ref _roundRobinIndex) % providers.Count; - for (var i = 0; i < providers.Count; i++) - { - yield return providers[(startIndex + i) % providers.Count]; - } - } - - private static TsaProviderStats ComputeStats(ProviderState state) - { - lock (state) - { - var sortedLatencies = state.Latencies.OrderBy(l => l).ToList(); - var p95Index = (int)(sortedLatencies.Count * 0.95); - - return new TsaProviderStats - { - TotalRequests = state.TotalRequests, - SuccessCount = state.SuccessCount, - FailureCount = state.FailureCount, - AverageLatencyMs = sortedLatencies.Count > 0 ? sortedLatencies.Average() : 0, - P95LatencyMs = sortedLatencies.Count > 0 ? sortedLatencies[Math.Min(p95Index, sortedLatencies.Count - 1)] : 0, - LastSuccessAt = state.LastSuccessAt, - LastFailureAt = state.LastFailureAt - }; - } - } - - private sealed class ProviderState - { - public required TsaProviderOptions Options { get; init; } - public TsaProviderHealth Health { get; set; } = new() { Status = TsaHealthStatus.Unknown }; - public List Latencies { get; init; } = []; - public long TotalRequests { get; set; } - public long SuccessCount { get; set; } - public long FailureCount { get; set; } - public int ConsecutiveFailures { get; set; } - public string? LastError { get; set; } - public DateTimeOffset? LastSuccessAt { get; set; } - public DateTimeOffset? LastFailureAt { get; set; } - } } diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderState.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderState.cs new file mode 100644 index 000000000..abb72412d --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderState.cs @@ -0,0 +1,30 @@ +// ----------------------------------------------------------------------------- +// TsaProviderState.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Provider state snapshot for registry reporting. +// ----------------------------------------------------------------------------- +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping; + +/// +/// State of a TSA provider including health and statistics. +/// +public sealed record TsaProviderState +{ + /// + /// Gets the provider options. + /// + public required TsaProviderOptions Options { get; init; } + + /// + /// Gets the current health status. + /// + public required TsaProviderHealth Health { get; init; } + + /// + /// Gets the usage statistics. + /// + public required TsaProviderStats Stats { get; init; } +} diff --git a/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderStats.cs b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderStats.cs new file mode 100644 index 000000000..7a08c0954 --- /dev/null +++ b/src/Authority/__Libraries/StellaOps.Authority.Timestamping/TsaProviderStats.cs @@ -0,0 +1,55 @@ +// ----------------------------------------------------------------------------- +// TsaProviderStats.cs +// Sprint: SPRINT_20260119_007 RFC-3161 TSA Client +// Task: TSA-005 - Provider Configuration & Management +// Description: Provider usage statistics snapshot. +// ----------------------------------------------------------------------------- +namespace StellaOps.Authority.Timestamping; + +/// +/// Usage statistics for a TSA provider. +/// +public sealed record TsaProviderStats +{ + /// + /// Gets the total number of requests. + /// + public long TotalRequests { get; init; } + + /// + /// Gets the number of successful requests. + /// + public long SuccessCount { get; init; } + + /// + /// Gets the number of failed requests. + /// + public long FailureCount { get; init; } + + /// + /// Gets the success rate as a percentage. + /// + public double SuccessRate => TotalRequests > 0 + ? (double)SuccessCount / TotalRequests * 100 + : 0; + + /// + /// Gets the average latency in milliseconds. + /// + public double AverageLatencyMs { get; init; } + + /// + /// Gets the P95 latency in milliseconds. + /// + public double P95LatencyMs { get; init; } + + /// + /// Gets the last successful request time. + /// + public DateTimeOffset? LastSuccessAt { get; init; } + + /// + /// Gets the last failed request time. + /// + public DateTimeOffset? LastFailureAt { get; init; } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/TASKS.md b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/TASKS.md index 271cb8723..6fa1ac7d1 100644 --- a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/TASKS.md +++ b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/TASKS.md @@ -8,3 +8,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0087-M | DONE | Revalidated 2026-01-06. | | AUDIT-0087-T | DONE | Revalidated 2026-01-06 (coverage reviewed). | | AUDIT-0087-A | DONE | Waived (test project; revalidated 2026-01-06). | +| REMED-05 | DONE | Unit coverage expanded for verdict manifest remediation. | diff --git a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/InMemoryVerdictManifestStoreTests.cs b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/InMemoryVerdictManifestStoreTests.cs index b1714f01d..83ebf9526 100644 --- a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/InMemoryVerdictManifestStoreTests.cs +++ b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/InMemoryVerdictManifestStoreTests.cs @@ -77,6 +77,30 @@ public sealed class InMemoryVerdictManifestStoreTests page3.NextPageToken.Should().BeNull(); } + [Fact] + public async Task ListByAsset_Paginates() + { + var assetDigest = "sha256:asset"; + var first = CreateManifest("m0", "t", assetDigest: assetDigest, evaluatedAt: BaseTime); + var second = CreateManifest("m1", "t", assetDigest: assetDigest, evaluatedAt: BaseTime.AddMinutes(-1)); + var third = CreateManifest("m2", "t", assetDigest: assetDigest, evaluatedAt: BaseTime.AddMinutes(-2)); + + await _store.StoreAsync(first); + await _store.StoreAsync(second); + await _store.StoreAsync(third); + + var page1 = await _store.ListByAssetAsync("t", assetDigest, limit: 2); + page1.Manifests.Should().HaveCount(2); + page1.Manifests[0].ManifestId.Should().Be("m0"); + page1.Manifests[1].ManifestId.Should().Be("m1"); + page1.NextPageToken.Should().NotBeNull(); + + var page2 = await _store.ListByAssetAsync("t", assetDigest, limit: 2, pageToken: page1.NextPageToken); + page2.Manifests.Should().HaveCount(1); + page2.Manifests[0].ManifestId.Should().Be("m2"); + page2.NextPageToken.Should().BeNull(); + } + [Fact] public async Task Delete_RemovesManifest() { diff --git a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictManifestSerializerTests.cs b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictManifestSerializerTests.cs index a9e86b6d8..c63131610 100644 --- a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictManifestSerializerTests.cs +++ b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictManifestSerializerTests.cs @@ -34,6 +34,16 @@ public sealed class VerdictManifestSerializerTests deserialized.Result.Confidence.Should().Be(manifest.Result.Confidence); } + [Theory] + [InlineData("")] + [InlineData(" ")] + public void Deserialize_ReturnsNull_ForEmptyJson(string json) + { + var deserialized = VerdictManifestSerializer.Deserialize(json); + + deserialized.Should().BeNull(); + } + [Fact] public void ComputeDigest_IsDeterministic() { diff --git a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictReplayVerifierTests.cs b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictReplayVerifierTests.cs index 7f886dce0..631a2ae0c 100644 --- a/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictReplayVerifierTests.cs +++ b/src/Authority/__Tests/StellaOps.Authority.Core.Tests/Verdicts/VerdictReplayVerifierTests.cs @@ -33,9 +33,31 @@ public sealed class VerdictReplayVerifierTests result.Error.Should().Contain("Signature verification failed"); } - private static VerdictManifest CreateManifest() + [Fact] + public async Task VerifyAsync_ReturnsDifferences_WhenReplayDiffers() { - return new VerdictManifest + var replayResult = new VerdictResult + { + Status = VexStatus.Affected, + Confidence = 0.2, + Explanations = ImmutableArray.Empty, + EvidenceRefs = ImmutableArray.Empty, + }; + + var verifier = new VerdictReplayVerifier(new NullStore(), new NullVerdictManifestSigner(), new FixedEvaluator(replayResult)); + var manifest = CreateManifest(includeSignature: false); + + var result = await verifier.VerifyAsync(manifest, CancellationToken.None); + + result.Success.Should().BeFalse(); + result.SignatureValid.Should().BeTrue(); + result.Differences.Should().NotBeNull(); + result.Differences!.Should().NotBeEmpty(); + } + + private static VerdictManifest CreateManifest(VerdictResult? result = null, bool includeSignature = true) + { + var manifest = new VerdictManifest { ManifestId = "manifest-1", Tenant = "tenant-a", @@ -49,7 +71,7 @@ public sealed class VerdictReplayVerifierTests ReachabilityGraphIds = ImmutableArray.Empty, ClockCutoff = DateTimeOffset.Parse("2025-01-01T00:00:00Z"), }, - Result = new VerdictResult + Result = result ?? new VerdictResult { Status = VexStatus.NotAffected, Confidence = 0.5, @@ -59,9 +81,12 @@ public sealed class VerdictReplayVerifierTests PolicyHash = "sha256:policy", LatticeVersion = "1.0.0", EvaluatedAt = DateTimeOffset.Parse("2025-01-01T00:00:00Z"), - ManifestDigest = "sha256:manifest", - SignatureBase64 = "invalid" + ManifestDigest = string.Empty, + SignatureBase64 = includeSignature ? "invalid" : null }; + + var digest = VerdictManifestSerializer.ComputeDigest(manifest); + return manifest with { ManifestDigest = digest }; } private sealed class NullStore : IVerdictManifestStore @@ -122,4 +147,21 @@ public sealed class VerdictReplayVerifierTests }); } } + + private sealed class FixedEvaluator(VerdictResult result) : IVerdictEvaluator + { + private readonly VerdictResult _result = result; + + public Task EvaluateAsync( + string tenant, + string assetDigest, + string vulnerabilityId, + VerdictInputs inputs, + string policyHash, + string latticeVersion, + CancellationToken ct = default) + { + return Task.FromResult(_result); + } + } } diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/AGENTS.md b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/AGENTS.md new file mode 100644 index 000000000..4fd6d251e --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/AGENTS.md @@ -0,0 +1,22 @@ +# Authority Timestamping Abstractions Tests AGENTS + +## Purpose & Scope +- Working directory: `src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/`. +- Roles: QA automation, backend engineer. +- Focus: timestamp request/response models, verification options, and deterministic helpers. + +## Required Reading (treat as read before DOING) +- `docs/README.md` +- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` +- `docs/modules/platform/architecture-overview.md` +- `docs/modules/authority/architecture.md` +- Relevant sprint files. + +## Working Agreements +- Keep tests deterministic (fixed inputs, stable ordering). +- Avoid live network calls; use in-memory data only. +- Update `docs/implplan/SPRINT_*.md` and local `TASKS.md` when starting or completing work. + +## Testing +- Use xUnit assertions. +- Cover factory helpers, option defaults, and validation mappings. diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/StellaOps.Authority.Timestamping.Abstractions.Tests.csproj b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/StellaOps.Authority.Timestamping.Abstractions.Tests.csproj new file mode 100644 index 000000000..5d2dba28a --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/StellaOps.Authority.Timestamping.Abstractions.Tests.csproj @@ -0,0 +1,15 @@ + + + + net10.0 + enable + enable + preview + false + true + StellaOps.Authority.Timestamping.Abstractions.Tests + + + + + diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TASKS.md b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TASKS.md new file mode 100644 index 000000000..de2fea6bf --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TASKS.md @@ -0,0 +1,8 @@ +# Authority Timestamping Abstractions Tests Task Board + +This board mirrors active sprint tasks for this module. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. + +| Task ID | Status | Notes | +| --- | --- | --- | +| REMED-05 | DONE | Unit test coverage added to remediate Timestamping.Abstractions test gap (2026-02-04). | diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampRequestTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampRequestTests.cs new file mode 100644 index 000000000..c8c379014 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampRequestTests.cs @@ -0,0 +1,39 @@ +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TimeStampRequestTests +{ + [Fact] + public void CreateFromHash_UsesProvidedHashAndNonceToggle() + { + var hash = new byte[] { 0x01, 0x02, 0x03 }; + + var request = TimeStampRequest.CreateFromHash(hash, HashAlgorithmName.SHA256, includeNonce: false); + + Assert.Equal(hash, request.MessageImprint.ToArray()); + Assert.Null(request.Nonce); + } + + [Fact] + public void Create_ComputesHashAndNonceWhenRequested() + { + var data = new byte[] { 0x10, 0x20, 0x30 }; + var expectedHash = SHA256.HashData(data); + + var request = TimeStampRequest.Create(data, HashAlgorithmName.SHA256, includeNonce: true); + + Assert.Equal(expectedHash, request.MessageImprint.ToArray()); + Assert.NotNull(request.Nonce); + Assert.Equal(8, request.Nonce!.Value.Length); + } + + [Fact] + public void Create_ThrowsForUnsupportedHashAlgorithm() + { + var act = () => TimeStampRequest.Create([0x01], new HashAlgorithmName("MD5")); + + var exception = Assert.Throws(act); + Assert.Equal("algorithm", exception.ParamName); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampResponseTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampResponseTests.cs new file mode 100644 index 000000000..6b2fc0237 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampResponseTests.cs @@ -0,0 +1,28 @@ +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TimeStampResponseTests +{ + [Fact] + public void Success_PopulatesTokenAndProvider() + { + var token = TimestampingTestData.CreateToken(); + + var response = TimeStampResponse.Success(token, "tsa-A"); + + Assert.True(response.IsSuccess); + Assert.Equal(PkiStatus.Granted, response.Status); + Assert.Same(token, response.Token); + Assert.Equal("tsa-A", response.ProviderName); + } + + [Fact] + public void Failure_PopulatesStatusAndErrorFields() + { + var response = TimeStampResponse.Failure(PkiStatus.Rejection, PkiFailureInfo.BadAlg, "bad algo"); + + Assert.False(response.IsSuccess); + Assert.Equal(PkiStatus.Rejection, response.Status); + Assert.Equal(PkiFailureInfo.BadAlg, response.FailureInfo); + Assert.Equal("bad algo", response.StatusString); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampTokenTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampTokenTests.cs new file mode 100644 index 000000000..0265ec3d8 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampTokenTests.cs @@ -0,0 +1,18 @@ +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TimeStampTokenTests +{ + [Fact] + public void TstInfoDigest_UsesLowercaseSha256() + { + var encoded = new byte[] { 0x10, 0x20, 0x30 }; + var info = TimestampingTestData.CreateTstInfo(encoded: encoded); + var token = TimestampingTestData.CreateToken(info); + + var expected = Convert.ToHexString(SHA256.HashData(encoded)).ToLowerInvariant(); + + Assert.Equal(expected, token.TstInfoDigest); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationOptionsTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationOptionsTests.cs new file mode 100644 index 000000000..8d7c81e8c --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationOptionsTests.cs @@ -0,0 +1,36 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TimeStampVerificationOptionsTests +{ + [Fact] + public void Defaults_AreStable() + { + var options = TimeStampVerificationOptions.Default; + + Assert.True(options.VerifyCertificateChain); + Assert.True(options.CheckRevocation); + Assert.False(options.AllowWeakHashAlgorithms); + } + + [Fact] + public void Strict_EnablesChecks() + { + var options = TimeStampVerificationOptions.Strict; + + Assert.True(options.VerifyCertificateChain); + Assert.True(options.CheckRevocation); + Assert.False(options.AllowWeakHashAlgorithms); + Assert.Equal(60, options.MaxAccuracySeconds); + } + + [Fact] + public void Offline_DisablesRevocation() + { + var options = TimeStampVerificationOptions.Offline; + + Assert.False(options.CheckRevocation); + Assert.Equal(X509RevocationMode.NoCheck, options.RevocationMode); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationResultTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationResultTests.cs new file mode 100644 index 000000000..8c7ead32a --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimeStampVerificationResultTests.cs @@ -0,0 +1,31 @@ +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TimeStampVerificationResultTests +{ + [Fact] + public void Success_SetsValidStatusAndData() + { + var verifiedTime = DateTimeOffset.UnixEpoch; + var timeRange = (verifiedTime, verifiedTime.AddSeconds(1)); + + var result = TimeStampVerificationResult.Success(verifiedTime, timeRange, policyOid: "1.2.3"); + + Assert.True(result.IsValid); + Assert.Equal(VerificationStatus.Valid, result.Status); + Assert.Equal(verifiedTime, result.VerifiedTime); + Assert.Equal(timeRange, result.TimeRange); + Assert.Equal("1.2.3", result.PolicyOid); + } + + [Fact] + public void Failure_MapsErrorCodeToStatus() + { + var error = new VerificationError(VerificationErrorCode.SignatureInvalid, "bad signature"); + + var result = TimeStampVerificationResult.Failure(error); + + Assert.Equal(VerificationStatus.SignatureInvalid, result.Status); + Assert.Same(error, result.Error); + Assert.False(result.IsValid); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimestampingTestData.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimestampingTestData.cs new file mode 100644 index 000000000..6437863ac --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TimestampingTestData.cs @@ -0,0 +1,33 @@ +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +internal static class TimestampingTestData +{ + internal static TstInfo CreateTstInfo( + ReadOnlyMemory? encoded = null, + DateTimeOffset? genTime = null, + TstAccuracy? accuracy = null) + { + var encodedValue = encoded ?? new byte[] { 0x01, 0x02, 0x03 }; + return new TstInfo + { + EncodedTstInfo = encodedValue, + PolicyOid = "1.2.3", + HashAlgorithm = HashAlgorithmName.SHA256, + MessageImprint = new byte[] { 0xAA }, + SerialNumber = new byte[] { 0xBB }, + GenTime = genTime ?? DateTimeOffset.UnixEpoch, + Accuracy = accuracy + }; + } + + internal static TimeStampToken CreateToken(TstInfo? info = null) + { + return new TimeStampToken + { + EncodedToken = new byte[] { 0x10 }, + TstInfo = info ?? CreateTstInfo() + }; + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TsaClientOptionsTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TsaClientOptionsTests.cs new file mode 100644 index 000000000..77978459c --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TsaClientOptionsTests.cs @@ -0,0 +1,36 @@ +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TsaClientOptionsTests +{ + [Fact] + public void Defaults_AreStable() + { + var options = new TsaClientOptions(); + + Assert.Equal(FailoverStrategy.Priority, options.FailoverStrategy); + Assert.True(options.EnableCaching); + Assert.Equal(TimeSpan.FromHours(24), options.CacheDuration); + Assert.Equal("SHA256", options.DefaultHashAlgorithm); + Assert.True(options.IncludeNonceByDefault); + Assert.True(options.RequestCertificatesByDefault); + Assert.Same(TimeStampVerificationOptions.Default, options.DefaultVerificationOptions); + Assert.Empty(options.Providers); + } + + [Fact] + public void ProviderDefaults_AreStable() + { + var provider = new TsaProviderOptions + { + Name = "tsa-A", + Url = new Uri("https://tsa.example") + }; + + Assert.Equal(100, provider.Priority); + Assert.Equal(TimeSpan.FromSeconds(30), provider.Timeout); + Assert.Equal(3, provider.RetryCount); + Assert.Equal(TimeSpan.FromSeconds(1), provider.RetryBaseDelay); + Assert.True(provider.Enabled); + Assert.Empty(provider.Headers); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstAccuracyTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstAccuracyTests.cs new file mode 100644 index 000000000..8a1761319 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstAccuracyTests.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TstAccuracyTests +{ + [Fact] + public void ToTimeSpan_ConvertsMicrosAndMillis() + { + var accuracy = new TstAccuracy { Seconds = 2, Millis = 3, Micros = 4 }; + + Assert.Equal(TimeSpan.FromMicroseconds(2_003_004), accuracy.ToTimeSpan()); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstInfoTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstInfoTests.cs new file mode 100644 index 000000000..9f6e9b569 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Abstractions.Tests/TstInfoTests.cs @@ -0,0 +1,29 @@ +namespace StellaOps.Authority.Timestamping.Abstractions.Tests; + +public sealed class TstInfoTests +{ + [Fact] + public void GetTimeRange_UsesAccuracyWhenPresent() + { + var genTime = new DateTimeOffset(2026, 2, 4, 0, 0, 0, TimeSpan.Zero); + var accuracy = new TstAccuracy { Seconds = 1, Millis = 500 }; + var info = TimestampingTestData.CreateTstInfo(genTime: genTime, accuracy: accuracy); + + var (earliest, latest) = info.GetTimeRange(); + + Assert.Equal(genTime - TimeSpan.FromSeconds(1.5), earliest); + Assert.Equal(genTime + TimeSpan.FromSeconds(1.5), latest); + } + + [Fact] + public void GetTimeRange_DefaultsToGenTimeWithoutAccuracy() + { + var genTime = new DateTimeOffset(2026, 2, 4, 1, 0, 0, TimeSpan.Zero); + var info = TimestampingTestData.CreateTstInfo(genTime: genTime, accuracy: null); + + var (earliest, latest) = info.GetTimeRange(); + + Assert.Equal(genTime, earliest); + Assert.Equal(genTime, latest); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/AGENTS.md b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/AGENTS.md new file mode 100644 index 000000000..563b5acdb --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/AGENTS.md @@ -0,0 +1,21 @@ +# Authority Timestamping Tests AGENTS + +## Purpose & Scope +- Working directory: `src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/`. +- Roles: QA automation, backend engineer. +- Focus: timestamping client helpers, registry/cache behavior, and ASN.1 encoding/decoding. + +## Required Reading (treat as read before DOING) +- `docs/README.md` +- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` +- `docs/modules/platform/architecture-overview.md` +- `docs/modules/authority/architecture.md` +- Relevant sprint files. + +## Working Agreements +- Keep tests deterministic (fixed inputs, stable ordering). +- Avoid live network calls; use in-memory handlers only. +- Update `docs/implplan/SPRINT_*.md` and local `TASKS.md` when starting or completing work. + +## Testing +- Use xUnit assertions. diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/InMemoryTsaCacheStoreTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/InMemoryTsaCacheStoreTests.cs new file mode 100644 index 000000000..c6b1f9ef3 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/InMemoryTsaCacheStoreTests.cs @@ -0,0 +1,41 @@ +using StellaOps.Authority.Timestamping.Caching; + +namespace StellaOps.Authority.Timestamping.Tests; + +public sealed class InMemoryTsaCacheStoreTests +{ + [Fact] + public async Task GetAsync_ReturnsToken_WhenPresent() + { + using var store = new InMemoryTsaCacheStore(TimeSpan.FromHours(1)); + var token = TimestampingTestData.CreateToken(); + + await store.SetAsync(token.TstInfo.MessageImprint, token, TimeSpan.FromMinutes(5)); + var result = await store.GetAsync(token.TstInfo.MessageImprint); + + Assert.Same(token, result); + + var stats = store.GetStats(); + Assert.Equal(1, stats.ItemCount); + Assert.Equal(1, stats.HitCount); + Assert.Equal(0, stats.MissCount); + Assert.Equal(token.EncodedToken.Length, stats.ApproximateSizeBytes); + } + + [Fact] + public async Task GetAsync_ReturnsNull_WhenExpired() + { + using var store = new InMemoryTsaCacheStore(TimeSpan.FromHours(1)); + var token = TimestampingTestData.CreateToken(); + + await store.SetAsync(token.TstInfo.MessageImprint, token, TimeSpan.FromSeconds(-1)); + var result = await store.GetAsync(token.TstInfo.MessageImprint); + + Assert.Null(result); + + var stats = store.GetStats(); + Assert.Equal(0, stats.ItemCount); + Assert.Equal(0, stats.HitCount); + Assert.Equal(1, stats.MissCount); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/StellaOps.Authority.Timestamping.Tests.csproj b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/StellaOps.Authority.Timestamping.Tests.csproj new file mode 100644 index 000000000..123601e4d --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/StellaOps.Authority.Timestamping.Tests.csproj @@ -0,0 +1,16 @@ + + + + net10.0 + enable + enable + preview + false + true + StellaOps.Authority.Timestamping.Tests + + + + + + diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TASKS.md b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TASKS.md new file mode 100644 index 000000000..336f00379 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TASKS.md @@ -0,0 +1,8 @@ +# Authority Timestamping Tests Task Board + +This board mirrors active sprint tasks for this module. +Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_solid_review.md`. + +| Task ID | Status | Notes | +| --- | --- | --- | +| REMED-05 | DONE | Added unit tests for Timestamping library remediation gaps. | diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampReqEncoderTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampReqEncoderTests.cs new file mode 100644 index 000000000..b24c31199 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampReqEncoderTests.cs @@ -0,0 +1,58 @@ +using StellaOps.Authority.Timestamping.Abstractions; +using StellaOps.Authority.Timestamping.Asn1; +using System.Formats.Asn1; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Tests; + +public sealed class TimeStampReqEncoderTests +{ + [Fact] + public void GetHashAlgorithmOid_RoundTrips() + { + var oid = TimeStampReqEncoder.GetHashAlgorithmOid(HashAlgorithmName.SHA256); + var roundTrip = TimeStampReqEncoder.GetHashAlgorithmFromOid(oid); + + Assert.Equal(HashAlgorithmName.SHA256, roundTrip); + } + + [Fact] + public void Encode_WritesExpectedFields() + { + var request = new TimeStampRequest + { + HashAlgorithm = HashAlgorithmName.SHA256, + MessageImprint = new byte[] { 0x10, 0x20 }, + PolicyOid = "1.2.3.4.5", + Nonce = new byte[] { 0x01, 0x02 }, + CertificateRequired = true, + Extensions = new[] + { + new TimeStampExtension("1.2.3.4.5.6", true, new byte[] { 0xAA }) + } + }; + + var encoded = TimeStampReqEncoder.Encode(request); + var reader = new AsnReader(encoded, AsnEncodingRules.DER); + var sequence = reader.ReadSequence(); + + Assert.Equal(1, (int)sequence.ReadInteger()); + + var messageImprint = sequence.ReadSequence(); + var algId = messageImprint.ReadSequence(); + Assert.Equal(TimeStampReqEncoder.GetHashAlgorithmOid(request.HashAlgorithm), algId.ReadObjectIdentifier()); + algId.ReadNull(); + Assert.Equal(request.MessageImprint.Span.ToArray(), messageImprint.ReadOctetString()); + + Assert.Equal(request.PolicyOid, sequence.ReadObjectIdentifier()); + Assert.Equal(request.Nonce!.Value.Span.ToArray(), sequence.ReadIntegerBytes().ToArray()); + Assert.True(sequence.ReadBoolean()); + + var extSeq = sequence.ReadSequence(new Asn1Tag(TagClass.ContextSpecific, 0)); + var extension = extSeq.ReadSequence(); + Assert.Equal("1.2.3.4.5.6", extension.ReadObjectIdentifier()); + Assert.True(extension.ReadBoolean()); + Assert.Equal(new byte[] { 0xAA }, extension.ReadOctetString()); + Assert.False(sequence.HasData); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampRespDecoderTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampRespDecoderTests.cs new file mode 100644 index 000000000..50809e449 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampRespDecoderTests.cs @@ -0,0 +1,42 @@ +using StellaOps.Authority.Timestamping.Abstractions; +using StellaOps.Authority.Timestamping.Asn1; +using System.Formats.Asn1; + +namespace StellaOps.Authority.Timestamping.Tests; + +public sealed class TimeStampRespDecoderTests +{ + [Fact] + public void Decode_ParsesStatusAndFailureInfo() + { + var encoded = BuildResponse(PkiStatus.Rejection, "bad request"); + + var response = TimeStampRespDecoder.Decode(encoded); + + Assert.Equal(PkiStatus.Rejection, response.Status); + Assert.Equal("bad request", response.StatusString); + Assert.Equal(PkiFailureInfo.BadAlg, response.FailureInfo); + Assert.Null(response.Token); + } + + private static byte[] BuildResponse(PkiStatus status, string statusString) + { + var writer = new AsnWriter(AsnEncodingRules.DER); + using (writer.PushSequence()) + { + using (writer.PushSequence()) + { + writer.WriteInteger((int)status); + + using (writer.PushSequence()) + { + writer.WriteCharacterString(UniversalTagNumber.UTF8String, statusString); + } + + writer.WriteBitString(new byte[] { 0x80 }, 7); + } + } + + return writer.Encode(); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampTokenVerifierTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampTokenVerifierTests.cs new file mode 100644 index 000000000..99e881a40 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimeStampTokenVerifierTests.cs @@ -0,0 +1,37 @@ +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Authority.Timestamping.Abstractions; + +namespace StellaOps.Authority.Timestamping.Tests; + +public sealed class TimeStampTokenVerifierTests +{ + [Fact] + public async Task VerifyAsync_ReturnsImprintMismatch_WhenHashDiffers() + { + var verifier = new TimeStampTokenVerifier(NullLogger.Instance); + var info = TimestampingTestData.CreateTstInfo(messageImprint: new byte[] { 0x01 }); + var token = TimestampingTestData.CreateToken(info); + + var result = await verifier.VerifyAsync( + token, + new byte[] { 0x02 }, + new TimeStampVerificationOptions()); + + Assert.Equal(VerificationErrorCode.MessageImprintMismatch, result.Error?.Code); + } + + [Fact] + public async Task VerifyAsync_ReturnsNonceMismatch_WhenExpectedNonceMissing() + { + var verifier = new TimeStampTokenVerifier(NullLogger.Instance); + var info = TimestampingTestData.CreateTstInfo(messageImprint: new byte[] { 0x0A }); + var token = TimestampingTestData.CreateToken(info); + + var result = await verifier.VerifyAsync( + token, + info.MessageImprint, + new TimeStampVerificationOptions { ExpectedNonce = new byte[] { 0xFF } }); + + Assert.Equal(VerificationErrorCode.NonceMismatch, result.Error?.Code); + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimestampingTestData.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimestampingTestData.cs new file mode 100644 index 000000000..71dcab8e1 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TimestampingTestData.cs @@ -0,0 +1,38 @@ +using StellaOps.Authority.Timestamping.Abstractions; +using System.Security.Cryptography; + +namespace StellaOps.Authority.Timestamping.Tests; + +internal static class TimestampingTestData +{ + internal static TstInfo CreateTstInfo( + ReadOnlyMemory? messageImprint = null, + HashAlgorithmName? algorithm = null, + ReadOnlyMemory? nonce = null, + TstAccuracy? accuracy = null, + DateTimeOffset? genTime = null) + { + return new TstInfo + { + EncodedTstInfo = new byte[] { 0x01, 0x02 }, + PolicyOid = "1.2.3.4.5", + HashAlgorithm = algorithm ?? HashAlgorithmName.SHA256, + MessageImprint = messageImprint ?? new byte[] { 0x10, 0x20 }, + SerialNumber = new byte[] { 0x0A }, + GenTime = genTime ?? new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), + Nonce = nonce, + Accuracy = accuracy + }; + } + + internal static TimeStampToken CreateToken( + TstInfo? info = null, + ReadOnlyMemory? encodedToken = null) + { + return new TimeStampToken + { + EncodedToken = encodedToken ?? new byte[] { 0x30, 0x00 }, + TstInfo = info ?? CreateTstInfo() + }; + } +} diff --git a/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TsaProviderRegistryTests.cs b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TsaProviderRegistryTests.cs new file mode 100644 index 000000000..5148b3512 --- /dev/null +++ b/src/Authority/__Tests/StellaOps.Authority.Timestamping.Tests/TsaProviderRegistryTests.cs @@ -0,0 +1,89 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Timestamping.Abstractions; +using System.Net; +using System.Net.Http; + +namespace StellaOps.Authority.Timestamping.Tests; + +public sealed class TsaProviderRegistryTests +{ + [Fact] + public void GetOrderedProviders_RespectsPriority() + { + var registry = CreateRegistry( + new TestMessageHandler(HttpStatusCode.OK), + new TsaProviderOptions { Name = "slow", Url = new Uri("https://tsa.slow"), Priority = 200 }, + new TsaProviderOptions { Name = "fast", Url = new Uri("https://tsa.fast"), Priority = 100 }); + + var ordered = registry.GetOrderedProviders().Select(p => p.Name).ToArray(); + + Assert.Equal(new[] { "fast", "slow" }, ordered); + } + + [Fact] + public async Task CheckHealthAsync_ReturnsHealthy_OnAnyResponse() + { + var handler = new TestMessageHandler(HttpStatusCode.ServiceUnavailable); + var registry = CreateRegistry(handler, new TsaProviderOptions + { + Name = "primary", + Url = new Uri("https://tsa.primary") + }); + + var health = await registry.CheckHealthAsync("primary"); + + Assert.True(health.IsHealthy); + Assert.Equal(HttpMethod.Head, handler.LastRequest?.Method); + } + + [Fact] + public void ReportSuccess_UpdatesStatsAndHealth() + { + var registry = CreateRegistry( + new TestMessageHandler(HttpStatusCode.OK), + new TsaProviderOptions { Name = "primary", Url = new Uri("https://tsa.primary") }); + + registry.ReportSuccess("primary", TimeSpan.FromMilliseconds(12)); + + var provider = registry.GetProviders().Single(); + Assert.Equal(1, provider.Stats.TotalRequests); + Assert.Equal(1, provider.Stats.SuccessCount); + Assert.Equal(0, provider.Stats.FailureCount); + Assert.Equal(TsaHealthStatus.Healthy, provider.Health.Status); + } + + private static TsaProviderRegistry CreateRegistry(HttpMessageHandler handler, params TsaProviderOptions[] providers) + { + var options = new TsaClientOptions + { + FailoverStrategy = FailoverStrategy.Priority, + Providers = providers.ToList() + }; + + return new TsaProviderRegistry( + Options.Create(options), + new TestHttpClientFactory(handler), + NullLogger.Instance); + } + + private sealed class TestHttpClientFactory(HttpMessageHandler handler) : IHttpClientFactory + { + private readonly HttpClient _client = new(handler); + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class TestMessageHandler(HttpStatusCode statusCode) : HttpMessageHandler + { + public HttpRequestMessage? LastRequest { get; private set; } + + protected override Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken) + { + LastRequest = request; + return Task.FromResult(new HttpResponseMessage(statusCode)); + } + } +} diff --git a/src/BinaryIndex/StellaOps.BinaryIndex.WebService/StellaOps.BinaryIndex.WebService.csproj b/src/BinaryIndex/StellaOps.BinaryIndex.WebService/StellaOps.BinaryIndex.WebService.csproj index aef1e61d8..dd05f2b53 100644 --- a/src/BinaryIndex/StellaOps.BinaryIndex.WebService/StellaOps.BinaryIndex.WebService.csproj +++ b/src/BinaryIndex/StellaOps.BinaryIndex.WebService/StellaOps.BinaryIndex.WebService.csproj @@ -23,6 +23,7 @@ + diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Differences.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Differences.cs new file mode 100644 index 000000000..ddb8397fb --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Differences.cs @@ -0,0 +1,91 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class AstComparisonEngine +{ + private static void CompareNodes(AstNode a, AstNode b, List differences) + { + if (a.Type != b.Type) + { + differences.Add(new CodeDifference( + DifferenceType.Modified, + a, + b, + $"Node type changed: {a.Type} -> {b.Type}")); + return; + } + + switch (a) + { + case VariableNode varA when b is VariableNode varB: + if (varA.Name != varB.Name) + { + differences.Add(new CodeDifference( + DifferenceType.Modified, + a, + b, + $"Variable renamed: {varA.Name} -> {varB.Name}")); + } + break; + + case ConstantNode constA when b is ConstantNode constB: + if (constA.Value?.ToString() != constB.Value?.ToString()) + { + differences.Add(new CodeDifference( + DifferenceType.Modified, + a, + b, + $"Constant changed: {constA.Value} -> {constB.Value}")); + } + break; + + case BinaryOpNode binA when b is BinaryOpNode binB: + if (binA.Operator != binB.Operator) + { + differences.Add(new CodeDifference( + DifferenceType.Modified, + a, + b, + $"Operator changed: {binA.Operator} -> {binB.Operator}")); + } + break; + + case CallNode callA when b is CallNode callB: + if (callA.FunctionName != callB.FunctionName) + { + differences.Add(new CodeDifference( + DifferenceType.Modified, + a, + b, + $"Function call changed: {callA.FunctionName} -> {callB.FunctionName}")); + } + break; + } + + var minChildren = Math.Min(a.Children.Length, b.Children.Length); + + for (var i = 0; i < minChildren; i++) + { + CompareNodes(a.Children[i], b.Children[i], differences); + } + + for (var i = minChildren; i < a.Children.Length; i++) + { + differences.Add(new CodeDifference( + DifferenceType.Removed, + a.Children[i], + null, + $"Node removed: {a.Children[i].Type}")); + } + + for (var i = minChildren; i < b.Children.Length; i++) + { + differences.Add(new CodeDifference( + DifferenceType.Added, + null, + b.Children[i], + $"Node added: {b.Children[i].Type}")); + } + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.EditDistance.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.EditDistance.cs new file mode 100644 index 000000000..1306140ca --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.EditDistance.cs @@ -0,0 +1,37 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class AstComparisonEngine +{ + private static EditOperations ComputeTreeEditOperations(AstNode a, AstNode b) + { + if (a.Type != b.Type) + { + return new EditOperations(0, 0, 1, 1); + } + + var childrenA = a.Children; + var childrenB = b.Children; + + var insertions = 0; + var deletions = 0; + var modifications = 0; + + var minLen = Math.Min(childrenA.Length, childrenB.Length); + insertions = childrenB.Length - minLen; + deletions = childrenA.Length - minLen; + + for (var i = 0; i < minLen; i++) + { + var childOps = ComputeTreeEditOperations(childrenA[i], childrenB[i]); + insertions += childOps.Insertions; + deletions += childOps.Deletions; + modifications += childOps.Modifications; + } + + return new EditOperations(insertions, deletions, modifications, insertions + deletions + modifications); + } + + private readonly record struct EditOperations(int Insertions, int Deletions, int Modifications, int TotalOperations); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.Helpers.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.Helpers.cs new file mode 100644 index 000000000..de837f26e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.Helpers.cs @@ -0,0 +1,104 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class AstComparisonEngine +{ + private static bool AreNodesRenamed(AstNode a, AstNode b) + { + if (a.Type != b.Type || a.Children.Length != b.Children.Length) + { + return false; + } + + if (a is VariableNode && b is VariableNode) + { + return true; + } + + for (var i = 0; i < a.Children.Length; i++) + { + if (!AreNodesRenamed(a.Children[i], b.Children[i]) && + !AreNodesIdentical(a.Children[i], b.Children[i])) + { + return false; + } + } + + return true; + } + + private static bool AreOptimizationVariants(AstNode a, AstNode b) + { + if (a.Type == AstNodeType.For && b.Type == AstNodeType.Block) + { + return true; + } + + if (a is BinaryOpNode binA && b is BinaryOpNode binB) + { + if ((binA.Operator == "*" && binB.Operator == "<<") || + (binA.Operator == "/" && binB.Operator == ">>")) + { + return true; + } + } + + if (a.Type == AstNodeType.Call && b.Type == AstNodeType.Block) + { + return true; + } + + return false; + } + + private static IEnumerable CollectNodes(AstNode root) + { + yield return root; + foreach (var child in root.Children) + { + foreach (var node in CollectNodes(child)) + { + yield return node; + } + } + } + + private static IEnumerable FilterRedundantEquivalences(List equivalences) + { + var result = new List(); + + foreach (var eq in equivalences) + { + var isRedundant = equivalences.Any(other => + other != eq && + IsAncestor(other.NodeA, eq.NodeA) && + IsAncestor(other.NodeB, eq.NodeB)); + + if (!isRedundant) + { + result.Add(eq); + } + } + + return result; + } + + private static bool IsAncestor(AstNode potential, AstNode node) + { + if (potential == node) + { + return false; + } + + foreach (var child in potential.Children) + { + if (child == node || IsAncestor(child, node)) + { + return true; + } + } + + return false; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.cs new file mode 100644 index 000000000..4e7c21aee --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.Equivalence.cs @@ -0,0 +1,76 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class AstComparisonEngine +{ + private static SemanticEquivalence? CheckEquivalence(AstNode a, AstNode b) + { + if (a.Type != b.Type) + { + return null; + } + + if (AreNodesIdentical(a, b)) + { + return new SemanticEquivalence(a, b, EquivalenceType.Identical, 1.0m, "Identical nodes"); + } + + if (AreNodesRenamed(a, b)) + { + return new SemanticEquivalence(a, b, EquivalenceType.Renamed, 0.95m, "Same structure with renamed identifiers"); + } + + if (AreOptimizationVariants(a, b)) + { + return new SemanticEquivalence(a, b, EquivalenceType.Optimized, 0.85m, "Optimization variant"); + } + + return null; + } + + private static bool AreNodesIdentical(AstNode a, AstNode b) + { + if (a.Type != b.Type || a.Children.Length != b.Children.Length) + { + return false; + } + + if (a is ConstantNode constA && b is ConstantNode constB) + { + return constA.Value?.ToString() == constB.Value?.ToString(); + } + + if (a is VariableNode varA && b is VariableNode varB) + { + return varA.Name == varB.Name; + } + + if (a is BinaryOpNode binA && b is BinaryOpNode binB) + { + if (binA.Operator != binB.Operator) + { + return false; + } + } + + if (a is CallNode callA && b is CallNode callB) + { + if (callA.FunctionName != callB.FunctionName) + { + return false; + } + } + + for (var i = 0; i < a.Children.Length; i++) + { + if (!AreNodesIdentical(a.Children[i], b.Children[i])) + { + return false; + } + } + + return true; + } + +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.cs index 15463c144..5ccdb3a80 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstComparisonEngine.cs @@ -1,6 +1,5 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - using System.Collections.Immutable; namespace StellaOps.BinaryIndex.Decompiler; @@ -8,7 +7,7 @@ namespace StellaOps.BinaryIndex.Decompiler; /// /// Engine for comparing AST structures using tree edit distance and semantic analysis. /// -public sealed class AstComparisonEngine : IAstComparisonEngine +public sealed partial class AstComparisonEngine : IAstComparisonEngine { /// public decimal ComputeStructuralSimilarity(DecompiledAst a, DecompiledAst b) @@ -16,7 +15,6 @@ public sealed class AstComparisonEngine : IAstComparisonEngine ArgumentNullException.ThrowIfNull(a); ArgumentNullException.ThrowIfNull(b); - // Use normalized tree edit distance var editDistance = ComputeEditDistance(a, b); return 1.0m - editDistance.NormalizedDistance; } @@ -27,9 +25,7 @@ public sealed class AstComparisonEngine : IAstComparisonEngine ArgumentNullException.ThrowIfNull(a); ArgumentNullException.ThrowIfNull(b); - // Simplified Zhang-Shasha tree edit distance var operations = ComputeTreeEditOperations(a.Root, b.Root); - var totalNodes = Math.Max(a.NodeCount, b.NodeCount); var normalized = totalNodes > 0 ? (decimal)operations.TotalOperations / totalNodes @@ -50,8 +46,6 @@ public sealed class AstComparisonEngine : IAstComparisonEngine ArgumentNullException.ThrowIfNull(b); var equivalences = new List(); - - // Find equivalent subtrees var nodesA = CollectNodes(a.Root).ToList(); var nodesB = CollectNodes(b.Root).ToList(); @@ -67,7 +61,6 @@ public sealed class AstComparisonEngine : IAstComparisonEngine } } - // Remove redundant equivalences (child nodes when parent is equivalent) return [.. FilterRedundantEquivalences(equivalences)]; } @@ -78,315 +71,8 @@ public sealed class AstComparisonEngine : IAstComparisonEngine ArgumentNullException.ThrowIfNull(b); var differences = new List(); - - // Compare root structures CompareNodes(a.Root, b.Root, differences); return [.. differences]; } - - private static EditOperations ComputeTreeEditOperations(AstNode a, AstNode b) - { - // Simplified tree comparison - if (a.Type != b.Type) - { - return new EditOperations(0, 0, 1, 1); - } - - var childrenA = a.Children; - var childrenB = b.Children; - - var insertions = 0; - var deletions = 0; - var modifications = 0; - - // Compare children using LCS-like approach - var maxLen = Math.Max(childrenA.Length, childrenB.Length); - var minLen = Math.Min(childrenA.Length, childrenB.Length); - - insertions = childrenB.Length - minLen; - deletions = childrenA.Length - minLen; - - for (var i = 0; i < minLen; i++) - { - var childOps = ComputeTreeEditOperations(childrenA[i], childrenB[i]); - insertions += childOps.Insertions; - deletions += childOps.Deletions; - modifications += childOps.Modifications; - } - - return new EditOperations(insertions, deletions, modifications, insertions + deletions + modifications); - } - - private static SemanticEquivalence? CheckEquivalence(AstNode a, AstNode b) - { - // Same type - potential equivalence - if (a.Type != b.Type) - { - return null; - } - - // Check for identical - if (AreNodesIdentical(a, b)) - { - return new SemanticEquivalence(a, b, EquivalenceType.Identical, 1.0m, "Identical nodes"); - } - - // Check for renamed (same structure, different names) - if (AreNodesRenamed(a, b)) - { - return new SemanticEquivalence(a, b, EquivalenceType.Renamed, 0.95m, "Same structure with renamed identifiers"); - } - - // Check for optimization variants - if (AreOptimizationVariants(a, b)) - { - return new SemanticEquivalence(a, b, EquivalenceType.Optimized, 0.85m, "Optimization variant"); - } - - return null; - } - - private static bool AreNodesIdentical(AstNode a, AstNode b) - { - if (a.Type != b.Type || a.Children.Length != b.Children.Length) - { - return false; - } - - // Check node-specific equality - if (a is ConstantNode constA && b is ConstantNode constB) - { - return constA.Value?.ToString() == constB.Value?.ToString(); - } - - if (a is VariableNode varA && b is VariableNode varB) - { - return varA.Name == varB.Name; - } - - if (a is BinaryOpNode binA && b is BinaryOpNode binB) - { - if (binA.Operator != binB.Operator) - { - return false; - } - } - - if (a is CallNode callA && b is CallNode callB) - { - if (callA.FunctionName != callB.FunctionName) - { - return false; - } - } - - // Check children recursively - for (var i = 0; i < a.Children.Length; i++) - { - if (!AreNodesIdentical(a.Children[i], b.Children[i])) - { - return false; - } - } - - return true; - } - - private static bool AreNodesRenamed(AstNode a, AstNode b) - { - if (a.Type != b.Type || a.Children.Length != b.Children.Length) - { - return false; - } - - // Same structure but variable/parameter names differ - if (a is VariableNode && b is VariableNode) - { - return true; // Different name but same position = renamed - } - - // Check children have same structure - for (var i = 0; i < a.Children.Length; i++) - { - if (!AreNodesRenamed(a.Children[i], b.Children[i]) && - !AreNodesIdentical(a.Children[i], b.Children[i])) - { - return false; - } - } - - return true; - } - - private static bool AreOptimizationVariants(AstNode a, AstNode b) - { - // Detect common optimization patterns - - // Loop unrolling: for loop vs repeated statements - if (a.Type == AstNodeType.For && b.Type == AstNodeType.Block) - { - return true; // Might be unrolled - } - - // Strength reduction: multiplication vs addition - if (a is BinaryOpNode binA && b is BinaryOpNode binB) - { - if ((binA.Operator == "*" && binB.Operator == "<<") || - (binA.Operator == "/" && binB.Operator == ">>")) - { - return true; - } - } - - // Inline expansion - if (a.Type == AstNodeType.Call && b.Type == AstNodeType.Block) - { - return true; // Might be inlined - } - - return false; - } - - private static void CompareNodes(AstNode a, AstNode b, List differences) - { - if (a.Type != b.Type) - { - differences.Add(new CodeDifference( - DifferenceType.Modified, - a, - b, - $"Node type changed: {a.Type} -> {b.Type}")); - return; - } - - // Compare specific node types - switch (a) - { - case VariableNode varA when b is VariableNode varB: - if (varA.Name != varB.Name) - { - differences.Add(new CodeDifference( - DifferenceType.Modified, - a, - b, - $"Variable renamed: {varA.Name} -> {varB.Name}")); - } - break; - - case ConstantNode constA when b is ConstantNode constB: - if (constA.Value?.ToString() != constB.Value?.ToString()) - { - differences.Add(new CodeDifference( - DifferenceType.Modified, - a, - b, - $"Constant changed: {constA.Value} -> {constB.Value}")); - } - break; - - case BinaryOpNode binA when b is BinaryOpNode binB: - if (binA.Operator != binB.Operator) - { - differences.Add(new CodeDifference( - DifferenceType.Modified, - a, - b, - $"Operator changed: {binA.Operator} -> {binB.Operator}")); - } - break; - - case CallNode callA when b is CallNode callB: - if (callA.FunctionName != callB.FunctionName) - { - differences.Add(new CodeDifference( - DifferenceType.Modified, - a, - b, - $"Function call changed: {callA.FunctionName} -> {callB.FunctionName}")); - } - break; - } - - // Compare children - var minChildren = Math.Min(a.Children.Length, b.Children.Length); - - for (var i = 0; i < minChildren; i++) - { - CompareNodes(a.Children[i], b.Children[i], differences); - } - - // Handle added/removed children - for (var i = minChildren; i < a.Children.Length; i++) - { - differences.Add(new CodeDifference( - DifferenceType.Removed, - a.Children[i], - null, - $"Node removed: {a.Children[i].Type}")); - } - - for (var i = minChildren; i < b.Children.Length; i++) - { - differences.Add(new CodeDifference( - DifferenceType.Added, - null, - b.Children[i], - $"Node added: {b.Children[i].Type}")); - } - } - - private static IEnumerable CollectNodes(AstNode root) - { - yield return root; - foreach (var child in root.Children) - { - foreach (var node in CollectNodes(child)) - { - yield return node; - } - } - } - - private static IEnumerable FilterRedundantEquivalences( - List equivalences) - { - // Keep only top-level equivalences - var result = new List(); - - foreach (var eq in equivalences) - { - var isRedundant = equivalences.Any(other => - other != eq && - IsAncestor(other.NodeA, eq.NodeA) && - IsAncestor(other.NodeB, eq.NodeB)); - - if (!isRedundant) - { - result.Add(eq); - } - } - - return result; - } - - private static bool IsAncestor(AstNode potential, AstNode node) - { - if (potential == node) - { - return false; - } - - foreach (var child in potential.Children) - { - if (child == node || IsAncestor(child, node)) - { - return true; - } - } - - return false; - } - - private readonly record struct EditOperations(int Insertions, int Deletions, int Modifications, int TotalOperations); } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNode.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNode.cs new file mode 100644 index 000000000..9116bb172 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNode.cs @@ -0,0 +1,60 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Abstract syntax tree node. +/// +public abstract record AstNode( + AstNodeType Type, + ImmutableArray Children, + SourceLocation? Location); + +/// +/// Types of AST nodes. +/// +public enum AstNodeType +{ + // Structure + Function, + Block, + Parameter, + + // Control flow + If, + While, + For, + DoWhile, + Switch, + Case, + Default, + Return, + Break, + Continue, + Goto, + Label, + + // Expressions + Assignment, + BinaryOp, + UnaryOp, + TernaryOp, + Call, + Cast, + Sizeof, + + // Operands + Variable, + Constant, + StringLiteral, + ArrayAccess, + FieldAccess, + PointerDeref, + AddressOf, + + // Declarations + VariableDecl, + TypeDef +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.ControlFlow.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.ControlFlow.cs new file mode 100644 index 000000000..5de52ae89 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.ControlFlow.cs @@ -0,0 +1,29 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed record IfNode( + AstNode Condition, + AstNode ThenBranch, + AstNode? ElseBranch, + SourceLocation? Location = null) + : AstNode(AstNodeType.If, ElseBranch is null ? [Condition, ThenBranch] : [Condition, ThenBranch, ElseBranch], Location); + +public sealed record WhileNode( + AstNode Condition, + AstNode Body, + SourceLocation? Location = null) + : AstNode(AstNodeType.While, [Condition, Body], Location); + +public sealed record ForNode( + AstNode? Init, + AstNode? Condition, + AstNode? Update, + AstNode Body, + SourceLocation? Location = null) + : AstNode(AstNodeType.For, [Init ?? EmptyNode.Instance, Condition ?? EmptyNode.Instance, Update ?? EmptyNode.Instance, Body], Location); + +public sealed record ReturnNode( + AstNode? Value, + SourceLocation? Location = null) + : AstNode(AstNodeType.Return, Value is null ? [] : [Value], Location); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Expressions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Expressions.cs new file mode 100644 index 000000000..773830a1c --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Expressions.cs @@ -0,0 +1,42 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed record CallNode( + string FunctionName, + ImmutableArray Arguments, + SourceLocation? Location = null) + : AstNode(AstNodeType.Call, Arguments, Location); + +public sealed record VariableNode( + string Name, + string? DataType, + SourceLocation? Location = null) + : AstNode(AstNodeType.Variable, [], Location); + +public sealed record ConstantNode( + object Value, + string DataType, + SourceLocation? Location = null) + : AstNode(AstNodeType.Constant, [], Location); + +public sealed record ArrayAccessNode( + AstNode Array, + AstNode Index, + SourceLocation? Location = null) + : AstNode(AstNodeType.ArrayAccess, [Array, Index], Location); + +public sealed record FieldAccessNode( + AstNode Object, + string FieldName, + bool IsPointer, + SourceLocation? Location = null) + : AstNode(AstNodeType.FieldAccess, [Object], Location); + +public sealed record CastNode( + AstNode Expression, + string TargetType, + SourceLocation? Location = null) + : AstNode(AstNodeType.Cast, [Expression], Location); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Operations.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Operations.cs new file mode 100644 index 000000000..5a3b26716 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Operations.cs @@ -0,0 +1,24 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed record AssignmentNode( + AstNode Target, + AstNode Value, + string Operator, + SourceLocation? Location = null) + : AstNode(AstNodeType.Assignment, [Target, Value], Location); + +public sealed record BinaryOpNode( + AstNode Left, + AstNode Right, + string Operator, + SourceLocation? Location = null) + : AstNode(AstNodeType.BinaryOp, [Left, Right], Location); + +public sealed record UnaryOpNode( + AstNode Operand, + string Operator, + bool IsPrefix, + SourceLocation? Location = null) + : AstNode(AstNodeType.UnaryOp, [Operand], Location); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Structure.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Structure.cs new file mode 100644 index 000000000..5a4049901 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstNodes.Structure.cs @@ -0,0 +1,30 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed record FunctionNode( + string Name, + string ReturnType, + ImmutableArray Parameters, + BlockNode Body, + SourceLocation? Location = null) + : AstNode(AstNodeType.Function, [Body, .. Parameters], Location); + +public sealed record ParameterNode( + string Name, + string DataType, + int Index, + SourceLocation? Location = null) + : AstNode(AstNodeType.Parameter, [], Location); + +public sealed record BlockNode( + ImmutableArray Statements, + SourceLocation? Location = null) + : AstNode(AstNodeType.Block, Statements, Location); + +public sealed record EmptyNode() : AstNode(AstNodeType.Block, [], null) +{ + public static EmptyNode Instance { get; } = new(); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstPattern.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstPattern.cs new file mode 100644 index 000000000..70f4467c8 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/AstPattern.cs @@ -0,0 +1,55 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// A recognized code pattern. +/// +public sealed record AstPattern( + PatternType Type, + AstNode Node, + PatternMetadata? Metadata); + +/// +/// Types of code patterns. +/// +public enum PatternType +{ + // Loops + CountedLoop, + ConditionalLoop, + InfiniteLoop, + LoopUnrolled, + + // Branches + IfElseChain, + SwitchTable, + ShortCircuit, + + // Memory + MemoryAllocation, + MemoryDeallocation, + BufferOperation, + StackBuffer, + + // Error handling + ErrorCheck, + NullCheck, + BoundsCheck, + + // Idioms + StringOperation, + MathOperation, + BitwiseOperation, + TableLookup +} + +/// +/// Metadata about a recognized pattern. +/// +public sealed record PatternMetadata( + string Description, + decimal Confidence, + ImmutableDictionary? Properties); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeDifference.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeDifference.cs new file mode 100644 index 000000000..321bce417 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeDifference.cs @@ -0,0 +1,25 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// A difference between two pieces of code. +/// +public sealed record CodeDifference( + DifferenceType Type, + AstNode? NodeA, + AstNode? NodeB, + string Description); + +/// +/// Types of code differences. +/// +public enum DifferenceType +{ + Added, + Removed, + Modified, + Reordered, + TypeChanged, + OptimizationVariant +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.Children.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.Children.cs new file mode 100644 index 000000000..e5e442c44 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.Children.cs @@ -0,0 +1,28 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + private static AstNode NormalizeChildren( + AstNode node, + NormalizationOptions options, + Dictionary varMap, + ref int varIndex) + { + if (node.Children.IsDefaultOrEmpty) + { + return node; + } + + var builder = ImmutableArray.CreateBuilder(node.Children.Length); + foreach (var child in node.Children) + { + builder.Add(NormalizeNode(child, options, varMap, ref varIndex)); + } + + return node with { Children = builder.MoveToImmutable() }; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.cs new file mode 100644 index 000000000..f43033412 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Ast.cs @@ -0,0 +1,106 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + /// + public DecompiledAst NormalizeAst(DecompiledAst ast, NormalizationOptions? options = null) + { + ArgumentNullException.ThrowIfNull(ast); + + options ??= NormalizationOptions.Default; + + var varIndex = 0; + var varMap = new Dictionary(); + + var normalizedRoot = NormalizeNode(ast.Root, options, varMap, ref varIndex); + + return new DecompiledAst( + normalizedRoot, + ast.NodeCount, + ast.Depth, + ast.Patterns); + } + + private static AstNode NormalizeNode( + AstNode node, + NormalizationOptions options, + Dictionary varMap, + ref int varIndex) + { + return node switch + { + VariableNode varNode when options.NormalizeVariables => + NormalizeVariableNode(varNode, varMap, ref varIndex), + CallNode callNode when options.NormalizeFunctionCalls => + NormalizeCallNode(callNode, options, varMap, ref varIndex), + ConstantNode constNode when options.NormalizeConstants => + NormalizeConstantNode(constNode), + _ => NormalizeChildren(node, options, varMap, ref varIndex) + }; + } + + private static AstNode NormalizeVariableNode( + VariableNode node, + Dictionary varMap, + ref int varIndex) + { + if (IsKeywordOrType(node.Name)) + { + return node; + } + + if (!varMap.TryGetValue(node.Name, out var canonical)) + { + canonical = $"var_{varIndex++}"; + varMap[node.Name] = canonical; + } + + return node with { Name = canonical }; + } + + private static AstNode NormalizeCallNode( + CallNode node, + NormalizationOptions options, + Dictionary varMap, + ref int varIndex) + { + var funcName = node.FunctionName; + + if (options.KnownFunctions?.Contains(funcName) != true && + !IsStandardLibraryFunction(funcName)) + { + funcName = $"func_{funcName.GetHashCode():X8}"; + } + + var normalizedArgs = new List(node.Arguments.Length); + foreach (var arg in node.Arguments) + { + normalizedArgs.Add(NormalizeNode(arg, options, varMap, ref varIndex)); + } + + return new CallNode(funcName, [.. normalizedArgs], node.Location); + } + + private static AstNode NormalizeConstantNode(ConstantNode node) + { + if (node.Value is long or int or short or byte) + { + return node with { Value = "CONST_INT" }; + } + + if (node.Value is double or float or decimal) + { + return node with { Value = "CONST_FLOAT" }; + } + + if (node.Value is string) + { + return node with { Value = "CONST_STR" }; + } + + return node; + } + +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Keywords.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Keywords.cs new file mode 100644 index 000000000..3e55bce82 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Keywords.cs @@ -0,0 +1,43 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + private static readonly ImmutableHashSet _cKeywords = ImmutableHashSet.Create( + "auto", "break", "case", "char", "const", "continue", "default", "do", + "double", "else", "enum", "extern", "float", "for", "goto", "if", + "int", "long", "register", "return", "short", "signed", "sizeof", "static", + "struct", "switch", "typedef", "union", "unsigned", "void", "volatile", "while", + // Common Ghidra types + "undefined", "undefined1", "undefined2", "undefined4", "undefined8", + "byte", "word", "dword", "qword", "bool", "uchar", "ushort", "uint", "ulong", + "int8_t", "int16_t", "int32_t", "int64_t", "uint8_t", "uint16_t", "uint32_t", "uint64_t", + "size_t", "ssize_t", "ptrdiff_t", "intptr_t", "uintptr_t", + // Common function names to preserve + "NULL", "true", "false" + ); + + private static bool IsKeywordOrType(string name) + { + return _cKeywords.Contains(name); + } + + private static bool IsStandardLibraryFunction(string name) + { + return name switch + { + "malloc" or "calloc" or "realloc" or "free" or "memcpy" or "memmove" or "memset" or "memcmp" => true, + "strlen" or "strcpy" or "strncpy" or "strcat" or "strncat" or "strcmp" or "strncmp" or "strchr" or "strrchr" or "strstr" => true, + "printf" or "fprintf" or "sprintf" or "snprintf" or "scanf" or "fscanf" or "sscanf" => true, + "fopen" or "fclose" or "fread" or "fwrite" or "fseek" or "ftell" or "fflush" => true, + "puts" or "fputs" or "gets" or "fgets" or "putchar" or "getchar" => true, + "abs" or "labs" or "llabs" or "fabs" or "sqrt" or "pow" or "sin" or "cos" or "tan" or "log" or "exp" => true, + "exit" or "abort" or "atexit" or "atoi" or "atol" or "atof" or "strtol" or "strtoul" or "strtod" => true, + "assert" or "errno" => true, + _ => false + }; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Regex.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Regex.cs new file mode 100644 index 000000000..ed4d533ac --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Regex.cs @@ -0,0 +1,44 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Text.RegularExpressions; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + [GeneratedRegex(@"//[^\n]*")] + private static partial Regex SingleLineCommentRegex(); + + [GeneratedRegex(@"/\*[\s\S]*?\*/")] + private static partial Regex MultiLineCommentRegex(); + + [GeneratedRegex(@"\b([a-zA-Z_][a-zA-Z0-9_]*)\b")] + private static partial Regex IdentifierRegex(); + + [GeneratedRegex(@"\b([a-zA-Z_][a-zA-Z0-9_]*)\s*\(")] + private static partial Regex FunctionCallRegex(); + + [GeneratedRegex(@"0[xX][0-9a-fA-F]+")] + private static partial Regex HexConstantRegex(); + + [GeneratedRegex(@"\b[0-9]{4,}\b")] + private static partial Regex LargeDecimalRegex(); + + [GeneratedRegex(@"""(?:[^""\\]|\\.)*""")] + private static partial Regex StringLiteralRegex(); + + [GeneratedRegex(@"[ \t]+")] + private static partial Regex MultipleWhitespaceRegex(); + + [GeneratedRegex(@"\s*([+\-*/%=<>!&|^~?:;,{}()\[\]])\s*")] + private static partial Regex WhitespaceAroundOperatorsRegex(); + + [GeneratedRegex(@"[ \t]+\n")] + private static partial Regex TrailingWhitespaceRegex(); + + [GeneratedRegex(@"^([a-zA-Z_][a-zA-Z0-9_]*)\s*=")] + private static partial Regex AssignmentTargetRegex(); + + [GeneratedRegex(@"^([a-zA-Z_][a-zA-Z0-9_]*)\s*\(")] + private static partial Regex FunctionNameRegex(); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Sorting.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Sorting.cs new file mode 100644 index 000000000..35a6ba1c2 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Sorting.cs @@ -0,0 +1,81 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Text; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + private static string SortIndependentStatements(string code) + { + var lines = code.Split('\n', StringSplitOptions.RemoveEmptyEntries); + var result = new StringBuilder(); + + var blockDepth = 0; + var currentBlock = new List(); + + foreach (var line in lines) + { + var trimmed = line.Trim(); + + blockDepth += trimmed.Count(c => c == '{'); + blockDepth -= trimmed.Count(c => c == '}'); + + if (blockDepth == 1 && !trimmed.Contains('{') && !trimmed.Contains('}')) + { + currentBlock.Add(trimmed); + } + else + { + if (currentBlock.Count > 0) + { + var sorted = SortStatements(currentBlock); + foreach (var stmt in sorted) + { + result.AppendLine(stmt); + } + currentBlock.Clear(); + } + + result.AppendLine(line); + } + } + + if (currentBlock.Count > 0) + { + var sorted = SortStatements(currentBlock); + foreach (var stmt in sorted) + { + result.AppendLine(stmt); + } + } + + return result.ToString().Trim(); + } + + private static List SortStatements(List statements) + { + return statements + .OrderBy(s => GetStatementSortKey(s), StringComparer.Ordinal) + .ToList(); + } + + private static string GetStatementSortKey(string statement) + { + var trimmed = statement.Trim(); + + var assignMatch = AssignmentTargetRegex().Match(trimmed); + if (assignMatch.Success) + { + return $"A_{assignMatch.Groups[1].Value}"; + } + + var callMatch = FunctionNameRegex().Match(trimmed); + if (callMatch.Success) + { + return $"C_{callMatch.Groups[1].Value}"; + } + + return $"Z_{trimmed}"; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Text.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Text.cs new file mode 100644 index 000000000..b054678ed --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.Text.cs @@ -0,0 +1,90 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; +using System.Text.RegularExpressions; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class CodeNormalizer +{ + private static string RemoveComments(string code) + { + code = SingleLineCommentRegex().Replace(code, string.Empty); + code = MultiLineCommentRegex().Replace(code, string.Empty); + + return code; + } + + private static string NormalizeVariableNames(string code, ImmutableHashSet? knownFunctions) + { + var varIndex = 0; + var varMap = new Dictionary(); + + return IdentifierRegex().Replace(code, match => + { + var name = match.Value; + + if (IsKeywordOrType(name)) + { + return name; + } + + if (knownFunctions?.Contains(name) == true) + { + return name; + } + + if (IsStandardLibraryFunction(name)) + { + return name; + } + + if (!varMap.TryGetValue(name, out var canonical)) + { + canonical = $"var_{varIndex++}"; + varMap[name] = canonical; + } + + return canonical; + }); + } + + private static string NormalizeFunctionCalls(string code, ImmutableHashSet? knownFunctions) + { + return FunctionCallRegex().Replace(code, match => + { + var funcName = match.Groups[1].Value; + + if (knownFunctions?.Contains(funcName) == true) + { + return match.Value; + } + + if (IsStandardLibraryFunction(funcName)) + { + return match.Value; + } + + return $"func_{funcName.GetHashCode():X8}("; + }); + } + + private static string NormalizeConstants(string code) + { + code = HexConstantRegex().Replace(code, "CONST_HEX"); + code = LargeDecimalRegex().Replace(code, "CONST_INT"); + code = StringLiteralRegex().Replace(code, "CONST_STR"); + + return code; + } + + private static string NormalizeWhitespace(string code) + { + code = MultipleWhitespaceRegex().Replace(code, " "); + code = WhitespaceAroundOperatorsRegex().Replace(code, "$1"); + code = code.Replace("\r\n", "\n").Replace("\r", "\n"); + code = TrailingWhitespaceRegex().Replace(code, "\n"); + + return code.Trim(); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.cs index edcf7d4b7..a789020be 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/CodeNormalizer.cs @@ -1,10 +1,7 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - -using System.Collections.Immutable; using System.Security.Cryptography; using System.Text; -using System.Text.RegularExpressions; namespace StellaOps.BinaryIndex.Decompiler; @@ -13,20 +10,6 @@ namespace StellaOps.BinaryIndex.Decompiler; /// public sealed partial class CodeNormalizer : ICodeNormalizer { - private static readonly ImmutableHashSet CKeywords = ImmutableHashSet.Create( - "auto", "break", "case", "char", "const", "continue", "default", "do", - "double", "else", "enum", "extern", "float", "for", "goto", "if", - "int", "long", "register", "return", "short", "signed", "sizeof", "static", - "struct", "switch", "typedef", "union", "unsigned", "void", "volatile", "while", - // Common Ghidra types - "undefined", "undefined1", "undefined2", "undefined4", "undefined8", - "byte", "word", "dword", "qword", "bool", "uchar", "ushort", "uint", "ulong", - "int8_t", "int16_t", "int32_t", "int64_t", "uint8_t", "uint16_t", "uint32_t", "uint64_t", - "size_t", "ssize_t", "ptrdiff_t", "intptr_t", "uintptr_t", - // Common function names to preserve - "NULL", "true", "false" - ); - /// public string Normalize(string code, NormalizationOptions? options = null) { @@ -36,34 +19,28 @@ public sealed partial class CodeNormalizer : ICodeNormalizer var normalized = code; - // 1. Remove comments normalized = RemoveComments(normalized); - // 2. Normalize variable names if (options.NormalizeVariables) { normalized = NormalizeVariableNames(normalized, options.KnownFunctions); } - // 3. Normalize function calls if (options.NormalizeFunctionCalls) { normalized = NormalizeFunctionCalls(normalized, options.KnownFunctions); } - // 4. Normalize constants if (options.NormalizeConstants) { normalized = NormalizeConstants(normalized); } - // 5. Normalize whitespace if (options.NormalizeWhitespace) { normalized = NormalizeWhitespace(normalized); } - // 6. Sort independent statements (within blocks) if (options.SortIndependentStatements) { normalized = SortIndependentStatements(normalized); @@ -77,458 +54,15 @@ public sealed partial class CodeNormalizer : ICodeNormalizer { ArgumentException.ThrowIfNullOrEmpty(code); - // Normalize with full normalization for hashing var normalized = Normalize(code, new NormalizationOptions { NormalizeVariables = true, NormalizeFunctionCalls = true, - NormalizeConstants = false, // Keep constants for semantic identity + NormalizeConstants = false, NormalizeWhitespace = true, SortIndependentStatements = true }); return SHA256.HashData(Encoding.UTF8.GetBytes(normalized)); } - - /// - public DecompiledAst NormalizeAst(DecompiledAst ast, NormalizationOptions? options = null) - { - ArgumentNullException.ThrowIfNull(ast); - - options ??= NormalizationOptions.Default; - - var varIndex = 0; - var varMap = new Dictionary(); - - var normalizedRoot = NormalizeNode(ast.Root, options, varMap, ref varIndex); - - return new DecompiledAst( - normalizedRoot, - ast.NodeCount, - ast.Depth, - ast.Patterns); - } - - private static AstNode NormalizeNode( - AstNode node, - NormalizationOptions options, - Dictionary varMap, - ref int varIndex) - { - return node switch - { - VariableNode varNode when options.NormalizeVariables => - NormalizeVariableNode(varNode, varMap, ref varIndex), - - CallNode callNode when options.NormalizeFunctionCalls => - NormalizeCallNode(callNode, options, varMap, ref varIndex), - - ConstantNode constNode when options.NormalizeConstants => - NormalizeConstantNode(constNode), - - _ => NormalizeChildren(node, options, varMap, ref varIndex) - }; - } - - private static AstNode NormalizeVariableNode( - VariableNode node, - Dictionary varMap, - ref int varIndex) - { - if (IsKeywordOrType(node.Name)) - { - return node; - } - - if (!varMap.TryGetValue(node.Name, out var canonical)) - { - canonical = $"var_{varIndex++}"; - varMap[node.Name] = canonical; - } - - return node with { Name = canonical }; - } - - private static AstNode NormalizeCallNode( - CallNode node, - NormalizationOptions options, - Dictionary varMap, - ref int varIndex) - { - var funcName = node.FunctionName; - - // Preserve known functions - if (options.KnownFunctions?.Contains(funcName) != true && - !IsStandardLibraryFunction(funcName)) - { - funcName = $"func_{funcName.GetHashCode():X8}"; - } - - var normalizedArgs = new List(node.Arguments.Length); - foreach (var arg in node.Arguments) - { - normalizedArgs.Add(NormalizeNode(arg, options, varMap, ref varIndex)); - } - - return new CallNode(funcName, [.. normalizedArgs], node.Location); - } - - private static AstNode NormalizeConstantNode(ConstantNode node) - { - // Normalize numeric constants to canonical form - if (node.Value is long or int or short or byte) - { - return node with { Value = "CONST_INT" }; - } - - if (node.Value is double or float or decimal) - { - return node with { Value = "CONST_FLOAT" }; - } - - if (node.Value is string) - { - return node with { Value = "CONST_STR" }; - } - - return node; - } - - private static AstNode NormalizeChildren( - AstNode node, - NormalizationOptions options, - Dictionary varMap, - ref int varIndex) - { - if (node.Children.Length == 0) - { - return node; - } - - var normalizedChildren = new List(node.Children.Length); - foreach (var child in node.Children) - { - normalizedChildren.Add(NormalizeNode(child, options, varMap, ref varIndex)); - } - - var normalizedArray = normalizedChildren.ToImmutableArray(); - - // Use reflection-free approach for common node types - return node switch - { - BlockNode block => block with { Statements = normalizedArray }, - IfNode ifNode => CreateNormalizedIf(ifNode, normalizedArray), - WhileNode whileNode => CreateNormalizedWhile(whileNode, normalizedArray), - ForNode forNode => CreateNormalizedFor(forNode, normalizedArray), - ReturnNode returnNode when normalizedArray.Length > 0 => - returnNode with { Value = normalizedArray[0] }, - AssignmentNode assignment => CreateNormalizedAssignment(assignment, normalizedArray), - BinaryOpNode binOp => CreateNormalizedBinaryOp(binOp, normalizedArray), - UnaryOpNode unaryOp when normalizedArray.Length > 0 => - unaryOp with { Operand = normalizedArray[0] }, - _ => node // Return as-is for other node types - }; - } - - private static IfNode CreateNormalizedIf(IfNode node, ImmutableArray children) - { - return new IfNode( - children.Length > 0 ? children[0] : node.Condition, - children.Length > 1 ? children[1] : node.ThenBranch, - children.Length > 2 ? children[2] : node.ElseBranch, - node.Location); - } - - private static WhileNode CreateNormalizedWhile(WhileNode node, ImmutableArray children) - { - return new WhileNode( - children.Length > 0 ? children[0] : node.Condition, - children.Length > 1 ? children[1] : node.Body, - node.Location); - } - - private static ForNode CreateNormalizedFor(ForNode node, ImmutableArray children) - { - return new ForNode( - children.Length > 0 ? children[0] : node.Init, - children.Length > 1 ? children[1] : node.Condition, - children.Length > 2 ? children[2] : node.Update, - children.Length > 3 ? children[3] : node.Body, - node.Location); - } - - private static AssignmentNode CreateNormalizedAssignment( - AssignmentNode node, - ImmutableArray children) - { - return new AssignmentNode( - children.Length > 0 ? children[0] : node.Target, - children.Length > 1 ? children[1] : node.Value, - node.Operator, - node.Location); - } - - private static BinaryOpNode CreateNormalizedBinaryOp( - BinaryOpNode node, - ImmutableArray children) - { - return new BinaryOpNode( - children.Length > 0 ? children[0] : node.Left, - children.Length > 1 ? children[1] : node.Right, - node.Operator, - node.Location); - } - - private static string RemoveComments(string code) - { - // Remove single-line comments - code = SingleLineCommentRegex().Replace(code, ""); - - // Remove multi-line comments - code = MultiLineCommentRegex().Replace(code, ""); - - return code; - } - - private static string NormalizeVariableNames(string code, ImmutableHashSet? knownFunctions) - { - var varIndex = 0; - var varMap = new Dictionary(); - - return IdentifierRegex().Replace(code, match => - { - var name = match.Value; - - // Skip keywords and types - if (IsKeywordOrType(name)) - { - return name; - } - - // Skip known functions - if (knownFunctions?.Contains(name) == true) - { - return name; - } - - // Skip standard library functions - if (IsStandardLibraryFunction(name)) - { - return name; - } - - if (!varMap.TryGetValue(name, out var canonical)) - { - canonical = $"var_{varIndex++}"; - varMap[name] = canonical; - } - - return canonical; - }); - } - - private static string NormalizeFunctionCalls(string code, ImmutableHashSet? knownFunctions) - { - // Match function calls: identifier followed by ( - return FunctionCallRegex().Replace(code, match => - { - var funcName = match.Groups[1].Value; - - // Skip known functions - if (knownFunctions?.Contains(funcName) == true) - { - return match.Value; - } - - // Skip standard library functions - if (IsStandardLibraryFunction(funcName)) - { - return match.Value; - } - - return $"func_{funcName.GetHashCode():X8}("; - }); - } - - private static string NormalizeConstants(string code) - { - // Normalize hex constants - code = HexConstantRegex().Replace(code, "CONST_HEX"); - - // Normalize decimal constants (but preserve small common ones like 0, 1, 2) - code = LargeDecimalRegex().Replace(code, "CONST_INT"); - - // Normalize string literals - code = StringLiteralRegex().Replace(code, "CONST_STR"); - - return code; - } - - private static string NormalizeWhitespace(string code) - { - // Collapse multiple whitespace to single space - code = MultipleWhitespaceRegex().Replace(code, " "); - - // Remove whitespace around operators - code = WhitespaceAroundOperatorsRegex().Replace(code, "$1"); - - // Normalize line endings - code = code.Replace("\r\n", "\n").Replace("\r", "\n"); - - // Remove trailing whitespace on lines - code = TrailingWhitespaceRegex().Replace(code, "\n"); - - return code.Trim(); - } - - private static string SortIndependentStatements(string code) - { - // Parse into blocks and sort independent statements within each block - // This is a simplified implementation that sorts top-level statements - // A full implementation would need to analyze data dependencies - - var lines = code.Split('\n', StringSplitOptions.RemoveEmptyEntries); - var result = new StringBuilder(); - - var blockDepth = 0; - var currentBlock = new List(); - - foreach (var line in lines) - { - var trimmed = line.Trim(); - - // Track block depth - blockDepth += trimmed.Count(c => c == '{'); - blockDepth -= trimmed.Count(c => c == '}'); - - if (blockDepth == 1 && !trimmed.Contains('{') && !trimmed.Contains('}')) - { - // Simple statement at block level 1 - currentBlock.Add(trimmed); - } - else - { - // Flush sorted block - if (currentBlock.Count > 0) - { - var sorted = SortStatements(currentBlock); - foreach (var stmt in sorted) - { - result.AppendLine(stmt); - } - currentBlock.Clear(); - } - - result.AppendLine(line); - } - } - - // Flush remaining - if (currentBlock.Count > 0) - { - var sorted = SortStatements(currentBlock); - foreach (var stmt in sorted) - { - result.AppendLine(stmt); - } - } - - return result.ToString().Trim(); - } - - private static List SortStatements(List statements) - { - // Group statements that can be reordered - // For now, just sort by canonical form (conservative) - return statements - .OrderBy(s => GetStatementSortKey(s), StringComparer.Ordinal) - .ToList(); - } - - private static string GetStatementSortKey(string statement) - { - // Extract the "essence" of the statement for sorting - // e.g., assignment target, function call name - var trimmed = statement.Trim(); - - // Assignment: sort by target - var assignMatch = AssignmentTargetRegex().Match(trimmed); - if (assignMatch.Success) - { - return $"A_{assignMatch.Groups[1].Value}"; - } - - // Function call: sort by function name - var callMatch = FunctionNameRegex().Match(trimmed); - if (callMatch.Success) - { - return $"C_{callMatch.Groups[1].Value}"; - } - - return $"Z_{trimmed}"; - } - - private static bool IsKeywordOrType(string name) - { - return CKeywords.Contains(name); - } - - private static bool IsStandardLibraryFunction(string name) - { - // Common C standard library functions to preserve - return name switch - { - // Memory - "malloc" or "calloc" or "realloc" or "free" or "memcpy" or "memmove" or "memset" or "memcmp" => true, - // String - "strlen" or "strcpy" or "strncpy" or "strcat" or "strncat" or "strcmp" or "strncmp" or "strchr" or "strrchr" or "strstr" => true, - // I/O - "printf" or "fprintf" or "sprintf" or "snprintf" or "scanf" or "fscanf" or "sscanf" => true, - "fopen" or "fclose" or "fread" or "fwrite" or "fseek" or "ftell" or "fflush" => true, - "puts" or "fputs" or "gets" or "fgets" or "putchar" or "getchar" => true, - // Math - "abs" or "labs" or "llabs" or "fabs" or "sqrt" or "pow" or "sin" or "cos" or "tan" or "log" or "exp" => true, - // Other - "exit" or "abort" or "atexit" or "atoi" or "atol" or "atof" or "strtol" or "strtoul" or "strtod" => true, - "assert" or "errno" => true, - _ => false - }; - } - - // Regex patterns using source generators - [GeneratedRegex(@"//[^\n]*")] - private static partial Regex SingleLineCommentRegex(); - - [GeneratedRegex(@"/\*[\s\S]*?\*/")] - private static partial Regex MultiLineCommentRegex(); - - [GeneratedRegex(@"\b([a-zA-Z_][a-zA-Z0-9_]*)\b")] - private static partial Regex IdentifierRegex(); - - [GeneratedRegex(@"\b([a-zA-Z_][a-zA-Z0-9_]*)\s*\(")] - private static partial Regex FunctionCallRegex(); - - [GeneratedRegex(@"0[xX][0-9a-fA-F]+")] - private static partial Regex HexConstantRegex(); - - [GeneratedRegex(@"\b[0-9]{4,}\b")] - private static partial Regex LargeDecimalRegex(); - - [GeneratedRegex(@"""(?:[^""\\]|\\.)*""")] - private static partial Regex StringLiteralRegex(); - - [GeneratedRegex(@"[ \t]+")] - private static partial Regex MultipleWhitespaceRegex(); - - [GeneratedRegex(@"\s*([+\-*/%=<>!&|^~?:;,{}()\[\]])\s*")] - private static partial Regex WhitespaceAroundOperatorsRegex(); - - [GeneratedRegex(@"[ \t]+\n")] - private static partial Regex TrailingWhitespaceRegex(); - - [GeneratedRegex(@"^([a-zA-Z_][a-zA-Z0-9_]*)\s*=")] - private static partial Regex AssignmentTargetRegex(); - - [GeneratedRegex(@"^([a-zA-Z_][a-zA-Z0-9_]*)\s*\(")] - private static partial Regex FunctionNameRegex(); } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonConfidence.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonConfidence.cs new file mode 100644 index 000000000..e321acefe --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonConfidence.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Confidence level for comparison results. +/// +public enum ComparisonConfidence +{ + Low, + Medium, + High, + VeryHigh +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonOptions.cs new file mode 100644 index 000000000..45d008271 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ComparisonOptions.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Options for AST comparison. +/// +public sealed record ComparisonOptions +{ + public bool IgnoreVariableNames { get; init; } = true; + public bool IgnoreConstants { get; init; } = false; + public bool DetectOptimizations { get; init; } = true; + public decimal MinSimilarityThreshold { get; init; } = 0.5m; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompileOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompileOptions.cs new file mode 100644 index 000000000..78dc3f7d5 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompileOptions.cs @@ -0,0 +1,15 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Options for decompilation. +/// +public sealed record DecompileOptions +{ + public bool SimplifyCode { get; init; } = true; + public bool RecoverTypes { get; init; } = true; + public bool RecoverStructs { get; init; } = true; + public int MaxCodeLength { get; init; } = 100_000; + public TimeSpan Timeout { get; init; } = TimeSpan.FromSeconds(30); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledAst.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledAst.cs new file mode 100644 index 000000000..63553f131 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledAst.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// AST representation of decompiled code. +/// +public sealed record DecompiledAst( + AstNode Root, + int NodeCount, + int Depth, + ImmutableArray Patterns); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Analysis.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Analysis.cs new file mode 100644 index 000000000..07d9bbb4a --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Analysis.cs @@ -0,0 +1,50 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static int CountNodes(AstNode node) + { + var count = 1; + foreach (var child in node.Children) + { + count += CountNodes(child); + } + return count; + } + + private static int ComputeDepth(AstNode node) + { + if (node.Children.Length == 0) + { + return 1; + } + + var maxDepth = 0; + foreach (var child in node.Children) + { + var depth = ComputeDepth(child); + if (depth > maxDepth) + { + maxDepth = depth; + } + } + + return 1 + maxDepth; + } + + private static ImmutableArray ExtractPatterns(AstNode node) + { + var patterns = new List(); + + foreach (var child in node.Children) + { + patterns.AddRange(ExtractPatterns(child)); + } + + return [.. patterns]; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Regex.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Regex.cs new file mode 100644 index 000000000..9792eebfa --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Regex.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Text.RegularExpressions; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + [GeneratedRegex(@"(?\w+)\s+(?\w+)\s*(?:=|;)", RegexOptions.Compiled)] + private static partial Regex VariableDeclarationRegex(); + + [GeneratedRegex(@"(?\w+)\s*\(", RegexOptions.Compiled)] + private static partial Regex FunctionCallRegex(); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Identifiers.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Identifiers.cs new file mode 100644 index 000000000..c53fefdbb --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Identifiers.cs @@ -0,0 +1,77 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static bool TryConsumeIdentifier( + string code, + ref int index, + ref int column, + int line, + int startColumn, + List tokens) + { + var c = code[index]; + if (!char.IsLetter(c) && c != '_') + { + return false; + } + + var start = index; + while (index < code.Length && (char.IsLetterOrDigit(code[index]) || code[index] == '_')) + { + index++; + column++; + } + + var value = code[start..index]; + var type = _keywords.Contains(value) ? TokenType.Keyword : TokenType.Identifier; + tokens.Add(new Token(type, value, line, startColumn)); + return true; + } + + private static bool TryConsumeNumber( + string code, + ref int index, + ref int column, + int line, + int startColumn, + List tokens) + { + var c = code[index]; + if (!char.IsDigit(c) && !(c == '0' && index + 1 < code.Length && code[index + 1] == 'x')) + { + return false; + } + + var start = index; + if (c == '0' && index + 1 < code.Length && code[index + 1] == 'x') + { + index += 2; + column += 2; + while (index < code.Length && char.IsAsciiHexDigit(code[index])) + { + index++; + column++; + } + } + else + { + while (index < code.Length && (char.IsDigit(code[index]) || code[index] == '.')) + { + index++; + column++; + } + } + + while (index < code.Length && (code[index] == 'U' || code[index] == 'L' || code[index] == 'u' || code[index] == 'l')) + { + index++; + column++; + } + + tokens.Add(new Token(TokenType.Number, code[start..index], line, startColumn)); + return true; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Literals.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Literals.cs new file mode 100644 index 000000000..a99b3dbf7 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Literals.cs @@ -0,0 +1,78 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static bool TryConsumeStringLiteral( + string code, + ref int index, + ref int column, + int line, + int startColumn, + List tokens) + { + if (code[index] != '"') + { + return false; + } + + var start = index; + index++; + column++; + while (index < code.Length && code[index] != '"') + { + if (code[index] == '\\' && index + 1 < code.Length) + { + index += 2; + column += 2; + } + else + { + index++; + column++; + } + } + index++; + column++; + + tokens.Add(new Token(TokenType.String, code[start..index], line, startColumn)); + return true; + } + + private static bool TryConsumeCharLiteral( + string code, + ref int index, + ref int column, + int line, + int startColumn, + List tokens) + { + if (code[index] != '\'') + { + return false; + } + + var start = index; + index++; + column++; + while (index < code.Length && code[index] != '\'') + { + if (code[index] == '\\' && index + 1 < code.Length) + { + index += 2; + column += 2; + } + else + { + index++; + column++; + } + } + index++; + column++; + + tokens.Add(new Token(TokenType.Char, code[start..index], line, startColumn)); + return true; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Operators.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Operators.cs new file mode 100644 index 000000000..dcf64d839 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Operators.cs @@ -0,0 +1,41 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static void ConsumeOperator( + string code, + ref int index, + ref int column, + int line, + int startColumn, + List tokens) + { + var twoChar = index + 1 < code.Length ? code[index..(index + 2)] : string.Empty; + var op = twoChar is "==" or "!=" or "<=" or ">=" or "&&" or "||" or "++" or "--" + or "->" or "+=" or "-=" or "*=" or "/=" or "&=" or "|=" or "^=" or "<<" + or ">>" + ? twoChar + : code[index].ToString(); + + if (op.Length == 2) + { + index += 2; + column += 2; + } + else + { + index++; + column++; + } + + var tokenType = op is "(" or ")" or "{" or "}" or "[" or "]" + ? TokenType.Bracket + : op is ";" or "," or "." + ? TokenType.Punctuation + : TokenType.Operator; + + tokens.Add(new Token(tokenType, op, line, startColumn)); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Whitespace.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Whitespace.cs new file mode 100644 index 000000000..19da5378b --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.Whitespace.cs @@ -0,0 +1,64 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static bool TryConsumeWhitespace(string code, ref int index, ref int line, ref int column) + { + if (!char.IsWhiteSpace(code[index])) + { + return false; + } + + if (code[index] == '\n') + { + line++; + column = 1; + } + else + { + column++; + } + + index++; + return true; + } + + private static bool TryConsumeLineComment(string code, ref int index) + { + if (index + 1 >= code.Length || code[index] != '/' || code[index + 1] != '/') + { + return false; + } + + while (index < code.Length && code[index] != '\n') + { + index++; + } + + return true; + } + + private static bool TryConsumeBlockComment(string code, ref int index, ref int line, ref int column) + { + if (index + 1 >= code.Length || code[index] != '/' || code[index + 1] != '*') + { + return false; + } + + index += 2; + while (index + 1 < code.Length && !(code[index] == '*' && code[index + 1] == '/')) + { + if (code[index] == '\n') + { + line++; + column = 1; + } + index++; + } + index += 2; + + return true; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.cs new file mode 100644 index 000000000..535d188b3 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.Tokenizer.cs @@ -0,0 +1,58 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class DecompiledCodeParser +{ + private static List Tokenize(string code) + { + var tokens = new List(); + var index = 0; + var line = 1; + var column = 1; + + while (index < code.Length) + { + if (TryConsumeWhitespace(code, ref index, ref line, ref column)) + { + continue; + } + + if (TryConsumeLineComment(code, ref index)) + { + continue; + } + + if (TryConsumeBlockComment(code, ref index, ref line, ref column)) + { + continue; + } + + var startColumn = column; + + if (TryConsumeIdentifier(code, ref index, ref column, line, startColumn, tokens)) + { + continue; + } + + if (TryConsumeNumber(code, ref index, ref column, line, startColumn, tokens)) + { + continue; + } + + if (TryConsumeStringLiteral(code, ref index, ref column, line, startColumn, tokens)) + { + continue; + } + + if (TryConsumeCharLiteral(code, ref index, ref column, line, startColumn, tokens)) + { + continue; + } + + ConsumeOperator(code, ref index, ref column, line, startColumn, tokens); + } + + return tokens; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.cs index 3831812fb..9eab40ae9 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledCodeParser.cs @@ -1,7 +1,7 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - using System.Collections.Immutable; +using System.Globalization; using System.Text.RegularExpressions; namespace StellaOps.BinaryIndex.Decompiler; @@ -11,7 +11,7 @@ namespace StellaOps.BinaryIndex.Decompiler; /// public sealed partial class DecompiledCodeParser : IDecompiledCodeParser { - private static readonly HashSet s_keywords = + private static readonly HashSet _keywords = [ "if", "else", "while", "for", "do", "switch", "case", "default", "return", "break", "continue", "goto", "sizeof", "typedef", @@ -19,7 +19,7 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser "float", "double", "unsigned", "signed", "const", "static", "extern" ]; - private static readonly HashSet s_types = + private static readonly HashSet _types = [ "void", "int", "uint", "char", "uchar", "byte", "ubyte", "short", "ushort", "long", "ulong", "longlong", "ulonglong", @@ -47,10 +47,7 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser public ImmutableArray ExtractVariables(string code) { var variables = new List(); - var varIndex = 0; - // Match variable declarations: type name [= value]; - // Ghidra style: int local_10; or undefined8 param_1; var declPattern = VariableDeclarationRegex(); foreach (Match match in declPattern.Matches(code)) @@ -58,6 +55,11 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser var type = match.Groups["type"].Value; var name = match.Groups["name"].Value; + if (_keywords.Contains(type) && !_types.Contains(type)) + { + continue; + } + var isParam = name.StartsWith("param_", StringComparison.Ordinal); int? paramIndex = null; int stackOffset = 0; @@ -68,13 +70,12 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser } if (name.StartsWith("local_", StringComparison.Ordinal) && - int.TryParse(name.AsSpan(6), System.Globalization.NumberStyles.HexNumber, null, out var offset)) + int.TryParse(name.AsSpan(6), NumberStyles.HexNumber, null, out var offset)) { - stackOffset = -offset; // Negative for locals + stackOffset = -offset; } variables.Add(new LocalVariable(name, type, stackOffset, isParam, paramIndex)); - varIndex++; } return [.. variables]; @@ -84,16 +85,13 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser public ImmutableArray ExtractCalledFunctions(string code) { var functions = new HashSet(); - - // Match function calls: name(...) var callPattern = FunctionCallRegex(); foreach (Match match in callPattern.Matches(code)) { var name = match.Groups["name"].Value; - // Skip keywords and types - if (!s_keywords.Contains(name) && !s_types.Contains(name)) + if (!_keywords.Contains(name) && !_types.Contains(name)) { functions.Add(name); } @@ -102,849 +100,4 @@ public sealed partial class DecompiledCodeParser : IDecompiledCodeParser return [.. functions.Order()]; } - private static List Tokenize(string code) - { - var tokens = new List(); - var i = 0; - var line = 1; - var column = 1; - - while (i < code.Length) - { - var c = code[i]; - - // Skip whitespace - if (char.IsWhiteSpace(c)) - { - if (c == '\n') - { - line++; - column = 1; - } - else - { - column++; - } - i++; - continue; - } - - // Skip comments - if (i + 1 < code.Length && code[i] == '/' && code[i + 1] == '/') - { - while (i < code.Length && code[i] != '\n') - { - i++; - } - continue; - } - - if (i + 1 < code.Length && code[i] == '/' && code[i + 1] == '*') - { - i += 2; - while (i + 1 < code.Length && !(code[i] == '*' && code[i + 1] == '/')) - { - if (code[i] == '\n') - { - line++; - column = 1; - } - i++; - } - i += 2; - continue; - } - - var startColumn = column; - - // Identifiers and keywords - if (char.IsLetter(c) || c == '_') - { - var start = i; - while (i < code.Length && (char.IsLetterOrDigit(code[i]) || code[i] == '_')) - { - i++; - column++; - } - var value = code[start..i]; - var type = s_keywords.Contains(value) ? TokenType.Keyword : TokenType.Identifier; - tokens.Add(new Token(type, value, line, startColumn)); - continue; - } - - // Numbers - if (char.IsDigit(c) || (c == '0' && i + 1 < code.Length && code[i + 1] == 'x')) - { - var start = i; - if (c == '0' && i + 1 < code.Length && code[i + 1] == 'x') - { - i += 2; - column += 2; - while (i < code.Length && char.IsAsciiHexDigit(code[i])) - { - i++; - column++; - } - } - else - { - while (i < code.Length && (char.IsDigit(code[i]) || code[i] == '.')) - { - i++; - column++; - } - } - // Handle suffixes (U, L, UL, etc.) - while (i < code.Length && (code[i] == 'U' || code[i] == 'L' || code[i] == 'u' || code[i] == 'l')) - { - i++; - column++; - } - tokens.Add(new Token(TokenType.Number, code[start..i], line, startColumn)); - continue; - } - - // String literals - if (c == '"') - { - var start = i; - i++; - column++; - while (i < code.Length && code[i] != '"') - { - if (code[i] == '\\' && i + 1 < code.Length) - { - i += 2; - column += 2; - } - else - { - i++; - column++; - } - } - i++; // closing quote - column++; - tokens.Add(new Token(TokenType.String, code[start..i], line, startColumn)); - continue; - } - - // Character literals - if (c == '\'') - { - var start = i; - i++; - column++; - while (i < code.Length && code[i] != '\'') - { - if (code[i] == '\\' && i + 1 < code.Length) - { - i += 2; - column += 2; - } - else - { - i++; - column++; - } - } - i++; // closing quote - column++; - tokens.Add(new Token(TokenType.Char, code[start..i], line, startColumn)); - continue; - } - - // Multi-character operators - if (i + 1 < code.Length) - { - var twoChar = code.Substring(i, 2); - if (twoChar is "==" or "!=" or "<=" or ">=" or "&&" or "||" or - "++" or "--" or "+=" or "-=" or "*=" or "/=" or - "<<" or ">>" or "->" or "::") - { - tokens.Add(new Token(TokenType.Operator, twoChar, line, startColumn)); - i += 2; - column += 2; - continue; - } - } - - // Single character operators and punctuation - var tokenType = c switch - { - '(' or ')' or '{' or '}' or '[' or ']' => TokenType.Bracket, - ';' or ',' or ':' or '?' => TokenType.Punctuation, - _ => TokenType.Operator - }; - tokens.Add(new Token(tokenType, c.ToString(), line, startColumn)); - i++; - column++; - } - - return tokens; - } - - private static int CountNodes(AstNode node) - { - var count = 1; - foreach (var child in node.Children) - { - count += CountNodes(child); - } - return count; - } - - private static int ComputeDepth(AstNode node) - { - if (node.Children.Length == 0) - { - return 1; - } - return 1 + node.Children.Max(c => ComputeDepth(c)); - } - - private static ImmutableArray ExtractPatterns(AstNode root) - { - var patterns = new List(); - - foreach (var node in TraverseNodes(root)) - { - // Detect loop patterns - if (node.Type == AstNodeType.For) - { - patterns.Add(new AstPattern( - PatternType.CountedLoop, - node, - new PatternMetadata("For loop", 0.9m, null))); - } - else if (node.Type == AstNodeType.While) - { - patterns.Add(new AstPattern( - PatternType.ConditionalLoop, - node, - new PatternMetadata("While loop", 0.9m, null))); - } - else if (node.Type == AstNodeType.DoWhile) - { - patterns.Add(new AstPattern( - PatternType.ConditionalLoop, - node, - new PatternMetadata("Do-while loop", 0.9m, null))); - } - - // Detect error handling - if (node is IfNode ifNode && IsErrorCheck(ifNode)) - { - patterns.Add(new AstPattern( - PatternType.ErrorCheck, - node, - new PatternMetadata("Error check", 0.8m, null))); - } - - // Detect null checks - if (node is IfNode ifNull && IsNullCheck(ifNull)) - { - patterns.Add(new AstPattern( - PatternType.NullCheck, - node, - new PatternMetadata("Null check", 0.9m, null))); - } - } - - return [.. patterns]; - } - - private static IEnumerable TraverseNodes(AstNode root) - { - yield return root; - foreach (var child in root.Children) - { - foreach (var node in TraverseNodes(child)) - { - yield return node; - } - } - } - - private static bool IsErrorCheck(IfNode node) - { - // Check if condition compares against -1, 0, or NULL - if (node.Condition is BinaryOpNode binaryOp) - { - if (binaryOp.Right is ConstantNode constant) - { - var value = constant.Value?.ToString(); - return value is "0" or "-1" or "0xffffffff" or "NULL"; - } - } - return false; - } - - private static bool IsNullCheck(IfNode node) - { - if (node.Condition is BinaryOpNode binaryOp) - { - if (binaryOp.Operator is "==" or "!=") - { - if (binaryOp.Right is ConstantNode constant) - { - var value = constant.Value?.ToString(); - return value is "0" or "NULL" or "nullptr"; - } - } - } - return false; - } - - [GeneratedRegex(@"(?\w+)\s+(?\w+)\s*(?:=|;)", RegexOptions.Compiled)] - private static partial Regex VariableDeclarationRegex(); - - [GeneratedRegex(@"(?\w+)\s*\(", RegexOptions.Compiled)] - private static partial Regex FunctionCallRegex(); -} - -internal enum TokenType -{ - Identifier, - Keyword, - Number, - String, - Char, - Operator, - Bracket, - Punctuation -} - -internal readonly record struct Token(TokenType Type, string Value, int Line, int Column); - -internal sealed class RecursiveParser -{ - private readonly List _tokens; - private int _pos; - - public RecursiveParser(List tokens) - { - _tokens = tokens; - _pos = 0; - } - - public AstNode ParseFunction() - { - // Parse return type - var returnType = ParseType(); - - // Parse function name - var name = Expect(TokenType.Identifier).Value; - - // Parse parameters - Expect(TokenType.Bracket, "("); - var parameters = ParseParameterList(); - Expect(TokenType.Bracket, ")"); - - // Parse body - var body = ParseBlock(); - - return new FunctionNode(name, returnType, parameters, body); - } - - private string ParseType() - { - var type = new System.Text.StringBuilder(); - - // Handle modifiers - while (Peek().Value is "const" or "unsigned" or "signed" or "static" or "extern") - { - type.Append(Advance().Value); - type.Append(' '); - } - - // Main type - type.Append(Advance().Value); - - // Handle pointers - while (Peek().Value == "*") - { - type.Append(Advance().Value); - } - - return type.ToString().Trim(); - } - - private ImmutableArray ParseParameterList() - { - var parameters = new List(); - var index = 0; - - if (Peek().Value == ")") - { - return []; - } - - if (Peek().Value == "void" && PeekAhead(1).Value == ")") - { - Advance(); // consume void - return []; - } - - do - { - if (Peek().Value == ",") - { - Advance(); - } - - var type = ParseType(); - var name = Peek().Type == TokenType.Identifier ? Advance().Value : $"param_{index}"; - - parameters.Add(new ParameterNode(name, type, index)); - index++; - } - while (Peek().Value == ","); - - return [.. parameters]; - } - - private BlockNode ParseBlock() - { - Expect(TokenType.Bracket, "{"); - - var statements = new List(); - - while (Peek().Value != "}") - { - var stmt = ParseStatement(); - if (stmt is not null) - { - statements.Add(stmt); - } - } - - Expect(TokenType.Bracket, "}"); - - return new BlockNode([.. statements]); - } - - private AstNode? ParseStatement() - { - var token = Peek(); - - return token.Value switch - { - "if" => ParseIf(), - "while" => ParseWhile(), - "for" => ParseFor(), - "do" => ParseDoWhile(), - "return" => ParseReturn(), - "break" => ParseBreak(), - "continue" => ParseContinue(), - "{" => ParseBlock(), - ";" => SkipSemicolon(), - _ => ParseExpressionStatement() - }; - } - - private IfNode ParseIf() - { - Advance(); // consume 'if' - Expect(TokenType.Bracket, "("); - var condition = ParseExpression(); - Expect(TokenType.Bracket, ")"); - - var thenBranch = ParseStatement() ?? new BlockNode([]); - - AstNode? elseBranch = null; - if (Peek().Value == "else") - { - Advance(); - elseBranch = ParseStatement(); - } - - return new IfNode(condition, thenBranch, elseBranch); - } - - private WhileNode ParseWhile() - { - Advance(); // consume 'while' - Expect(TokenType.Bracket, "("); - var condition = ParseExpression(); - Expect(TokenType.Bracket, ")"); - - var body = ParseStatement() ?? new BlockNode([]); - - return new WhileNode(condition, body); - } - - private ForNode ParseFor() - { - Advance(); // consume 'for' - Expect(TokenType.Bracket, "("); - - AstNode? init = null; - if (Peek().Value != ";") - { - init = ParseExpression(); - } - Expect(TokenType.Punctuation, ";"); - - AstNode? condition = null; - if (Peek().Value != ";") - { - condition = ParseExpression(); - } - Expect(TokenType.Punctuation, ";"); - - AstNode? update = null; - if (Peek().Value != ")") - { - update = ParseExpression(); - } - Expect(TokenType.Bracket, ")"); - - var body = ParseStatement() ?? new BlockNode([]); - - return new ForNode(init, condition, update, body); - } - - private AstNode ParseDoWhile() - { - Advance(); // consume 'do' - var body = ParseStatement() ?? new BlockNode([]); - - Expect(TokenType.Keyword, "while"); - Expect(TokenType.Bracket, "("); - var condition = ParseExpression(); - Expect(TokenType.Bracket, ")"); - Expect(TokenType.Punctuation, ";"); - - return new WhileNode(condition, body); // Simplify do-while to while for now - } - - private ReturnNode ParseReturn() - { - Advance(); // consume 'return' - - AstNode? value = null; - if (Peek().Value != ";") - { - value = ParseExpression(); - } - Expect(TokenType.Punctuation, ";"); - - return new ReturnNode(value); - } - - private AstNode ParseBreak() - { - Advance(); - Expect(TokenType.Punctuation, ";"); - return new BlockNode([]); // Simplified - } - - private AstNode ParseContinue() - { - Advance(); - Expect(TokenType.Punctuation, ";"); - return new BlockNode([]); // Simplified - } - - private AstNode? SkipSemicolon() - { - Advance(); - return null; - } - - private AstNode? ParseExpressionStatement() - { - var expr = ParseExpression(); - if (Peek().Value == ";") - { - Advance(); - } - return expr; - } - - private AstNode ParseExpression() - { - return ParseAssignment(); - } - - private AstNode ParseAssignment() - { - var left = ParseLogicalOr(); - - if (Peek().Value is "=" or "+=" or "-=" or "*=" or "/=" or "&=" or "|=" or "^=" or "<<=" or ">>=") - { - var op = Advance().Value; - var right = ParseAssignment(); - return new AssignmentNode(left, right, op); - } - - return left; - } - - private AstNode ParseLogicalOr() - { - var left = ParseLogicalAnd(); - - while (Peek().Value == "||") - { - var op = Advance().Value; - var right = ParseLogicalAnd(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseLogicalAnd() - { - var left = ParseBitwiseOr(); - - while (Peek().Value == "&&") - { - var op = Advance().Value; - var right = ParseBitwiseOr(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseBitwiseOr() - { - var left = ParseComparison(); - - while (Peek().Value is "|" or "^" or "&") - { - var op = Advance().Value; - var right = ParseComparison(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseComparison() - { - var left = ParseShift(); - - while (Peek().Value is "==" or "!=" or "<" or ">" or "<=" or ">=") - { - var op = Advance().Value; - var right = ParseShift(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseShift() - { - var left = ParseAdditive(); - - while (Peek().Value is "<<" or ">>") - { - var op = Advance().Value; - var right = ParseAdditive(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseAdditive() - { - var left = ParseMultiplicative(); - - while (Peek().Value is "+" or "-") - { - var op = Advance().Value; - var right = ParseMultiplicative(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseMultiplicative() - { - var left = ParseUnary(); - - while (Peek().Value is "*" or "/" or "%") - { - var op = Advance().Value; - var right = ParseUnary(); - left = new BinaryOpNode(left, right, op); - } - - return left; - } - - private AstNode ParseUnary() - { - if (Peek().Value is "!" or "~" or "-" or "+" or "*" or "&" or "++" or "--") - { - var op = Advance().Value; - var operand = ParseUnary(); - return new UnaryOpNode(operand, op, true); - } - - return ParsePostfix(); - } - - private AstNode ParsePostfix() - { - var expr = ParsePrimary(); - - while (true) - { - if (Peek().Value == "(") - { - // Function call - Advance(); - var args = ParseArgumentList(); - Expect(TokenType.Bracket, ")"); - - if (expr is VariableNode varNode) - { - expr = new CallNode(varNode.Name, args); - } - } - else if (Peek().Value == "[") - { - // Array access - Advance(); - var index = ParseExpression(); - Expect(TokenType.Bracket, "]"); - expr = new ArrayAccessNode(expr, index); - } - else if (Peek().Value is "." or "->") - { - var isPointer = Advance().Value == "->"; - var field = Expect(TokenType.Identifier).Value; - expr = new FieldAccessNode(expr, field, isPointer); - } - else if (Peek().Value is "++" or "--") - { - var op = Advance().Value; - expr = new UnaryOpNode(expr, op, false); - } - else - { - break; - } - } - - return expr; - } - - private ImmutableArray ParseArgumentList() - { - var args = new List(); - - if (Peek().Value == ")") - { - return []; - } - - do - { - if (Peek().Value == ",") - { - Advance(); - } - args.Add(ParseExpression()); - } - while (Peek().Value == ","); - - return [.. args]; - } - - private AstNode ParsePrimary() - { - var token = Peek(); - - if (token.Type == TokenType.Number) - { - Advance(); - return new ConstantNode(token.Value, "int"); - } - - if (token.Type == TokenType.String) - { - Advance(); - return new ConstantNode(token.Value, "char*"); - } - - if (token.Type == TokenType.Char) - { - Advance(); - return new ConstantNode(token.Value, "char"); - } - - if (token.Type == TokenType.Identifier) - { - Advance(); - return new VariableNode(token.Value, null); - } - - if (token.Value == "(") - { - Advance(); - - // Check for cast - if (IsType(Peek().Value)) - { - var targetType = ParseType(); - Expect(TokenType.Bracket, ")"); - var expr = ParseUnary(); - return new CastNode(expr, targetType); - } - - var inner = ParseExpression(); - Expect(TokenType.Bracket, ")"); - return inner; - } - - // Handle sizeof - if (token.Value == "sizeof") - { - Advance(); - Expect(TokenType.Bracket, "("); - var type = ParseType(); - Expect(TokenType.Bracket, ")"); - return new ConstantNode($"sizeof({type})", "size_t"); - } - - // Unknown token - return empty node - Advance(); - return new ConstantNode(token.Value, "unknown"); - } - - private static bool IsType(string value) - { - return value is "int" or "char" or "void" or "long" or "short" or "float" or "double" - or "unsigned" or "signed" or "const" or "struct" or "union" or "enum" - or "undefined" or "undefined1" or "undefined2" or "undefined4" or "undefined8" - or "byte" or "word" or "dword" or "qword" or "pointer" or "code" or "uint" or "ulong"; - } - - private Token Peek() => _pos < _tokens.Count ? _tokens[_pos] : new Token(TokenType.Punctuation, "", 0, 0); - - private Token PeekAhead(int offset) => _pos + offset < _tokens.Count - ? _tokens[_pos + offset] - : new Token(TokenType.Punctuation, "", 0, 0); - - private Token Advance() => _pos < _tokens.Count ? _tokens[_pos++] : new Token(TokenType.Punctuation, "", 0, 0); - - private Token Expect(TokenType type, string? value = null) - { - var token = Peek(); - if (token.Type != type || (value is not null && token.Value != value)) - { - // Skip unexpected tokens - return Advance(); - } - return Advance(); - } } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledComparisonResult.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledComparisonResult.cs new file mode 100644 index 000000000..107325958 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledComparisonResult.cs @@ -0,0 +1,27 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Result of comparing two decompiled functions. +/// +public sealed record DecompiledComparisonResult( + decimal Similarity, + decimal StructuralSimilarity, + decimal SemanticSimilarity, + AstEditDistance EditDistance, + ImmutableArray Equivalences, + ImmutableArray Differences, + ComparisonConfidence Confidence); + +/// +/// Edit distance between ASTs. +/// +public sealed record AstEditDistance( + int Insertions, + int Deletions, + int Modifications, + int TotalOperations, + decimal NormalizedDistance); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledFunction.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledFunction.cs new file mode 100644 index 000000000..dddca9adc --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompiledFunction.cs @@ -0,0 +1,18 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// A function decompiled to C-like pseudo-code. +/// +public sealed record DecompiledFunction( + string FunctionName, + string Signature, + string Code, + DecompiledAst? Ast, + ImmutableArray Locals, + ImmutableArray CalledFunctions, + ulong Address, + int SizeBytes); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompilerOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompilerOptions.cs new file mode 100644 index 000000000..813b18ef4 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/DecompilerOptions.cs @@ -0,0 +1,13 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Options for the decompiler adapter. +/// +public sealed class DecompilerOptions +{ + public string GhidraScriptsPath { get; set; } = "/scripts"; + public TimeSpan DefaultTimeout { get; set; } = TimeSpan.FromSeconds(30); + public int MaxCodeLength { get; set; } = 100_000; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Compare.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Compare.cs new file mode 100644 index 000000000..54f1d483b --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Compare.cs @@ -0,0 +1,85 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class GhidraDecompilerAdapter +{ + /// + public Task ParseToAstAsync( + string decompiledCode, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrEmpty(decompiledCode); + + ct.ThrowIfCancellationRequested(); + + var ast = _parser.Parse(decompiledCode); + return Task.FromResult(ast); + } + + /// + public Task CompareAsync( + DecompiledFunction a, + DecompiledFunction b, + ComparisonOptions? options = null, + CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(a); + ArgumentNullException.ThrowIfNull(b); + + options ??= new ComparisonOptions(); + ct.ThrowIfCancellationRequested(); + + _logger.LogDebug( + "Comparing functions {A} and {B}", + a.FunctionName, + b.FunctionName); + + if (a.Ast is null || b.Ast is null) + { + _logger.LogWarning("Cannot compare functions without ASTs"); + + return Task.FromResult(new DecompiledComparisonResult( + Similarity: 0, + StructuralSimilarity: 0, + SemanticSimilarity: 0, + EditDistance: new AstEditDistance(0, 0, 0, 0, 1.0m), + Equivalences: [], + Differences: [], + Confidence: ComparisonConfidence.Low)); + } + + var structuralSimilarity = _comparisonEngine.ComputeStructuralSimilarity(a.Ast, b.Ast); + var editDistance = _comparisonEngine.ComputeEditDistance(a.Ast, b.Ast); + var equivalences = _comparisonEngine.FindEquivalences(a.Ast, b.Ast); + var differences = _comparisonEngine.FindDifferences(a.Ast, b.Ast); + + var totalNodes = Math.Max(a.Ast.NodeCount, b.Ast.NodeCount); + var equivalentNodes = equivalences.Length; + var semanticSimilarity = totalNodes > 0 + ? (decimal)equivalentNodes / totalNodes + : 0m; + + var overallSimilarity = ComputeOverallSimilarity( + structuralSimilarity, + semanticSimilarity, + editDistance.NormalizedDistance); + + var confidence = DetermineConfidence( + overallSimilarity, + a.Ast.NodeCount, + b.Ast.NodeCount, + equivalences.Length); + + return Task.FromResult(new DecompiledComparisonResult( + Similarity: overallSimilarity, + StructuralSimilarity: structuralSimilarity, + SemanticSimilarity: semanticSimilarity, + EditDistance: editDistance, + Equivalences: equivalences, + Differences: differences, + Confidence: confidence)); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Decompile.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Decompile.cs new file mode 100644 index 000000000..40ac6dcff --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Decompile.cs @@ -0,0 +1,108 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; +using StellaOps.BinaryIndex.Ghidra; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class GhidraDecompilerAdapter +{ + /// + public async Task DecompileAsync( + GhidraFunction function, + DecompileOptions? options = null, + CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(function); + + options ??= new DecompileOptions(); + + _logger.LogDebug( + "Decompiling function {Name} at 0x{Address:X}", + function.Name, + function.Address); + + var code = function.DecompiledCode; + + if (string.IsNullOrEmpty(code)) + { + _logger.LogWarning( + "Function {Name} has no decompiled code, returning stub", + function.Name); + + return new DecompiledFunction( + function.Name, + BuildSignature(function), + "/* Decompilation unavailable */", + null, + [], + [], + function.Address, + function.Size); + } + + if (code.Length > options.MaxCodeLength) + { + code = code[..options.MaxCodeLength] + "\n/* ... truncated ... */"; + } + + DecompiledAst? ast = null; + try + { + ast = _parser.Parse(code); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse decompiled code for {Name}", function.Name); + } + + var locals = _parser.ExtractVariables(code); + var calledFunctions = _parser.ExtractCalledFunctions(code); + + return new DecompiledFunction( + function.Name, + BuildSignature(function), + code, + ast, + locals, + calledFunctions, + function.Address, + function.Size); + } + + /// + public async Task DecompileAtAddressAsync( + string binaryPath, + ulong address, + DecompileOptions? options = null, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrEmpty(binaryPath); + + options ??= new DecompileOptions(); + + _logger.LogDebug( + "Decompiling function at 0x{Address:X} in {Binary}", + address, + Path.GetFileName(binaryPath)); + + using var stream = File.OpenRead(binaryPath); + var analysis = await _ghidraService.AnalyzeAsync( + stream, + new GhidraAnalysisOptions + { + IncludeDecompilation = true, + ExtractDecompilation = true + }, + ct).ConfigureAwait(false); + + var function = analysis.Functions.FirstOrDefault(f => f.Address == address); + + if (function is null) + { + throw new InvalidOperationException($"No function found at address 0x{address:X}"); + } + + return await DecompileAsync(function, options, ct).ConfigureAwait(false); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Helpers.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Helpers.cs new file mode 100644 index 000000000..2d64d6126 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.Helpers.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Ghidra; + +namespace StellaOps.BinaryIndex.Decompiler; + +public sealed partial class GhidraDecompilerAdapter +{ + private static string BuildSignature(GhidraFunction function) + { + if (!string.IsNullOrEmpty(function.Signature)) + { + return function.Signature; + } + + return $"void {function.Name}(void)"; + } + + private static decimal ComputeOverallSimilarity( + decimal structural, + decimal semantic, + decimal normalizedEditDistance) + { + var editSimilarity = 1.0m - normalizedEditDistance; + return structural * 0.4m + semantic * 0.4m + editSimilarity * 0.2m; + } + + private static ComparisonConfidence DetermineConfidence( + decimal similarity, + int nodeCountA, + int nodeCountB, + int equivalenceCount) + { + var minNodes = Math.Min(nodeCountA, nodeCountB); + if (minNodes < 5) + { + return ComparisonConfidence.Low; + } + + if (similarity > 0.9m && equivalenceCount > minNodes * 0.7) + { + return ComparisonConfidence.VeryHigh; + } + + if (similarity > 0.7m && equivalenceCount > minNodes * 0.5) + { + return ComparisonConfidence.High; + } + + if (similarity > 0.5m) + { + return ComparisonConfidence.Medium; + } + + return ComparisonConfidence.Low; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.cs index e6cabb898..13ba8238a 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/GhidraDecompilerAdapter.cs @@ -1,18 +1,15 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.BinaryIndex.Ghidra; -using System.Text.Json; namespace StellaOps.BinaryIndex.Decompiler; /// /// Adapter for Ghidra's decompiler via headless analysis. /// -public sealed class GhidraDecompilerAdapter : IDecompilerService +public sealed partial class GhidraDecompilerAdapter : IDecompilerService { private readonly IGhidraService _ghidraService; private readonly IDecompiledCodeParser _parser; @@ -33,260 +30,4 @@ public sealed class GhidraDecompilerAdapter : IDecompilerService _options = options.Value; _logger = logger; } - - /// - public async Task DecompileAsync( - GhidraFunction function, - DecompileOptions? options = null, - CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(function); - - options ??= new DecompileOptions(); - - _logger.LogDebug( - "Decompiling function {Name} at 0x{Address:X}", - function.Name, - function.Address); - - // The GhidraFunction should already have decompiled code from analysis - var code = function.DecompiledCode; - - if (string.IsNullOrEmpty(code)) - { - _logger.LogWarning( - "Function {Name} has no decompiled code, returning stub", - function.Name); - - return new DecompiledFunction( - function.Name, - BuildSignature(function), - "/* Decompilation unavailable */", - null, - [], - [], - function.Address, - function.Size); - } - - // Truncate if too long - if (code.Length > options.MaxCodeLength) - { - code = code[..options.MaxCodeLength] + "\n/* ... truncated ... */"; - } - - // Parse to AST - DecompiledAst? ast = null; - try - { - ast = _parser.Parse(code); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse decompiled code for {Name}", function.Name); - } - - // Extract metadata - var locals = _parser.ExtractVariables(code); - var calledFunctions = _parser.ExtractCalledFunctions(code); - - return new DecompiledFunction( - function.Name, - BuildSignature(function), - code, - ast, - locals, - calledFunctions, - function.Address, - function.Size); - } - - /// - public async Task DecompileAtAddressAsync( - string binaryPath, - ulong address, - DecompileOptions? options = null, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrEmpty(binaryPath); - - options ??= new DecompileOptions(); - - _logger.LogDebug( - "Decompiling function at 0x{Address:X} in {Binary}", - address, - Path.GetFileName(binaryPath)); - - // Use Ghidra to analyze and get the function - using var stream = File.OpenRead(binaryPath); - var analysis = await _ghidraService.AnalyzeAsync( - stream, - new GhidraAnalysisOptions - { - IncludeDecompilation = true, - ExtractDecompilation = true - }, - ct); - - var function = analysis.Functions.FirstOrDefault(f => f.Address == address); - - if (function is null) - { - throw new InvalidOperationException($"No function found at address 0x{address:X}"); - } - - return await DecompileAsync(function, options, ct); - } - - /// - public Task ParseToAstAsync( - string decompiledCode, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrEmpty(decompiledCode); - - ct.ThrowIfCancellationRequested(); - - var ast = _parser.Parse(decompiledCode); - return Task.FromResult(ast); - } - - /// - public Task CompareAsync( - DecompiledFunction a, - DecompiledFunction b, - ComparisonOptions? options = null, - CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(a); - ArgumentNullException.ThrowIfNull(b); - - options ??= new ComparisonOptions(); - ct.ThrowIfCancellationRequested(); - - _logger.LogDebug( - "Comparing functions {A} and {B}", - a.FunctionName, - b.FunctionName); - - // Need ASTs for comparison - if (a.Ast is null || b.Ast is null) - { - _logger.LogWarning("Cannot compare functions without ASTs"); - - return Task.FromResult(new DecompiledComparisonResult( - Similarity: 0, - StructuralSimilarity: 0, - SemanticSimilarity: 0, - EditDistance: new AstEditDistance(0, 0, 0, 0, 1.0m), - Equivalences: [], - Differences: [], - Confidence: ComparisonConfidence.Low)); - } - - // Compute structural similarity - var structuralSimilarity = _comparisonEngine.ComputeStructuralSimilarity(a.Ast, b.Ast); - - // Compute edit distance - var editDistance = _comparisonEngine.ComputeEditDistance(a.Ast, b.Ast); - - // Find semantic equivalences - var equivalences = _comparisonEngine.FindEquivalences(a.Ast, b.Ast); - - // Find differences - var differences = _comparisonEngine.FindDifferences(a.Ast, b.Ast); - - // Compute semantic similarity from equivalences - var totalNodes = Math.Max(a.Ast.NodeCount, b.Ast.NodeCount); - var equivalentNodes = equivalences.Length; - var semanticSimilarity = totalNodes > 0 - ? (decimal)equivalentNodes / totalNodes - : 0m; - - // Combine into overall similarity - var overallSimilarity = ComputeOverallSimilarity( - structuralSimilarity, - semanticSimilarity, - editDistance.NormalizedDistance); - - // Determine confidence - var confidence = DetermineConfidence( - overallSimilarity, - a.Ast.NodeCount, - b.Ast.NodeCount, - equivalences.Length); - - return Task.FromResult(new DecompiledComparisonResult( - Similarity: overallSimilarity, - StructuralSimilarity: structuralSimilarity, - SemanticSimilarity: semanticSimilarity, - EditDistance: editDistance, - Equivalences: equivalences, - Differences: differences, - Confidence: confidence)); - } - - private static string BuildSignature(GhidraFunction function) - { - // Use the signature from Ghidra if available, otherwise construct a simple one - if (!string.IsNullOrEmpty(function.Signature)) - { - return function.Signature; - } - - // Default signature if none available - return $"void {function.Name}(void)"; - } - - private static decimal ComputeOverallSimilarity( - decimal structural, - decimal semantic, - decimal normalizedEditDistance) - { - // Weight: 40% structural, 40% semantic, 20% edit distance (inverted) - var editSimilarity = 1.0m - normalizedEditDistance; - return structural * 0.4m + semantic * 0.4m + editSimilarity * 0.2m; - } - - private static ComparisonConfidence DetermineConfidence( - decimal similarity, - int nodeCountA, - int nodeCountB, - int equivalenceCount) - { - // Very small functions are harder to compare confidently - var minNodes = Math.Min(nodeCountA, nodeCountB); - if (minNodes < 5) - { - return ComparisonConfidence.Low; - } - - // High similarity with many equivalences = high confidence - if (similarity > 0.9m && equivalenceCount > minNodes * 0.7) - { - return ComparisonConfidence.VeryHigh; - } - - if (similarity > 0.7m && equivalenceCount > minNodes * 0.5) - { - return ComparisonConfidence.High; - } - - if (similarity > 0.5m) - { - return ComparisonConfidence.Medium; - } - - return ComparisonConfidence.Low; - } -} - -/// -/// Options for the decompiler adapter. -/// -public sealed class DecompilerOptions -{ - public string GhidraScriptsPath { get; set; } = "/scripts"; - public TimeSpan DefaultTimeout { get; set; } = TimeSpan.FromSeconds(30); - public int MaxCodeLength { get; set; } = 100_000; } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IAstComparisonEngine.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IAstComparisonEngine.cs new file mode 100644 index 000000000..2f8e7f67c --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IAstComparisonEngine.cs @@ -0,0 +1,43 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Engine for comparing AST structures. +/// +public interface IAstComparisonEngine +{ + /// + /// Compute structural similarity between ASTs. + /// + /// First AST. + /// Second AST. + /// Similarity score (0.0 to 1.0). + decimal ComputeStructuralSimilarity(DecompiledAst a, DecompiledAst b); + + /// + /// Compute edit distance between ASTs. + /// + /// First AST. + /// Second AST. + /// Edit distance metrics. + AstEditDistance ComputeEditDistance(DecompiledAst a, DecompiledAst b); + + /// + /// Find semantic equivalences between ASTs. + /// + /// First AST. + /// Second AST. + /// List of equivalent node pairs. + ImmutableArray FindEquivalences(DecompiledAst a, DecompiledAst b); + + /// + /// Find differences between ASTs. + /// + /// First AST. + /// Second AST. + /// List of differences. + ImmutableArray FindDifferences(DecompiledAst a, DecompiledAst b); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ICodeNormalizer.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ICodeNormalizer.cs new file mode 100644 index 000000000..3c67a81c9 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/ICodeNormalizer.cs @@ -0,0 +1,32 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Normalizes decompiled code for comparison. +/// +public interface ICodeNormalizer +{ + /// + /// Normalize decompiled code for comparison. + /// + /// Raw decompiled code. + /// Normalization options. + /// Normalized code. + string Normalize(string code, NormalizationOptions? options = null); + + /// + /// Compute canonical hash of normalized code. + /// + /// Decompiled code. + /// 32-byte hash. + byte[] ComputeCanonicalHash(string code); + + /// + /// Normalize an AST for comparison. + /// + /// AST to normalize. + /// Normalization options. + /// Normalized AST. + DecompiledAst NormalizeAst(DecompiledAst ast, NormalizationOptions? options = null); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompiledCodeParser.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompiledCodeParser.cs new file mode 100644 index 000000000..eea0aa903 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompiledCodeParser.cs @@ -0,0 +1,32 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Parses decompiled C-like code into AST. +/// +public interface IDecompiledCodeParser +{ + /// + /// Parse decompiled code into AST. + /// + /// C-like pseudo-code. + /// Parsed AST. + DecompiledAst Parse(string code); + + /// + /// Extract local variables from decompiled code. + /// + /// C-like pseudo-code. + /// List of local variables. + ImmutableArray ExtractVariables(string code); + + /// + /// Extract called functions from decompiled code. + /// + /// C-like pseudo-code. + /// List of function names called. + ImmutableArray ExtractCalledFunctions(string code); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompilerService.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompilerService.cs index add26dde6..401517261 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompilerService.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/IDecompilerService.cs @@ -3,7 +3,6 @@ using StellaOps.BinaryIndex.Ghidra; -using System.Collections.Immutable; namespace StellaOps.BinaryIndex.Decompiler; @@ -62,97 +61,3 @@ public interface IDecompilerService ComparisonOptions? options = null, CancellationToken ct = default); } - -/// -/// Engine for comparing AST structures. -/// -public interface IAstComparisonEngine -{ - /// - /// Compute structural similarity between ASTs. - /// - /// First AST. - /// Second AST. - /// Similarity score (0.0 to 1.0). - decimal ComputeStructuralSimilarity(DecompiledAst a, DecompiledAst b); - - /// - /// Compute edit distance between ASTs. - /// - /// First AST. - /// Second AST. - /// Edit distance metrics. - AstEditDistance ComputeEditDistance(DecompiledAst a, DecompiledAst b); - - /// - /// Find semantic equivalences between ASTs. - /// - /// First AST. - /// Second AST. - /// List of equivalent node pairs. - ImmutableArray FindEquivalences(DecompiledAst a, DecompiledAst b); - - /// - /// Find differences between ASTs. - /// - /// First AST. - /// Second AST. - /// List of differences. - ImmutableArray FindDifferences(DecompiledAst a, DecompiledAst b); -} - -/// -/// Normalizes decompiled code for comparison. -/// -public interface ICodeNormalizer -{ - /// - /// Normalize decompiled code for comparison. - /// - /// Raw decompiled code. - /// Normalization options. - /// Normalized code. - string Normalize(string code, NormalizationOptions? options = null); - - /// - /// Compute canonical hash of normalized code. - /// - /// Decompiled code. - /// 32-byte hash. - byte[] ComputeCanonicalHash(string code); - - /// - /// Normalize an AST for comparison. - /// - /// AST to normalize. - /// Normalization options. - /// Normalized AST. - DecompiledAst NormalizeAst(DecompiledAst ast, NormalizationOptions? options = null); -} - -/// -/// Parses decompiled C-like code into AST. -/// -public interface IDecompiledCodeParser -{ - /// - /// Parse decompiled code into AST. - /// - /// C-like pseudo-code. - /// Parsed AST. - DecompiledAst Parse(string code); - - /// - /// Extract local variables from decompiled code. - /// - /// C-like pseudo-code. - /// List of local variables. - ImmutableArray ExtractVariables(string code); - - /// - /// Extract called functions from decompiled code. - /// - /// C-like pseudo-code. - /// List of function names called. - ImmutableArray ExtractCalledFunctions(string code); -} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/LocalVariable.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/LocalVariable.cs new file mode 100644 index 000000000..817bb4e6b --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/LocalVariable.cs @@ -0,0 +1,13 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// A local variable in decompiled code. +/// +public sealed record LocalVariable( + string Name, + string Type, + int StackOffset, + bool IsParameter, + int? ParameterIndex); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Models.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Models.cs deleted file mode 100644 index 61a68626e..000000000 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Models.cs +++ /dev/null @@ -1,377 +0,0 @@ -// Copyright (c) StellaOps. All rights reserved. -// Licensed under BUSL-1.1. See LICENSE in the project root. - -using System.Collections.Immutable; - -namespace StellaOps.BinaryIndex.Decompiler; - -/// -/// A function decompiled to C-like pseudo-code. -/// -public sealed record DecompiledFunction( - string FunctionName, - string Signature, - string Code, - DecompiledAst? Ast, - ImmutableArray Locals, - ImmutableArray CalledFunctions, - ulong Address, - int SizeBytes); - -/// -/// AST representation of decompiled code. -/// -public sealed record DecompiledAst( - AstNode Root, - int NodeCount, - int Depth, - ImmutableArray Patterns); - -/// -/// Abstract syntax tree node. -/// -public abstract record AstNode( - AstNodeType Type, - ImmutableArray Children, - SourceLocation? Location); - -/// -/// Types of AST nodes. -/// -public enum AstNodeType -{ - // Structure - Function, - Block, - Parameter, - - // Control flow - If, - While, - For, - DoWhile, - Switch, - Case, - Default, - Return, - Break, - Continue, - Goto, - Label, - - // Expressions - Assignment, - BinaryOp, - UnaryOp, - TernaryOp, - Call, - Cast, - Sizeof, - - // Operands - Variable, - Constant, - StringLiteral, - ArrayAccess, - FieldAccess, - PointerDeref, - AddressOf, - - // Declarations - VariableDecl, - TypeDef -} - -/// -/// Source location in decompiled code. -/// -public sealed record SourceLocation(int Line, int Column, int Length); - -/// -/// A local variable in decompiled code. -/// -public sealed record LocalVariable( - string Name, - string Type, - int StackOffset, - bool IsParameter, - int? ParameterIndex); - -/// -/// A recognized code pattern. -/// -public sealed record AstPattern( - PatternType Type, - AstNode Node, - PatternMetadata? Metadata); - -/// -/// Types of code patterns. -/// -public enum PatternType -{ - // Loops - CountedLoop, - ConditionalLoop, - InfiniteLoop, - LoopUnrolled, - - // Branches - IfElseChain, - SwitchTable, - ShortCircuit, - - // Memory - MemoryAllocation, - MemoryDeallocation, - BufferOperation, - StackBuffer, - - // Error handling - ErrorCheck, - NullCheck, - BoundsCheck, - - // Idioms - StringOperation, - MathOperation, - BitwiseOperation, - TableLookup -} - -/// -/// Metadata about a recognized pattern. -/// -public sealed record PatternMetadata( - string Description, - decimal Confidence, - ImmutableDictionary? Properties); - -/// -/// Result of comparing two decompiled functions. -/// -public sealed record DecompiledComparisonResult( - decimal Similarity, - decimal StructuralSimilarity, - decimal SemanticSimilarity, - AstEditDistance EditDistance, - ImmutableArray Equivalences, - ImmutableArray Differences, - ComparisonConfidence Confidence); - -/// -/// Edit distance between ASTs. -/// -public sealed record AstEditDistance( - int Insertions, - int Deletions, - int Modifications, - int TotalOperations, - decimal NormalizedDistance); - -/// -/// A semantic equivalence between AST nodes. -/// -public sealed record SemanticEquivalence( - AstNode NodeA, - AstNode NodeB, - EquivalenceType Type, - decimal Confidence, - string? Explanation); - -/// -/// Types of semantic equivalence. -/// -public enum EquivalenceType -{ - Identical, - Renamed, - Reordered, - Optimized, - Inlined, - Semantically -} - -/// -/// A difference between two pieces of code. -/// -public sealed record CodeDifference( - DifferenceType Type, - AstNode? NodeA, - AstNode? NodeB, - string Description); - -/// -/// Types of code differences. -/// -public enum DifferenceType -{ - Added, - Removed, - Modified, - Reordered, - TypeChanged, - OptimizationVariant -} - -/// -/// Confidence level for comparison results. -/// -public enum ComparisonConfidence -{ - Low, - Medium, - High, - VeryHigh -} - -/// -/// Options for decompilation. -/// -public sealed record DecompileOptions -{ - public bool SimplifyCode { get; init; } = true; - public bool RecoverTypes { get; init; } = true; - public bool RecoverStructs { get; init; } = true; - public int MaxCodeLength { get; init; } = 100_000; - public TimeSpan Timeout { get; init; } = TimeSpan.FromSeconds(30); -} - -/// -/// Options for AST comparison. -/// -public sealed record ComparisonOptions -{ - public bool IgnoreVariableNames { get; init; } = true; - public bool IgnoreConstants { get; init; } = false; - public bool DetectOptimizations { get; init; } = true; - public decimal MinSimilarityThreshold { get; init; } = 0.5m; -} - -/// -/// Options for code normalization. -/// -public sealed record NormalizationOptions -{ - public bool NormalizeVariables { get; init; } = true; - public bool NormalizeFunctionCalls { get; init; } = true; - public bool NormalizeConstants { get; init; } = false; - public bool NormalizeWhitespace { get; init; } = true; - public bool SortIndependentStatements { get; init; } = false; - public ImmutableHashSet? KnownFunctions { get; init; } - - public static NormalizationOptions Default { get; } = new(); -} - -#region Concrete AST Node Types - -public sealed record FunctionNode( - string Name, - string ReturnType, - ImmutableArray Parameters, - BlockNode Body, - SourceLocation? Location = null) - : AstNode(AstNodeType.Function, [Body, .. Parameters], Location); - -public sealed record ParameterNode( - string Name, - string DataType, - int Index, - SourceLocation? Location = null) - : AstNode(AstNodeType.Parameter, [], Location); - -public sealed record BlockNode( - ImmutableArray Statements, - SourceLocation? Location = null) - : AstNode(AstNodeType.Block, Statements, Location); - -public sealed record IfNode( - AstNode Condition, - AstNode ThenBranch, - AstNode? ElseBranch, - SourceLocation? Location = null) - : AstNode(AstNodeType.If, ElseBranch is null ? [Condition, ThenBranch] : [Condition, ThenBranch, ElseBranch], Location); - -public sealed record WhileNode( - AstNode Condition, - AstNode Body, - SourceLocation? Location = null) - : AstNode(AstNodeType.While, [Condition, Body], Location); - -public sealed record ForNode( - AstNode? Init, - AstNode? Condition, - AstNode? Update, - AstNode Body, - SourceLocation? Location = null) - : AstNode(AstNodeType.For, [Init ?? EmptyNode.Instance, Condition ?? EmptyNode.Instance, Update ?? EmptyNode.Instance, Body], Location); - -public sealed record ReturnNode( - AstNode? Value, - SourceLocation? Location = null) - : AstNode(AstNodeType.Return, Value is null ? [] : [Value], Location); - -public sealed record AssignmentNode( - AstNode Target, - AstNode Value, - string Operator, - SourceLocation? Location = null) - : AstNode(AstNodeType.Assignment, [Target, Value], Location); - -public sealed record BinaryOpNode( - AstNode Left, - AstNode Right, - string Operator, - SourceLocation? Location = null) - : AstNode(AstNodeType.BinaryOp, [Left, Right], Location); - -public sealed record UnaryOpNode( - AstNode Operand, - string Operator, - bool IsPrefix, - SourceLocation? Location = null) - : AstNode(AstNodeType.UnaryOp, [Operand], Location); - -public sealed record CallNode( - string FunctionName, - ImmutableArray Arguments, - SourceLocation? Location = null) - : AstNode(AstNodeType.Call, Arguments, Location); - -public sealed record VariableNode( - string Name, - string? DataType, - SourceLocation? Location = null) - : AstNode(AstNodeType.Variable, [], Location); - -public sealed record ConstantNode( - object Value, - string DataType, - SourceLocation? Location = null) - : AstNode(AstNodeType.Constant, [], Location); - -public sealed record ArrayAccessNode( - AstNode Array, - AstNode Index, - SourceLocation? Location = null) - : AstNode(AstNodeType.ArrayAccess, [Array, Index], Location); - -public sealed record FieldAccessNode( - AstNode Object, - string FieldName, - bool IsPointer, - SourceLocation? Location = null) - : AstNode(AstNodeType.FieldAccess, [Object], Location); - -public sealed record CastNode( - AstNode Expression, - string TargetType, - SourceLocation? Location = null) - : AstNode(AstNodeType.Cast, [Expression], Location); - -public sealed record EmptyNode() : AstNode(AstNodeType.Block, [], null) -{ - public static EmptyNode Instance { get; } = new(); -} - -#endregion diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/NormalizationOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/NormalizationOptions.cs new file mode 100644 index 000000000..170cbc831 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/NormalizationOptions.cs @@ -0,0 +1,20 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Options for code normalization. +/// +public sealed record NormalizationOptions +{ + public bool NormalizeVariables { get; init; } = true; + public bool NormalizeFunctionCalls { get; init; } = true; + public bool NormalizeConstants { get; init; } = false; + public bool NormalizeWhitespace { get; init; } = true; + public bool SortIndependentStatements { get; init; } = false; + public ImmutableHashSet? KnownFunctions { get; init; } + + public static NormalizationOptions Default { get; } = new(); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Branching.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Branching.cs new file mode 100644 index 000000000..6afcdcfd5 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Branching.cs @@ -0,0 +1,24 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private IfNode ParseIf() + { + Advance(); + Expect(TokenType.Bracket, "("); + var condition = ParseExpression(); + Expect(TokenType.Bracket, ")"); + + var thenBranch = ParseStatement() ?? new BlockNode([]); + AstNode? elseBranch = null; + if (Peek().Value == "else") + { + Advance(); + elseBranch = ParseStatement(); + } + + return new IfNode(condition, thenBranch, elseBranch); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Loops.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Loops.cs new file mode 100644 index 000000000..c784752e0 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Loops.cs @@ -0,0 +1,63 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private WhileNode ParseWhile() + { + Advance(); + Expect(TokenType.Bracket, "("); + var condition = ParseExpression(); + Expect(TokenType.Bracket, ")"); + + var body = ParseStatement() ?? new BlockNode([]); + + return new WhileNode(condition, body); + } + + private ForNode ParseFor() + { + Advance(); + Expect(TokenType.Bracket, "("); + + AstNode? init = null; + if (Peek().Value != ";") + { + init = ParseExpression(); + } + Expect(TokenType.Punctuation, ";"); + + AstNode? condition = null; + if (Peek().Value != ";") + { + condition = ParseExpression(); + } + Expect(TokenType.Punctuation, ";"); + + AstNode? update = null; + if (Peek().Value != ")") + { + update = ParseExpression(); + } + Expect(TokenType.Bracket, ")"); + + var body = ParseStatement() ?? new BlockNode([]); + + return new ForNode(init, condition, update, body); + } + + private AstNode ParseDoWhile() + { + Advance(); + var body = ParseStatement() ?? new BlockNode([]); + + Expect(TokenType.Keyword, "while"); + Expect(TokenType.Bracket, "("); + var condition = ParseExpression(); + Expect(TokenType.Bracket, ")"); + Expect(TokenType.Punctuation, ";"); + + return new WhileNode(condition, body); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Termination.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Termination.cs new file mode 100644 index 000000000..a545e166a --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.ControlFlow.Termination.cs @@ -0,0 +1,50 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private ReturnNode ParseReturn() + { + Advance(); + + AstNode? value = null; + if (Peek().Value != ";") + { + value = ParseExpression(); + } + Expect(TokenType.Punctuation, ";"); + + return new ReturnNode(value); + } + + private AstNode ParseBreak() + { + Advance(); + Expect(TokenType.Punctuation, ";"); + return new BlockNode([]); + } + + private AstNode ParseContinue() + { + Advance(); + Expect(TokenType.Punctuation, ";"); + return new BlockNode([]); + } + + private AstNode? SkipSemicolon() + { + Advance(); + return null; + } + + private AstNode? ParseExpressionStatement() + { + var expr = ParseExpression(); + if (Peek().Value == ";") + { + Advance(); + } + return expr; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Arithmetic.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Arithmetic.cs new file mode 100644 index 000000000..fdaf58f6e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Arithmetic.cs @@ -0,0 +1,48 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private AstNode ParseShift() + { + var left = ParseAdditive(); + + while (Peek().Value is "<<" or ">>") + { + var op = Advance().Value; + var right = ParseAdditive(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } + + private AstNode ParseAdditive() + { + var left = ParseMultiplicative(); + + while (Peek().Value is "+" or "-") + { + var op = Advance().Value; + var right = ParseMultiplicative(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } + + private AstNode ParseMultiplicative() + { + var left = ParseUnary(); + + while (Peek().Value is "*" or "/" or "%") + { + var op = Advance().Value; + var right = ParseUnary(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Logical.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Logical.cs new file mode 100644 index 000000000..02c115f84 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Logical.cs @@ -0,0 +1,62 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private AstNode ParseLogicalOr() + { + var left = ParseLogicalAnd(); + + while (Peek().Value == "||") + { + var op = Advance().Value; + var right = ParseLogicalAnd(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } + + private AstNode ParseLogicalAnd() + { + var left = ParseBitwiseOr(); + + while (Peek().Value == "&&") + { + var op = Advance().Value; + var right = ParseBitwiseOr(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } + + private AstNode ParseBitwiseOr() + { + var left = ParseComparison(); + + while (Peek().Value is "|" or "^" or "&") + { + var op = Advance().Value; + var right = ParseComparison(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } + + private AstNode ParseComparison() + { + var left = ParseShift(); + + while (Peek().Value is "==" or "!=" or "<" or ">" or "<=" or ">=") + { + var op = Advance().Value; + var right = ParseShift(); + left = new BinaryOpNode(left, right, op); + } + + return left; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Primary.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Primary.cs new file mode 100644 index 000000000..1f2d23a56 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Primary.cs @@ -0,0 +1,88 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private ImmutableArray ParseArgumentList() + { + var args = new List(); + + if (Peek().Value == ")") + { + return []; + } + + do + { + if (Peek().Value == ",") + { + Advance(); + } + args.Add(ParseExpression()); + } + while (Peek().Value == ","); + + return [.. args]; + } + + private AstNode ParsePrimary() + { + var token = Peek(); + + if (token.Type == TokenType.Number) + { + Advance(); + return new ConstantNode(token.Value, "int"); + } + + if (token.Type == TokenType.String) + { + Advance(); + return new ConstantNode(token.Value, "char*"); + } + + if (token.Type == TokenType.Char) + { + Advance(); + return new ConstantNode(token.Value, "char"); + } + + if (token.Type == TokenType.Identifier) + { + Advance(); + return new VariableNode(token.Value, null); + } + + if (token.Value == "(") + { + Advance(); + + if (IsType(Peek().Value)) + { + var targetType = ParseType(); + Expect(TokenType.Bracket, ")"); + var expr = ParseUnary(); + return new CastNode(expr, targetType); + } + + var inner = ParseExpression(); + Expect(TokenType.Bracket, ")"); + return inner; + } + + if (token.Value == "sizeof") + { + Advance(); + Expect(TokenType.Bracket, "("); + var type = ParseType(); + Expect(TokenType.Bracket, ")"); + return new ConstantNode($"sizeof({type})", "size_t"); + } + + Advance(); + return new ConstantNode(token.Value, "unknown"); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Unary.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Unary.cs new file mode 100644 index 000000000..0bcd02b3e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.Unary.cs @@ -0,0 +1,62 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private AstNode ParseUnary() + { + if (Peek().Value is "!" or "~" or "-" or "+" or "*" or "&" or "++" or "--") + { + var op = Advance().Value; + var operand = ParseUnary(); + return new UnaryOpNode(operand, op, true); + } + + return ParsePostfix(); + } + + private AstNode ParsePostfix() + { + var expr = ParsePrimary(); + + while (true) + { + if (Peek().Value == "(") + { + Advance(); + var args = ParseArgumentList(); + Expect(TokenType.Bracket, ")"); + + if (expr is VariableNode varNode) + { + expr = new CallNode(varNode.Name, args); + } + } + else if (Peek().Value == "[") + { + Advance(); + var index = ParseExpression(); + Expect(TokenType.Bracket, "]"); + expr = new ArrayAccessNode(expr, index); + } + else if (Peek().Value is "." or "->") + { + var isPointer = Advance().Value == "->"; + var field = Expect(TokenType.Identifier).Value; + expr = new FieldAccessNode(expr, field, isPointer); + } + else if (Peek().Value is "++" or "--") + { + var op = Advance().Value; + expr = new UnaryOpNode(expr, op, false); + } + else + { + break; + } + } + + return expr; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.cs new file mode 100644 index 000000000..e602778e4 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Expression.cs @@ -0,0 +1,25 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private AstNode ParseExpression() + { + return ParseAssignment(); + } + + private AstNode ParseAssignment() + { + var left = ParseLogicalOr(); + + if (Peek().Value is "=" or "+=" or "-=" or "*=" or "/=" or "&=" or "|=" or "^=" or "<<=" or ">>=") + { + var op = Advance().Value; + var right = ParseAssignment(); + return new AssignmentNode(left, right, op); + } + + return left; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Helpers.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Helpers.cs new file mode 100644 index 000000000..3d67625ce --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.Helpers.cs @@ -0,0 +1,32 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private static bool IsType(string value) + { + return value is "int" or "char" or "void" or "long" or "short" or "float" or "double" + or "unsigned" or "signed" or "const" or "struct" or "union" or "enum" + or "undefined" or "undefined1" or "undefined2" or "undefined4" or "undefined8" + or "byte" or "word" or "dword" or "qword" or "pointer" or "code" or "uint" or "ulong"; + } + + private Token Peek() => _pos < _tokens.Count ? _tokens[_pos] : new Token(TokenType.Punctuation, "", 0, 0); + + private Token PeekAhead(int offset) => _pos + offset < _tokens.Count + ? _tokens[_pos + offset] + : new Token(TokenType.Punctuation, "", 0, 0); + + private Token Advance() => _pos < _tokens.Count ? _tokens[_pos++] : new Token(TokenType.Punctuation, "", 0, 0); + + private Token Expect(TokenType type, string? value = null) + { + var token = Peek(); + if (token.Type != type || (value is not null && token.Value != value)) + { + return Advance(); + } + return Advance(); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.cs new file mode 100644 index 000000000..a9c41ad68 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/RecursiveParser.cs @@ -0,0 +1,124 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Decompiler; + +internal sealed partial class RecursiveParser +{ + private readonly List _tokens; + private int _pos; + + public RecursiveParser(List tokens) + { + _tokens = tokens; + _pos = 0; + } + + public AstNode ParseFunction() + { + var returnType = ParseType(); + var name = Expect(TokenType.Identifier).Value; + + Expect(TokenType.Bracket, "("); + var parameters = ParseParameterList(); + Expect(TokenType.Bracket, ")"); + + var body = ParseBlock(); + + return new FunctionNode(name, returnType, parameters, body); + } + + private string ParseType() + { + var type = new System.Text.StringBuilder(); + + while (Peek().Value is "const" or "unsigned" or "signed" or "static" or "extern") + { + type.Append(Advance().Value); + type.Append(' '); + } + + type.Append(Advance().Value); + + while (Peek().Value == "*") + { + type.Append(Advance().Value); + } + + return type.ToString().Trim(); + } + + private ImmutableArray ParseParameterList() + { + var parameters = new List(); + var index = 0; + + if (Peek().Value == ")") + { + return []; + } + + if (Peek().Value == "void" && PeekAhead(1).Value == ")") + { + Advance(); + return []; + } + + do + { + if (Peek().Value == ",") + { + Advance(); + } + + var type = ParseType(); + var name = Peek().Type == TokenType.Identifier ? Advance().Value : $"param_{index}"; + + parameters.Add(new ParameterNode(name, type, index)); + index++; + } + while (Peek().Value == ","); + + return [.. parameters]; + } + + private BlockNode ParseBlock() + { + Expect(TokenType.Bracket, "{"); + + var statements = new List(); + + while (Peek().Value != "}") + { + var stmt = ParseStatement(); + if (stmt is not null) + { + statements.Add(stmt); + } + } + + Expect(TokenType.Bracket, "}"); + + return new BlockNode([.. statements]); + } + + private AstNode? ParseStatement() + { + var token = Peek(); + + return token.Value switch + { + "if" => ParseIf(), + "while" => ParseWhile(), + "for" => ParseFor(), + "do" => ParseDoWhile(), + "return" => ParseReturn(), + "break" => ParseBreak(), + "continue" => ParseContinue(), + "{" => ParseBlock(), + ";" => SkipSemicolon(), + _ => ParseExpressionStatement() + }; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SemanticEquivalence.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SemanticEquivalence.cs new file mode 100644 index 000000000..e8d246e13 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SemanticEquivalence.cs @@ -0,0 +1,26 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// A semantic equivalence between AST nodes. +/// +public sealed record SemanticEquivalence( + AstNode NodeA, + AstNode NodeB, + EquivalenceType Type, + decimal Confidence, + string? Explanation); + +/// +/// Types of semantic equivalence. +/// +public enum EquivalenceType +{ + Identical, + Renamed, + Reordered, + Optimized, + Inlined, + Semantically +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SourceLocation.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SourceLocation.cs new file mode 100644 index 000000000..28ffac731 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/SourceLocation.cs @@ -0,0 +1,8 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +/// +/// Source location in decompiled code. +/// +public sealed record SourceLocation(int Line, int Column, int Length); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TASKS.md b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TASKS.md index 0600d78d5..ff93cfb51 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TASKS.md +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/StellaOps.BinaryIndex.Decompiler.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/StellaOps.BinaryIndex.Decompiler.md. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Token.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Token.cs new file mode 100644 index 000000000..c60a376cd --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/Token.cs @@ -0,0 +1,5 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal readonly record struct Token(TokenType Type, string Value, int Line, int Column); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TokenType.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TokenType.cs new file mode 100644 index 000000000..59b4bb22f --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Decompiler/TokenType.cs @@ -0,0 +1,15 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Decompiler; + +internal enum TokenType +{ + Identifier, + Keyword, + Number, + String, + Char, + Operator, + Bracket, + Punctuation +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPlugin.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPlugin.cs index 6a3bbe5d3..2b0781934 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPlugin.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPlugin.cs @@ -71,70 +71,3 @@ public interface IDisassemblyPlugin /// Enumerable of disassembled instructions. IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol); } - -/// -/// Registry for disassembly plugins. Manages plugin discovery and selection. -/// -public interface IDisassemblyPluginRegistry -{ - /// - /// Gets all registered plugins. - /// - IReadOnlyList Plugins { get; } - - /// - /// Finds the best plugin for the given architecture and format. - /// - /// Target CPU architecture. - /// Target binary format. - /// The best matching plugin, or null if none found. - IDisassemblyPlugin? FindPlugin(CpuArchitecture architecture, BinaryFormat format); - - /// - /// Finds all plugins that support the given architecture. - /// - /// Target CPU architecture. - /// All matching plugins ordered by priority. - IEnumerable FindPluginsForArchitecture(CpuArchitecture architecture); - - /// - /// Finds all plugins that support the given format. - /// - /// Target binary format. - /// All matching plugins ordered by priority. - IEnumerable FindPluginsForFormat(BinaryFormat format); - - /// - /// Gets a plugin by its unique identifier. - /// - /// The plugin identifier. - /// The plugin if found, null otherwise. - IDisassemblyPlugin? GetPlugin(string pluginId); -} - -/// -/// Facade service for disassembly operations. Automatically selects the best plugin. -/// -public interface IDisassemblyService -{ - /// - /// Loads a binary and automatically selects the best plugin. - /// - /// The binary stream to load. - /// Optional preferred plugin ID. - /// Binary information and the plugin used. - (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(Stream stream, string? preferredPluginId = null); - - /// - /// Loads a binary from bytes and automatically selects the best plugin. - /// - /// The binary data. - /// Optional preferred plugin ID. - /// Binary information and the plugin used. - (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(ReadOnlySpan bytes, string? preferredPluginId = null); - - /// - /// Gets the plugin registry. - /// - IDisassemblyPluginRegistry Registry { get; } -} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPluginRegistry.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPluginRegistry.cs new file mode 100644 index 000000000..1b44eaf9a --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyPluginRegistry.cs @@ -0,0 +1,43 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Registry for disassembly plugins. Manages plugin discovery and selection. +/// +public interface IDisassemblyPluginRegistry +{ + /// + /// Gets all registered plugins. + /// + IReadOnlyList Plugins { get; } + + /// + /// Finds the best plugin for the given architecture and format. + /// + /// Target CPU architecture. + /// Target binary format. + /// The best matching plugin, or null if none found. + IDisassemblyPlugin? FindPlugin(CpuArchitecture architecture, BinaryFormat format); + + /// + /// Finds all plugins that support the given architecture. + /// + /// Target CPU architecture. + /// All matching plugins ordered by priority. + IEnumerable FindPluginsForArchitecture(CpuArchitecture architecture); + + /// + /// Finds all plugins that support the given format. + /// + /// Target binary format. + /// All matching plugins ordered by priority. + IEnumerable FindPluginsForFormat(BinaryFormat format); + + /// + /// Gets a plugin by its unique identifier. + /// + /// The plugin identifier. + /// The plugin if found, null otherwise. + IDisassemblyPlugin? GetPlugin(string pluginId); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyService.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyService.cs new file mode 100644 index 000000000..a0bfb3058 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/IDisassemblyService.cs @@ -0,0 +1,30 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Facade service for disassembly operations. Automatically selects the best plugin. +/// +public interface IDisassemblyService +{ + /// + /// Loads a binary and automatically selects the best plugin. + /// + /// The binary stream to load. + /// Optional preferred plugin ID. + /// Binary information and the plugin used. + (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(Stream stream, string? preferredPluginId = null); + + /// + /// Loads a binary from bytes and automatically selects the best plugin. + /// + /// The binary data. + /// Optional preferred plugin ID. + /// Binary information and the plugin used. + (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(ReadOnlySpan bytes, string? preferredPluginId = null); + + /// + /// Gets the plugin registry. + /// + IDisassemblyPluginRegistry Registry { get; } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models.cs deleted file mode 100644 index f607d954c..000000000 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models.cs +++ /dev/null @@ -1,348 +0,0 @@ -// Copyright (c) StellaOps. All rights reserved. -// Licensed under BUSL-1.1. See LICENSE in the project root. - -using System.Collections.Immutable; - -namespace StellaOps.BinaryIndex.Disassembly; - -/// -/// CPU architecture identifier. -/// -public enum CpuArchitecture -{ - /// Unknown architecture. - Unknown = 0, - - /// Intel/AMD 32-bit x86. - X86 = 1, - - /// Intel/AMD 64-bit x86-64 (amd64). - X86_64 = 2, - - /// ARM 32-bit (ARMv7). - ARM32 = 3, - - /// ARM 64-bit (AArch64/ARMv8). - ARM64 = 4, - - /// MIPS 32-bit. - MIPS32 = 5, - - /// MIPS 64-bit. - MIPS64 = 6, - - /// RISC-V 64-bit. - RISCV64 = 7, - - /// PowerPC 32-bit. - PPC32 = 8, - - /// PowerPC 64-bit. - PPC64 = 9, - - /// SPARC. - SPARC = 10, - - /// SuperH SH4. - SH4 = 11, - - /// AVR microcontroller. - AVR = 12, - - /// Ethereum Virtual Machine. - EVM = 13, - - /// WebAssembly. - WASM = 14 -} - -/// -/// Binary executable format. -/// -public enum BinaryFormat -{ - /// Unknown format. - Unknown = 0, - - /// Raw binary data (no format metadata). - Raw = 1, - - /// Executable and Linkable Format (Linux, BSD, etc.). - ELF = 2, - - /// Portable Executable (Windows). - PE = 3, - - /// Mach-O (macOS, iOS). - MachO = 4, - - /// WebAssembly module. - WASM = 5 -} - -/// -/// Describes the capabilities of a disassembly plugin. -/// -public sealed record DisassemblyCapabilities -{ - /// - /// The unique identifier of the plugin. - /// - public required string PluginId { get; init; } - - /// - /// Display name of the disassembly engine. - /// - public required string Name { get; init; } - - /// - /// Version of the underlying disassembly library. - /// - public required string Version { get; init; } - - /// - /// Supported CPU architectures. - /// - public required ImmutableHashSet SupportedArchitectures { get; init; } - - /// - /// Supported binary formats. - /// - public required ImmutableHashSet SupportedFormats { get; init; } - - /// - /// Whether the plugin supports lifting to intermediate representation. - /// - public bool SupportsLifting { get; init; } - - /// - /// Whether the plugin supports control flow graph recovery. - /// - public bool SupportsCfgRecovery { get; init; } - - /// - /// Priority for plugin selection when multiple plugins support the same arch/format. - /// Higher values indicate higher priority. - /// - public int Priority { get; init; } = 0; - - /// - /// Checks if this plugin supports the given architecture. - /// - public bool SupportsArchitecture(CpuArchitecture arch) => - SupportedArchitectures.Contains(arch); - - /// - /// Checks if this plugin supports the given format. - /// - public bool SupportsFormat(BinaryFormat format) => - SupportedFormats.Contains(format); - - /// - /// Checks if this plugin can handle the given architecture and format combination. - /// - public bool CanHandle(CpuArchitecture arch, BinaryFormat format) => - SupportsArchitecture(arch) && SupportsFormat(format); -} - -/// -/// Information about a loaded binary. -/// -/// Binary format: ELF, PE, MachO, etc. -/// CPU architecture. -/// 32 or 64 bit. -/// Byte order. -/// Application binary interface hint (gnu, musl, msvc, darwin). -/// Entry point address if available. -/// Build identifier if present (e.g., GNU build-id). -/// Additional metadata from the binary. -/// Internal handle for the disassembly engine (engine-specific). -public sealed record BinaryInfo( - BinaryFormat Format, - CpuArchitecture Architecture, - int Bitness, - Endianness Endianness, - string? Abi, - ulong? EntryPoint, - string? BuildId, - IReadOnlyDictionary Metadata, - object Handle); - -/// -/// Byte order. -/// -public enum Endianness -{ - /// Little-endian (LSB first). - Little, - /// Big-endian (MSB first). - Big -} - -/// -/// Represents a code region (section) in a binary. -/// -/// Section name: .text, .rodata, etc. -/// Virtual address in memory. -/// Offset in the binary file. -/// Size in bytes. -/// Whether the region contains executable code. -/// Whether the region is readable. -/// Whether the region is writable. -public sealed record CodeRegion( - string Name, - ulong VirtualAddress, - ulong FileOffset, - ulong Size, - bool IsExecutable, - bool IsReadable, - bool IsWritable); - -/// -/// Information about a symbol in the binary. -/// -/// Symbol name. -/// Virtual address of the symbol. -/// Size in bytes (0 if unknown). -/// Symbol type. -/// Symbol binding. -/// Section containing the symbol. -public sealed record SymbolInfo( - string Name, - ulong Address, - ulong Size, - SymbolType Type, - SymbolBinding Binding, - string? Section); - -/// -/// Type of symbol. -/// -public enum SymbolType -{ - /// Unknown or unspecified type. - Unknown, - /// Function/procedure. - Function, - /// Data object. - Object, - /// Section symbol. - Section, - /// Source file name. - File, - /// Common block symbol. - Common, - /// Thread-local storage. - Tls -} - -/// -/// Symbol binding/visibility. -/// -public enum SymbolBinding -{ - /// Unknown binding. - Unknown, - /// Local symbol (not visible outside the object). - Local, - /// Global symbol (visible to other objects). - Global, - /// Weak symbol (can be overridden). - Weak -} - -/// -/// A disassembled instruction. -/// -/// Virtual address of the instruction. -/// Raw bytes of the instruction. -/// Instruction mnemonic (e.g., MOV, ADD, JMP). -/// Text representation of operands. -/// Classification of the instruction. -/// Parsed operands. -public sealed record DisassembledInstruction( - ulong Address, - ImmutableArray RawBytes, - string Mnemonic, - string OperandsText, - InstructionKind Kind, - ImmutableArray Operands); - -/// -/// Classification of instruction types. -/// -public enum InstructionKind -{ - /// Unknown or unclassified instruction. - Unknown, - /// Arithmetic operation (ADD, SUB, MUL, DIV). - Arithmetic, - /// Logical operation (AND, OR, XOR, NOT). - Logic, - /// Data movement (MOV, PUSH, POP). - Move, - /// Memory load operation. - Load, - /// Memory store operation. - Store, - /// Unconditional branch (JMP). - Branch, - /// Conditional branch (JE, JNE, JL, etc.). - ConditionalBranch, - /// Function call. - Call, - /// Function return. - Return, - /// No operation. - Nop, - /// System call. - Syscall, - /// Software interrupt. - Interrupt, - /// Compare operation. - Compare, - /// Shift operation. - Shift, - /// Vector/SIMD operation. - Vector, - /// Floating point operation. - FloatingPoint -} - -/// -/// An instruction operand. -/// -/// Operand type. -/// Text representation. -/// Immediate value if applicable. -/// Register name if applicable. -/// Base register for memory operand. -/// Index register for memory operand. -/// Scale factor for indexed memory operand. -/// Displacement for memory operand. -public sealed record Operand( - OperandType Type, - string Text, - long? Value = null, - string? Register = null, - string? MemoryBase = null, - string? MemoryIndex = null, - int? MemoryScale = null, - long? MemoryDisplacement = null); - -/// -/// Type of operand. -/// -public enum OperandType -{ - /// Unknown operand type. - Unknown, - /// CPU register. - Register, - /// Immediate value. - Immediate, - /// Memory reference. - Memory, - /// Address/label. - Address -} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryFormat.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryFormat.cs new file mode 100644 index 000000000..81041fd78 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryFormat.cs @@ -0,0 +1,22 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Binary executable format. +/// +public enum BinaryFormat +{ + /// Unknown format. + Unknown = 0, + /// Raw binary data (no format metadata). + Raw = 1, + /// Executable and Linkable Format (Linux, BSD, etc.). + ELF = 2, + /// Portable Executable (Windows). + PE = 3, + /// Mach-O (macOS, iOS). + MachO = 4, + /// WebAssembly module. + WASM = 5 +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryInfo.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryInfo.cs new file mode 100644 index 000000000..22ff22085 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/BinaryInfo.cs @@ -0,0 +1,26 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Information about a loaded binary. +/// +/// Binary format: ELF, PE, MachO, etc. +/// CPU architecture. +/// 32 or 64 bit. +/// Byte order. +/// Application binary interface hint (gnu, musl, msvc, darwin). +/// Entry point address if available. +/// Build identifier if present (e.g., GNU build-id). +/// Additional metadata from the binary. +/// Internal handle for the disassembly engine (engine-specific). +public sealed record BinaryInfo( + BinaryFormat Format, + CpuArchitecture Architecture, + int Bitness, + Endianness Endianness, + string? Abi, + ulong? EntryPoint, + string? BuildId, + IReadOnlyDictionary Metadata, + object Handle); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CodeRegion.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CodeRegion.cs new file mode 100644 index 000000000..ac209a621 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CodeRegion.cs @@ -0,0 +1,22 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Represents a code region (section) in a binary. +/// +/// Section name: .text, .rodata, etc. +/// Virtual address in memory. +/// Offset in the binary file. +/// Size in bytes. +/// Whether the region contains executable code. +/// Whether the region is readable. +/// Whether the region is writable. +public sealed record CodeRegion( + string Name, + ulong VirtualAddress, + ulong FileOffset, + ulong Size, + bool IsExecutable, + bool IsReadable, + bool IsWritable); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CpuArchitecture.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CpuArchitecture.cs new file mode 100644 index 000000000..1c999d330 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/CpuArchitecture.cs @@ -0,0 +1,40 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// CPU architecture identifier. +/// +public enum CpuArchitecture +{ + /// Unknown architecture. + Unknown = 0, + /// Intel/AMD 32-bit x86. + X86 = 1, + /// Intel/AMD 64-bit x86-64 (amd64). + X86_64 = 2, + /// ARM 32-bit (ARMv7). + ARM32 = 3, + /// ARM 64-bit (AArch64/ARMv8). + ARM64 = 4, + /// MIPS 32-bit. + MIPS32 = 5, + /// MIPS 64-bit. + MIPS64 = 6, + /// RISC-V 64-bit. + RISCV64 = 7, + /// PowerPC 32-bit. + PPC32 = 8, + /// PowerPC 64-bit. + PPC64 = 9, + /// SPARC. + SPARC = 10, + /// SuperH SH4. + SH4 = 11, + /// AVR microcontroller. + AVR = 12, + /// Ethereum Virtual Machine. + EVM = 13, + /// WebAssembly. + WASM = 14 +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassembledInstruction.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassembledInstruction.cs new file mode 100644 index 000000000..471c6d084 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassembledInstruction.cs @@ -0,0 +1,22 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// A disassembled instruction. +/// +/// Virtual address of the instruction. +/// Raw bytes of the instruction. +/// Instruction mnemonic (e.g., MOV, ADD, JMP). +/// Text representation of operands. +/// Classification of the instruction. +/// Parsed operands. +public sealed record DisassembledInstruction( + ulong Address, + ImmutableArray RawBytes, + string Mnemonic, + string OperandsText, + InstructionKind Kind, + ImmutableArray Operands); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassemblyCapabilities.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassemblyCapabilities.cs new file mode 100644 index 000000000..6bb72f7b7 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/DisassemblyCapabilities.cs @@ -0,0 +1,70 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Describes the capabilities of a disassembly plugin. +/// +public sealed record DisassemblyCapabilities +{ + /// + /// The unique identifier of the plugin. + /// + public required string PluginId { get; init; } + + /// + /// Display name of the disassembly engine. + /// + public required string Name { get; init; } + + /// + /// Version of the underlying disassembly library. + /// + public required string Version { get; init; } + + /// + /// Supported CPU architectures. + /// + public required ImmutableHashSet SupportedArchitectures { get; init; } + + /// + /// Supported binary formats. + /// + public required ImmutableHashSet SupportedFormats { get; init; } + + /// + /// Whether the plugin supports lifting to intermediate representation. + /// + public bool SupportsLifting { get; init; } + + /// + /// Whether the plugin supports control flow graph recovery. + /// + public bool SupportsCfgRecovery { get; init; } + + /// + /// Priority for plugin selection when multiple plugins support the same arch/format. + /// Higher values indicate higher priority. + /// + public int Priority { get; init; } = 0; + + /// + /// Checks if this plugin supports the given architecture. + /// + public bool SupportsArchitecture(CpuArchitecture arch) => + SupportedArchitectures.Contains(arch); + + /// + /// Checks if this plugin supports the given format. + /// + public bool SupportsFormat(BinaryFormat format) => + SupportedFormats.Contains(format); + + /// + /// Checks if this plugin can handle the given architecture and format combination. + /// + public bool CanHandle(CpuArchitecture arch, BinaryFormat format) => + SupportsArchitecture(arch) && SupportsFormat(format); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Endianness.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Endianness.cs new file mode 100644 index 000000000..561f6db96 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Endianness.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Byte order. +/// +public enum Endianness +{ + /// Little-endian (LSB first). + Little, + /// Big-endian (MSB first). + Big +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/InstructionKind.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/InstructionKind.cs new file mode 100644 index 000000000..f633f47ed --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/InstructionKind.cs @@ -0,0 +1,44 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Classification of instruction types. +/// +public enum InstructionKind +{ + /// Unknown or unclassified instruction. + Unknown, + /// Arithmetic operation (ADD, SUB, MUL, DIV). + Arithmetic, + /// Logical operation (AND, OR, XOR, NOT). + Logic, + /// Data movement (MOV, PUSH, POP). + Move, + /// Memory load operation. + Load, + /// Memory store operation. + Store, + /// Unconditional branch (JMP). + Branch, + /// Conditional branch (JE, JNE, JL, etc.). + ConditionalBranch, + /// Function call. + Call, + /// Function return. + Return, + /// No operation. + Nop, + /// System call. + Syscall, + /// Software interrupt. + Interrupt, + /// Compare operation. + Compare, + /// Shift operation. + Shift, + /// Vector/SIMD operation. + Vector, + /// Floating point operation. + FloatingPoint +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Operand.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Operand.cs new file mode 100644 index 000000000..c3f90b90f --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/Operand.cs @@ -0,0 +1,24 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// An instruction operand. +/// +/// Operand type. +/// Text representation. +/// Immediate value if applicable. +/// Register name if applicable. +/// Base register for memory operand. +/// Index register for memory operand. +/// Scale factor for indexed memory operand. +/// Displacement for memory operand. +public sealed record Operand( + OperandType Type, + string Text, + long? Value = null, + string? Register = null, + string? MemoryBase = null, + string? MemoryIndex = null, + int? MemoryScale = null, + long? MemoryDisplacement = null); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/OperandType.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/OperandType.cs new file mode 100644 index 000000000..596675265 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/OperandType.cs @@ -0,0 +1,20 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Type of operand. +/// +public enum OperandType +{ + /// Unknown operand type. + Unknown, + /// CPU register. + Register, + /// Immediate value. + Immediate, + /// Memory reference. + Memory, + /// Address/label. + Address +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/SymbolInfo.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/SymbolInfo.cs new file mode 100644 index 000000000..83dccc823 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/Models/SymbolInfo.cs @@ -0,0 +1,56 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Information about a symbol in the binary. +/// +/// Symbol name. +/// Virtual address of the symbol. +/// Size in bytes (0 if unknown). +/// Symbol type. +/// Symbol binding. +/// Section containing the symbol. +public sealed record SymbolInfo( + string Name, + ulong Address, + ulong Size, + SymbolType Type, + SymbolBinding Binding, + string? Section); + +/// +/// Type of symbol. +/// +public enum SymbolType +{ + /// Unknown or unspecified type. + Unknown, + /// Function/procedure. + Function, + /// Data object. + Object, + /// Section symbol. + Section, + /// Source file name. + File, + /// Common block symbol. + Common, + /// Thread-local storage. + Tls +} + +/// +/// Symbol binding/visibility. +/// +public enum SymbolBinding +{ + /// Unknown binding. + Unknown, + /// Local symbol (not visible outside the object). + Local, + /// Global symbol (visible to other objects). + Global, + /// Weak symbol (can be overridden). + Weak +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/TASKS.md b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/TASKS.md index 1921f38cd..ee658c6e0 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/TASKS.md +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/StellaOps.BinaryIndex.Disassembly.Abstractions.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.Abstractions/StellaOps.BinaryIndex.Disassembly.Abstractions.md (2026-02-04). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2BinaryHandle.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2BinaryHandle.cs new file mode 100644 index 000000000..bce11ab89 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2BinaryHandle.cs @@ -0,0 +1,11 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2.FrontEnd; +using B2R2.FrontEnd.BinFile; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +/// +/// Internal handle for B2R2 binary data. +/// +internal sealed record B2R2BinaryHandle(BinHandle BinHandle, byte[] Bytes); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.ArchitectureMapping.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.ArchitectureMapping.cs new file mode 100644 index 000000000..fcad96dfd --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.ArchitectureMapping.cs @@ -0,0 +1,89 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using B2R2.FrontEnd.BinFile; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + private static ISA MapToB2R2Isa(CpuArchitecture arch) + { + return arch switch + { + CpuArchitecture.X86 => new ISA(Architecture.Intel, WordSize.Bit32), + CpuArchitecture.X86_64 => new ISA(Architecture.Intel, WordSize.Bit64), + CpuArchitecture.ARM32 => new ISA(Architecture.ARMv7, WordSize.Bit32), + CpuArchitecture.ARM64 => new ISA(Architecture.ARMv8, WordSize.Bit64), + CpuArchitecture.MIPS32 => new ISA(Architecture.MIPS, WordSize.Bit32), + CpuArchitecture.MIPS64 => new ISA(Architecture.MIPS, WordSize.Bit64), + CpuArchitecture.RISCV64 => new ISA(Architecture.RISCV, WordSize.Bit64), + CpuArchitecture.PPC32 => new ISA(Architecture.PPC, Endian.Big, WordSize.Bit32), + CpuArchitecture.SPARC => new ISA(Architecture.SPARC, Endian.Big), + CpuArchitecture.SH4 => new ISA(Architecture.SH4), + CpuArchitecture.AVR => new ISA(Architecture.AVR), + CpuArchitecture.EVM => new ISA(Architecture.EVM, Endian.Big), + _ => new ISA(Architecture.Intel, WordSize.Bit64) + }; + } + + private static CpuArchitecture MapFromB2R2Architecture(ISA isa) + { + return isa.Arch switch + { + Architecture.Intel when isa.WordSize == WordSize.Bit32 => CpuArchitecture.X86, + Architecture.Intel when isa.WordSize == WordSize.Bit64 => CpuArchitecture.X86_64, + Architecture.Intel => isa.IsX86 ? CpuArchitecture.X86 : CpuArchitecture.X86_64, + Architecture.ARMv7 => CpuArchitecture.ARM32, + Architecture.ARMv8 when isa.WordSize == WordSize.Bit64 => CpuArchitecture.ARM64, + Architecture.ARMv8 => CpuArchitecture.ARM32, + Architecture.MIPS when isa.WordSize == WordSize.Bit64 => CpuArchitecture.MIPS64, + Architecture.MIPS => CpuArchitecture.MIPS32, + Architecture.RISCV => CpuArchitecture.RISCV64, + Architecture.PPC => CpuArchitecture.PPC32, + Architecture.SPARC => CpuArchitecture.SPARC, + Architecture.SH4 => CpuArchitecture.SH4, + Architecture.AVR => CpuArchitecture.AVR, + Architecture.EVM => CpuArchitecture.EVM, + _ => CpuArchitecture.Unknown + }; + } + + private static BinaryFormat MapFromB2R2Format(FileFormat format) + { + return format switch + { + FileFormat.ELFBinary => BinaryFormat.ELF, + FileFormat.PEBinary => BinaryFormat.PE, + FileFormat.MachBinary => BinaryFormat.MachO, + FileFormat.WasmBinary => BinaryFormat.WASM, + FileFormat.RawBinary => BinaryFormat.Raw, + _ => BinaryFormat.Unknown + }; + } + + private static int GetBitness(WordSize wordSize) + { + return wordSize switch + { + WordSize.Bit8 => 8, + WordSize.Bit16 => 16, + WordSize.Bit32 => 32, + WordSize.Bit64 => 64, + WordSize.Bit128 => 128, + WordSize.Bit256 => 256, + _ => 64 + }; + } + + private static string? DetectAbi(BinaryFormat format) + { + return format switch + { + BinaryFormat.ELF => "gnu", + BinaryFormat.PE => "msvc", + BinaryFormat.MachO => "darwin", + _ => null + }; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.Disassemble.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.Disassemble.cs new file mode 100644 index 000000000..94db79da5 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.Disassemble.cs @@ -0,0 +1,84 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2.FrontEnd.BinLifter; +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + /// + public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) + { + ArgumentNullException.ThrowIfNull(binary); + ArgumentNullException.ThrowIfNull(region); + + var handle = GetHandle(binary); + var lifter = handle.BinHandle.NewLiftingUnit(); + + var addr = region.VirtualAddress; + var endAddr = region.VirtualAddress + region.Size; + + _logger.LogDebug( + "Disassembling region {Name} from 0x{Start:X} to 0x{End:X}", + region.Name, addr, endAddr); + + while (addr < endAddr) + { + IInstruction? instr; + try + { + instr = lifter.ParseInstruction(addr); + } + catch + { + addr++; + continue; + } + + if (instr is null || instr.Length == 0) + { + addr++; + continue; + } + + yield return MapInstruction(instr, handle, addr); + addr += instr.Length; + } + } + + /// + public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) + { + var region = new CodeRegion( + Name: $"0x{startAddress:X}", + VirtualAddress: startAddress, + FileOffset: startAddress, + Size: length, + IsExecutable: true, + IsReadable: true, + IsWritable: false); + + return Disassemble(binary, region); + } + + /// + public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) + { + ArgumentNullException.ThrowIfNull(binary); + ArgumentNullException.ThrowIfNull(symbol); + + var size = symbol.Size > 0 ? symbol.Size : 4096UL; + + var region = new CodeRegion( + Name: symbol.Name, + VirtualAddress: symbol.Address, + FileOffset: symbol.Address, + Size: size, + IsExecutable: true, + IsReadable: true, + IsWritable: false); + + return Disassemble(binary, region); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.InstructionMapping.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.InstructionMapping.cs new file mode 100644 index 000000000..0c4ab87c0 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.InstructionMapping.cs @@ -0,0 +1,76 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2.FrontEnd.BinLifter; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + private static B2R2BinaryHandle GetHandle(BinaryInfo binary) + { + if (binary.Handle is not B2R2BinaryHandle handle) + throw new ArgumentException("Invalid binary handle - not a B2R2 handle", nameof(binary)); + return handle; + } + + private static DisassembledInstruction MapInstruction(IInstruction instr, B2R2BinaryHandle handle, ulong address) + { + var disasm = instr.Disasm(); + + var parts = disasm.Split(' ', 2, StringSplitOptions.RemoveEmptyEntries); + var mnemonic = parts.Length > 0 ? parts[0] : "???"; + var operandsText = parts.Length > 1 ? parts[1] : ""; + + var offset = (int)(address - handle.BinHandle.File.BaseAddress); + var length = (int)instr.Length; + var rawBytes = offset >= 0 && offset + length <= handle.Bytes.Length + ? handle.Bytes.AsSpan(offset, length).ToArray().ToImmutableArray() + : ImmutableArray.Empty; + + var kind = ClassifyInstruction(instr, mnemonic); + var operands = ParseOperands(operandsText, mnemonic); + + return new DisassembledInstruction( + Address: address, + RawBytes: rawBytes, + Mnemonic: mnemonic, + OperandsText: operandsText, + Kind: kind, + Operands: operands); + } + + private static InstructionKind ClassifyInstruction(IInstruction instr, string mnemonic) + { + if (instr.IsRET) return InstructionKind.Return; + if (instr.IsCall) return InstructionKind.Call; + if (instr.IsCondBranch) return InstructionKind.ConditionalBranch; + if (instr.IsBranch) return InstructionKind.Branch; + if (instr.IsNop) return InstructionKind.Nop; + if (instr.IsInterrupt) return InstructionKind.Syscall; + + var upper = mnemonic.ToUpperInvariant(); + + if (upper is "ADD" or "SUB" or "MUL" or "DIV" or "IMUL" or "IDIV" or "INC" or "DEC" or "NEG" or "ADC" or "SBB") + return InstructionKind.Arithmetic; + + if (upper is "AND" or "OR" or "XOR" or "NOT" or "TEST" or "ORR" or "EOR") + return InstructionKind.Logic; + + if (upper is "SHL" or "SHR" or "SAL" or "SAR" or "ROL" or "ROR" or "LSL" or "LSR" or "ASR") + return InstructionKind.Shift; + + if (upper.StartsWith("MOV", StringComparison.Ordinal) || upper is "LEA" or "PUSH" or "POP" or "XCHG") + return InstructionKind.Move; + + if (upper.StartsWith("LDR", StringComparison.Ordinal) || upper.StartsWith("LD", StringComparison.Ordinal)) + return InstructionKind.Load; + + if (upper.StartsWith("STR", StringComparison.Ordinal) || upper.StartsWith("ST", StringComparison.Ordinal)) + return InstructionKind.Store; + + if (upper is "CMP" or "CMPS" or "SCAS") return InstructionKind.Compare; + + return InstructionKind.Unknown; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.LoadBinary.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.LoadBinary.cs new file mode 100644 index 000000000..528ecc25a --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.LoadBinary.cs @@ -0,0 +1,72 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using B2R2.FrontEnd; +using B2R2.FrontEnd.BinFile; +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + /// + public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) + { + ArgumentNullException.ThrowIfNull(stream); + + using var memStream = new MemoryStream(); + stream.CopyTo(memStream); + return LoadBinary(memStream.ToArray(), archHint, formatHint); + } + + /// + public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) + { + var byteArray = bytes.ToArray(); + + _logger.LogDebug("Loading binary with B2R2 plugin (size: {Size} bytes)", byteArray.Length); + + var isa = archHint.HasValue + ? MapToB2R2Isa(archHint.Value) + : new ISA(Architecture.Intel, WordSize.Bit64); // Default to x64 + + var binHandle = new BinHandle(byteArray, isa, null, true); + var binFile = binHandle.File; + + var format = MapFromB2R2Format(binFile.Format); + var architecture = MapFromB2R2Architecture(binFile.ISA); + var bitness = GetBitness(binFile.ISA.WordSize); + var endianness = binFile.ISA.Endian == Endian.Little ? Endianness.Little : Endianness.Big; + var abi = DetectAbi(format); + + var entryPointOpt = binFile.EntryPoint; + var entryPoint = Microsoft.FSharp.Core.FSharpOption.get_IsSome(entryPointOpt) + ? entryPointOpt.Value + : (ulong?)null; + + _logger.LogInformation( + "Loaded binary with B2R2: Format={Format}, Architecture={Architecture}, Endian={Endian}", + format, architecture, endianness); + + var metadata = new Dictionary + { + ["size"] = byteArray.Length, + ["b2r2_isa"] = binFile.ISA.Arch.ToString() + }; + if (entryPoint.HasValue) + { + metadata["entry_point"] = entryPoint.Value; + } + + return new BinaryInfo( + Format: format, + Architecture: architecture, + Bitness: bitness, + Endianness: endianness, + Abi: abi, + EntryPoint: entryPoint, + BuildId: null, + Metadata: metadata, + Handle: new B2R2BinaryHandle(binHandle, byteArray)); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Memory.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Memory.cs new file mode 100644 index 000000000..01ae277a8 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Memory.cs @@ -0,0 +1,109 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Globalization; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + private static Operand ParseMemoryOperand(string text) + { + var start = text.IndexOf('['); + var end = text.LastIndexOf(']'); + + if (start < 0 || end <= start) + { + return new Operand( + Type: OperandType.Memory, + Text: text, + Value: null, + Register: null, + MemoryBase: null, + MemoryIndex: null, + MemoryScale: null, + MemoryDisplacement: null); + } + + var inner = text.Substring(start + 1, end - start - 1); + + string? memBase = null; + string? memIndex = null; + int? memScale = null; + long? memDisp = null; + + var components = inner.Split(['+', ','], StringSplitOptions.RemoveEmptyEntries); + + foreach (var comp in components) + { + var trimmed = comp.Trim(); + + if (trimmed.Contains('*')) + { + var scaleParts = trimmed.Split('*'); + if (scaleParts.Length == 2) + { + memIndex = scaleParts[0].Trim().ToUpperInvariant(); + if (int.TryParse(scaleParts[1].Trim(), out var scale)) + { + memScale = scale; + } + } + continue; + } + + if (trimmed.StartsWith('#')) + { + var immText = trimmed.TrimStart('#'); + if (immText.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) + { + if (long.TryParse(immText.AsSpan(2), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var hexDisp)) + { + memDisp = hexDisp; + } + } + else if (long.TryParse(immText, out var decDisp)) + { + memDisp = decDisp; + } + continue; + } + + if (trimmed.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) + { + if (long.TryParse(trimmed.AsSpan(2), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var hexDisp)) + { + memDisp = hexDisp; + } + continue; + } + + if (trimmed.StartsWith('-')) + { + if (long.TryParse(trimmed, out var negDisp)) + { + memDisp = negDisp; + } + continue; + } + + if (memBase == null) + { + memBase = trimmed.ToUpperInvariant(); + } + else if (memIndex == null) + { + memIndex = trimmed.ToUpperInvariant(); + } + } + + return new Operand( + Type: OperandType.Memory, + Text: text, + Value: null, + Register: null, + MemoryBase: memBase, + MemoryIndex: memIndex, + MemoryScale: memScale, + MemoryDisplacement: memDisp); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Split.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Split.cs new file mode 100644 index 000000000..26d55fb44 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.Split.cs @@ -0,0 +1,48 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Text; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + private static IReadOnlyList SplitOperands(string operandsText) + { + var result = new List(); + var current = new StringBuilder(); + var bracketDepth = 0; + + foreach (var c in operandsText) + { + if (c == '[' || c == '(' || c == '{') + { + bracketDepth++; + current.Append(c); + } + else if (c == ']' || c == ')' || c == '}') + { + bracketDepth--; + current.Append(c); + } + else if (c == ',' && bracketDepth == 0) + { + if (current.Length > 0) + { + result.Add(current.ToString()); + current.Clear(); + } + } + else + { + current.Append(c); + } + } + + if (current.Length > 0) + { + result.Add(current.ToString()); + } + + return result; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.cs new file mode 100644 index 000000000..5ca697a43 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.OperandParsing.cs @@ -0,0 +1,105 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; +using System.Globalization; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + private static ImmutableArray ParseOperands(string operandsText, string mnemonic) + { + if (string.IsNullOrWhiteSpace(operandsText)) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + var operandStrings = SplitOperands(operandsText); + + foreach (var opStr in operandStrings) + { + var trimmed = opStr.Trim(); + if (string.IsNullOrEmpty(trimmed)) continue; + + var operand = ParseSingleOperand(trimmed); + builder.Add(operand); + } + + return builder.ToImmutable(); + } + + private static Operand ParseSingleOperand(string text) + { + var trimmed = text.Trim(); + + if (trimmed.StartsWith('[') && trimmed.EndsWith(']')) + { + return ParseMemoryOperand(trimmed); + } + + if (trimmed.StartsWith('[') && (trimmed.EndsWith("]!") || trimmed.Contains("],"))) + { + return ParseMemoryOperand(trimmed); + } + + if (trimmed.StartsWith('#') || trimmed.StartsWith("0x", StringComparison.OrdinalIgnoreCase) || + trimmed.StartsWith("0X", StringComparison.OrdinalIgnoreCase) || + (trimmed.Length > 0 && (char.IsDigit(trimmed[0]) || trimmed[0] == '-'))) + { + return ParseImmediateOperand(trimmed); + } + + return ParseRegisterOperand(trimmed); + } + + private static Operand ParseRegisterOperand(string text) + { + var regName = text.ToUpperInvariant(); + + return new Operand( + Type: OperandType.Register, + Text: text, + Value: null, + Register: regName, + MemoryBase: null, + MemoryIndex: null, + MemoryScale: null, + MemoryDisplacement: null); + } + + private static Operand ParseImmediateOperand(string text) + { + var cleanText = text.TrimStart('#'); + long? value = null; + + if (cleanText.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) + { + if (long.TryParse(cleanText.AsSpan(2), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var hexVal)) + { + value = hexVal; + } + } + else if (cleanText.StartsWith("-0x", StringComparison.OrdinalIgnoreCase)) + { + if (long.TryParse(cleanText.AsSpan(3), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var hexVal)) + { + value = -hexVal; + } + } + else if (long.TryParse(cleanText, CultureInfo.InvariantCulture, out var decVal)) + { + value = decVal; + } + + return new Operand( + Type: OperandType.Immediate, + Text: text, + Value: value, + Register: null, + MemoryBase: null, + MemoryIndex: null, + MemoryScale: null, + MemoryDisplacement: null); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.RegionsAndSymbols.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.RegionsAndSymbols.cs new file mode 100644 index 000000000..814647781 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.RegionsAndSymbols.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2DisassemblyPlugin +{ + /// + public IEnumerable GetCodeRegions(BinaryInfo binary) + { + ArgumentNullException.ThrowIfNull(binary); + var handle = GetHandle(binary); + + var textPtr = handle.BinHandle.File.GetTextSectionPointer(); + if (textPtr.IsValid) + { + yield return new CodeRegion( + Name: ".text", + VirtualAddress: textPtr.Addr, + FileOffset: (ulong)textPtr.Offset, + Size: (ulong)(textPtr.MaxAddr - textPtr.Addr + 1), + IsExecutable: true, + IsReadable: true, + IsWritable: false); + } + else + { + yield return new CodeRegion( + Name: ".code", + VirtualAddress: handle.BinHandle.File.BaseAddress, + FileOffset: 0, + Size: (ulong)handle.Bytes.Length, + IsExecutable: true, + IsReadable: true, + IsWritable: false); + } + } + + /// + public IEnumerable GetSymbols(BinaryInfo binary) + { + ArgumentNullException.ThrowIfNull(binary); + var handle = GetHandle(binary); + + var funcAddrs = handle.BinHandle.File.GetFunctionAddresses(); + + foreach (var addr in funcAddrs) + { + yield return new SymbolInfo( + Name: $"func_{addr:X}", + Address: addr, + Size: 0, + Type: SymbolType.Function, + Binding: SymbolBinding.Global, + Section: ".text"); + } + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.cs index 9518a00e3..05087a2cc 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2DisassemblyPlugin.cs @@ -1,13 +1,6 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - - -using B2R2; -using B2R2.FrontEnd; -using B2R2.FrontEnd.BinFile; -using B2R2.FrontEnd.BinLifter; using Microsoft.Extensions.Logging; -using System.Collections.Immutable; namespace StellaOps.BinaryIndex.Disassembly.B2R2; @@ -16,7 +9,7 @@ namespace StellaOps.BinaryIndex.Disassembly.B2R2; /// B2R2 is a pure .NET binary analysis framework supporting ELF, PE, and Mach-O /// on x86, x86-64, ARM32, ARM64, MIPS, RISC-V, and more. /// -public sealed class B2R2DisassemblyPlugin : IDisassemblyPlugin +public sealed partial class B2R2DisassemblyPlugin : IDisassemblyPlugin { /// /// Plugin identifier. @@ -25,7 +18,7 @@ public sealed class B2R2DisassemblyPlugin : IDisassemblyPlugin private readonly ILogger _logger; - private static readonly DisassemblyCapabilities s_capabilities = new() + private static readonly DisassemblyCapabilities _capabilities = new() { PluginId = PluginId, Name = "B2R2 Disassembler", @@ -61,627 +54,5 @@ public sealed class B2R2DisassemblyPlugin : IDisassemblyPlugin } /// - public DisassemblyCapabilities Capabilities => s_capabilities; - - /// - public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) - { - ArgumentNullException.ThrowIfNull(stream); - - using var memStream = new MemoryStream(); - stream.CopyTo(memStream); - return LoadBinary(memStream.ToArray(), archHint, formatHint); - } - - /// - public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) - { - var byteArray = bytes.ToArray(); - - _logger.LogDebug("Loading binary with B2R2 plugin (size: {Size} bytes)", byteArray.Length); - - // Create B2R2 ISA hint if provided - var isa = archHint.HasValue - ? MapToB2R2Isa(archHint.Value) - : new ISA(Architecture.Intel, WordSize.Bit64); // Default to x64 - - // Create BinHandle - B2R2's main interface - // Enable format detection when loading from bytes - var binHandle = new BinHandle(byteArray, isa, null, true); - var binFile = binHandle.File; - - // Extract binary information - var format = MapFromB2R2Format(binFile.Format); - var architecture = MapFromB2R2Architecture(binFile.ISA); - var bitness = GetBitness(binFile.ISA.WordSize); - var endianness = binFile.ISA.Endian == Endian.Little ? Endianness.Little : Endianness.Big; - var abi = DetectAbi(format); - - // Extract entry point - B2R2 returns FSharpOption - var entryPointOpt = binFile.EntryPoint; - var entryPoint = Microsoft.FSharp.Core.FSharpOption.get_IsSome(entryPointOpt) - ? entryPointOpt.Value - : (ulong?)null; - - _logger.LogInformation( - "Loaded binary with B2R2: Format={Format}, Architecture={Architecture}, Endian={Endian}", - format, architecture, endianness); - - var metadata = new Dictionary - { - ["size"] = byteArray.Length, - ["b2r2_isa"] = binFile.ISA.Arch.ToString() - }; - if (entryPoint.HasValue) - { - metadata["entry_point"] = entryPoint.Value; - } - - return new BinaryInfo( - Format: format, - Architecture: architecture, - Bitness: bitness, - Endianness: endianness, - Abi: abi, - EntryPoint: entryPoint, - BuildId: null, - Metadata: metadata, - Handle: new B2R2BinaryHandle(binHandle, byteArray)); - } - - /// - public IEnumerable GetCodeRegions(BinaryInfo binary) - { - ArgumentNullException.ThrowIfNull(binary); - var handle = GetHandle(binary); - - // Use the text section pointer if available - var textPtr = handle.BinHandle.File.GetTextSectionPointer(); - if (textPtr.IsValid) - { - yield return new CodeRegion( - Name: ".text", - VirtualAddress: textPtr.Addr, - FileOffset: (ulong)textPtr.Offset, - Size: (ulong)(textPtr.MaxAddr - textPtr.Addr + 1), - IsExecutable: true, - IsReadable: true, - IsWritable: false); - } - else - { - // Fallback: treat entire binary as code - yield return new CodeRegion( - Name: ".code", - VirtualAddress: handle.BinHandle.File.BaseAddress, - FileOffset: 0, - Size: (ulong)handle.Bytes.Length, - IsExecutable: true, - IsReadable: true, - IsWritable: false); - } - } - - /// - public IEnumerable GetSymbols(BinaryInfo binary) - { - ArgumentNullException.ThrowIfNull(binary); - var handle = GetHandle(binary); - - // Get function addresses from B2R2 - var funcAddrs = handle.BinHandle.File.GetFunctionAddresses(); - - foreach (var addr in funcAddrs) - { - yield return new SymbolInfo( - Name: $"func_{addr:X}", - Address: addr, - Size: 0, // Unknown size - Type: SymbolType.Function, - Binding: SymbolBinding.Global, - Section: ".text"); - } - } - - /// - public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) - { - ArgumentNullException.ThrowIfNull(binary); - ArgumentNullException.ThrowIfNull(region); - - var handle = GetHandle(binary); - var lifter = handle.BinHandle.NewLiftingUnit(); - - var addr = region.VirtualAddress; - var endAddr = region.VirtualAddress + region.Size; - - _logger.LogDebug( - "Disassembling region {Name} from 0x{Start:X} to 0x{End:X}", - region.Name, addr, endAddr); - - while (addr < endAddr) - { - IInstruction? instr; - try - { - instr = lifter.ParseInstruction(addr); - } - catch - { - // Skip invalid instruction - addr++; - continue; - } - - if (instr is null || instr.Length == 0) - { - addr++; - continue; - } - - yield return MapInstruction(instr, handle, addr); - addr += instr.Length; - } - } - - /// - public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) - { - var region = new CodeRegion( - Name: $"0x{startAddress:X}", - VirtualAddress: startAddress, - FileOffset: startAddress, - Size: length, - IsExecutable: true, - IsReadable: true, - IsWritable: false); - - return Disassemble(binary, region); - } - - /// - public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) - { - ArgumentNullException.ThrowIfNull(binary); - ArgumentNullException.ThrowIfNull(symbol); - - var size = symbol.Size > 0 ? symbol.Size : 4096UL; - - var region = new CodeRegion( - Name: symbol.Name, - VirtualAddress: symbol.Address, - FileOffset: symbol.Address, - Size: size, - IsExecutable: true, - IsReadable: true, - IsWritable: false); - - return Disassemble(binary, region); - } - - #region Architecture Mapping - - private static ISA MapToB2R2Isa(CpuArchitecture arch) - { - return arch switch - { - CpuArchitecture.X86 => new ISA(Architecture.Intel, WordSize.Bit32), - CpuArchitecture.X86_64 => new ISA(Architecture.Intel, WordSize.Bit64), - CpuArchitecture.ARM32 => new ISA(Architecture.ARMv7, WordSize.Bit32), - CpuArchitecture.ARM64 => new ISA(Architecture.ARMv8, WordSize.Bit64), - CpuArchitecture.MIPS32 => new ISA(Architecture.MIPS, WordSize.Bit32), - CpuArchitecture.MIPS64 => new ISA(Architecture.MIPS, WordSize.Bit64), - CpuArchitecture.RISCV64 => new ISA(Architecture.RISCV, WordSize.Bit64), - CpuArchitecture.PPC32 => new ISA(Architecture.PPC, Endian.Big, WordSize.Bit32), - CpuArchitecture.SPARC => new ISA(Architecture.SPARC, Endian.Big), - CpuArchitecture.SH4 => new ISA(Architecture.SH4), - CpuArchitecture.AVR => new ISA(Architecture.AVR), - CpuArchitecture.EVM => new ISA(Architecture.EVM, Endian.Big), - _ => new ISA(Architecture.Intel, WordSize.Bit64) // Default to x64 - }; - } - - private static CpuArchitecture MapFromB2R2Architecture(ISA isa) - { - return isa.Arch switch - { - Architecture.Intel when isa.WordSize == WordSize.Bit32 => CpuArchitecture.X86, - Architecture.Intel when isa.WordSize == WordSize.Bit64 => CpuArchitecture.X86_64, - Architecture.Intel => isa.IsX86 ? CpuArchitecture.X86 : CpuArchitecture.X86_64, - Architecture.ARMv7 => CpuArchitecture.ARM32, - Architecture.ARMv8 when isa.WordSize == WordSize.Bit64 => CpuArchitecture.ARM64, - Architecture.ARMv8 => CpuArchitecture.ARM32, - Architecture.MIPS when isa.WordSize == WordSize.Bit64 => CpuArchitecture.MIPS64, - Architecture.MIPS => CpuArchitecture.MIPS32, - Architecture.RISCV => CpuArchitecture.RISCV64, - Architecture.PPC => CpuArchitecture.PPC32, - Architecture.SPARC => CpuArchitecture.SPARC, - Architecture.SH4 => CpuArchitecture.SH4, - Architecture.AVR => CpuArchitecture.AVR, - Architecture.EVM => CpuArchitecture.EVM, - _ => CpuArchitecture.Unknown - }; - } - - private static BinaryFormat MapFromB2R2Format(FileFormat format) - { - return format switch - { - FileFormat.ELFBinary => BinaryFormat.ELF, - FileFormat.PEBinary => BinaryFormat.PE, - FileFormat.MachBinary => BinaryFormat.MachO, - FileFormat.WasmBinary => BinaryFormat.WASM, - FileFormat.RawBinary => BinaryFormat.Raw, - _ => BinaryFormat.Unknown - }; - } - - private static int GetBitness(WordSize wordSize) - { - return wordSize switch - { - WordSize.Bit8 => 8, - WordSize.Bit16 => 16, - WordSize.Bit32 => 32, - WordSize.Bit64 => 64, - WordSize.Bit128 => 128, - WordSize.Bit256 => 256, - _ => 64 - }; - } - - private static string? DetectAbi(BinaryFormat format) - { - return format switch - { - BinaryFormat.ELF => "gnu", - BinaryFormat.PE => "msvc", - BinaryFormat.MachO => "darwin", - _ => null - }; - } - - #endregion - - #region Instruction Mapping - - private static B2R2BinaryHandle GetHandle(BinaryInfo binary) - { - if (binary.Handle is not B2R2BinaryHandle handle) - throw new ArgumentException("Invalid binary handle - not a B2R2 handle", nameof(binary)); - return handle; - } - - private static DisassembledInstruction MapInstruction(IInstruction instr, B2R2BinaryHandle handle, ulong address) - { - // Get disassembly string - var disasm = instr.Disasm(); - - // Parse mnemonic and operands from disassembly string - var parts = disasm.Split(' ', 2, StringSplitOptions.RemoveEmptyEntries); - var mnemonic = parts.Length > 0 ? parts[0] : "???"; - var operandsText = parts.Length > 1 ? parts[1] : ""; - - // Get raw bytes from the binary data - var offset = (int)(address - handle.BinHandle.File.BaseAddress); - var length = (int)instr.Length; - var rawBytes = offset >= 0 && offset + length <= handle.Bytes.Length - ? handle.Bytes.AsSpan(offset, length).ToArray().ToImmutableArray() - : ImmutableArray.Empty; - - var kind = ClassifyInstruction(instr, mnemonic); - var operands = ParseOperands(operandsText, mnemonic); - - return new DisassembledInstruction( - Address: address, - RawBytes: rawBytes, - Mnemonic: mnemonic, - OperandsText: operandsText, - Kind: kind, - Operands: operands); - } - - private static ImmutableArray ParseOperands(string operandsText, string mnemonic) - { - if (string.IsNullOrWhiteSpace(operandsText)) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(); - - // Split operands by comma, respecting brackets - var operandStrings = SplitOperands(operandsText); - - foreach (var opStr in operandStrings) - { - var trimmed = opStr.Trim(); - if (string.IsNullOrEmpty(trimmed)) continue; - - var operand = ParseSingleOperand(trimmed); - builder.Add(operand); - } - - return builder.ToImmutable(); - } - - private static IReadOnlyList SplitOperands(string operandsText) - { - var result = new List(); - var current = new System.Text.StringBuilder(); - var bracketDepth = 0; - - foreach (var c in operandsText) - { - if (c == '[' || c == '(' || c == '{') - { - bracketDepth++; - current.Append(c); - } - else if (c == ']' || c == ')' || c == '}') - { - bracketDepth--; - current.Append(c); - } - else if (c == ',' && bracketDepth == 0) - { - if (current.Length > 0) - { - result.Add(current.ToString()); - current.Clear(); - } - } - else - { - current.Append(c); - } - } - - if (current.Length > 0) - { - result.Add(current.ToString()); - } - - return result; - } - - private static Operand ParseSingleOperand(string text) - { - var trimmed = text.Trim(); - - // Check for memory operand [...] - if (trimmed.StartsWith('[') && trimmed.EndsWith(']')) - { - return ParseMemoryOperand(trimmed); - } - - // Check for ARM64 memory operand [...]! - if (trimmed.StartsWith('[') && (trimmed.EndsWith("]!") || trimmed.Contains("],"))) - { - return ParseMemoryOperand(trimmed); - } - - // Check for immediate value - if (trimmed.StartsWith('#') || trimmed.StartsWith("0x", StringComparison.OrdinalIgnoreCase) || - trimmed.StartsWith("0X", StringComparison.OrdinalIgnoreCase) || - (trimmed.Length > 0 && (char.IsDigit(trimmed[0]) || trimmed[0] == '-'))) - { - return ParseImmediateOperand(trimmed); - } - - // Assume it's a register - return ParseRegisterOperand(trimmed); - } - - private static Operand ParseRegisterOperand(string text) - { - var regName = text.ToUpperInvariant(); - - return new Operand( - Type: OperandType.Register, - Text: text, - Value: null, - Register: regName, - MemoryBase: null, - MemoryIndex: null, - MemoryScale: null, - MemoryDisplacement: null); - } - - private static Operand ParseImmediateOperand(string text) - { - var cleanText = text.TrimStart('#'); - long? value = null; - - if (cleanText.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) - { - if (long.TryParse(cleanText.AsSpan(2), System.Globalization.NumberStyles.HexNumber, - System.Globalization.CultureInfo.InvariantCulture, out var hexVal)) - { - value = hexVal; - } - } - else if (cleanText.StartsWith("-0x", StringComparison.OrdinalIgnoreCase)) - { - if (long.TryParse(cleanText.AsSpan(3), System.Globalization.NumberStyles.HexNumber, - System.Globalization.CultureInfo.InvariantCulture, out var hexVal)) - { - value = -hexVal; - } - } - else if (long.TryParse(cleanText, System.Globalization.CultureInfo.InvariantCulture, out var decVal)) - { - value = decVal; - } - - return new Operand( - Type: OperandType.Immediate, - Text: text, - Value: value, - Register: null, - MemoryBase: null, - MemoryIndex: null, - MemoryScale: null, - MemoryDisplacement: null); - } - - private static Operand ParseMemoryOperand(string text) - { - // Extract content between brackets - var start = text.IndexOf('['); - var end = text.LastIndexOf(']'); - - if (start < 0 || end <= start) - { - return new Operand( - Type: OperandType.Memory, - Text: text, - Value: null, - Register: null, - MemoryBase: null, - MemoryIndex: null, - MemoryScale: null, - MemoryDisplacement: null); - } - - var inner = text.Substring(start + 1, end - start - 1); - - // Parse components: base, index, scale, displacement - // Common patterns: - // x86: [rax], [rax+rbx], [rax+rbx*4], [rax+0x10], [rax+rbx*4+0x10] - // ARM: [x0], [x0, #8], [x0, x1], [x0, x1, lsl #2] - - string? memBase = null; - string? memIndex = null; - int? memScale = null; - long? memDisp = null; - - // Split by + or , depending on architecture style - var components = inner.Split(['+', ','], StringSplitOptions.RemoveEmptyEntries); - - foreach (var comp in components) - { - var trimmed = comp.Trim(); - - // Check for scale pattern: reg*N - if (trimmed.Contains('*')) - { - var scaleParts = trimmed.Split('*'); - if (scaleParts.Length == 2) - { - memIndex = scaleParts[0].Trim().ToUpperInvariant(); - if (int.TryParse(scaleParts[1].Trim(), out var scale)) - { - memScale = scale; - } - } - continue; - } - - // Check for ARM immediate: #N - if (trimmed.StartsWith('#')) - { - var immText = trimmed.TrimStart('#'); - if (immText.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) - { - if (long.TryParse(immText.AsSpan(2), System.Globalization.NumberStyles.HexNumber, - System.Globalization.CultureInfo.InvariantCulture, out var hexDisp)) - { - memDisp = hexDisp; - } - } - else if (long.TryParse(immText, out var decDisp)) - { - memDisp = decDisp; - } - continue; - } - - // Check for hex displacement: 0xNN - if (trimmed.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) - { - if (long.TryParse(trimmed.AsSpan(2), System.Globalization.NumberStyles.HexNumber, - System.Globalization.CultureInfo.InvariantCulture, out var hexDisp)) - { - memDisp = hexDisp; - } - continue; - } - - // Check for negative displacement - if (trimmed.StartsWith('-')) - { - if (long.TryParse(trimmed, out var negDisp)) - { - memDisp = negDisp; - } - continue; - } - - // Must be a register - if (memBase == null) - { - memBase = trimmed.ToUpperInvariant(); - } - else if (memIndex == null) - { - memIndex = trimmed.ToUpperInvariant(); - } - } - - return new Operand( - Type: OperandType.Memory, - Text: text, - Value: null, - Register: null, - MemoryBase: memBase, - MemoryIndex: memIndex, - MemoryScale: memScale, - MemoryDisplacement: memDisp); - } - - private static InstructionKind ClassifyInstruction(IInstruction instr, string mnemonic) - { - // Use B2R2's built-in classification where possible - if (instr.IsRET) return InstructionKind.Return; - if (instr.IsCall) return InstructionKind.Call; - if (instr.IsCondBranch) return InstructionKind.ConditionalBranch; - if (instr.IsBranch) return InstructionKind.Branch; - if (instr.IsNop) return InstructionKind.Nop; - if (instr.IsInterrupt) return InstructionKind.Syscall; - - // Fall back to mnemonic-based classification - var upper = mnemonic.ToUpperInvariant(); - - if (upper is "ADD" or "SUB" or "MUL" or "DIV" or "IMUL" or "IDIV" or - "INC" or "DEC" or "NEG" or "ADC" or "SBB") - return InstructionKind.Arithmetic; - - if (upper is "AND" or "OR" or "XOR" or "NOT" or "TEST" or "ORR" or "EOR") - return InstructionKind.Logic; - - if (upper is "SHL" or "SHR" or "SAL" or "SAR" or "ROL" or "ROR" or - "LSL" or "LSR" or "ASR") - return InstructionKind.Shift; - - if (upper.StartsWith("MOV", StringComparison.Ordinal) || upper is "LEA" or "PUSH" or "POP" or "XCHG") - return InstructionKind.Move; - - if (upper.StartsWith("LDR", StringComparison.Ordinal) || upper.StartsWith("LD", StringComparison.Ordinal)) - return InstructionKind.Load; - - if (upper.StartsWith("STR", StringComparison.Ordinal) || upper.StartsWith("ST", StringComparison.Ordinal)) - return InstructionKind.Store; - - if (upper is "CMP" or "CMPS" or "SCAS") return InstructionKind.Compare; - - return InstructionKind.Unknown; - } - - #endregion + public DisassemblyCapabilities Capabilities => _capabilities; } - -/// -/// Internal handle for B2R2 binary data. -/// -internal sealed record B2R2BinaryHandle(BinHandle BinHandle, byte[] Bytes); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2IsaPoolStats.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2IsaPoolStats.cs new file mode 100644 index 000000000..276f90825 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2IsaPoolStats.cs @@ -0,0 +1,14 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +/// +/// Per-ISA pool statistics. +/// +/// Number of lifters in pool for this ISA. +/// Number of lifters currently in use for this ISA. +/// Maximum pool size for this ISA. +public sealed record B2R2IsaPoolStats( + int PooledCount, + int ActiveCount, + int MaxPoolSize); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.Helpers.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.Helpers.cs new file mode 100644 index 000000000..b22b4591e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.Helpers.cs @@ -0,0 +1,106 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using B2R2.FrontEnd; +using B2R2.FrontEnd.BinFile; +using B2R2.FrontEnd.BinLifter; +using System.Collections.Concurrent; +using System.Globalization; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LifterPool +{ + private static string GetIsaKey(ISA isa) => + string.Format( + CultureInfo.InvariantCulture, + "{0}-{1}", + isa.Arch.ToString().ToLowerInvariant(), + isa.WordSize == WordSize.Bit64 ? "64" : "32"); + + private static ISA? ParseIsaKey(string key) + { + var parts = key.Split('-'); + if (parts.Length != 2) return null; + + var archStr = parts[0].ToLowerInvariant(); + var bits = parts[1]; + + var wordSize = bits == "64" ? WordSize.Bit64 : WordSize.Bit32; + + return archStr switch + { + "intel" => new ISA(Architecture.Intel, wordSize), + "armv7" => new ISA(Architecture.ARMv7, wordSize), + "armv8" => new ISA(Architecture.ARMv8, wordSize), + "mips" => new ISA(Architecture.MIPS, wordSize), + "riscv" => new ISA(Architecture.RISCV, wordSize), + "ppc" => new ISA(Architecture.PPC, Endian.Big, wordSize), + "sparc" => new ISA(Architecture.SPARC, Endian.Big), + _ => (ISA?)null + }; + } + + private ConcurrentBag GetOrCreatePool(string isaKey) => + _pools.GetOrAdd(isaKey, _ => new ConcurrentBag()); + + private static PooledLifterEntry CreateLifterEntry(ISA isa) + { + var nopBytes = CreateNopSled(isa, 64); + var binHandle = new BinHandle(nopBytes, isa, null, true); + var liftingUnit = binHandle.NewLiftingUnit(); + return new PooledLifterEntry(binHandle, liftingUnit, DateTimeOffset.UtcNow); + } + + private static byte[] CreateNopSled(ISA isa, int size) + { + var bytes = new byte[size]; + + switch (isa.Arch) + { + case Architecture.Intel: + Array.Fill(bytes, (byte)0x90); + break; + + case Architecture.ARMv7: + case Architecture.ARMv8: + if (isa.WordSize == WordSize.Bit64) + { + for (var i = 0; i + 3 < size; i += 4) + { + bytes[i] = 0x1F; + bytes[i + 1] = 0x20; + bytes[i + 2] = 0x03; + bytes[i + 3] = 0xD5; + } + } + else + { + for (var i = 0; i + 3 < size; i += 4) + { + bytes[i] = 0x00; + bytes[i + 1] = 0xF0; + bytes[i + 2] = 0x20; + bytes[i + 3] = 0xE3; + } + } + break; + + default: + Array.Fill(bytes, (byte)0x00); + break; + } + + return bytes; + } + + private void IncrementActive(string isaKey) + { + _activeCount.AddOrUpdate(isaKey, 1, (_, v) => v + 1); + } + + private void DecrementActive(string isaKey) + { + _activeCount.AddOrUpdate(isaKey, 0, (_, v) => Math.Max(0, v - 1)); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.StatsAndWarm.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.StatsAndWarm.cs new file mode 100644 index 000000000..7a7e4d5a8 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.StatsAndWarm.cs @@ -0,0 +1,79 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LifterPool +{ + /// + /// Gets the current pool statistics. + /// + public B2R2LifterPoolStats GetStats() + { + var isaStats = new Dictionary(); + + foreach (var kvp in _pools) + { + var isaKey = kvp.Key; + var poolSize = kvp.Value.Count; + var activeCount = _activeCount.GetValueOrDefault(isaKey, 0); + + isaStats[isaKey] = new B2R2IsaPoolStats( + PooledCount: poolSize, + ActiveCount: activeCount, + MaxPoolSize: _options.MaxPoolSizePerIsa); + } + + return new B2R2LifterPoolStats( + TotalPooledLifters: _pools.Values.Sum(b => b.Count), + TotalActiveLifters: _activeCount.Values.Sum(), + IsWarm: _warmed, + IsaStats: isaStats.ToImmutableDictionary()); + } + + /// + /// Warms the pool by preloading lifters for common ISAs. + /// + public void WarmPool() + { + if (!_options.EnableWarmPreload) return; + if (_warmed) return; + + lock (_warmLock) + { + if (_warmed) return; + + _logger.LogInformation( + "Warming B2R2 lifter pool for {IsaCount} ISAs", + _options.WarmPreloadIsas.Length); + + foreach (var isaKey in _options.WarmPreloadIsas) + { + try + { + var isa = ParseIsaKey(isaKey); + if (isa is null) + { + _logger.LogWarning("Unknown ISA key for warm preload: {IsaKey}", isaKey); + continue; + } + + var entry = CreateLifterEntry(isa); + var pool = GetOrCreatePool(GetIsaKey(isa)); + pool.Add(entry); + + _logger.LogDebug("Warmed lifter for ISA: {IsaKey}", isaKey); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to warm lifter for ISA: {IsaKey}", isaKey); + } + } + + _warmed = true; + _logger.LogInformation("B2R2 lifter pool warm complete"); + } + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.cs index 0ec8d18f4..136fa1159 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPool.cs @@ -2,103 +2,21 @@ // Licensed under BUSL-1.1. See LICENSE in the project root. // Sprint: SPRINT_20260112_004_BINIDX_b2r2_lowuir_perf_cache (BINIDX-LIFTER-02) // Task: Bounded lifter pool with warm preload per ISA - - using B2R2; using B2R2.FrontEnd; +using B2R2.FrontEnd.BinFile; using B2R2.FrontEnd.BinLifter; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Globalization; namespace StellaOps.BinaryIndex.Disassembly.B2R2; -/// -/// Configuration options for the B2R2 lifter pool. -/// -public sealed class B2R2LifterPoolOptions -{ - /// - /// Configuration section name. - /// - public const string SectionName = "StellaOps:BinaryIndex:B2R2LifterPool"; - - /// - /// Maximum number of pooled lifters per ISA. - /// - public int MaxPoolSizePerIsa { get; set; } = 4; - - /// - /// Whether to warm preload lifters for common ISAs at startup. - /// - public bool EnableWarmPreload { get; set; } = true; - - /// - /// ISAs to warm preload at startup. - /// - public ImmutableArray WarmPreloadIsas { get; set; } = - [ - "intel-64", - "intel-32", - "armv8-64", - "armv7-32" - ]; - - /// - /// Timeout for acquiring a lifter from the pool. - /// - public TimeSpan AcquireTimeout { get; set; } = TimeSpan.FromSeconds(5); -} - -/// -/// Pooled B2R2 BinHandle and LiftingUnit for reuse across calls. -/// -public sealed class PooledLifter : IDisposable -{ - private readonly B2R2LifterPool _pool; - private readonly ISA _isa; - private bool _disposed; - - internal PooledLifter( - B2R2LifterPool pool, - ISA isa, - BinHandle binHandle, - LiftingUnit liftingUnit) - { - _pool = pool ?? throw new ArgumentNullException(nameof(pool)); - _isa = isa; - BinHandle = binHandle ?? throw new ArgumentNullException(nameof(binHandle)); - LiftingUnit = liftingUnit ?? throw new ArgumentNullException(nameof(liftingUnit)); - } - - /// - /// The B2R2 BinHandle for this lifter. - /// - public BinHandle BinHandle { get; } - - /// - /// The B2R2 LiftingUnit for this lifter. - /// - public LiftingUnit LiftingUnit { get; } - - /// - /// Returns the lifter to the pool. - /// - public void Dispose() - { - if (_disposed) return; - _disposed = true; - _pool.Return(this, _isa); - } -} - /// /// Bounded pool of B2R2 lifters with warm preload per ISA. /// Thread-safe and designed for reuse in high-throughput scenarios. /// -public sealed class B2R2LifterPool : IDisposable +public sealed partial class B2R2LifterPool : IDisposable { private readonly ILogger _logger; private readonly B2R2LifterPoolOptions _options; @@ -121,77 +39,6 @@ public sealed class B2R2LifterPool : IDisposable _options = options?.Value ?? new B2R2LifterPoolOptions(); } - /// - /// Gets the current pool statistics. - /// - public B2R2LifterPoolStats GetStats() - { - var isaStats = new Dictionary(); - - foreach (var kvp in _pools) - { - var isaKey = kvp.Key; - var poolSize = kvp.Value.Count; - var activeCount = _activeCount.GetValueOrDefault(isaKey, 0); - - isaStats[isaKey] = new B2R2IsaPoolStats( - PooledCount: poolSize, - ActiveCount: activeCount, - MaxPoolSize: _options.MaxPoolSizePerIsa); - } - - return new B2R2LifterPoolStats( - TotalPooledLifters: _pools.Values.Sum(b => b.Count), - TotalActiveLifters: _activeCount.Values.Sum(), - IsWarm: _warmed, - IsaStats: isaStats.ToImmutableDictionary()); - } - - /// - /// Warms the pool by preloading lifters for common ISAs. - /// - public void WarmPool() - { - if (!_options.EnableWarmPreload) return; - if (_warmed) return; - - lock (_warmLock) - { - if (_warmed) return; - - _logger.LogInformation( - "Warming B2R2 lifter pool for {IsaCount} ISAs", - _options.WarmPreloadIsas.Length); - - foreach (var isaKey in _options.WarmPreloadIsas) - { - try - { - var isa = ParseIsaKey(isaKey); - if (isa is null) - { - _logger.LogWarning("Unknown ISA key for warm preload: {IsaKey}", isaKey); - continue; - } - - // Create and pool a lifter for this ISA - var entry = CreateLifterEntry(isa); - var pool = GetOrCreatePool(GetIsaKey(isa)); - pool.Add(entry); - - _logger.LogDebug("Warmed lifter for ISA: {IsaKey}", isaKey); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to warm lifter for ISA: {IsaKey}", isaKey); - } - } - - _warmed = true; - _logger.LogInformation("B2R2 lifter pool warm complete"); - } - } - /// /// Acquires a lifter for the specified ISA. /// @@ -202,7 +49,6 @@ public sealed class B2R2LifterPool : IDisposable var isaKey = GetIsaKey(isa); var pool = GetOrCreatePool(isaKey); - // Try to get an existing lifter from the pool if (pool.TryTake(out var entry)) { IncrementActive(isaKey); @@ -210,7 +56,6 @@ public sealed class B2R2LifterPool : IDisposable return new PooledLifter(this, isa, entry.BinHandle, entry.LiftingUnit); } - // Create a new lifter var newEntry = CreateLifterEntry(isa); IncrementActive(isaKey); _logger.LogTrace("Created new lifter for {Isa}", isaKey); @@ -227,7 +72,6 @@ public sealed class B2R2LifterPool : IDisposable var pool = GetOrCreatePool(isaKey); - // Only return to pool if under limit if (pool.Count < _options.MaxPoolSizePerIsa) { var entry = new PooledLifterEntry( @@ -254,132 +98,4 @@ public sealed class B2R2LifterPool : IDisposable _logger.LogInformation("B2R2 lifter pool disposed"); } - - #region Private Helpers - - private static string GetIsaKey(ISA isa) => - string.Format( - CultureInfo.InvariantCulture, - "{0}-{1}", - isa.Arch.ToString().ToLowerInvariant(), - isa.WordSize == WordSize.Bit64 ? "64" : "32"); - - private static ISA? ParseIsaKey(string key) - { - var parts = key.Split('-'); - if (parts.Length != 2) return null; - - var archStr = parts[0].ToLowerInvariant(); - var bits = parts[1]; - - var wordSize = bits == "64" ? WordSize.Bit64 : WordSize.Bit32; - - return archStr switch - { - "intel" => new ISA(Architecture.Intel, wordSize), - "armv7" => new ISA(Architecture.ARMv7, wordSize), - "armv8" => new ISA(Architecture.ARMv8, wordSize), - "mips" => new ISA(Architecture.MIPS, wordSize), - "riscv" => new ISA(Architecture.RISCV, wordSize), - "ppc" => new ISA(Architecture.PPC, Endian.Big, wordSize), - "sparc" => new ISA(Architecture.SPARC, Endian.Big), - _ => (ISA?)null - }; - } - - private ConcurrentBag GetOrCreatePool(string isaKey) => - _pools.GetOrAdd(isaKey, _ => new ConcurrentBag()); - - private static PooledLifterEntry CreateLifterEntry(ISA isa) - { - // Create a minimal BinHandle for the ISA - // Use a small NOP sled as placeholder code - var nopBytes = CreateNopSled(isa, 64); - var binHandle = new BinHandle(nopBytes, isa, null, true); - var liftingUnit = binHandle.NewLiftingUnit(); - return new PooledLifterEntry(binHandle, liftingUnit, DateTimeOffset.UtcNow); - } - - private static byte[] CreateNopSled(ISA isa, int size) - { - var bytes = new byte[size]; - - // Fill with architecture-appropriate NOP bytes - switch (isa.Arch) - { - case Architecture.Intel: - // x86/x64 NOP = 0x90 - Array.Fill(bytes, (byte)0x90); - break; - - case Architecture.ARMv7: - case Architecture.ARMv8: - // ARM NOP = 0x00000000 or 0x1F 20 03 D5 (ARM64) - if (isa.WordSize == WordSize.Bit64) - { - for (var i = 0; i + 3 < size; i += 4) - { - bytes[i] = 0x1F; - bytes[i + 1] = 0x20; - bytes[i + 2] = 0x03; - bytes[i + 3] = 0xD5; - } - } - else - { - // ARM32 NOP = 0xE320F000 (big endian) or 0x00 F0 20 E3 (little) - for (var i = 0; i + 3 < size; i += 4) - { - bytes[i] = 0x00; - bytes[i + 1] = 0xF0; - bytes[i + 2] = 0x20; - bytes[i + 3] = 0xE3; - } - } - break; - - default: - // Generic zeroes for other architectures - Array.Fill(bytes, (byte)0x00); - break; - } - - return bytes; - } - - private void IncrementActive(string isaKey) - { - _activeCount.AddOrUpdate(isaKey, 1, (_, v) => v + 1); - } - - private void DecrementActive(string isaKey) - { - _activeCount.AddOrUpdate(isaKey, 0, (_, v) => Math.Max(0, v - 1)); - } - - #endregion } - -/// -/// Statistics for the B2R2 lifter pool. -/// -/// Total lifters currently in pool. -/// Total lifters currently in use. -/// Whether the pool has been warmed. -/// Per-ISA pool statistics. -public sealed record B2R2LifterPoolStats( - int TotalPooledLifters, - int TotalActiveLifters, - bool IsWarm, - ImmutableDictionary IsaStats); - -/// -/// Per-ISA pool statistics. -/// -/// Number of lifters in pool for this ISA. -/// Number of lifters currently in use for this ISA. -/// Maximum pool size for this ISA. -public sealed record B2R2IsaPoolStats( - int PooledCount, - int ActiveCount, - int MaxPoolSize); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolOptions.cs new file mode 100644 index 000000000..a31dee3dc --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolOptions.cs @@ -0,0 +1,42 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +/// +/// Configuration options for the B2R2 lifter pool. +/// +public sealed class B2R2LifterPoolOptions +{ + /// + /// Configuration section name. + /// + public const string SectionName = "StellaOps:BinaryIndex:B2R2LifterPool"; + + /// + /// Maximum number of pooled lifters per ISA. + /// + public int MaxPoolSizePerIsa { get; set; } = 4; + + /// + /// Whether to warm preload lifters for common ISAs at startup. + /// + public bool EnableWarmPreload { get; set; } = true; + + /// + /// ISAs to warm preload at startup. + /// + public ImmutableArray WarmPreloadIsas { get; set; } = + [ + "intel-64", + "intel-32", + "armv8-64", + "armv7-32" + ]; + + /// + /// Timeout for acquiring a lifter from the pool. + /// + public TimeSpan AcquireTimeout { get; set; } = TimeSpan.FromSeconds(5); +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolStats.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolStats.cs new file mode 100644 index 000000000..6e416ac6e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LifterPoolStats.cs @@ -0,0 +1,18 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +/// +/// Statistics for the B2R2 lifter pool. +/// +/// Total lifters currently in pool. +/// Total lifters currently in use. +/// Whether the pool has been warmed. +/// Per-ISA pool statistics. +public sealed record B2R2LifterPoolStats( + int TotalPooledLifters, + int TotalActiveLifters, + bool IsWarm, + ImmutableDictionary IsaStats); diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.BlockBuilder.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.BlockBuilder.cs new file mode 100644 index 000000000..a8ac1af54 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.BlockBuilder.cs @@ -0,0 +1,73 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; +using System.Globalization; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private (List Statements, List BasicBlocks) BuildStatementsAndBlocks( + IReadOnlyList instructions, + ISA isa, + ulong startAddress, + int maxInstructions, + CancellationToken ct) + { + var statements = new List(); + var basicBlocks = new List(); + var currentBlockStatements = new List(); + var blockStartAddress = startAddress; + var statementId = 0; + var blockId = 0; + + foreach (var instr in instructions.Take(maxInstructions)) + { + ct.ThrowIfCancellationRequested(); + + var liftedStatements = LiftInstructionToLowUir(isa, instr, ref statementId); + statements.AddRange(liftedStatements); + + foreach (var stmt in liftedStatements) + { + currentBlockStatements.Add(stmt.Id); + } + + if (IsBlockTerminator(instr)) + { + var endAddress = instr.Address + (ulong)instr.RawBytes.Length; + basicBlocks.Add(CreateBasicBlock(blockId, blockStartAddress, endAddress, currentBlockStatements)); + blockId++; + currentBlockStatements.Clear(); + blockStartAddress = endAddress; + } + } + + if (currentBlockStatements.Count > 0 && instructions.Count > 0) + { + var lastInstr = instructions[^1]; + var endAddress = lastInstr.Address + (ulong)lastInstr.RawBytes.Length; + basicBlocks.Add(CreateBasicBlock(blockId, blockStartAddress, endAddress, currentBlockStatements)); + } + + return (statements, basicBlocks); + } + + private static IrBasicBlock CreateBasicBlock( + int blockId, + ulong startAddress, + ulong endAddress, + IReadOnlyList statementIds) + { + return new IrBasicBlock( + Id: blockId, + Label: string.Format(CultureInfo.InvariantCulture, "bb_{0}", blockId), + StartAddress: startAddress, + EndAddress: endAddress, + StatementIds: [.. statementIds], + Predecessors: ImmutableArray.Empty, + Successors: ImmutableArray.Empty); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.CfgEdges.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.CfgEdges.cs new file mode 100644 index 000000000..73c8732a9 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.CfgEdges.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static (ImmutableArray Blocks, ImmutableArray Edges) BuildCfgEdges( + ImmutableArray blocks) + { + if (blocks.Length == 0) + return (blocks, ImmutableArray.Empty); + + var result = new IrBasicBlock[blocks.Length]; + var edges = new List(); + + for (var i = 0; i < blocks.Length; i++) + { + var block = blocks[i]; + var predecessors = new List(); + var successors = new List(); + + if (i < blocks.Length - 1) + { + successors.Add(i + 1); + edges.Add(new CfgEdge( + SourceBlockId: i, + TargetBlockId: i + 1, + Kind: CfgEdgeKind.FallThrough, + Condition: null)); + } + + if (i > 0) + { + predecessors.Add(i - 1); + } + + result[i] = block with + { + Predecessors = [.. predecessors.Distinct().OrderBy(x => x)], + Successors = [.. successors.Distinct().OrderBy(x => x)] + }; + } + + return ([.. result], [.. edges]); + } + + private static ImmutableArray FindExitBlocks(ImmutableArray blocks) + { + return blocks + .Where(b => b.Successors.Length == 0) + .Select(b => b.Id) + .ToImmutableArray(); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.Fallback.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.Fallback.cs new file mode 100644 index 000000000..258cdba2e --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.Fallback.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static IrStatement CreateFallbackStatement(DisassembledInstruction instr, int id) + { + var sources = instr.Operands.Skip(1) + .Select(op => new IrOperand( + Kind: MapOperandType(op.Type), + Name: op.Text, + Value: op.Value, + BitSize: 64, + IsMemory: op.Type == OperandType.Memory)) + .ToImmutableArray(); + + var dest = instr.Operands.Length > 0 + ? new IrOperand( + Kind: MapOperandType(instr.Operands[0].Type), + Name: instr.Operands[0].Text, + Value: instr.Operands[0].Value, + BitSize: 64, + IsMemory: instr.Operands[0].Type == OperandType.Memory) + : null; + + return new IrStatement( + Id: id, + Address: instr.Address, + Kind: MapMnemonicToKind(instr.Mnemonic), + Operation: instr.Mnemonic, + Destination: dest, + Sources: sources, + Metadata: ImmutableDictionary.Empty.Add("fallback", true)); + } + + private static SsaVariableKind MapOperandKindToSsaKind(IrOperandKind kind) => kind switch + { + IrOperandKind.Register => SsaVariableKind.Register, + IrOperandKind.Temporary => SsaVariableKind.Temporary, + IrOperandKind.Memory => SsaVariableKind.Memory, + IrOperandKind.Immediate => SsaVariableKind.Constant, + _ => SsaVariableKind.Temporary + }; + + private static IrOperandKind MapOperandType(OperandType type) => type switch + { + OperandType.Register => IrOperandKind.Register, + OperandType.Immediate => IrOperandKind.Immediate, + OperandType.Memory => IrOperandKind.Memory, + OperandType.Address => IrOperandKind.Label, + _ => IrOperandKind.Unknown + }; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.InstructionKinds.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.InstructionKinds.cs new file mode 100644 index 000000000..617c7fea5 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.InstructionKinds.cs @@ -0,0 +1,69 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static bool IsBlockTerminator(DisassembledInstruction instr) + { + var mnemonic = instr.Mnemonic.ToUpperInvariant(); + return mnemonic.StartsWith("J", StringComparison.Ordinal) || + mnemonic.StartsWith("B", StringComparison.Ordinal) || + mnemonic == "RET" || + mnemonic == "RETN" || + mnemonic == "RETF" || + mnemonic == "IRET" || + mnemonic == "SYSRET" || + mnemonic == "BLR" || + mnemonic == "BX" || + mnemonic == "JR"; + } + + private static IrStatementKind MapMnemonicToKind(string mnemonic) + { + var upper = mnemonic.ToUpperInvariant(); + + if (upper.StartsWith("MOV", StringComparison.Ordinal) || + upper.StartsWith("LEA", StringComparison.Ordinal) || + upper.StartsWith("LDR", StringComparison.Ordinal)) + return IrStatementKind.Assign; + + if (upper.StartsWith("ADD", StringComparison.Ordinal) || + upper.StartsWith("SUB", StringComparison.Ordinal) || + upper.StartsWith("MUL", StringComparison.Ordinal) || + upper.StartsWith("DIV", StringComparison.Ordinal)) + return IrStatementKind.BinaryOp; + + if (upper.StartsWith("AND", StringComparison.Ordinal) || + upper.StartsWith("OR", StringComparison.Ordinal) || + upper.StartsWith("XOR", StringComparison.Ordinal) || + upper.StartsWith("SH", StringComparison.Ordinal)) + return IrStatementKind.BinaryOp; + + if (upper.StartsWith("CMP", StringComparison.Ordinal) || + upper.StartsWith("TEST", StringComparison.Ordinal)) + return IrStatementKind.Compare; + + if (upper.StartsWith("J", StringComparison.Ordinal) || + upper.StartsWith("B", StringComparison.Ordinal)) + return IrStatementKind.ConditionalJump; + + if (upper == "CALL" || upper == "BL" || upper == "BLX") + return IrStatementKind.Call; + + if (upper == "RET" || upper == "RETN" || upper == "BLR") + return IrStatementKind.Return; + + if (upper.StartsWith("PUSH", StringComparison.Ordinal) || + upper.StartsWith("POP", StringComparison.Ordinal) || + upper.StartsWith("STR", StringComparison.Ordinal)) + return IrStatementKind.Store; + + if (upper == "NOP") + return IrStatementKind.Nop; + + return IrStatementKind.Unknown; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.IsaMapping.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.IsaMapping.cs new file mode 100644 index 000000000..8f56fbc33 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.IsaMapping.cs @@ -0,0 +1,22 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static ISA MapToB2R2Isa(CpuArchitecture arch) => arch switch + { + CpuArchitecture.X86 => new ISA(Architecture.Intel, WordSize.Bit32), + CpuArchitecture.X86_64 => new ISA(Architecture.Intel, WordSize.Bit64), + CpuArchitecture.ARM32 => new ISA(Architecture.ARMv7, WordSize.Bit32), + CpuArchitecture.ARM64 => new ISA(Architecture.ARMv8, WordSize.Bit64), + CpuArchitecture.MIPS32 => new ISA(Architecture.MIPS, WordSize.Bit32), + CpuArchitecture.MIPS64 => new ISA(Architecture.MIPS, WordSize.Bit64), + CpuArchitecture.RISCV64 => new ISA(Architecture.RISCV, WordSize.Bit64), + CpuArchitecture.PPC32 => new ISA(Architecture.PPC, Endian.Big, WordSize.Bit32), + CpuArchitecture.SPARC => new ISA(Architecture.SPARC, Endian.Big), + _ => throw new NotSupportedException($"Unsupported architecture: {arch}") + }; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LiftToIr.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LiftToIr.cs new file mode 100644 index 000000000..3a1108fb1 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LiftToIr.cs @@ -0,0 +1,77 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; +using StellaOps.BinaryIndex.Semantic; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + /// + public Task LiftToIrAsync( + IReadOnlyList instructions, + string functionName, + ulong startAddress, + CpuArchitecture architecture, + LiftOptions? options = null, + CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(instructions); + ct.ThrowIfCancellationRequested(); + + options ??= LiftOptions.Default; + + if (!SupportsArchitecture(architecture)) + { + throw new NotSupportedException( + $"Architecture {architecture} is not supported for B2R2 LowUIR lifting."); + } + + _logger.LogDebug( + "B2R2 LowUIR lifting {InstructionCount} instructions for function {FunctionName} ({Architecture})", + instructions.Count, + functionName, + architecture); + + var lifted = BuildLiftedFunction(instructions, functionName, startAddress, architecture, options, ct); + + _logger.LogDebug( + "B2R2 LowUIR lifted {StatementCount} statements in {BlockCount} blocks for {FunctionName}", + lifted.Statements.Length, + lifted.BasicBlocks.Length, + functionName); + + return Task.FromResult(lifted); + } + + private LiftedFunction BuildLiftedFunction( + IReadOnlyList instructions, + string functionName, + ulong startAddress, + CpuArchitecture architecture, + LiftOptions options, + CancellationToken ct) + { + var isa = MapToB2R2Isa(architecture); + var maxInstructions = GetEffectiveMaxInstructions(options); + var (statements, basicBlocks) = BuildStatementsAndBlocks(instructions, isa, startAddress, maxInstructions, ct); + + var (blocksWithEdges, edges) = BuildCfgEdges([.. basicBlocks]); + var cfg = new ControlFlowGraph( + EntryBlockId: blocksWithEdges.Length > 0 ? 0 : -1, + ExitBlockIds: FindExitBlocks(blocksWithEdges), + Edges: edges); + + return new LiftedFunction( + Name: functionName, + Address: startAddress, + Statements: [.. statements], + BasicBlocks: blocksWithEdges, + Cfg: cfg); + } + + private static int GetEffectiveMaxInstructions(LiftOptions options) + { + return options.MaxInstructions > 0 ? options.MaxInstructions : int.MaxValue; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirLift.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirLift.cs new file mode 100644 index 000000000..2fb1cd406 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirLift.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using B2R2.FrontEnd; +using Microsoft.Extensions.Logging; +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private List LiftInstructionToLowUir( + ISA isa, + DisassembledInstruction instr, + ref int statementId) + { + var statements = new List(); + + try + { + var bytes = instr.RawBytes.ToArray(); + var binHandle = new BinHandle(bytes, isa, null, true); + var lifter = binHandle.NewLiftingUnit(); + + var liftResult = lifter.LiftInstruction(instr.Address); + + if (liftResult == null || liftResult.Length == 0) + { + statements.Add(CreateFallbackStatement(instr, statementId++)); + return statements; + } + + foreach (var b2r2Stmt in liftResult) + { + var stmtType = b2r2Stmt.GetType().Name; + var (dest, sources) = ExtractOperandsFromB2R2Stmt(b2r2Stmt); + + statements.Add(new IrStatement( + Id: statementId++, + Address: instr.Address, + Kind: MapB2R2StmtTypeToKind(stmtType), + Operation: stmtType, + Destination: dest, + Sources: sources, + Metadata: ImmutableDictionary.Empty)); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to lift instruction at {Address:X} with B2R2 LowUIR", instr.Address); + statements.Add(CreateFallbackStatement(instr, statementId++)); + } + + return statements; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirMapping.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirMapping.cs new file mode 100644 index 000000000..30ed813eb --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirMapping.cs @@ -0,0 +1,79 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static IrStatementKind MapB2R2StmtTypeToKind(string stmtType) => stmtType switch + { + "Put" or "Store" => IrStatementKind.Assign, + "Jmp" => IrStatementKind.Jump, + "CJmp" => IrStatementKind.ConditionalJump, + "Call" => IrStatementKind.Call, + "Ret" => IrStatementKind.Return, + "BinOp" => IrStatementKind.BinaryOp, + "UnOp" => IrStatementKind.UnaryOp, + "Load" => IrStatementKind.Load, + _ => IrStatementKind.Unknown + }; + + private static (IrOperand? Dest, ImmutableArray Sources) ExtractOperandsFromB2R2Stmt(object b2r2Stmt) + { + try + { + var stmtType = b2r2Stmt.GetType().Name; + + var destProp = b2r2Stmt.GetType().GetProperty("Dst") ?? + b2r2Stmt.GetType().GetProperty("Destination"); + var srcProp = b2r2Stmt.GetType().GetProperty("Src") ?? + b2r2Stmt.GetType().GetProperty("Source"); + var srcsProp = b2r2Stmt.GetType().GetProperty("Srcs") ?? + b2r2Stmt.GetType().GetProperty("Sources"); + + IrOperand? dest = null; + var sources = new List(); + + if (destProp != null) + { + var destVal = destProp.GetValue(b2r2Stmt); + if (destVal != null) + { + dest = CreateOperandFromB2R2Expr(destVal); + } + } + + if (srcProp != null) + { + var srcVal = srcProp.GetValue(b2r2Stmt); + if (srcVal != null) + { + sources.Add(CreateOperandFromB2R2Expr(srcVal)); + } + } + + if (srcsProp != null) + { + var srcVals = srcsProp.GetValue(b2r2Stmt) as System.Collections.IEnumerable; + if (srcVals != null) + { + foreach (var src in srcVals) + { + if (src != null) + { + sources.Add(CreateOperandFromB2R2Expr(src)); + } + } + } + } + + return (dest, [.. sources]); + } + catch + { + return (null, ImmutableArray.Empty); + } + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirOperands.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirOperands.cs new file mode 100644 index 000000000..2b3c20fa9 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.LowUirOperands.cs @@ -0,0 +1,106 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; +using System.Globalization; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static IrOperand CreateOperandFromB2R2Expr(object expr) + { + var exprType = expr.GetType().Name; + + switch (exprType) + { + case "Var": + return new IrOperand( + Kind: IrOperandKind.Register, + Name: GetVarName(expr), + Value: null, + BitSize: GetVarBitWidth(expr), + IsMemory: false); + + case "TempVar": + return new IrOperand( + Kind: IrOperandKind.Temporary, + Name: GetTempVarName(expr), + Value: null, + BitSize: GetVarBitWidth(expr), + IsMemory: false); + + case "Num": + return new IrOperand( + Kind: IrOperandKind.Immediate, + Name: null, + Value: GetNumValueLong(expr), + BitSize: GetNumBitWidth(expr), + IsMemory: false); + + case "Load": + return new IrOperand( + Kind: IrOperandKind.Memory, + Name: "mem", + Value: null, + BitSize: GetLoadBitWidth(expr), + IsMemory: true); + + default: + return new IrOperand( + Kind: IrOperandKind.Unknown, + Name: exprType, + Value: null, + BitSize: 64, + IsMemory: false); + } + } + + private static string GetVarName(object varExpr) + { + var nameProp = varExpr.GetType().GetProperty("Name"); + return nameProp?.GetValue(varExpr)?.ToString() ?? "?"; + } + + private static string GetTempVarName(object tempVarExpr) + { + var nameProp = tempVarExpr.GetType().GetProperty("Name"); + return "t" + (nameProp?.GetValue(tempVarExpr)?.ToString() ?? "?"); + } + + private static int GetVarBitWidth(object varExpr) + { + var typeProp = varExpr.GetType().GetProperty("Type"); + if (typeProp == null) return 64; + + var varType = typeProp.GetValue(varExpr); + var bitSizeProp = varType?.GetType().GetProperty("BitSize"); + return (int?)bitSizeProp?.GetValue(varType) ?? 64; + } + + private static long GetNumValueLong(object numExpr) + { + var valueProp = numExpr.GetType().GetProperty("Value"); + var value = valueProp?.GetValue(numExpr); + return Convert.ToInt64(value, CultureInfo.InvariantCulture); + } + + private static int GetNumBitWidth(object numExpr) + { + var typeProp = numExpr.GetType().GetProperty("Type"); + if (typeProp == null) return 64; + + var numType = typeProp.GetValue(numExpr); + var bitSizeProp = numType?.GetType().GetProperty("BitSize"); + return (int?)bitSizeProp?.GetValue(numType) ?? 64; + } + + private static int GetLoadBitWidth(object loadExpr) + { + var typeProp = loadExpr.GetType().GetProperty("Type"); + if (typeProp == null) return 64; + + var loadType = typeProp.GetValue(loadExpr); + var bitSizeProp = loadType?.GetType().GetProperty("BitSize"); + return (int?)bitSizeProp?.GetValue(loadType) ?? 64; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaBlocks.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaBlocks.cs new file mode 100644 index 000000000..82e0982ef --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaBlocks.cs @@ -0,0 +1,35 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private static ImmutableArray BuildSsaBlocks( + LiftedFunction lifted, + IReadOnlyList ssaStatements) + { + var ssaBlocks = new List(); + + foreach (var block in lifted.BasicBlocks) + { + var blockStatements = ssaStatements + .Where(s => block.StatementIds.Contains(s.Id)) + .ToImmutableArray(); + + var ssaBlock = new SsaBasicBlock( + Id: block.Id, + Label: block.Label, + PhiNodes: ImmutableArray.Empty, + Statements: blockStatements, + Predecessors: block.Predecessors, + Successors: block.Successors); + + ssaBlocks.Add(ssaBlock); + } + + return [.. ssaBlocks]; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaStatements.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaStatements.cs new file mode 100644 index 000000000..24a2678cc --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.SsaStatements.cs @@ -0,0 +1,74 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Semantic; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + private (List Statements, Dictionary Definitions, Dictionary> Uses) + BuildSsaStatements(LiftedFunction lifted, CancellationToken ct) + { + var ssaStatements = new List(); + var definitions = new Dictionary(); + var uses = new Dictionary>(); + var versionCounters = new Dictionary(); + + foreach (var stmt in lifted.Statements) + { + ct.ThrowIfCancellationRequested(); + + SsaVariable? destVar = null; + var sourceVars = new List(); + + if (stmt.Destination != null) + { + var varName = stmt.Destination.Name ?? "?"; + if (!versionCounters.TryGetValue(varName, out var version)) + { + version = 0; + } + versionCounters[varName] = version + 1; + + destVar = new SsaVariable( + BaseName: varName, + Version: version + 1, + BitSize: stmt.Destination.BitSize, + Kind: MapOperandKindToSsaKind(stmt.Destination.Kind)); + + definitions[destVar] = stmt.Id; + } + + foreach (var src in stmt.Sources) + { + var varName = src.Name ?? "?"; + var currentVersion = versionCounters.GetValueOrDefault(varName, 0); + var ssaVar = new SsaVariable( + BaseName: varName, + Version: currentVersion, + BitSize: src.BitSize, + Kind: MapOperandKindToSsaKind(src.Kind)); + sourceVars.Add(ssaVar); + + if (!uses.ContainsKey(ssaVar)) + { + uses[ssaVar] = []; + } + uses[ssaVar].Add(stmt.Id); + } + + var ssaStmt = new SsaStatement( + Id: stmt.Id, + Address: stmt.Address, + Kind: stmt.Kind, + Operation: stmt.Operation, + Destination: destVar, + Sources: [.. sourceVars], + PhiSources: null); + + ssaStatements.Add(ssaStmt); + } + + return (ssaStatements, definitions, uses); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.TransformToSsa.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.TransformToSsa.cs new file mode 100644 index 000000000..ad0a78eda --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.TransformToSsa.cs @@ -0,0 +1,53 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; +using StellaOps.BinaryIndex.Semantic; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +public sealed partial class B2R2LowUirLiftingService +{ + /// + public Task TransformToSsaAsync( + LiftedFunction lifted, + CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(lifted); + ct.ThrowIfCancellationRequested(); + + _logger.LogDebug( + "Transforming {FunctionName} to SSA form ({StatementCount} statements)", + lifted.Name, + lifted.Statements.Length); + + var (ssaStatements, definitions, uses) = BuildSsaStatements(lifted, ct); + var ssaBlocks = BuildSsaBlocks(lifted, ssaStatements); + var defUse = BuildDefUseChains(definitions, uses); + + var ssaFunction = new SsaFunction( + Name: lifted.Name, + Address: lifted.Address, + Statements: [.. ssaStatements], + BasicBlocks: ssaBlocks, + DefUse: defUse); + + _logger.LogDebug( + "SSA transformation complete: {StatementCount} SSA statements, {DefCount} definitions", + ssaStatements.Count, + definitions.Count); + + return Task.FromResult(ssaFunction); + } + + private static DefUseChains BuildDefUseChains( + Dictionary definitions, + Dictionary> uses) + { + return new DefUseChains( + Definitions: definitions.ToImmutableDictionary(), + Uses: uses.ToImmutableDictionary( + k => k.Key, + v => v.Value.ToImmutableHashSet())); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.cs index ed9d47fad..7d47764f8 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/B2R2LowUirLiftingService.cs @@ -2,15 +2,9 @@ // Licensed under BUSL-1.1. See LICENSE in the project root. // Sprint: SPRINT_20260112_004_BINIDX_b2r2_lowuir_perf_cache (BINIDX-LIR-01) // Task: Implement B2R2 LowUIR adapter for IIrLiftingService - - -using B2R2; -using B2R2.FrontEnd; using Microsoft.Extensions.Logging; -using StellaOps.BinaryIndex.Disassembly; using StellaOps.BinaryIndex.Semantic; using System.Collections.Immutable; -using System.Globalization; namespace StellaOps.BinaryIndex.Disassembly.B2R2; @@ -19,7 +13,7 @@ namespace StellaOps.BinaryIndex.Disassembly.B2R2; /// Maps B2R2 BinIR/LowUIR statements to the StellaOps IR model /// with deterministic ordering and invariant formatting. /// -public sealed class B2R2LowUirLiftingService : IIrLiftingService +public sealed partial class B2R2LowUirLiftingService : IIrLiftingService { private readonly ILogger _logger; @@ -28,7 +22,7 @@ public sealed class B2R2LowUirLiftingService : IIrLiftingService /// public const string AdapterVersion = "1.0.0"; - private static readonly ImmutableHashSet SupportedArchitectures = + private static readonly ImmutableHashSet _supportedArchitectures = [ CpuArchitecture.X86, CpuArchitecture.X86_64, @@ -48,651 +42,5 @@ public sealed class B2R2LowUirLiftingService : IIrLiftingService /// public bool SupportsArchitecture(CpuArchitecture architecture) => - SupportedArchitectures.Contains(architecture); - - /// - public Task LiftToIrAsync( - IReadOnlyList instructions, - string functionName, - ulong startAddress, - CpuArchitecture architecture, - LiftOptions? options = null, - CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(instructions); - ct.ThrowIfCancellationRequested(); - - options ??= LiftOptions.Default; - - if (!SupportsArchitecture(architecture)) - { - throw new NotSupportedException( - $"Architecture {architecture} is not supported for B2R2 LowUIR lifting."); - } - - _logger.LogDebug( - "B2R2 LowUIR lifting {InstructionCount} instructions for function {FunctionName} ({Architecture})", - instructions.Count, - functionName, - architecture); - - var isa = MapToB2R2Isa(architecture); - - var statements = new List(); - var basicBlocks = new List(); - var currentBlockStatements = new List(); - var blockStartAddress = startAddress; - var statementId = 0; - var blockId = 0; - - var effectiveMaxInstructions = options.MaxInstructions > 0 - ? options.MaxInstructions - : int.MaxValue; - - foreach (var instr in instructions.Take(effectiveMaxInstructions)) - { - ct.ThrowIfCancellationRequested(); - - // Lift instruction to B2R2 LowUIR - var liftedStatements = LiftInstructionToLowUir(isa, instr, ref statementId); - statements.AddRange(liftedStatements); - - foreach (var stmt in liftedStatements) - { - currentBlockStatements.Add(stmt.Id); - } - - // Check for block-ending instructions - if (IsBlockTerminator(instr)) - { - var endAddress = instr.Address + (ulong)instr.RawBytes.Length; - var block = new IrBasicBlock( - Id: blockId, - Label: string.Format(CultureInfo.InvariantCulture, "bb_{0}", blockId), - StartAddress: blockStartAddress, - EndAddress: endAddress, - StatementIds: [.. currentBlockStatements], - Predecessors: ImmutableArray.Empty, - Successors: ImmutableArray.Empty); - - basicBlocks.Add(block); - blockId++; - currentBlockStatements.Clear(); - blockStartAddress = endAddress; - } - } - - // Handle trailing statements not yet in a block - if (currentBlockStatements.Count > 0 && instructions.Count > 0) - { - var lastInstr = instructions[^1]; - var endAddress = lastInstr.Address + (ulong)lastInstr.RawBytes.Length; - var block = new IrBasicBlock( - Id: blockId, - Label: string.Format(CultureInfo.InvariantCulture, "bb_{0}", blockId), - StartAddress: blockStartAddress, - EndAddress: endAddress, - StatementIds: [.. currentBlockStatements], - Predecessors: ImmutableArray.Empty, - Successors: ImmutableArray.Empty); - basicBlocks.Add(block); - } - - // Build CFG edges deterministically (sorted by address) - var (blocksWithEdges, edges) = BuildCfgEdges([.. basicBlocks]); - - var cfg = new ControlFlowGraph( - EntryBlockId: blocksWithEdges.Length > 0 ? 0 : -1, - ExitBlockIds: FindExitBlocks(blocksWithEdges), - Edges: edges); - - var lifted = new LiftedFunction( - Name: functionName, - Address: startAddress, - Statements: [.. statements], - BasicBlocks: blocksWithEdges, - Cfg: cfg); - - _logger.LogDebug( - "B2R2 LowUIR lifted {StatementCount} statements in {BlockCount} blocks for {FunctionName}", - statements.Count, - blocksWithEdges.Length, - functionName); - - return Task.FromResult(lifted); - } - - /// - public Task TransformToSsaAsync( - LiftedFunction lifted, - CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(lifted); - ct.ThrowIfCancellationRequested(); - - _logger.LogDebug( - "Transforming {FunctionName} to SSA form ({StatementCount} statements)", - lifted.Name, - lifted.Statements.Length); - - // Build SSA form from lifted function - var ssaStatements = new List(); - var ssaBlocks = new List(); - var definitions = new Dictionary(); - var uses = new Dictionary>(); - - var versionCounters = new Dictionary(); - - foreach (var stmt in lifted.Statements) - { - ct.ThrowIfCancellationRequested(); - - SsaVariable? destVar = null; - var sourceVars = new List(); - - // Process destination - if (stmt.Destination != null) - { - var varName = stmt.Destination.Name ?? "?"; - if (!versionCounters.TryGetValue(varName, out var version)) - { - version = 0; - } - versionCounters[varName] = version + 1; - - destVar = new SsaVariable( - BaseName: varName, - Version: version + 1, - BitSize: stmt.Destination.BitSize, - Kind: MapOperandKindToSsaKind(stmt.Destination.Kind)); - - definitions[destVar] = stmt.Id; - } - - // Process sources - foreach (var src in stmt.Sources) - { - var varName = src.Name ?? "?"; - var currentVersion = versionCounters.GetValueOrDefault(varName, 0); - var ssaVar = new SsaVariable( - BaseName: varName, - Version: currentVersion, - BitSize: src.BitSize, - Kind: MapOperandKindToSsaKind(src.Kind)); - sourceVars.Add(ssaVar); - - if (!uses.ContainsKey(ssaVar)) - { - uses[ssaVar] = []; - } - uses[ssaVar].Add(stmt.Id); - } - - var ssaStmt = new SsaStatement( - Id: stmt.Id, - Address: stmt.Address, - Kind: stmt.Kind, - Operation: stmt.Operation, - Destination: destVar, - Sources: [.. sourceVars], - PhiSources: null); - - ssaStatements.Add(ssaStmt); - } - - // Build SSA basic blocks from lifted blocks - foreach (var block in lifted.BasicBlocks) - { - var blockStatements = ssaStatements - .Where(s => block.StatementIds.Contains(s.Id)) - .ToImmutableArray(); - - var ssaBlock = new SsaBasicBlock( - Id: block.Id, - Label: block.Label, - PhiNodes: ImmutableArray.Empty, - Statements: blockStatements, - Predecessors: block.Predecessors, - Successors: block.Successors); - - ssaBlocks.Add(ssaBlock); - } - - var defUse = new DefUseChains( - Definitions: definitions.ToImmutableDictionary(), - Uses: uses.ToImmutableDictionary( - k => k.Key, - v => v.Value.ToImmutableHashSet())); - - var ssaFunction = new SsaFunction( - Name: lifted.Name, - Address: lifted.Address, - Statements: [.. ssaStatements], - BasicBlocks: [.. ssaBlocks], - DefUse: defUse); - - _logger.LogDebug( - "SSA transformation complete: {StatementCount} SSA statements, {DefCount} definitions", - ssaStatements.Count, - definitions.Count); - - return Task.FromResult(ssaFunction); - } - - #region B2R2 LowUIR Mapping - - private List LiftInstructionToLowUir( - ISA isa, - DisassembledInstruction instr, - ref int statementId) - { - var statements = new List(); - - try - { - // Create B2R2 BinHandle and lifting unit for the ISA - var bytes = instr.RawBytes.ToArray(); - var binHandle = new BinHandle(bytes, isa, null, true); - var lifter = binHandle.NewLiftingUnit(); - - // Lift to LowUIR using B2R2 - returns Stmt[] directly - var liftResult = lifter.LiftInstruction(instr.Address); - - if (liftResult == null || liftResult.Length == 0) - { - // Fallback to simple mapping if B2R2 lift fails - statements.Add(CreateFallbackStatement(instr, statementId++)); - return statements; - } - - // Map each B2R2 LowUIR statement to our IR model - foreach (var b2r2Stmt in liftResult) - { - var irStmt = MapB2R2Statement(b2r2Stmt, instr.Address, ref statementId); - if (irStmt != null) - { - statements.Add(irStmt); - } - } - - // Ensure at least one statement per instruction for determinism - if (statements.Count == 0) - { - statements.Add(CreateFallbackStatement(instr, statementId++)); - } - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "B2R2 lift failed for instruction at {Address}: {Mnemonic}", - instr.Address, - instr.Mnemonic); - - statements.Add(CreateFallbackStatement(instr, statementId++)); - } - - return statements; - } - - private IrStatement? MapB2R2Statement(object b2r2Stmt, ulong baseAddress, ref int statementId) - { - // B2R2 LowUIR statement types: - // - Put: register assignment - // - Store: memory write - // - Jmp: unconditional jump - // - CJmp: conditional jump - // - InterJmp: indirect jump - // - InterCJmp: indirect conditional jump - // - LMark: label marker - // - SideEffect: side effects (syscall, fence, etc.) - - var stmtType = b2r2Stmt.GetType().Name; - var kind = MapB2R2StmtTypeToKind(stmtType); - - if (kind == IrStatementKind.Unknown) - { - return null; - } - - var (dest, sources) = ExtractOperandsFromB2R2Stmt(b2r2Stmt); - var operation = stmtType; - - return new IrStatement( - Id: statementId++, - Address: baseAddress, - Kind: kind, - Operation: operation, - Destination: dest, - Sources: sources, - Metadata: null); - } - - private static IrStatementKind MapB2R2StmtTypeToKind(string stmtType) => stmtType switch - { - "Put" => IrStatementKind.Assign, - "Store" => IrStatementKind.Store, - "Jmp" => IrStatementKind.Jump, - "CJmp" => IrStatementKind.ConditionalJump, - "InterJmp" => IrStatementKind.Jump, - "InterCJmp" => IrStatementKind.ConditionalJump, - "LMark" => IrStatementKind.Nop, - "SideEffect" => IrStatementKind.Syscall, - _ => IrStatementKind.Unknown - }; - - private static (IrOperand? Dest, ImmutableArray Sources) ExtractOperandsFromB2R2Stmt(object b2r2Stmt) - { - IrOperand? dest = null; - var sources = new List(); - - var type = b2r2Stmt.GetType(); - - // Try to extract destination - var destProp = type.GetProperty("Dest"); - if (destProp != null) - { - var destVal = destProp.GetValue(b2r2Stmt); - if (destVal != null) - { - dest = CreateOperandFromB2R2Expr(destVal); - } - } - - // Try to extract source/value - var srcProp = type.GetProperty("Value") ?? type.GetProperty("Src"); - if (srcProp != null) - { - var srcVal = srcProp.GetValue(b2r2Stmt); - if (srcVal != null) - { - sources.Add(CreateOperandFromB2R2Expr(srcVal)); - } - } - - // Try to extract condition for conditional jumps - var condProp = type.GetProperty("Cond"); - if (condProp != null) - { - var condVal = condProp.GetValue(b2r2Stmt); - if (condVal != null) - { - sources.Add(CreateOperandFromB2R2Expr(condVal)); - } - } - - return (dest, [.. sources]); - } - - private static IrOperand CreateOperandFromB2R2Expr(object expr) - { - var exprType = expr.GetType().Name; - - return exprType switch - { - "Var" => new IrOperand( - Kind: IrOperandKind.Register, - Name: GetVarName(expr), - Value: null, - BitSize: GetVarBitWidth(expr), - IsMemory: false), - - "TempVar" => new IrOperand( - Kind: IrOperandKind.Temporary, - Name: GetTempVarName(expr), - Value: null, - BitSize: GetVarBitWidth(expr), - IsMemory: false), - - "Num" => new IrOperand( - Kind: IrOperandKind.Immediate, - Name: null, - Value: GetNumValueLong(expr), - BitSize: GetNumBitWidth(expr), - IsMemory: false), - - "Load" => new IrOperand( - Kind: IrOperandKind.Memory, - Name: "[mem]", - Value: null, - BitSize: GetLoadBitWidth(expr), - IsMemory: true), - - _ => new IrOperand( - Kind: IrOperandKind.Unknown, - Name: exprType, - Value: null, - BitSize: 64, - IsMemory: false) - }; - } - - private static string GetVarName(object varExpr) - { - var nameProp = varExpr.GetType().GetProperty("Name"); - return nameProp?.GetValue(varExpr)?.ToString() ?? "?"; - } - - private static string GetTempVarName(object tempVarExpr) - { - var numProp = tempVarExpr.GetType().GetProperty("N"); - var num = numProp?.GetValue(tempVarExpr) ?? 0; - return string.Format(CultureInfo.InvariantCulture, "T{0}", num); - } - - private static int GetVarBitWidth(object varExpr) - { - var typeProp = varExpr.GetType().GetProperty("Type"); - if (typeProp == null) return 64; - - var regType = typeProp.GetValue(varExpr); - var bitSizeProp = regType?.GetType().GetProperty("BitSize"); - return (int?)bitSizeProp?.GetValue(regType) ?? 64; - } - - private static long GetNumValueLong(object numExpr) - { - var valueProp = numExpr.GetType().GetProperty("Value"); - var value = valueProp?.GetValue(numExpr); - return Convert.ToInt64(value, CultureInfo.InvariantCulture); - } - - private static int GetNumBitWidth(object numExpr) - { - var typeProp = numExpr.GetType().GetProperty("Type"); - if (typeProp == null) return 64; - - var numType = typeProp.GetValue(numExpr); - var bitSizeProp = numType?.GetType().GetProperty("BitSize"); - return (int?)bitSizeProp?.GetValue(numType) ?? 64; - } - - private static int GetLoadBitWidth(object loadExpr) - { - var typeProp = loadExpr.GetType().GetProperty("Type"); - if (typeProp == null) return 64; - - var loadType = typeProp.GetValue(loadExpr); - var bitSizeProp = loadType?.GetType().GetProperty("BitSize"); - return (int?)bitSizeProp?.GetValue(loadType) ?? 64; - } - - private static IrStatement CreateFallbackStatement(DisassembledInstruction instr, int id) - { - var sources = instr.Operands.Skip(1) - .Select(op => new IrOperand( - Kind: MapOperandType(op.Type), - Name: op.Text, - Value: op.Value, - BitSize: 64, - IsMemory: op.Type == OperandType.Memory)) - .ToImmutableArray(); - - var dest = instr.Operands.Length > 0 - ? new IrOperand( - Kind: MapOperandType(instr.Operands[0].Type), - Name: instr.Operands[0].Text, - Value: instr.Operands[0].Value, - BitSize: 64, - IsMemory: instr.Operands[0].Type == OperandType.Memory) - : null; - - return new IrStatement( - Id: id, - Address: instr.Address, - Kind: MapMnemonicToKind(instr.Mnemonic), - Operation: instr.Mnemonic, - Destination: dest, - Sources: sources, - Metadata: ImmutableDictionary.Empty.Add("fallback", true)); - } - - private static SsaVariableKind MapOperandKindToSsaKind(IrOperandKind kind) => kind switch - { - IrOperandKind.Register => SsaVariableKind.Register, - IrOperandKind.Temporary => SsaVariableKind.Temporary, - IrOperandKind.Memory => SsaVariableKind.Memory, - IrOperandKind.Immediate => SsaVariableKind.Constant, - _ => SsaVariableKind.Temporary - }; - - private static IrOperandKind MapOperandType(OperandType type) => type switch - { - OperandType.Register => IrOperandKind.Register, - OperandType.Immediate => IrOperandKind.Immediate, - OperandType.Memory => IrOperandKind.Memory, - OperandType.Address => IrOperandKind.Label, - _ => IrOperandKind.Unknown - }; - - #endregion - - #region Helper Methods - - private static ISA MapToB2R2Isa(CpuArchitecture arch) => arch switch - { - CpuArchitecture.X86 => new ISA(Architecture.Intel, WordSize.Bit32), - CpuArchitecture.X86_64 => new ISA(Architecture.Intel, WordSize.Bit64), - CpuArchitecture.ARM32 => new ISA(Architecture.ARMv7, WordSize.Bit32), - CpuArchitecture.ARM64 => new ISA(Architecture.ARMv8, WordSize.Bit64), - CpuArchitecture.MIPS32 => new ISA(Architecture.MIPS, WordSize.Bit32), - CpuArchitecture.MIPS64 => new ISA(Architecture.MIPS, WordSize.Bit64), - CpuArchitecture.RISCV64 => new ISA(Architecture.RISCV, WordSize.Bit64), - CpuArchitecture.PPC32 => new ISA(Architecture.PPC, Endian.Big, WordSize.Bit32), - CpuArchitecture.SPARC => new ISA(Architecture.SPARC, Endian.Big), - _ => throw new NotSupportedException($"Unsupported architecture: {arch}") - }; - - private static bool IsBlockTerminator(DisassembledInstruction instr) - { - var mnemonic = instr.Mnemonic.ToUpperInvariant(); - return mnemonic.StartsWith("J", StringComparison.Ordinal) || - mnemonic.StartsWith("B", StringComparison.Ordinal) || - mnemonic == "RET" || - mnemonic == "RETN" || - mnemonic == "RETF" || - mnemonic == "IRET" || - mnemonic == "SYSRET" || - mnemonic == "BLR" || - mnemonic == "BX" || - mnemonic == "JR"; - } - - private static IrStatementKind MapMnemonicToKind(string mnemonic) - { - var upper = mnemonic.ToUpperInvariant(); - - if (upper.StartsWith("MOV", StringComparison.Ordinal) || - upper.StartsWith("LEA", StringComparison.Ordinal) || - upper.StartsWith("LDR", StringComparison.Ordinal)) - return IrStatementKind.Assign; - - if (upper.StartsWith("ADD", StringComparison.Ordinal) || - upper.StartsWith("SUB", StringComparison.Ordinal) || - upper.StartsWith("MUL", StringComparison.Ordinal) || - upper.StartsWith("DIV", StringComparison.Ordinal)) - return IrStatementKind.BinaryOp; - - if (upper.StartsWith("AND", StringComparison.Ordinal) || - upper.StartsWith("OR", StringComparison.Ordinal) || - upper.StartsWith("XOR", StringComparison.Ordinal) || - upper.StartsWith("SH", StringComparison.Ordinal)) - return IrStatementKind.BinaryOp; - - if (upper.StartsWith("CMP", StringComparison.Ordinal) || - upper.StartsWith("TEST", StringComparison.Ordinal)) - return IrStatementKind.Compare; - - if (upper.StartsWith("J", StringComparison.Ordinal) || - upper.StartsWith("B", StringComparison.Ordinal)) - return IrStatementKind.ConditionalJump; - - if (upper == "CALL" || upper == "BL" || upper == "BLX") - return IrStatementKind.Call; - - if (upper == "RET" || upper == "RETN" || upper == "BLR") - return IrStatementKind.Return; - - if (upper.StartsWith("PUSH", StringComparison.Ordinal) || - upper.StartsWith("POP", StringComparison.Ordinal) || - upper.StartsWith("STR", StringComparison.Ordinal)) - return IrStatementKind.Store; - - if (upper == "NOP") - return IrStatementKind.Nop; - - return IrStatementKind.Unknown; - } - - private static (ImmutableArray Blocks, ImmutableArray Edges) BuildCfgEdges( - ImmutableArray blocks) - { - if (blocks.Length == 0) - return (blocks, ImmutableArray.Empty); - - var result = new IrBasicBlock[blocks.Length]; - var edges = new List(); - - for (var i = 0; i < blocks.Length; i++) - { - var block = blocks[i]; - var predecessors = new List(); - var successors = new List(); - - // Fall-through successor (next block in sequence) - if (i < blocks.Length - 1) - { - successors.Add(i + 1); - edges.Add(new CfgEdge( - SourceBlockId: i, - TargetBlockId: i + 1, - Kind: CfgEdgeKind.FallThrough, - Condition: null)); - } - - // Predecessor from fall-through - if (i > 0) - { - predecessors.Add(i - 1); - } - - result[i] = block with - { - Predecessors = [.. predecessors.Distinct().OrderBy(x => x)], - Successors = [.. successors.Distinct().OrderBy(x => x)] - }; - } - - return ([.. result], [.. edges]); - } - - private static ImmutableArray FindExitBlocks(ImmutableArray blocks) - { - return blocks - .Where(b => b.Successors.Length == 0) - .Select(b => b.Id) - .ToImmutableArray(); - } - - #endregion + _supportedArchitectures.Contains(architecture); } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/PooledLifter.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/PooledLifter.cs new file mode 100644 index 000000000..7912a5911 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/PooledLifter.cs @@ -0,0 +1,50 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using B2R2.FrontEnd; +using B2R2.FrontEnd.BinFile; +using B2R2.FrontEnd.BinLifter; + +namespace StellaOps.BinaryIndex.Disassembly.B2R2; + +/// +/// Pooled B2R2 BinHandle and LiftingUnit for reuse across calls. +/// +public sealed class PooledLifter : IDisposable +{ + private readonly B2R2LifterPool _pool; + private readonly ISA _isa; + private bool _disposed; + + internal PooledLifter( + B2R2LifterPool pool, + ISA isa, + BinHandle binHandle, + LiftingUnit liftingUnit) + { + _pool = pool ?? throw new ArgumentNullException(nameof(pool)); + _isa = isa; + BinHandle = binHandle ?? throw new ArgumentNullException(nameof(binHandle)); + LiftingUnit = liftingUnit ?? throw new ArgumentNullException(nameof(liftingUnit)); + } + + /// + /// The B2R2 BinHandle for this lifter. + /// + public BinHandle BinHandle { get; } + + /// + /// The B2R2 LiftingUnit for this lifter. + /// + public LiftingUnit LiftingUnit { get; } + + /// + /// Returns the lifter to the pool. + /// + public void Dispose() + { + if (_disposed) return; + _disposed = true; + _pool.Return(this, _isa); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/TASKS.md b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/TASKS.md index 245acaf66..18e8d6c31 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/TASKS.md +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/StellaOps.BinaryIndex.Disassembly.B2R2.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly.B2R2/StellaOps.BinaryIndex.Disassembly.B2R2.md (2026-02-04). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/BinaryFormatDetector.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/BinaryFormatDetector.cs new file mode 100644 index 000000000..64ccabe96 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/BinaryFormatDetector.cs @@ -0,0 +1,103 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System; + +namespace StellaOps.BinaryIndex.Disassembly; + +internal static class BinaryFormatDetector +{ + public static BinaryFormat DetectFormat(ReadOnlySpan bytes) + { + if (bytes.Length < 4) + { + return BinaryFormat.Raw; + } + + if (bytes[0] == 0x7F && bytes[1] == 'E' && bytes[2] == 'L' && bytes[3] == 'F') + { + return BinaryFormat.ELF; + } + + if (bytes[0] == 'M' && bytes[1] == 'Z') + { + return BinaryFormat.PE; + } + + if ((bytes[0] == 0xFE && bytes[1] == 0xED && bytes[2] == 0xFA && (bytes[3] == 0xCE || bytes[3] == 0xCF)) || + (bytes[3] == 0xFE && bytes[2] == 0xED && bytes[1] == 0xFA && (bytes[0] == 0xCE || bytes[0] == 0xCF))) + { + return BinaryFormat.MachO; + } + + if (bytes[0] == 0x00 && bytes[1] == 'a' && bytes[2] == 's' && bytes[3] == 'm') + { + return BinaryFormat.WASM; + } + + return BinaryFormat.Raw; + } + + public static CpuArchitecture DetectArchitecture(ReadOnlySpan bytes, BinaryFormat format) + { + return format switch + { + BinaryFormat.ELF when bytes.Length > 18 => DetectElfArchitecture(bytes), + BinaryFormat.PE when bytes.Length > 0x40 => DetectPeArchitecture(bytes), + BinaryFormat.MachO when bytes.Length > 8 => DetectMachOArchitecture(bytes), + _ => CpuArchitecture.X86_64 + }; + } + + private static CpuArchitecture DetectElfArchitecture(ReadOnlySpan bytes) + { + var machine = (ushort)(bytes[18] | (bytes[19] << 8)); + return machine switch + { + 0x03 => CpuArchitecture.X86, + 0x3E => CpuArchitecture.X86_64, + 0x28 => CpuArchitecture.ARM32, + 0xB7 => CpuArchitecture.ARM64, + 0x08 => CpuArchitecture.MIPS32, + 0xF3 => CpuArchitecture.RISCV64, + 0x14 => CpuArchitecture.PPC32, + 0x02 => CpuArchitecture.SPARC, + _ => bytes[4] == 2 ? CpuArchitecture.X86_64 : CpuArchitecture.X86 + }; + } + + private static CpuArchitecture DetectPeArchitecture(ReadOnlySpan bytes) + { + var peOffset = bytes[0x3C] | (bytes[0x3D] << 8) | (bytes[0x3E] << 16) | (bytes[0x3F] << 24); + if (peOffset < 0 || peOffset + 6 > bytes.Length) + { + return CpuArchitecture.X86; + } + + var machine = (ushort)(bytes[peOffset + 4] | (bytes[peOffset + 5] << 8)); + return machine switch + { + 0x014c => CpuArchitecture.X86, + 0x8664 => CpuArchitecture.X86_64, + 0xaa64 => CpuArchitecture.ARM64, + 0x01c4 => CpuArchitecture.ARM32, + _ => CpuArchitecture.X86 + }; + } + + private static CpuArchitecture DetectMachOArchitecture(ReadOnlySpan bytes) + { + bool isBigEndian = bytes[0] == 0xFE; + uint cpuType = isBigEndian + ? (uint)((bytes[4] << 24) | (bytes[5] << 16) | (bytes[6] << 8) | bytes[7]) + : (uint)(bytes[4] | (bytes[5] << 8) | (bytes[6] << 16) | (bytes[7] << 24)); + + return cpuType switch + { + 0x00000007 => CpuArchitecture.X86, + 0x01000007 => CpuArchitecture.X86_64, + 0x0000000C => CpuArchitecture.ARM32, + 0x0100000C => CpuArchitecture.ARM64, + _ => CpuArchitecture.X86_64 + }; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyOptions.cs new file mode 100644 index 000000000..f40ad5554 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyOptions.cs @@ -0,0 +1,34 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System; + +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Configuration options for the disassembly service. +/// +public sealed class DisassemblyOptions +{ + /// + /// Configuration section name. + /// + public const string SectionName = "Disassembly"; + + /// + /// The preferred plugin ID to use for disassembly when multiple plugins are available. + /// If not set, the plugin with the highest priority for the given architecture/format is used. + /// + public string? PreferredPluginId { get; set; } + + /// + /// Plugin-specific preferences by architecture. + /// Key: architecture name (e.g., "x86_64", "arm64"), Value: preferred plugin ID. + /// + public Dictionary ArchitecturePreferences { get; set; } = new(StringComparer.OrdinalIgnoreCase); + + /// + /// Maximum instruction count to disassemble per region (prevents runaway disassembly). + /// Default: 1,000,000 instructions. + /// + public int MaxInstructionsPerRegion { get; set; } = 1_000_000; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyQualityResult.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyQualityResult.cs new file mode 100644 index 000000000..8c43cca84 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyQualityResult.cs @@ -0,0 +1,67 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Result of a disassembly operation with quality metrics. +/// +public sealed record DisassemblyQualityResult +{ + /// + /// The loaded binary information. + /// + public required BinaryInfo Binary { get; init; } + + /// + /// The plugin that produced this result. + /// + public required IDisassemblyPlugin Plugin { get; init; } + + /// + /// Discovered code regions. + /// + public required ImmutableArray CodeRegions { get; init; } + + /// + /// Discovered symbols/functions. + /// + public required ImmutableArray Symbols { get; init; } + + /// + /// Total instructions disassembled across all regions. + /// + public int TotalInstructions { get; init; } + + /// + /// Successfully decoded instructions count. + /// + public int DecodedInstructions { get; init; } + + /// + /// Failed/invalid instruction count. + /// + public int FailedInstructions { get; init; } + + /// + /// Confidence score (0.0-1.0) based on quality metrics. + /// + public double Confidence { get; init; } + + /// + /// Whether this result came from a fallback plugin. + /// + public bool UsedFallback { get; init; } + + /// + /// Reason for fallback if applicable. + /// + public string? FallbackReason { get; init; } + + /// + /// Decode success rate (DecodedInstructions / TotalInstructions). + /// + public double DecodeSuccessRate => + TotalInstructions > 0 ? (double)DecodedInstructions / TotalInstructions : 0.0; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.LoadBinary.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.LoadBinary.cs new file mode 100644 index 000000000..865feb4ca --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.LoadBinary.cs @@ -0,0 +1,60 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Disassembly; + +public sealed partial class DisassemblyService +{ + /// + public (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(Stream stream, string? preferredPluginId = null) + { + ArgumentNullException.ThrowIfNull(stream); + + using var memStream = new MemoryStream(); + stream.CopyTo(memStream); + return LoadBinary(memStream.ToArray(), preferredPluginId); + } + + /// + public (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(ReadOnlySpan bytes, string? preferredPluginId = null) + { + var format = BinaryFormatDetector.DetectFormat(bytes); + var architecture = BinaryFormatDetector.DetectArchitecture(bytes, format); + + _logger.LogDebug( + "Detected format {Format} and architecture {Arch} for binary", + format, architecture); + + var pluginId = preferredPluginId ?? GetPreferredPluginId(architecture); + IDisassemblyPlugin? plugin = null; + + if (!string.IsNullOrEmpty(pluginId)) + { + plugin = _registry.GetPlugin(pluginId); + if (plugin != null && !plugin.Capabilities.CanHandle(architecture, format)) + { + _logger.LogWarning( + "Preferred plugin {Plugin} does not support {Arch}/{Format}, falling back to auto-selection", + pluginId, architecture, format); + plugin = null; + } + } + + plugin ??= _registry.FindPlugin(architecture, format); + + if (plugin == null) + { + throw new NotSupportedException( + $"No disassembly plugin available for architecture {architecture} and format {format}"); + } + + var binary = plugin.LoadBinary(bytes, architecture, format); + + _logger.LogInformation( + "Loaded binary using plugin {Plugin}: Format={Format}, Arch={Arch}, Bitness={Bitness}", + plugin.Capabilities.PluginId, binary.Format, binary.Architecture, binary.Bitness); + + return (binary, plugin); + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.cs index a752d7a18..79a543b3e 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyService.cs @@ -1,45 +1,14 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; namespace StellaOps.BinaryIndex.Disassembly; -/// -/// Configuration options for the disassembly service. -/// -public sealed class DisassemblyOptions -{ - /// - /// Configuration section name. - /// - public const string SectionName = "Disassembly"; - - /// - /// The preferred plugin ID to use for disassembly when multiple plugins are available. - /// If not set, the plugin with the highest priority for the given architecture/format is used. - /// - public string? PreferredPluginId { get; set; } - - /// - /// Plugin-specific preferences by architecture. - /// Key: architecture name (e.g., "x86_64", "arm64"), Value: preferred plugin ID. - /// - public Dictionary ArchitecturePreferences { get; set; } = new(StringComparer.OrdinalIgnoreCase); - - /// - /// Maximum instruction count to disassemble per region (prevents runaway disassembly). - /// Default: 1,000,000 instructions. - /// - public int MaxInstructionsPerRegion { get; set; } = 1_000_000; -} - /// /// Default implementation of the disassembly service facade. /// -public sealed class DisassemblyService : IDisassemblyService +public sealed partial class DisassemblyService : IDisassemblyService { private readonly IDisassemblyPluginRegistry _registry; private readonly DisassemblyOptions _options; @@ -64,62 +33,6 @@ public sealed class DisassemblyService : IDisassemblyService /// public IDisassemblyPluginRegistry Registry => _registry; - /// - public (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(Stream stream, string? preferredPluginId = null) - { - ArgumentNullException.ThrowIfNull(stream); - - // Read stream to byte array for format detection - using var memStream = new MemoryStream(); - stream.CopyTo(memStream); - return LoadBinary(memStream.ToArray(), preferredPluginId); - } - - /// - public (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(ReadOnlySpan bytes, string? preferredPluginId = null) - { - // First, detect format and architecture to find appropriate plugin - var format = DetectFormat(bytes); - var architecture = DetectArchitecture(bytes, format); - - _logger.LogDebug( - "Detected format {Format} and architecture {Arch} for binary", - format, architecture); - - // Find the best plugin - var pluginId = preferredPluginId ?? GetPreferredPluginId(architecture); - IDisassemblyPlugin? plugin = null; - - if (!string.IsNullOrEmpty(pluginId)) - { - plugin = _registry.GetPlugin(pluginId); - if (plugin != null && !plugin.Capabilities.CanHandle(architecture, format)) - { - _logger.LogWarning( - "Preferred plugin {Plugin} does not support {Arch}/{Format}, falling back to auto-selection", - pluginId, architecture, format); - plugin = null; - } - } - - plugin ??= _registry.FindPlugin(architecture, format); - - if (plugin == null) - { - throw new NotSupportedException( - $"No disassembly plugin available for architecture {architecture} and format {format}"); - } - - // Load the binary with the selected plugin - var binary = plugin.LoadBinary(bytes, architecture, format); - - _logger.LogInformation( - "Loaded binary using plugin {Plugin}: Format={Format}, Arch={Arch}, Bitness={Bitness}", - plugin.Capabilities.PluginId, binary.Format, binary.Architecture, binary.Bitness); - - return (binary, plugin); - } - private string? GetPreferredPluginId(CpuArchitecture architecture) { var archName = architecture.ToString(); @@ -127,95 +40,7 @@ public sealed class DisassemblyService : IDisassemblyService { return pluginId; } + return _options.PreferredPluginId; } - - #region Format/Architecture Detection - - private static BinaryFormat DetectFormat(ReadOnlySpan bytes) - { - if (bytes.Length < 4) return BinaryFormat.Raw; - - // ELF magic: 0x7F 'E' 'L' 'F' - if (bytes[0] == 0x7F && bytes[1] == 'E' && bytes[2] == 'L' && bytes[3] == 'F') - return BinaryFormat.ELF; - - // PE magic: 'M' 'Z' - if (bytes[0] == 'M' && bytes[1] == 'Z') - return BinaryFormat.PE; - - // Mach-O magic - if ((bytes[0] == 0xFE && bytes[1] == 0xED && bytes[2] == 0xFA && (bytes[3] == 0xCE || bytes[3] == 0xCF)) || - (bytes[3] == 0xFE && bytes[2] == 0xED && bytes[1] == 0xFA && (bytes[0] == 0xCE || bytes[0] == 0xCF))) - return BinaryFormat.MachO; - - // WASM magic: 0x00 'a' 's' 'm' - if (bytes[0] == 0x00 && bytes[1] == 'a' && bytes[2] == 's' && bytes[3] == 'm') - return BinaryFormat.WASM; - - return BinaryFormat.Raw; - } - - private static CpuArchitecture DetectArchitecture(ReadOnlySpan bytes, BinaryFormat format) - { - return format switch - { - BinaryFormat.ELF when bytes.Length > 18 => DetectElfArchitecture(bytes), - BinaryFormat.PE when bytes.Length > 0x40 => DetectPeArchitecture(bytes), - BinaryFormat.MachO when bytes.Length > 8 => DetectMachOArchitecture(bytes), - _ => CpuArchitecture.X86_64 // Default - }; - } - - private static CpuArchitecture DetectElfArchitecture(ReadOnlySpan bytes) - { - var machine = (ushort)(bytes[18] | (bytes[19] << 8)); - return machine switch - { - 0x03 => CpuArchitecture.X86, - 0x3E => CpuArchitecture.X86_64, - 0x28 => CpuArchitecture.ARM32, - 0xB7 => CpuArchitecture.ARM64, - 0x08 => CpuArchitecture.MIPS32, - 0xF3 => CpuArchitecture.RISCV64, - 0x14 => CpuArchitecture.PPC32, - 0x02 => CpuArchitecture.SPARC, - _ => bytes[4] == 2 ? CpuArchitecture.X86_64 : CpuArchitecture.X86 - }; - } - - private static CpuArchitecture DetectPeArchitecture(ReadOnlySpan bytes) - { - var peOffset = bytes[0x3C] | (bytes[0x3D] << 8) | (bytes[0x3E] << 16) | (bytes[0x3F] << 24); - if (peOffset < 0 || peOffset + 6 > bytes.Length) return CpuArchitecture.X86; - - var machine = (ushort)(bytes[peOffset + 4] | (bytes[peOffset + 5] << 8)); - return machine switch - { - 0x014c => CpuArchitecture.X86, - 0x8664 => CpuArchitecture.X86_64, - 0xaa64 => CpuArchitecture.ARM64, - 0x01c4 => CpuArchitecture.ARM32, - _ => CpuArchitecture.X86 - }; - } - - private static CpuArchitecture DetectMachOArchitecture(ReadOnlySpan bytes) - { - bool isBigEndian = bytes[0] == 0xFE; - uint cpuType = isBigEndian - ? (uint)((bytes[4] << 24) | (bytes[5] << 16) | (bytes[6] << 8) | bytes[7]) - : (uint)(bytes[4] | (bytes[5] << 8) | (bytes[6] << 16) | (bytes[7] << 24)); - - return cpuType switch - { - 0x00000007 => CpuArchitecture.X86, - 0x01000007 => CpuArchitecture.X86_64, - 0x0000000C => CpuArchitecture.ARM32, - 0x0100000C => CpuArchitecture.ARM64, - _ => CpuArchitecture.X86_64 - }; - } - - #endregion } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.Hybrid.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.Hybrid.cs new file mode 100644 index 000000000..acacb22f1 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.Hybrid.cs @@ -0,0 +1,78 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Extension methods for configuring hybrid disassembly services. +/// +public static partial class DisassemblyServiceCollectionExtensions +{ + /// + /// Adds the hybrid disassembly service with fallback logic between plugins. + /// This replaces the standard disassembly service with a hybrid version that + /// automatically falls back to secondary plugins when primary quality is low. + /// + /// The service collection. + /// Configuration for binding options. + /// The service collection for chaining. + public static IServiceCollection AddHybridDisassemblyServices( + this IServiceCollection services, + IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddOptions() + .Bind(configuration.GetSection(DisassemblyOptions.SectionName)) + .ValidateOnStart(); + + services.AddOptions() + .Bind(configuration.GetSection(HybridDisassemblyOptions.SectionName)) + .ValidateOnStart(); + + services.TryAddSingleton(); + services.AddSingleton(); + + return services; + } + + /// + /// Adds the hybrid disassembly service with configuration actions. + /// + /// The service collection. + /// Action to configure hybrid options. + /// Optional action to configure standard options. + /// The service collection for chaining. + public static IServiceCollection AddHybridDisassemblyServices( + this IServiceCollection services, + Action configureHybrid, + Action? configureDisassembly = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configureHybrid); + + if (configureDisassembly != null) + { + services.AddOptions() + .Configure(configureDisassembly) + .ValidateOnStart(); + } + else + { + services.AddOptions(); + } + + services.AddOptions() + .Configure(configureHybrid) + .ValidateOnStart(); + + services.TryAddSingleton(); + services.AddSingleton(); + + return services; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.cs index 34ac4621a..b47633677 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/DisassemblyServiceCollectionExtensions.cs @@ -10,7 +10,7 @@ namespace StellaOps.BinaryIndex.Disassembly; /// /// Extension methods for configuring disassembly services. /// -public static class DisassemblyServiceCollectionExtensions +public static partial class DisassemblyServiceCollectionExtensions { /// /// Adds the disassembly service infrastructure (registry and service facade). @@ -66,81 +66,4 @@ public static class DisassemblyServiceCollectionExtensions return services; } - - /// - /// Adds the hybrid disassembly service with fallback logic between plugins. - /// This replaces the standard disassembly service with a hybrid version that - /// automatically falls back to secondary plugins when primary quality is low. - /// - /// The service collection. - /// Configuration for binding options. - /// The service collection for chaining. - public static IServiceCollection AddHybridDisassemblyServices( - this IServiceCollection services, - IConfiguration configuration) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - // Register standard options - services.AddOptions() - .Bind(configuration.GetSection(DisassemblyOptions.SectionName)) - .ValidateOnStart(); - - // Register hybrid options - services.AddOptions() - .Bind(configuration.GetSection(HybridDisassemblyOptions.SectionName)) - .ValidateOnStart(); - - // Register the plugin registry - services.TryAddSingleton(); - - // Register hybrid service as IDisassemblyService - services.AddSingleton(); - services.AddSingleton(sp => sp.GetRequiredService()); - - return services; - } - - /// - /// Adds the hybrid disassembly service with configuration actions. - /// - /// The service collection. - /// Action to configure hybrid options. - /// Optional action to configure standard options. - /// The service collection for chaining. - public static IServiceCollection AddHybridDisassemblyServices( - this IServiceCollection services, - Action configureHybrid, - Action? configureDisassembly = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configureHybrid); - - // Register standard options - if (configureDisassembly != null) - { - services.AddOptions() - .Configure(configureDisassembly) - .ValidateOnStart(); - } - else - { - services.AddOptions(); - } - - // Register hybrid options - services.AddOptions() - .Configure(configureHybrid) - .ValidateOnStart(); - - // Register the plugin registry - services.TryAddSingleton(); - - // Register hybrid service as IDisassemblyService - services.AddSingleton(); - services.AddSingleton(sp => sp.GetRequiredService()); - - return services; - } } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyOptions.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyOptions.cs new file mode 100644 index 000000000..8e4613ca8 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyOptions.cs @@ -0,0 +1,55 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +/// +/// Configuration options for hybrid disassembly with fallback logic. +/// +public sealed class HybridDisassemblyOptions +{ + /// + /// Configuration section name. + /// + public const string SectionName = "HybridDisassembly"; + + /// + /// Primary plugin ID to try first. If null, auto-selects highest priority plugin. + /// + public string? PrimaryPluginId { get; set; } + + /// + /// Fallback plugin ID to use when primary fails quality threshold. + /// + public string? FallbackPluginId { get; set; } + + /// + /// Minimum confidence score (0.0-1.0) required to accept primary plugin results. + /// If primary result confidence is below this, fallback is attempted. + /// + public double MinConfidenceThreshold { get; set; } = 0.7; + + /// + /// Minimum function discovery count. If primary finds fewer functions, fallback is attempted. + /// + public int MinFunctionCount { get; set; } = 1; + + /// + /// Minimum instruction decode success rate (0.0-1.0). + /// + public double MinDecodeSuccessRate { get; set; } = 0.8; + + /// + /// Whether to automatically fallback when primary plugin doesn't support the architecture. + /// + public bool AutoFallbackOnUnsupported { get; set; } = true; + + /// + /// Whether to enable hybrid fallback logic at all. If false, behaves like standard service. + /// + public bool EnableFallback { get; set; } = true; + + /// + /// Timeout in seconds for each plugin attempt. + /// + public int PluginTimeoutSeconds { get; set; } = 120; +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.LoadBinaryWithQuality.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.LoadBinaryWithQuality.cs new file mode 100644 index 000000000..54ed92768 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.LoadBinaryWithQuality.cs @@ -0,0 +1,74 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Disassembly; + +public sealed partial class HybridDisassemblyService +{ + /// + /// Loads binary with quality assessment and returns detailed quality result. + /// + /// The binary data. + /// Optional preferred plugin ID. + /// A quality result with metrics and fallback info. + public DisassemblyQualityResult LoadBinaryWithQuality(ReadOnlySpan bytes, string? preferredPluginId = null) + { + var format = BinaryFormatDetector.DetectFormat(bytes); + var architecture = BinaryFormatDetector.DetectArchitecture(bytes, format); + + var primaryPlugin = GetPrimaryPlugin(architecture, format, preferredPluginId); + if (primaryPlugin is null) + { + throw new NotSupportedException( + $"No disassembly plugin available for architecture {architecture} and format {format}"); + } + + var primaryResult = AssessQuality(primaryPlugin, bytes, architecture, format); + + if (MeetsQualityThreshold(primaryResult)) + { + _logger.LogInformation( + "Primary plugin {Plugin} met quality threshold (confidence: {Confidence:P1})", + primaryPlugin.Capabilities.PluginId, primaryResult.Confidence); + return primaryResult; + } + + if (!_options.EnableFallback) + { + _logger.LogWarning( + "Primary plugin {Plugin} below threshold (confidence: {Confidence:P1}), fallback disabled", + primaryPlugin.Capabilities.PluginId, primaryResult.Confidence); + return primaryResult; + } + + var fallbackPlugin = GetFallbackPlugin(primaryPlugin, architecture, format); + if (fallbackPlugin is null) + { + _logger.LogWarning( + "No fallback plugin available for {Arch}/{Format}", + architecture, format); + return primaryResult; + } + + var fallbackResult = AssessQuality(fallbackPlugin, bytes, architecture, format); + + if (fallbackResult.Confidence > primaryResult.Confidence) + { + _logger.LogInformation( + "Using fallback plugin {Plugin} (confidence: {Confidence:P1} > primary: {PrimaryConf:P1})", + fallbackPlugin.Capabilities.PluginId, fallbackResult.Confidence, primaryResult.Confidence); + + return fallbackResult with + { + UsedFallback = true, + FallbackReason = CreateFallbackReason(primaryResult) + }; + } + + _logger.LogDebug( + "Keeping primary plugin result (confidence: {Confidence:P1})", + primaryResult.Confidence); + return primaryResult; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.PluginSelection.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.PluginSelection.cs new file mode 100644 index 000000000..05eeb065b --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.PluginSelection.cs @@ -0,0 +1,111 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; + +namespace StellaOps.BinaryIndex.Disassembly; + +public sealed partial class HybridDisassemblyService +{ + private (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadWithBestPlugin( + ReadOnlySpan bytes, + CpuArchitecture architecture, + BinaryFormat format, + string? preferredPluginId) + { + var plugin = GetPluginById(preferredPluginId) ?? _registry.FindPlugin(architecture, format); + + if (plugin == null) + { + throw new NotSupportedException( + $"No disassembly plugin available for architecture {architecture} and format {format}"); + } + + var binary = plugin.LoadBinary(bytes, architecture, format); + return (binary, plugin); + } + + private (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadWithFallback( + ReadOnlySpan bytes, + CpuArchitecture architecture, + BinaryFormat format, + string? preferredPluginId) + { + var primaryPlugin = GetPrimaryPlugin(architecture, format, preferredPluginId); + + if (primaryPlugin is null) + { + var fallback = GetFallbackPlugin(null, architecture, format); + if (fallback is null) + { + throw new NotSupportedException( + $"No disassembly plugin available for architecture {architecture} and format {format}"); + } + + return (fallback.LoadBinary(bytes, architecture, format), fallback); + } + + if (_options.AutoFallbackOnUnsupported && !primaryPlugin.Capabilities.CanHandle(architecture, format)) + { + _logger.LogDebug( + "Primary plugin {Plugin} doesn't support {Arch}/{Format}, using fallback", + primaryPlugin.Capabilities.PluginId, architecture, format); + + var fallback = GetFallbackPlugin(primaryPlugin, architecture, format); + if (fallback is not null) + { + return (fallback.LoadBinary(bytes, architecture, format), fallback); + } + } + + return (primaryPlugin.LoadBinary(bytes, architecture, format), primaryPlugin); + } + + private IDisassemblyPlugin? GetPrimaryPlugin( + CpuArchitecture architecture, + BinaryFormat format, + string? preferredPluginId) + { + if (!string.IsNullOrEmpty(preferredPluginId)) + { + return GetPluginById(preferredPluginId); + } + + if (!string.IsNullOrEmpty(_options.PrimaryPluginId)) + { + return GetPluginById(_options.PrimaryPluginId); + } + + return _registry.FindPlugin(architecture, format); + } + + private IDisassemblyPlugin? GetFallbackPlugin( + IDisassemblyPlugin? excludePlugin, + CpuArchitecture architecture, + BinaryFormat format) + { + if (!string.IsNullOrEmpty(_options.FallbackPluginId)) + { + var fallback = GetPluginById(_options.FallbackPluginId); + if (fallback?.Capabilities.CanHandle(architecture, format) == true) + { + return fallback; + } + } + + return _registry.Plugins + .Where(p => p != excludePlugin) + .Where(p => p.Capabilities.CanHandle(architecture, format)) + .OrderByDescending(p => p.Capabilities.Priority) + .FirstOrDefault(); + } + + private IDisassemblyPlugin? GetPluginById(string? pluginId) + { + return string.IsNullOrEmpty(pluginId) ? null : _registry.GetPlugin(pluginId); + } + + private static string CreateFallbackReason(DisassemblyQualityResult primaryResult) + { + return $"Primary confidence ({primaryResult.Confidence:P1}) below threshold"; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.Quality.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.Quality.cs new file mode 100644 index 000000000..a708ad41c --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.Quality.cs @@ -0,0 +1,85 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging; +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly; + +public sealed partial class HybridDisassemblyService +{ + private DisassemblyQualityResult AssessQuality( + IDisassemblyPlugin plugin, + ReadOnlySpan bytes, + CpuArchitecture architecture, + BinaryFormat format) + { + try + { + var binary = plugin.LoadBinary(bytes, architecture, format); + var codeRegions = plugin.GetCodeRegions(binary).ToImmutableArray(); + var symbols = plugin.GetSymbols(binary).ToImmutableArray(); + + int totalInstructions = 0; + int decodedInstructions = 0; + int failedInstructions = 0; + + foreach (var region in codeRegions.Take(3)) + { + var instructions = plugin.Disassemble(binary, region).Take(1000).ToList(); + totalInstructions += instructions.Count; + + foreach (var instr in instructions) + { + if (instr.Mnemonic.Equals("??", StringComparison.Ordinal) || + instr.Mnemonic.Equals("invalid", StringComparison.OrdinalIgnoreCase) || + instr.Mnemonic.Equals("db", StringComparison.OrdinalIgnoreCase)) + { + failedInstructions++; + } + else + { + decodedInstructions++; + } + } + } + + var confidence = CalculateConfidence( + symbols.Length, + decodedInstructions, + failedInstructions, + codeRegions.Length); + + return new DisassemblyQualityResult + { + Binary = binary, + Plugin = plugin, + CodeRegions = codeRegions, + Symbols = symbols, + TotalInstructions = totalInstructions, + DecodedInstructions = decodedInstructions, + FailedInstructions = failedInstructions, + Confidence = confidence, + UsedFallback = false + }; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Plugin {Plugin} failed during quality assessment", plugin.Capabilities.PluginId); + + return new DisassemblyQualityResult + { + Binary = null!, + Plugin = plugin, + CodeRegions = [], + Symbols = [], + TotalInstructions = 0, + DecodedInstructions = 0, + FailedInstructions = 0, + Confidence = 0.0, + UsedFallback = false, + FallbackReason = $"Plugin failed: {ex.Message}" + }; + } + } + +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.QualityThresholds.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.QualityThresholds.cs new file mode 100644 index 000000000..53447b8d2 --- /dev/null +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.QualityThresholds.cs @@ -0,0 +1,45 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +namespace StellaOps.BinaryIndex.Disassembly; + +public sealed partial class HybridDisassemblyService +{ + private static double CalculateConfidence( + int symbolCount, + int decodedInstructions, + int failedInstructions, + int regionCount) + { + var totalInstructions = decodedInstructions + failedInstructions; + if (totalInstructions == 0) + { + return 0.0; + } + + var decodeRate = (double)decodedInstructions / totalInstructions; + var symbolScore = Math.Min(1.0, symbolCount / 10.0); + var regionScore = Math.Min(1.0, regionCount / 5.0); + + return (decodeRate * 0.5) + (symbolScore * 0.3) + (regionScore * 0.2); + } + + private bool MeetsQualityThreshold(DisassemblyQualityResult result) + { + if (result.Confidence < _options.MinConfidenceThreshold) + { + return false; + } + + if (result.Symbols.Length < _options.MinFunctionCount) + { + return false; + } + + if (result.DecodeSuccessRate < _options.MinDecodeSuccessRate) + { + return false; + } + + return true; + } +} diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.cs b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.cs index 246d7aaf4..f0b8cc043 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.cs +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/HybridDisassemblyService.cs @@ -1,132 +1,15 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using System.Collections.Immutable; namespace StellaOps.BinaryIndex.Disassembly; -/// -/// Configuration options for hybrid disassembly with fallback logic. -/// -public sealed class HybridDisassemblyOptions -{ - /// - /// Configuration section name. - /// - public const string SectionName = "HybridDisassembly"; - - /// - /// Primary plugin ID to try first. If null, auto-selects highest priority plugin. - /// - public string? PrimaryPluginId { get; set; } - - /// - /// Fallback plugin ID to use when primary fails quality threshold. - /// - public string? FallbackPluginId { get; set; } - - /// - /// Minimum confidence score (0.0-1.0) required to accept primary plugin results. - /// If primary result confidence is below this, fallback is attempted. - /// - public double MinConfidenceThreshold { get; set; } = 0.7; - - /// - /// Minimum function discovery count. If primary finds fewer functions, fallback is attempted. - /// - public int MinFunctionCount { get; set; } = 1; - - /// - /// Minimum instruction decode success rate (0.0-1.0). - /// - public double MinDecodeSuccessRate { get; set; } = 0.8; - - /// - /// Whether to automatically fallback when primary plugin doesn't support the architecture. - /// - public bool AutoFallbackOnUnsupported { get; set; } = true; - - /// - /// Whether to enable hybrid fallback logic at all. If false, behaves like standard service. - /// - public bool EnableFallback { get; set; } = true; - - /// - /// Timeout in seconds for each plugin attempt. - /// - public int PluginTimeoutSeconds { get; set; } = 120; -} - -/// -/// Result of a disassembly operation with quality metrics. -/// -public sealed record DisassemblyQualityResult -{ - /// - /// The loaded binary information. - /// - public required BinaryInfo Binary { get; init; } - - /// - /// The plugin that produced this result. - /// - public required IDisassemblyPlugin Plugin { get; init; } - - /// - /// Discovered code regions. - /// - public required ImmutableArray CodeRegions { get; init; } - - /// - /// Discovered symbols/functions. - /// - public required ImmutableArray Symbols { get; init; } - - /// - /// Total instructions disassembled across all regions. - /// - public int TotalInstructions { get; init; } - - /// - /// Successfully decoded instructions count. - /// - public int DecodedInstructions { get; init; } - - /// - /// Failed/invalid instruction count. - /// - public int FailedInstructions { get; init; } - - /// - /// Confidence score (0.0-1.0) based on quality metrics. - /// - public double Confidence { get; init; } - - /// - /// Whether this result came from a fallback plugin. - /// - public bool UsedFallback { get; init; } - - /// - /// Reason for fallback if applicable. - /// - public string? FallbackReason { get; init; } - - /// - /// Decode success rate (DecodedInstructions / TotalInstructions). - /// - public double DecodeSuccessRate => - TotalInstructions > 0 ? (double)DecodedInstructions / TotalInstructions : 0.0; -} - /// /// Hybrid disassembly service that implements smart routing between plugins /// with quality-based fallback logic (e.g., B2R2 primary -> Ghidra fallback). /// -public sealed class HybridDisassemblyService : IDisassemblyService +public sealed partial class HybridDisassemblyService : IDisassemblyService { private readonly IDisassemblyPluginRegistry _registry; private readonly HybridDisassemblyOptions _options; @@ -164,9 +47,8 @@ public sealed class HybridDisassemblyService : IDisassemblyService /// public (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadBinary(ReadOnlySpan bytes, string? preferredPluginId = null) { - // Detect format/architecture - var format = DetectFormat(bytes); - var architecture = DetectArchitecture(bytes, format); + var format = BinaryFormatDetector.DetectFormat(bytes); + var architecture = BinaryFormatDetector.DetectArchitecture(bytes, format); _logger.LogDebug( "Hybrid service: Detected format {Format} and architecture {Arch}", @@ -174,400 +56,10 @@ public sealed class HybridDisassemblyService : IDisassemblyService if (!_options.EnableFallback) { - // Simple mode - just use the best plugin return LoadWithBestPlugin(bytes, architecture, format, preferredPluginId); } - // Hybrid mode with fallback logic return LoadWithFallback(bytes, architecture, format, preferredPluginId); } - /// - /// Loads binary with quality assessment and returns detailed quality result. - /// - /// The binary data. - /// Optional preferred plugin ID. - /// A quality result with metrics and fallback info. - public DisassemblyQualityResult LoadBinaryWithQuality(ReadOnlySpan bytes, string? preferredPluginId = null) - { - var format = DetectFormat(bytes); - var architecture = DetectArchitecture(bytes, format); - - // Try primary plugin - var primaryPlugin = GetPrimaryPlugin(architecture, format, preferredPluginId); - if (primaryPlugin is null) - { - throw new NotSupportedException( - $"No disassembly plugin available for architecture {architecture} and format {format}"); - } - - var primaryResult = AssessQuality(primaryPlugin, bytes, architecture, format); - - // Check if primary meets quality threshold - if (MeetsQualityThreshold(primaryResult)) - { - _logger.LogInformation( - "Primary plugin {Plugin} met quality threshold (confidence: {Confidence:P1})", - primaryPlugin.Capabilities.PluginId, primaryResult.Confidence); - return primaryResult; - } - - // Try fallback - if (!_options.EnableFallback) - { - _logger.LogWarning( - "Primary plugin {Plugin} below threshold (confidence: {Confidence:P1}), fallback disabled", - primaryPlugin.Capabilities.PluginId, primaryResult.Confidence); - return primaryResult; - } - - var fallbackPlugin = GetFallbackPlugin(primaryPlugin, architecture, format); - if (fallbackPlugin is null) - { - _logger.LogWarning( - "No fallback plugin available for {Arch}/{Format}", - architecture, format); - return primaryResult; - } - - var fallbackResult = AssessQuality(fallbackPlugin, bytes, architecture, format); - - // Use fallback if it's better - if (fallbackResult.Confidence > primaryResult.Confidence) - { - _logger.LogInformation( - "Using fallback plugin {Plugin} (confidence: {Confidence:P1} > primary: {PrimaryConf:P1})", - fallbackPlugin.Capabilities.PluginId, fallbackResult.Confidence, primaryResult.Confidence); - - return fallbackResult with - { - UsedFallback = true, - FallbackReason = $"Primary confidence ({primaryResult.Confidence:P1}) below threshold" - }; - } - - _logger.LogDebug( - "Keeping primary plugin result (confidence: {Confidence:P1})", - primaryResult.Confidence); - return primaryResult; - } - - #region Private Methods - - private (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadWithBestPlugin( - ReadOnlySpan bytes, - CpuArchitecture architecture, - BinaryFormat format, - string? preferredPluginId) - { - var plugin = GetPluginById(preferredPluginId) ?? _registry.FindPlugin(architecture, format); - - if (plugin == null) - { - throw new NotSupportedException( - $"No disassembly plugin available for architecture {architecture} and format {format}"); - } - - var binary = plugin.LoadBinary(bytes, architecture, format); - return (binary, plugin); - } - - private (BinaryInfo Binary, IDisassemblyPlugin Plugin) LoadWithFallback( - ReadOnlySpan bytes, - CpuArchitecture architecture, - BinaryFormat format, - string? preferredPluginId) - { - var primaryPlugin = GetPrimaryPlugin(architecture, format, preferredPluginId); - - if (primaryPlugin is null) - { - // No primary, try fallback directly - var fallback = GetFallbackPlugin(null, architecture, format); - if (fallback is null) - { - throw new NotSupportedException( - $"No disassembly plugin available for architecture {architecture} and format {format}"); - } - return (fallback.LoadBinary(bytes, architecture, format), fallback); - } - - // Check if primary supports this arch/format - if (_options.AutoFallbackOnUnsupported && !primaryPlugin.Capabilities.CanHandle(architecture, format)) - { - _logger.LogDebug( - "Primary plugin {Plugin} doesn't support {Arch}/{Format}, using fallback", - primaryPlugin.Capabilities.PluginId, architecture, format); - - var fallback = GetFallbackPlugin(primaryPlugin, architecture, format); - if (fallback is not null) - { - return (fallback.LoadBinary(bytes, architecture, format), fallback); - } - } - - // Use primary - return (primaryPlugin.LoadBinary(bytes, architecture, format), primaryPlugin); - } - - private IDisassemblyPlugin? GetPrimaryPlugin( - CpuArchitecture architecture, - BinaryFormat format, - string? preferredPluginId) - { - // Explicit preferred plugin - if (!string.IsNullOrEmpty(preferredPluginId)) - { - return GetPluginById(preferredPluginId); - } - - // Configured primary plugin - if (!string.IsNullOrEmpty(_options.PrimaryPluginId)) - { - return GetPluginById(_options.PrimaryPluginId); - } - - // Auto-select highest priority - return _registry.FindPlugin(architecture, format); - } - - private IDisassemblyPlugin? GetFallbackPlugin( - IDisassemblyPlugin? excludePlugin, - CpuArchitecture architecture, - BinaryFormat format) - { - // Explicit fallback plugin - if (!string.IsNullOrEmpty(_options.FallbackPluginId)) - { - var fallback = GetPluginById(_options.FallbackPluginId); - if (fallback?.Capabilities.CanHandle(architecture, format) == true) - { - return fallback; - } - } - - // Find any other plugin that supports this arch/format - return _registry.Plugins - .Where(p => p != excludePlugin) - .Where(p => p.Capabilities.CanHandle(architecture, format)) - .OrderByDescending(p => p.Capabilities.Priority) - .FirstOrDefault(); - } - - private IDisassemblyPlugin? GetPluginById(string? pluginId) - { - return string.IsNullOrEmpty(pluginId) ? null : _registry.GetPlugin(pluginId); - } - - private DisassemblyQualityResult AssessQuality( - IDisassemblyPlugin plugin, - ReadOnlySpan bytes, - CpuArchitecture architecture, - BinaryFormat format) - { - try - { - var binary = plugin.LoadBinary(bytes, architecture, format); - var codeRegions = plugin.GetCodeRegions(binary).ToImmutableArray(); - var symbols = plugin.GetSymbols(binary).ToImmutableArray(); - - // Assess quality by sampling disassembly - int totalInstructions = 0; - int decodedInstructions = 0; - int failedInstructions = 0; - - foreach (var region in codeRegions.Take(3)) // Sample up to 3 regions - { - var instructions = plugin.Disassemble(binary, region).Take(1000).ToList(); - totalInstructions += instructions.Count; - - foreach (var instr in instructions) - { - if (instr.Mnemonic.Equals("??", StringComparison.Ordinal) || - instr.Mnemonic.Equals("invalid", StringComparison.OrdinalIgnoreCase) || - instr.Mnemonic.Equals("db", StringComparison.OrdinalIgnoreCase)) - { - failedInstructions++; - } - else - { - decodedInstructions++; - } - } - } - - // Calculate confidence - var confidence = CalculateConfidence( - symbols.Length, - decodedInstructions, - failedInstructions, - codeRegions.Length); - - return new DisassemblyQualityResult - { - Binary = binary, - Plugin = plugin, - CodeRegions = codeRegions, - Symbols = symbols, - TotalInstructions = totalInstructions, - DecodedInstructions = decodedInstructions, - FailedInstructions = failedInstructions, - Confidence = confidence, - UsedFallback = false - }; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Plugin {Plugin} failed during quality assessment", plugin.Capabilities.PluginId); - - return new DisassemblyQualityResult - { - Binary = null!, - Plugin = plugin, - CodeRegions = [], - Symbols = [], - TotalInstructions = 0, - DecodedInstructions = 0, - FailedInstructions = 0, - Confidence = 0.0, - UsedFallback = false, - FallbackReason = $"Plugin failed: {ex.Message}" - }; - } - } - - private static double CalculateConfidence( - int symbolCount, - int decodedInstructions, - int failedInstructions, - int regionCount) - { - var totalInstructions = decodedInstructions + failedInstructions; - if (totalInstructions == 0) - { - return 0.0; - } - - // Decode success rate (weight: 0.5) - var decodeRate = (double)decodedInstructions / totalInstructions; - - // Symbol discovery (weight: 0.3) - var symbolScore = Math.Min(1.0, symbolCount / 10.0); - - // Region coverage (weight: 0.2) - var regionScore = Math.Min(1.0, regionCount / 5.0); - - return (decodeRate * 0.5) + (symbolScore * 0.3) + (regionScore * 0.2); - } - - private bool MeetsQualityThreshold(DisassemblyQualityResult result) - { - if (result.Confidence < _options.MinConfidenceThreshold) - { - return false; - } - - if (result.Symbols.Length < _options.MinFunctionCount) - { - return false; - } - - if (result.DecodeSuccessRate < _options.MinDecodeSuccessRate) - { - return false; - } - - return true; - } - - #region Format/Architecture Detection (copied from DisassemblyService) - - private static BinaryFormat DetectFormat(ReadOnlySpan bytes) - { - if (bytes.Length < 4) return BinaryFormat.Raw; - - // ELF magic - if (bytes[0] == 0x7F && bytes[1] == 'E' && bytes[2] == 'L' && bytes[3] == 'F') - return BinaryFormat.ELF; - - // PE magic - if (bytes[0] == 'M' && bytes[1] == 'Z') - return BinaryFormat.PE; - - // Mach-O magic - if ((bytes[0] == 0xFE && bytes[1] == 0xED && bytes[2] == 0xFA && (bytes[3] == 0xCE || bytes[3] == 0xCF)) || - (bytes[3] == 0xFE && bytes[2] == 0xED && bytes[1] == 0xFA && (bytes[0] == 0xCE || bytes[0] == 0xCF))) - return BinaryFormat.MachO; - - // WASM magic - if (bytes[0] == 0x00 && bytes[1] == 'a' && bytes[2] == 's' && bytes[3] == 'm') - return BinaryFormat.WASM; - - return BinaryFormat.Raw; - } - - private static CpuArchitecture DetectArchitecture(ReadOnlySpan bytes, BinaryFormat format) - { - return format switch - { - BinaryFormat.ELF when bytes.Length > 18 => DetectElfArchitecture(bytes), - BinaryFormat.PE when bytes.Length > 0x40 => DetectPeArchitecture(bytes), - BinaryFormat.MachO when bytes.Length > 8 => DetectMachOArchitecture(bytes), - _ => CpuArchitecture.X86_64 - }; - } - - private static CpuArchitecture DetectElfArchitecture(ReadOnlySpan bytes) - { - var machine = (ushort)(bytes[18] | (bytes[19] << 8)); - return machine switch - { - 0x03 => CpuArchitecture.X86, - 0x3E => CpuArchitecture.X86_64, - 0x28 => CpuArchitecture.ARM32, - 0xB7 => CpuArchitecture.ARM64, - 0x08 => CpuArchitecture.MIPS32, - 0xF3 => CpuArchitecture.RISCV64, - 0x14 => CpuArchitecture.PPC32, - 0x02 => CpuArchitecture.SPARC, - _ => bytes[4] == 2 ? CpuArchitecture.X86_64 : CpuArchitecture.X86 - }; - } - - private static CpuArchitecture DetectPeArchitecture(ReadOnlySpan bytes) - { - var peOffset = bytes[0x3C] | (bytes[0x3D] << 8) | (bytes[0x3E] << 16) | (bytes[0x3F] << 24); - if (peOffset < 0 || peOffset + 6 > bytes.Length) return CpuArchitecture.X86; - - var machine = (ushort)(bytes[peOffset + 4] | (bytes[peOffset + 5] << 8)); - return machine switch - { - 0x014c => CpuArchitecture.X86, - 0x8664 => CpuArchitecture.X86_64, - 0xaa64 => CpuArchitecture.ARM64, - 0x01c4 => CpuArchitecture.ARM32, - _ => CpuArchitecture.X86 - }; - } - - private static CpuArchitecture DetectMachOArchitecture(ReadOnlySpan bytes) - { - bool isBigEndian = bytes[0] == 0xFE; - uint cpuType = isBigEndian - ? (uint)((bytes[4] << 24) | (bytes[5] << 16) | (bytes[6] << 8) | bytes[7]) - : (uint)(bytes[4] | (bytes[5] << 8) | (bytes[6] << 16) | (bytes[7] << 24)); - - return cpuType switch - { - 0x00000007 => CpuArchitecture.X86, - 0x01000007 => CpuArchitecture.X86_64, - 0x0000000C => CpuArchitecture.ARM32, - 0x0100000C => CpuArchitecture.ARM64, - _ => CpuArchitecture.X86_64 - }; - } - - #endregion - - #endregion } diff --git a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/TASKS.md b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/TASKS.md index 09d613e3e..4a8f2d10a 100644 --- a/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/TASKS.md +++ b/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Disassembly/StellaOps.BinaryIndex.Disassembly.md. | +| REMED-05 | DONE | Split service/DI files, extracted helpers, removed service locator; checklist updated. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Differences.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Differences.cs new file mode 100644 index 000000000..75b178df5 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Differences.cs @@ -0,0 +1,55 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class AstComparisonEngineTests +{ + [Fact] + public void FindDifferences_DifferentOperators_FindsModification() + { + // Arrange + var code1 = @" +int calc(int a, int b) { + return a + b; +}"; + var code2 = @" +int calc(int a, int b) { + return a - b; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var differences = _engine.FindDifferences(ast1, ast2); + + // Assert + Assert.NotEmpty(differences); + Assert.Contains(differences, d => d.Type == DifferenceType.Modified); + } + + [Fact] + public void FindDifferences_AddedStatement_FindsAddition() + { + // Arrange + var code1 = @" +void foo() { + return; +}"; + var code2 = @" +void foo() { + int x = 1; + return; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var differences = _engine.FindDifferences(ast1, ast2); + + // Assert + Assert.NotEmpty(differences); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.EditDistance.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.EditDistance.cs new file mode 100644 index 000000000..bcdffbb2e --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.EditDistance.cs @@ -0,0 +1,67 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class AstComparisonEngineTests +{ + [Fact] + public void ComputeEditDistance_IdenticalCode_ReturnsZeroOperations() + { + // Arrange + var code = @" +int foo() { + return 1; +}"; + var ast1 = _parser.Parse(code); + var ast2 = _parser.Parse(code); + + // Act + var distance = _engine.ComputeEditDistance(ast1, ast2); + + // Assert + Assert.Equal(0, distance.TotalOperations); + Assert.Equal(0m, distance.NormalizedDistance); + } + + [Fact] + public void ComputeEditDistance_DifferentCode_ReturnsNonZeroOperations() + { + // Arrange + var code1 = @" +int foo() { + return 1; +}"; + var code2 = @" +int foo() { + int x = 1; + return x + 1; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var distance = _engine.ComputeEditDistance(ast1, ast2); + + // Assert + Assert.True(distance.TotalOperations > 0); + } + + [Fact] + public void ComputeEditDistance_NormalizedDistance_IsBetween0And1() + { + // Arrange + var code1 = @"void a() { }"; + var code2 = @"void b() { int x = 1; int y = 2; return; }"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var distance = _engine.ComputeEditDistance(ast1, ast2); + + // Assert + Assert.InRange(distance.NormalizedDistance, 0m, 1m); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Equivalences.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Equivalences.cs new file mode 100644 index 000000000..18eb25a03 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.Equivalences.cs @@ -0,0 +1,54 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class AstComparisonEngineTests +{ + [Fact] + public void FindEquivalences_IdenticalSubtrees_FindsEquivalences() + { + // Arrange + var code1 = @" +int foo(int a) { + return a + 1; +}"; + var code2 = @" +int foo(int a) { + return a + 1; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var equivalences = _engine.FindEquivalences(ast1, ast2); + + // Assert + Assert.NotEmpty(equivalences); + Assert.Contains(equivalences, e => e.Type == EquivalenceType.Identical); + } + + [Fact] + public void FindEquivalences_RenamedVariables_DetectsRenaming() + { + // Arrange + var code1 = @" +int foo(int x) { + return x + 1; +}"; + var code2 = @" +int foo(int y) { + return y + 1; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var equivalences = _engine.FindEquivalences(ast1, ast2); + + // Assert + Assert.NotEmpty(equivalences); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.StructuralSimilarity.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.StructuralSimilarity.cs new file mode 100644 index 000000000..faf3734ce --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.StructuralSimilarity.cs @@ -0,0 +1,75 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class AstComparisonEngineTests +{ + [Fact] + public void ComputeStructuralSimilarity_IdenticalCode_Returns1() + { + // Arrange + var code = @" +int add(int a, int b) { + return a + b; +}"; + var ast1 = _parser.Parse(code); + var ast2 = _parser.Parse(code); + + // Act + var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); + + // Assert + Assert.Equal(1.0m, similarity); + } + + [Fact] + public void ComputeStructuralSimilarity_DifferentCode_ReturnsLessThan1() + { + // Arrange - use structurally different code + var code1 = @" +int simple() { + return 1; +}"; + var code2 = @" +int complex(int a, int b, int c) { + if (a > 0) { + return b + c; + } + return a * b; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); + + // Assert + Assert.True(similarity < 1.0m); + } + + [Fact] + public void ComputeStructuralSimilarity_OptimizedVariant_DetectsSimilarity() + { + // Arrange - multiplication vs left shift (strength reduction) + var code1 = @" +int foo(int x) { + return x * 2; +}"; + var code2 = @" +int foo(int x) { + return x << 1; +}"; + var ast1 = _parser.Parse(code1); + var ast2 = _parser.Parse(code2); + + // Act + var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); + + // Assert + // Should have some similarity due to same overall structure + Assert.True(similarity > 0.3m); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.cs index 81ce3b24a..5e5cf5e77 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/AstComparisonEngineTests.cs @@ -1,229 +1,13 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - using StellaOps.BinaryIndex.Decompiler; using Xunit; namespace StellaOps.BinaryIndex.Decompiler.Tests; [Trait("Category", "Unit")] -public sealed class AstComparisonEngineTests +public sealed partial class AstComparisonEngineTests { private readonly DecompiledCodeParser _parser = new(); private readonly AstComparisonEngine _engine = new(); - - [Fact] - public void ComputeStructuralSimilarity_IdenticalCode_Returns1() - { - // Arrange - var code = @" -int add(int a, int b) { - return a + b; -}"; - var ast1 = _parser.Parse(code); - var ast2 = _parser.Parse(code); - - // Act - var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); - - // Assert - Assert.Equal(1.0m, similarity); - } - - [Fact] - public void ComputeStructuralSimilarity_DifferentCode_ReturnsLessThan1() - { - // Arrange - use structurally different code - var code1 = @" -int simple() { - return 1; -}"; - var code2 = @" -int complex(int a, int b, int c) { - if (a > 0) { - return b + c; - } - return a * b; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); - - // Assert - Assert.True(similarity < 1.0m); - } - - [Fact] - public void ComputeEditDistance_IdenticalCode_ReturnsZeroOperations() - { - // Arrange - var code = @" -int foo() { - return 1; -}"; - var ast1 = _parser.Parse(code); - var ast2 = _parser.Parse(code); - - // Act - var distance = _engine.ComputeEditDistance(ast1, ast2); - - // Assert - Assert.Equal(0, distance.TotalOperations); - Assert.Equal(0m, distance.NormalizedDistance); - } - - [Fact] - public void ComputeEditDistance_DifferentCode_ReturnsNonZeroOperations() - { - // Arrange - var code1 = @" -int foo() { - return 1; -}"; - var code2 = @" -int foo() { - int x = 1; - return x + 1; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var distance = _engine.ComputeEditDistance(ast1, ast2); - - // Assert - Assert.True(distance.TotalOperations > 0); - } - - [Fact] - public void FindEquivalences_IdenticalSubtrees_FindsEquivalences() - { - // Arrange - var code1 = @" -int foo(int a) { - return a + 1; -}"; - var code2 = @" -int foo(int a) { - return a + 1; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var equivalences = _engine.FindEquivalences(ast1, ast2); - - // Assert - Assert.NotEmpty(equivalences); - Assert.Contains(equivalences, e => e.Type == EquivalenceType.Identical); - } - - [Fact] - public void FindEquivalences_RenamedVariables_DetectsRenaming() - { - // Arrange - var code1 = @" -int foo(int x) { - return x + 1; -}"; - var code2 = @" -int foo(int y) { - return y + 1; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var equivalences = _engine.FindEquivalences(ast1, ast2); - - // Assert - Assert.NotEmpty(equivalences); - } - - [Fact] - public void FindDifferences_DifferentOperators_FindsModification() - { - // Arrange - var code1 = @" -int calc(int a, int b) { - return a + b; -}"; - var code2 = @" -int calc(int a, int b) { - return a - b; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var differences = _engine.FindDifferences(ast1, ast2); - - // Assert - Assert.NotEmpty(differences); - Assert.Contains(differences, d => d.Type == DifferenceType.Modified); - } - - [Fact] - public void FindDifferences_AddedStatement_FindsAddition() - { - // Arrange - var code1 = @" -void foo() { - return; -}"; - var code2 = @" -void foo() { - int x = 1; - return; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var differences = _engine.FindDifferences(ast1, ast2); - - // Assert - Assert.NotEmpty(differences); - } - - [Fact] - public void ComputeStructuralSimilarity_OptimizedVariant_DetectsSimilarity() - { - // Arrange - multiplication vs left shift (strength reduction) - var code1 = @" -int foo(int x) { - return x * 2; -}"; - var code2 = @" -int foo(int x) { - return x << 1; -}"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var similarity = _engine.ComputeStructuralSimilarity(ast1, ast2); - - // Assert - // Should have some similarity due to same overall structure - Assert.True(similarity > 0.3m); - } - - [Fact] - public void ComputeEditDistance_NormalizedDistance_IsBetween0And1() - { - // Arrange - var code1 = @"void a() { }"; - var code2 = @"void b() { int x = 1; int y = 2; return; }"; - var ast1 = _parser.Parse(code1); - var ast2 = _parser.Parse(code2); - - // Act - var distance = _engine.ComputeEditDistance(ast1, ast2); - - // Assert - Assert.InRange(distance.NormalizedDistance, 0m, 1m); - } } diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Ast.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Ast.cs new file mode 100644 index 000000000..6b61aa809 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Ast.cs @@ -0,0 +1,29 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class CodeNormalizerTests +{ + [Fact] + public void NormalizeAst_WithParser_NormalizesAstNodes() + { + // Arrange + var parser = new DecompiledCodeParser(); + var code = @" +int foo(int myVar) { + return myVar + 1; +}"; + var ast = parser.Parse(code); + var options = new NormalizationOptions { NormalizeVariables = true }; + + // Act + var normalizedAst = _normalizer.NormalizeAst(ast, options); + + // Assert + Assert.NotNull(normalizedAst); + Assert.Equal(ast.NodeCount, normalizedAst.NodeCount); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Hash.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Hash.cs new file mode 100644 index 000000000..89e41b66c --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Hash.cs @@ -0,0 +1,82 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class CodeNormalizerTests +{ + [Fact] + public void ComputeCanonicalHash_SameCode_ReturnsSameHash() + { + // Arrange + var code1 = "int foo() { return 1; }"; + var code2 = "int foo() { return 1; }"; + + // Act + var hash1 = _normalizer.ComputeCanonicalHash(code1); + var hash2 = _normalizer.ComputeCanonicalHash(code2); + + // Assert + Assert.Equal(hash1, hash2); + } + + [Fact] + public void ComputeCanonicalHash_DifferentWhitespace_ReturnsSameHash() + { + // Arrange + var code1 = "int foo(){return 1;}"; + var code2 = "int foo() { return 1; }"; + + // Act + var hash1 = _normalizer.ComputeCanonicalHash(code1); + var hash2 = _normalizer.ComputeCanonicalHash(code2); + + // Assert + Assert.Equal(hash1, hash2); + } + + [Fact] + public void ComputeCanonicalHash_DifferentVariableNames_ReturnsSameHash() + { + // Arrange + var code1 = "int foo(int x) { return x + 1; }"; + var code2 = "int foo(int y) { return y + 1; }"; + + // Act + var hash1 = _normalizer.ComputeCanonicalHash(code1); + var hash2 = _normalizer.ComputeCanonicalHash(code2); + + // Assert + Assert.Equal(hash1, hash2); + } + + [Fact] + public void ComputeCanonicalHash_DifferentLogic_ReturnsDifferentHash() + { + // Arrange + var code1 = "int foo(int x) { return x + 1; }"; + var code2 = "int foo(int x) { return x - 1; }"; + + // Act + var hash1 = _normalizer.ComputeCanonicalHash(code1); + var hash2 = _normalizer.ComputeCanonicalHash(code2); + + // Assert + Assert.NotEqual(hash1, hash2); + } + + [Fact] + public void ComputeCanonicalHash_Returns32Bytes() + { + // Arrange + var code = "int foo() { return 1; }"; + + // Act + var hash = _normalizer.ComputeCanonicalHash(code); + + // Assert (SHA256 = 32 bytes) + Assert.Equal(32, hash.Length); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Functions.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Functions.cs new file mode 100644 index 000000000..4f692ce09 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Functions.cs @@ -0,0 +1,44 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class CodeNormalizerTests +{ + [Fact] + public void Normalize_PreservesStandardLibraryFunctions() + { + // Arrange + var code = "printf(\"hello\"); malloc(100); free(ptr);"; + var options = new NormalizationOptions { NormalizeFunctionCalls = true }; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + Assert.Contains("printf", normalized); + Assert.Contains("malloc", normalized); + Assert.Contains("free", normalized); + } + + [Fact] + public void Normalize_RemovesComments() + { + // Arrange + var code = @" +int foo() { + // This is a comment + return 1; /* inline comment */ +}"; + var options = NormalizationOptions.Default; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + Assert.DoesNotContain("//", normalized); + Assert.DoesNotContain("/*", normalized); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Text.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Text.cs new file mode 100644 index 000000000..a3f695373 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.Normalize.Text.cs @@ -0,0 +1,69 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class CodeNormalizerTests +{ + [Fact] + public void Normalize_WithWhitespace_NormalizesWhitespace() + { + // Arrange + var code = "int x = 1;"; + var options = new NormalizationOptions { NormalizeWhitespace = true }; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + Assert.DoesNotContain(" ", normalized); + } + + [Fact] + public void Normalize_WithVariables_NormalizesVariableNames() + { + // Arrange + var code = "int myVar = 1; int otherVar = myVar;"; + var options = new NormalizationOptions { NormalizeVariables = true }; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + // Original variable names should be replaced with canonical names + Assert.DoesNotContain("myVar", normalized); + Assert.DoesNotContain("otherVar", normalized); + Assert.Contains("var_", normalized); + } + + [Fact] + public void Normalize_WithConstants_NormalizesLargeNumbers() + { + // Arrange + var code = "int x = 1234567890;"; + var options = new NormalizationOptions { NormalizeConstants = true }; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + Assert.DoesNotContain("1234567890", normalized); + } + + [Fact] + public void Normalize_PreservesKeywords_DoesNotRenameKeywords() + { + // Arrange + var code = "int foo() { return 1; }"; + var options = new NormalizationOptions { NormalizeVariables = true }; + + // Act + var normalized = _normalizer.Normalize(code, options); + + // Assert + Assert.Contains("return", normalized); + Assert.Contains("int", normalized); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.cs index 87f824d5a..dc252bc77 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/CodeNormalizerTests.cs @@ -1,201 +1,12 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - using StellaOps.BinaryIndex.Decompiler; using Xunit; namespace StellaOps.BinaryIndex.Decompiler.Tests; [Trait("Category", "Unit")] -public sealed class CodeNormalizerTests +public sealed partial class CodeNormalizerTests { private readonly CodeNormalizer _normalizer = new(); - - [Fact] - public void Normalize_WithWhitespace_NormalizesWhitespace() - { - // Arrange - var code = "int x = 1;"; - var options = new NormalizationOptions { NormalizeWhitespace = true }; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - Assert.DoesNotContain(" ", normalized); - } - - [Fact] - public void Normalize_WithVariables_NormalizesVariableNames() - { - // Arrange - var code = "int myVar = 1; int otherVar = myVar;"; - var options = new NormalizationOptions { NormalizeVariables = true }; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - // Original variable names should be replaced with canonical names - Assert.DoesNotContain("myVar", normalized); - Assert.DoesNotContain("otherVar", normalized); - Assert.Contains("var_", normalized); - } - - [Fact] - public void Normalize_WithConstants_NormalizesLargeNumbers() - { - // Arrange - var code = "int x = 1234567890;"; - var options = new NormalizationOptions { NormalizeConstants = true }; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - Assert.DoesNotContain("1234567890", normalized); - } - - [Fact] - public void Normalize_PreservesKeywords_DoesNotRenameKeywords() - { - // Arrange - var code = "int foo() { return 1; }"; - var options = new NormalizationOptions { NormalizeVariables = true }; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - Assert.Contains("return", normalized); - Assert.Contains("int", normalized); - } - - [Fact] - public void Normalize_PreservesStandardLibraryFunctions() - { - // Arrange - var code = "printf(\"hello\"); malloc(100); free(ptr);"; - var options = new NormalizationOptions { NormalizeFunctionCalls = true }; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - Assert.Contains("printf", normalized); - Assert.Contains("malloc", normalized); - Assert.Contains("free", normalized); - } - - [Fact] - public void ComputeCanonicalHash_SameCode_ReturnsSameHash() - { - // Arrange - var code1 = "int foo() { return 1; }"; - var code2 = "int foo() { return 1; }"; - - // Act - var hash1 = _normalizer.ComputeCanonicalHash(code1); - var hash2 = _normalizer.ComputeCanonicalHash(code2); - - // Assert - Assert.Equal(hash1, hash2); - } - - [Fact] - public void ComputeCanonicalHash_DifferentWhitespace_ReturnsSameHash() - { - // Arrange - var code1 = "int foo(){return 1;}"; - var code2 = "int foo() { return 1; }"; - - // Act - var hash1 = _normalizer.ComputeCanonicalHash(code1); - var hash2 = _normalizer.ComputeCanonicalHash(code2); - - // Assert - Assert.Equal(hash1, hash2); - } - - [Fact] - public void ComputeCanonicalHash_DifferentVariableNames_ReturnsSameHash() - { - // Arrange - var code1 = "int foo(int x) { return x + 1; }"; - var code2 = "int foo(int y) { return y + 1; }"; - - // Act - var hash1 = _normalizer.ComputeCanonicalHash(code1); - var hash2 = _normalizer.ComputeCanonicalHash(code2); - - // Assert - Assert.Equal(hash1, hash2); - } - - [Fact] - public void ComputeCanonicalHash_DifferentLogic_ReturnsDifferentHash() - { - // Arrange - var code1 = "int foo(int x) { return x + 1; }"; - var code2 = "int foo(int x) { return x - 1; }"; - - // Act - var hash1 = _normalizer.ComputeCanonicalHash(code1); - var hash2 = _normalizer.ComputeCanonicalHash(code2); - - // Assert - Assert.NotEqual(hash1, hash2); - } - - [Fact] - public void ComputeCanonicalHash_Returns32Bytes() - { - // Arrange - var code = "int foo() { return 1; }"; - - // Act - var hash = _normalizer.ComputeCanonicalHash(code); - - // Assert (SHA256 = 32 bytes) - Assert.Equal(32, hash.Length); - } - - [Fact] - public void Normalize_RemovesComments() - { - // Arrange - var code = @" -int foo() { - // This is a comment - return 1; /* inline comment */ -}"; - var options = NormalizationOptions.Default; - - // Act - var normalized = _normalizer.Normalize(code, options); - - // Assert - Assert.DoesNotContain("//", normalized); - Assert.DoesNotContain("/*", normalized); - } - - [Fact] - public void NormalizeAst_WithParser_NormalizesAstNodes() - { - // Arrange - var parser = new DecompiledCodeParser(); - var code = @" -int foo(int myVar) { - return myVar + 1; -}"; - var ast = parser.Parse(code); - var options = new NormalizationOptions { NormalizeVariables = true }; - - // Act - var normalizedAst = _normalizer.NormalizeAst(ast, options); - - // Assert - Assert.NotNull(normalizedAst); - Assert.Equal(ast.NodeCount, normalizedAst.NodeCount); - } } diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Extraction.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Extraction.cs new file mode 100644 index 000000000..ebe08978b --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Extraction.cs @@ -0,0 +1,68 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class DecompiledCodeParserTests +{ + [Fact] + public void ExtractVariables_FunctionWithLocals_ReturnsVariables() + { + // Arrange + var code = @" +int compute(int x) { + int local1 = x + 1; + int local2 = local1 * 2; + return local2; +}"; + + // Act + var variables = _parser.ExtractVariables(code); + + // Assert + Assert.NotEmpty(variables); + } + + [Fact] + public void ExtractVariables_GhidraLocalWithHexOffset_ParsesStackOffset() + { + // Arrange + var code = @" +int foo(int param_1) { + int local_10; + local_10 = param_1 + 1; + return local_10; +}"; + + // Act + var variables = _parser.ExtractVariables(code); + + // Assert + var local = Assert.Single(variables, v => v.Name == "local_10"); + Assert.Equal(-16, local.StackOffset); + Assert.False(local.IsParameter); + Assert.Null(local.ParameterIndex); + } + + [Fact] + public void ExtractCalledFunctions_CodeWithCalls_ReturnsFunctionNames() + { + // Arrange + var code = @" +void process() { + init(); + compute(); + cleanup(); +}"; + + // Act + var functions = _parser.ExtractCalledFunctions(code); + + // Assert + Assert.Contains("init", functions); + Assert.Contains("compute", functions); + Assert.Contains("cleanup", functions); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Basics.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Basics.cs new file mode 100644 index 000000000..f9f4c7f2c --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Basics.cs @@ -0,0 +1,59 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class DecompiledCodeParserTests +{ + [Fact] + public void Parse_SimpleFunction_ReturnsValidAst() + { + // Arrange + var code = @" +void foo(int x) { + return x; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.NotNull(ast.Root); + Assert.True(ast.NodeCount > 0); + Assert.True(ast.Depth > 0); + } + + [Fact] + public void Parse_EmptyFunction_ReturnsValidAst() + { + // Arrange + var code = @"void empty() { }"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.NotNull(ast.Root); + } + + [Fact] + public void Parse_FunctionWithCall_ParsesFunctionCall() + { + // Arrange + var code = @" +void caller() { + printf(""hello""); +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount > 0); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.ControlFlow.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.ControlFlow.cs new file mode 100644 index 000000000..0b99a89b7 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.ControlFlow.cs @@ -0,0 +1,69 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class DecompiledCodeParserTests +{ + [Fact] + public void Parse_FunctionWithIfStatement_ParsesControlFlow() + { + // Arrange + var code = @" +int check(int x) { + if (x > 0) { + return 1; + } + return 0; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount >= 3); // Function, if, returns + } + + [Fact] + public void Parse_FunctionWithLoop_ParsesWhileLoop() + { + // Arrange + var code = @" +void loop(int n) { + while (n > 0) { + n = n - 1; + } +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount > 0); + } + + [Fact] + public void Parse_FunctionWithForLoop_ParsesForLoop() + { + // Arrange + var code = @" +int sum(int n) { + int total = 0; + for (int i = 0; i < n; i = i + 1) { + total = total + i; + } + return total; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount > 0); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Expressions.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Expressions.cs new file mode 100644 index 000000000..46c0a6e3d --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Expressions.cs @@ -0,0 +1,58 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class DecompiledCodeParserTests +{ + [Fact] + public void Parse_BinaryOperations_ParsesOperators() + { + // Arrange + var code = @" +int math(int a, int b) { + return a + b * 2; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount > 0); + } + + [Fact] + public void Parse_PointerDereference_ParsesDeref() + { + // Arrange + var code = @" +int read(int *ptr) { + return *ptr; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + } + + [Fact] + public void Parse_ArrayAccess_ParsesIndexing() + { + // Arrange + var code = @" +int get(int *arr, int idx) { + return arr[idx]; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Ghidra.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Ghidra.cs new file mode 100644 index 000000000..52bc634dc --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.Parse.Ghidra.cs @@ -0,0 +1,28 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using StellaOps.BinaryIndex.Decompiler; +using Xunit; + +namespace StellaOps.BinaryIndex.Decompiler.Tests; + +public sealed partial class DecompiledCodeParserTests +{ + [Fact] + public void Parse_GhidraStyleCode_HandlesAutoGeneratedNames() + { + // Arrange - Ghidra often generates names like FUN_00401000, local_c, etc. + var code = @" +undefined8 FUN_00401000(undefined8 param_1, int param_2) { + int local_c; + local_c = param_2 + 1; + return param_1; +}"; + + // Act + var ast = _parser.Parse(code); + + // Assert + Assert.NotNull(ast); + Assert.True(ast.NodeCount > 0); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.cs index a2840506f..3d5fbe7ba 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/DecompiledCodeParserTests.cs @@ -1,229 +1,12 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - using StellaOps.BinaryIndex.Decompiler; using Xunit; namespace StellaOps.BinaryIndex.Decompiler.Tests; [Trait("Category", "Unit")] -public sealed class DecompiledCodeParserTests +public sealed partial class DecompiledCodeParserTests { private readonly DecompiledCodeParser _parser = new(); - - [Fact] - public void Parse_SimpleFunction_ReturnsValidAst() - { - // Arrange - var code = @" -void foo(int x) { - return x; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.NotNull(ast.Root); - Assert.True(ast.NodeCount > 0); - Assert.True(ast.Depth > 0); - } - - [Fact] - public void Parse_FunctionWithIfStatement_ParsesControlFlow() - { - // Arrange - var code = @" -int check(int x) { - if (x > 0) { - return 1; - } - return 0; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount >= 3); // Function, if, returns - } - - [Fact] - public void Parse_FunctionWithLoop_ParsesWhileLoop() - { - // Arrange - var code = @" -void loop(int n) { - while (n > 0) { - n = n - 1; - } -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount > 0); - } - - [Fact] - public void Parse_FunctionWithForLoop_ParsesForLoop() - { - // Arrange - var code = @" -int sum(int n) { - int total = 0; - for (int i = 0; i < n; i = i + 1) { - total = total + i; - } - return total; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount > 0); - } - - [Fact] - public void Parse_FunctionWithCall_ParsesFunctionCall() - { - // Arrange - var code = @" -void caller() { - printf(""hello""); -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount > 0); - } - - [Fact] - public void ExtractVariables_FunctionWithLocals_ReturnsVariables() - { - // Arrange - var code = @" -int compute(int x) { - int local1 = x + 1; - int local2 = local1 * 2; - return local2; -}"; - - // Act - var variables = _parser.ExtractVariables(code); - - // Assert - Assert.NotEmpty(variables); - } - - [Fact] - public void ExtractCalledFunctions_CodeWithCalls_ReturnsFunctionNames() - { - // Arrange - var code = @" -void process() { - init(); - compute(); - cleanup(); -}"; - - // Act - var functions = _parser.ExtractCalledFunctions(code); - - // Assert - Assert.Contains("init", functions); - Assert.Contains("compute", functions); - Assert.Contains("cleanup", functions); - } - - [Fact] - public void Parse_EmptyFunction_ReturnsValidAst() - { - // Arrange - var code = @"void empty() { }"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.NotNull(ast.Root); - } - - [Fact] - public void Parse_BinaryOperations_ParsesOperators() - { - // Arrange - var code = @" -int math(int a, int b) { - return a + b * 2; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount > 0); - } - - [Fact] - public void Parse_PointerDereference_ParsesDeref() - { - // Arrange - var code = @" -int read(int *ptr) { - return *ptr; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - } - - [Fact] - public void Parse_ArrayAccess_ParsesIndexing() - { - // Arrange - var code = @" -int get(int *arr, int idx) { - return arr[idx]; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - } - - [Fact] - public void Parse_GhidraStyleCode_HandlesAutoGeneratedNames() - { - // Arrange - Ghidra often generates names like FUN_00401000, local_c, etc. - var code = @" -undefined8 FUN_00401000(undefined8 param_1, int param_2) { - int local_c; - local_c = param_2 + 1; - return param_1; -}"; - - // Act - var ast = _parser.Parse(code); - - // Assert - Assert.NotNull(ast); - Assert.True(ast.NodeCount > 0); - } } diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/TASKS.md b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/TASKS.md index eb96626d2..181c45970 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/TASKS.md +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/StellaOps.BinaryIndex.Decompiler.Tests.md. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Decompiler.Tests/StellaOps.BinaryIndex.Decompiler.Tests.md. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LifterPoolTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LifterPoolTests.cs new file mode 100644 index 000000000..339ab40b3 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LifterPoolTests.cs @@ -0,0 +1,56 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using B2R2; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.BinaryIndex.Disassembly.B2R2; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +[Trait("Category", "Unit")] +public sealed class B2R2LifterPoolTests +{ + [Fact] + public void WarmPool_PopulatesStats() + { + var options = Options.Create(new B2R2LifterPoolOptions + { + EnableWarmPreload = true, + WarmPreloadIsas = ["intel-64"] + }); + + using var pool = new B2R2LifterPool(NullLogger.Instance, options); + + pool.WarmPool(); + var stats = pool.GetStats(); + + stats.IsWarm.Should().BeTrue(); + stats.IsaStats.Should().ContainKey("intel-64"); + stats.IsaStats["intel-64"].PooledCount.Should().BeGreaterThan(0); + } + + [Fact] + public void Acquire_Return_UpdatesStats() + { + var options = Options.Create(new B2R2LifterPoolOptions + { + EnableWarmPreload = false, + MaxPoolSizePerIsa = 1 + }); + + using var pool = new B2R2LifterPool(NullLogger.Instance, options); + var isa = new ISA(Architecture.Intel, WordSize.Bit64); + + using (pool.Acquire(isa)) + { + var activeStats = pool.GetStats(); + activeStats.IsaStats["intel-64"].ActiveCount.Should().Be(1); + } + + var finalStats = pool.GetStats(); + finalStats.IsaStats["intel-64"].ActiveCount.Should().Be(0); + finalStats.IsaStats["intel-64"].PooledCount.Should().BeGreaterThan(0); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LowUirLiftingServiceTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LowUirLiftingServiceTests.cs new file mode 100644 index 000000000..fa827e78a --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2LowUirLiftingServiceTests.cs @@ -0,0 +1,21 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.BinaryIndex.Disassembly.B2R2; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +[Trait("Category", "Unit")] +public sealed class B2R2LowUirLiftingServiceTests +{ + [Fact] + public void SupportsArchitecture_ReportsSupportedSet() + { + var service = new B2R2LowUirLiftingService(NullLogger.Instance); + + service.SupportsArchitecture(CpuArchitecture.ARM64).Should().BeTrue(); + service.SupportsArchitecture(CpuArchitecture.Unknown).Should().BeFalse(); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.Tests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.Tests.cs new file mode 100644 index 000000000..42e6afb68 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.Tests.cs @@ -0,0 +1,64 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.BinaryIndex.Disassembly.Iced; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class B2R2PluginTests +{ + [Fact] + public void LoadBinary_LoadsRawX64Binary() + { + // Arrange + var plugin = CreatePlugin(); + + // Act + var binary = plugin.LoadBinary(_simpleX64Code, CpuArchitecture.X86_64); + + // Assert + binary.Should().NotBeNull(); + binary.Architecture.Should().Be(CpuArchitecture.X86_64); + binary.Bitness.Should().Be(64); + } + + [Fact] + public void Capabilities_SupportsMultipleArchitectures() + { + // Arrange + var plugin = CreatePlugin(); + + // Assert + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.X86); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.X86_64); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.ARM32); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.ARM64); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.MIPS32); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.MIPS64); + plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.RISCV64); + } + + [Fact] + public void Capabilities_SupportsLifting() + { + // Arrange + var plugin = CreatePlugin(); + + // Assert + plugin.Capabilities.SupportsLifting.Should().BeTrue(); + plugin.Capabilities.SupportsCfgRecovery.Should().BeTrue(); + } + + [Fact] + public void Capabilities_HasLowerPriorityThanIced() + { + // Arrange + var b2r2Plugin = CreatePlugin(); + var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); + + // Assert - Iced should have higher priority for x86/x64 + icedPlugin.Capabilities.Priority.Should().BeGreaterThan(b2r2Plugin.Capabilities.Priority); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.cs index 2b558d1c8..52a02343c 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/B2R2PluginTests.cs @@ -1,7 +1,5 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; using StellaOps.BinaryIndex.Disassembly.B2R2; using Xunit; @@ -12,71 +10,18 @@ namespace StellaOps.BinaryIndex.Disassembly.Tests; /// Tests for the B2R2 disassembly plugin. /// [Trait("Category", "Integration")] -public sealed class B2R2PluginTests +public sealed partial class B2R2PluginTests { // Simple x86-64 ELF header (minimal valid) - private static readonly byte[] s_minimalElf64Header = CreateMinimalElf64(); + private static readonly byte[] _minimalElf64Header = CreateMinimalElf64(); // Simple x86-64 instructions: mov rax, 0x1234; ret - private static readonly byte[] s_simpleX64Code = + private static readonly byte[] _simpleX64Code = [ 0x48, 0xC7, 0xC0, 0x34, 0x12, 0x00, 0x00, // mov rax, 0x1234 0xC3 // ret ]; - [Fact] - public void LoadBinary_LoadsRawX64Binary() - { - // Arrange - var plugin = CreatePlugin(); - - // Act - var binary = plugin.LoadBinary(s_simpleX64Code, CpuArchitecture.X86_64); - - // Assert - binary.Should().NotBeNull(); - binary.Architecture.Should().Be(CpuArchitecture.X86_64); - binary.Bitness.Should().Be(64); - } - - [Fact] - public void Capabilities_SupportsMultipleArchitectures() - { - // Arrange - var plugin = CreatePlugin(); - - // Assert - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.X86); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.X86_64); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.ARM32); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.ARM64); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.MIPS32); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.MIPS64); - plugin.Capabilities.SupportedArchitectures.Should().Contain(CpuArchitecture.RISCV64); - } - - [Fact] - public void Capabilities_SupportsLifting() - { - // Arrange - var plugin = CreatePlugin(); - - // Assert - plugin.Capabilities.SupportsLifting.Should().BeTrue(); - plugin.Capabilities.SupportsCfgRecovery.Should().BeTrue(); - } - - [Fact] - public void Capabilities_HasLowerPriorityThanIced() - { - // Arrange - var b2r2Plugin = CreatePlugin(); - var icedPlugin = new Iced.IcedDisassemblyPlugin(NullLogger.Instance); - - // Assert - Iced should have higher priority for x86/x64 - icedPlugin.Capabilities.Priority.Should().BeGreaterThan(b2r2Plugin.Capabilities.Priority); - } - private static B2R2DisassemblyPlugin CreatePlugin() { return new B2R2DisassemblyPlugin(NullLogger.Instance); diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.Helpers.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.Helpers.cs new file mode 100644 index 000000000..83ee59247 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.Helpers.cs @@ -0,0 +1,54 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.BinaryIndex.Disassembly.B2R2; +using StellaOps.BinaryIndex.Disassembly.Iced; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class DisassemblyServiceTests +{ + private static DisassemblyService CreateService(string? preferredPluginId = null) + { + var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); + var b2r2Plugin = new B2R2DisassemblyPlugin(NullLogger.Instance); + + var registry = new DisassemblyPluginRegistry( + [icedPlugin, b2r2Plugin], + NullLogger.Instance); + + var options = Options.Create(new DisassemblyOptions + { + PreferredPluginId = preferredPluginId + }); + + return new DisassemblyService( + registry, + options, + NullLogger.Instance); + } + + private static DisassemblyService CreateServiceWithArchPreference(CpuArchitecture arch, string pluginId) + { + var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); + var b2r2Plugin = new B2R2DisassemblyPlugin(NullLogger.Instance); + + var registry = new DisassemblyPluginRegistry( + [icedPlugin, b2r2Plugin], + NullLogger.Instance); + + var options = Options.Create(new DisassemblyOptions + { + ArchitecturePreferences = new Dictionary + { + [arch.ToString()] = pluginId + } + }); + + return new DisassemblyService( + registry, + options, + NullLogger.Instance); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.HybridRegistration.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.HybridRegistration.cs new file mode 100644 index 000000000..2962bd7bb --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.HybridRegistration.cs @@ -0,0 +1,37 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class DisassemblyServiceTests +{ + [Fact] + public void DependencyInjection_RegistersHybridService() + { + var services = new ServiceCollection(); + services.AddLogging(); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + [$"{DisassemblyOptions.SectionName}:MaxInstructionsPerRegion"] = "1000", + [$"{HybridDisassemblyOptions.SectionName}:EnableFallback"] = "true" + }) + .Build(); + + services.AddHybridDisassemblyServices(configuration); + + var serviceDescriptor = services.Single( + descriptor => descriptor.ServiceType == typeof(IDisassemblyService)); + serviceDescriptor.ImplementationType.Should().Be(typeof(HybridDisassemblyService)); + serviceDescriptor.ImplementationFactory.Should().BeNull(); + + var registryDescriptor = services.Single( + descriptor => descriptor.ServiceType == typeof(IDisassemblyPluginRegistry)); + registryDescriptor.ImplementationType.Should().Be(typeof(DisassemblyPluginRegistry)); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.cs index d0b066380..6675de679 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/DisassemblyServiceTests.cs @@ -3,8 +3,6 @@ using FluentAssertions; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Logging.Abstractions; using StellaOps.BinaryIndex.Disassembly.B2R2; using StellaOps.BinaryIndex.Disassembly.Iced; using Xunit; @@ -15,10 +13,10 @@ namespace StellaOps.BinaryIndex.Disassembly.Tests; /// Tests for the disassembly service facade. /// [Trait("Category", "Unit")] -public sealed class DisassemblyServiceTests +public sealed partial class DisassemblyServiceTests { // Simple x86-64 instructions - private static readonly byte[] s_x64Code = + private static readonly byte[] _x64Code = [ 0x48, 0xC7, 0xC0, 0x34, 0x12, 0x00, 0x00, // mov rax, 0x1234 0xC3 // ret @@ -31,7 +29,7 @@ public sealed class DisassemblyServiceTests var service = CreateService(); // Act - var (binary, plugin) = service.LoadBinary(s_x64Code); + var (binary, plugin) = service.LoadBinary(_x64Code); // Assert plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.iced"); @@ -45,7 +43,7 @@ public sealed class DisassemblyServiceTests var service = CreateService(preferredPluginId: "stellaops.disasm.b2r2"); // Act - var (binary, plugin) = service.LoadBinary(s_x64Code); + var (binary, plugin) = service.LoadBinary(_x64Code); // Assert plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); @@ -92,59 +90,11 @@ public sealed class DisassemblyServiceTests services.AddIcedDisassemblyPlugin(); services.AddB2R2DisassemblyPlugin(); - var provider = services.BuildServiceProvider(); - // Act - var disassemblyService = provider.GetService(); - var registry = provider.GetService(); - var plugins = provider.GetServices().ToList(); - - // Assert - disassemblyService.Should().NotBeNull(); - registry.Should().NotBeNull(); - plugins.Should().HaveCount(2); - } - - private static DisassemblyService CreateService(string? preferredPluginId = null) - { - var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); - var b2r2Plugin = new B2R2DisassemblyPlugin(NullLogger.Instance); - - var registry = new DisassemblyPluginRegistry( - [icedPlugin, b2r2Plugin], - NullLogger.Instance); - - var options = Options.Create(new DisassemblyOptions - { - PreferredPluginId = preferredPluginId - }); - - return new DisassemblyService( - registry, - options, - NullLogger.Instance); - } - - private static DisassemblyService CreateServiceWithArchPreference(CpuArchitecture arch, string pluginId) - { - var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); - var b2r2Plugin = new B2R2DisassemblyPlugin(NullLogger.Instance); - - var registry = new DisassemblyPluginRegistry( - [icedPlugin, b2r2Plugin], - NullLogger.Instance); - - var options = Options.Create(new DisassemblyOptions - { - ArchitecturePreferences = new Dictionary - { - [arch.ToString()] = pluginId - } - }); - - return new DisassemblyService( - registry, - options, - NullLogger.Instance); + services.Should().ContainSingle(descriptor => descriptor.ServiceType == typeof(IDisassemblyService)); + services.Should().ContainSingle(descriptor => descriptor.ServiceType == typeof(IDisassemblyPluginRegistry)); + services.Where(descriptor => descriptor.ServiceType == typeof(IDisassemblyPlugin)) + .Should() + .HaveCount(2); } } diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ArchitectureFallbacks.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ArchitectureFallbacks.cs new file mode 100644 index 000000000..57d1adb56 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ArchitectureFallbacks.cs @@ -0,0 +1,85 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinary_B2R2UnsupportedArchitecture_FallsBackToGhidra() + { + // Arrange - B2R2 doesn't support SPARC, Ghidra does + var b2r2Binary = CreateBinaryInfo(CpuArchitecture.SPARC); + var b2r2Plugin = new StubDisassemblyPlugin( + "stellaops.disasm.b2r2", + "B2R2", + 100, + b2r2Binary, + CreateMockCodeRegions(3), + CreateMockSymbols(10), + CreateMockInstructions(950, 50), + supportedArchs: new[] { CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64 }); + + var ghidraBinary = CreateBinaryInfo(CpuArchitecture.SPARC); + var ghidraPlugin = new StubDisassemblyPlugin( + "stellaops.disasm.ghidra", + "Ghidra", + 50, + ghidraBinary, + CreateMockCodeRegions(3), + CreateMockSymbols(15), + CreateMockInstructions(950, 50), + supportedArchs: new[] { CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64, CpuArchitecture.SPARC }); + + var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraPlugin }); + var options = Options.Create(new HybridDisassemblyOptions + { + PrimaryPluginId = "stellaops.disasm.b2r2", + FallbackPluginId = "stellaops.disasm.ghidra", + AutoFallbackOnUnsupported = true, + EnableFallback = true + }); + + var service = new HybridDisassemblyService( + registry, + options, + NullLogger.Instance); + + // Create a fake SPARC binary + var sparcBinary = CreateElfHeader(CpuArchitecture.SPARC); + + // Act + var (binary, plugin) = service.LoadBinary(sparcBinary.AsSpan()); + + // Assert + binary.Should().NotBeNull(); + plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + binary.Architecture.Should().Be(CpuArchitecture.SPARC); + } + + [Fact] + public void LoadBinaryWithQuality_ARM64Binary_B2R2HighConfidence_UsesB2R2() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.95, + b2r2FunctionCount: 20, + b2r2DecodeSuccessRate: 0.98, + architecture: CpuArchitecture.ARM64); + + // Act + var result = service.LoadBinaryWithQuality(_elfArm64Header); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + result.UsedFallback.Should().BeFalse(); + result.Binary.Architecture.Should().Be(CpuArchitecture.ARM64); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2CompleteFailure.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2CompleteFailure.cs new file mode 100644 index 000000000..17f4080ee --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2CompleteFailure.cs @@ -0,0 +1,58 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinaryWithQuality_B2R2ThrowsException_FallsBackToGhidra() + { + // Arrange + var b2r2Binary = CreateBinaryInfo(CpuArchitecture.X86_64); + var b2r2Plugin = new ThrowingPlugin("stellaops.disasm.b2r2", "B2R2", 100, b2r2Binary); + + var (ghidraStub, ghidraBinary) = CreateStubPlugin( + "stellaops.disasm.ghidra", + "Ghidra", + priority: 50, + confidence: 0.85); + + var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraStub }); + var service = CreateService(registry); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + // When plugin throws, confidence becomes 0 and fallback reason reflects low confidence + result.FallbackReason.Should().Contain("confidence"); + } + + [Fact] + public void LoadBinaryWithQuality_B2R2ReturnsZeroConfidence_FallsBackToGhidra() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.0, // Complete failure + b2r2FunctionCount: 0, + b2r2DecodeSuccessRate: 0.0, + ghidraConfidence: 0.85); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + result.Confidence.Should().BeGreaterThan(0.0); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2FallbackScenarios.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2FallbackScenarios.cs new file mode 100644 index 000000000..5e7030fe3 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.B2R2FallbackScenarios.cs @@ -0,0 +1,98 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinaryWithQuality_B2R2MeetsThreshold_ReturnsB2R2Result() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.9, + b2r2FunctionCount: 10, + b2r2DecodeSuccessRate: 0.95); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + result.UsedFallback.Should().BeFalse(); + result.Confidence.Should().BeGreaterThanOrEqualTo(0.7); + } + + [Fact] + public void LoadBinaryWithQuality_B2R2LowConfidence_FallsBackToGhidra() + { + // Arrange + // Create B2R2 with low decode rate which results in low confidence + // Confidence = decodeRate*0.5 + symbolScore*0.3 + regionScore*0.2 + // With decodeRate=0.4, symbolCount=2 (score=0.2), regions=3 (score=0.6): + // confidence = 0.4*0.5 + 0.2*0.3 + 0.6*0.2 = 0.2 + 0.06 + 0.12 = 0.38 (below 0.7) + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.38, // Below 0.7 threshold (not actually used, calculated from params) + b2r2FunctionCount: 2, + b2r2DecodeSuccessRate: 0.4, + ghidraConfidence: 0.85, + ghidraFunctionCount: 15, + ghidraDecodeSuccessRate: 0.95); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + result.FallbackReason.Should().Contain("confidence"); + } + + [Fact] + public void LoadBinaryWithQuality_B2R2InsufficientFunctions_FallsBackToGhidra() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.9, + b2r2FunctionCount: 0, // Below MinFunctionCount threshold + b2r2DecodeSuccessRate: 0.95, + ghidraConfidence: 0.85, + ghidraFunctionCount: 15); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + result.Symbols.Should().HaveCount(15); + } + + [Fact] + public void LoadBinaryWithQuality_B2R2LowDecodeRate_FallsBackToGhidra() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.9, + b2r2FunctionCount: 10, + b2r2DecodeSuccessRate: 0.6, // Below 0.8 threshold + ghidraConfidence: 0.85, + ghidraDecodeSuccessRate: 0.95); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + result.DecodeSuccessRate.Should().BeGreaterThanOrEqualTo(0.8); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.GhidraUnavailable.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.GhidraUnavailable.cs new file mode 100644 index 000000000..89d331e20 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.GhidraUnavailable.cs @@ -0,0 +1,66 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinaryWithQuality_GhidraUnavailable_ReturnsB2R2ResultEvenIfPoor() + { + // Arrange + var (b2r2Plugin, b2r2Binary) = CreateStubPlugin( + "stellaops.disasm.b2r2", + "B2R2", + priority: 100, + confidence: 0.5); + + var registry = CreateMockRegistry(new List { b2r2Plugin }); + var service = CreateService(registry); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert - Should return B2R2 result since Ghidra is not available + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + result.UsedFallback.Should().BeFalse(); + // Confidence will be calculated based on mock data, not the input parameter + } + + [Fact] + public void LoadBinaryWithQuality_NoPluginAvailable_ThrowsException() + { + // Arrange + var registry = CreateMockRegistry(new List()); + var service = CreateService(registry); + + // Act & Assert + var act = () => service.LoadBinaryWithQuality(_simpleX64Code); + act.Should().Throw() + .WithMessage("*No disassembly plugin available*"); + } + + [Fact] + public void LoadBinaryWithQuality_FallbackDisabled_ReturnsB2R2ResultEvenIfPoor() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.5, + b2r2FunctionCount: 0, + b2r2DecodeSuccessRate: 0.6, + enableFallback: false); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + result.UsedFallback.Should().BeFalse(); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MetricsLogging.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MetricsLogging.cs new file mode 100644 index 000000000..2e7b37c34 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MetricsLogging.cs @@ -0,0 +1,54 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinaryWithQuality_CalculatesConfidenceCorrectly() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.85, + b2r2FunctionCount: 10, + b2r2DecodeSuccessRate: 0.95); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Confidence.Should().BeGreaterThanOrEqualTo(0.0); + result.Confidence.Should().BeLessThanOrEqualTo(1.0); + result.TotalInstructions.Should().BeGreaterThan(0); + result.DecodedInstructions.Should().BeGreaterThan(0); + result.DecodeSuccessRate.Should().BeGreaterThanOrEqualTo(0.9); + } + + [Fact] + public void LoadBinaryWithQuality_GhidraBetterThanB2R2_UsesGhidra() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.6, + b2r2FunctionCount: 5, + b2r2DecodeSuccessRate: 0.75, + ghidraConfidence: 0.95, + ghidraFunctionCount: 25, + ghidraDecodeSuccessRate: 0.98); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + result.Confidence.Should().BeGreaterThan(0.6); + result.Symbols.Should().HaveCount(25); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MockData.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MockData.cs new file mode 100644 index 000000000..543b85d05 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.MockData.cs @@ -0,0 +1,87 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + private static BinaryInfo CreateBinaryInfo(CpuArchitecture architecture) + { + return new BinaryInfo( + Format: BinaryFormat.ELF, + Architecture: architecture, + Bitness: architecture == CpuArchitecture.X86 ? 32 : 64, + Endianness: Endianness.Little, + Abi: "gnu", + EntryPoint: 0x1000, + BuildId: "abc123", + Metadata: new Dictionary(), + Handle: new object()); + } + + private static List CreateMockCodeRegions(int count) + { + var regions = new List(); + for (int i = 0; i < count; i++) + { + regions.Add(new CodeRegion( + Name: $".text{i}", + VirtualAddress: (ulong)(0x1000 + i * 0x1000), + FileOffset: (ulong)(0x1000 + i * 0x1000), + Size: 0x1000, + IsExecutable: true, + IsReadable: true, + IsWritable: false)); + } + return regions; + } + + private static List CreateMockSymbols(int count) + { + var symbols = new List(); + for (int i = 0; i < count; i++) + { + symbols.Add(new SymbolInfo( + Name: $"function_{i}", + Address: (ulong)(0x1000 + i * 0x10), + Size: 0x10, + Type: SymbolType.Function, + Binding: SymbolBinding.Global, + Section: ".text")); + } + return symbols; + } + + private static List CreateMockInstructions(int validCount, int invalidCount) + { + var instructions = new List(); + + // Add valid instructions + for (int i = 0; i < validCount; i++) + { + instructions.Add(new DisassembledInstruction( + Address: (ulong)(0x1000 + i * 4), + RawBytes: ImmutableArray.Create(0x48, 0xC7, 0xC0, 0x00), + Mnemonic: "mov", + OperandsText: "rax, 0", + Kind: InstructionKind.Move, + Operands: ImmutableArray.Empty)); + } + + // Add invalid instructions + for (int i = 0; i < invalidCount; i++) + { + instructions.Add(new DisassembledInstruction( + Address: (ulong)(0x1000 + validCount * 4 + i * 4), + RawBytes: ImmutableArray.Create(0xFF, 0xFF, 0xFF, 0xFF), + Mnemonic: "??", + OperandsText: "", + Kind: InstructionKind.Unknown, + Operands: ImmutableArray.Empty)); + } + + return instructions; + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.PreferredPlugin.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.PreferredPlugin.cs new file mode 100644 index 000000000..38280c4fd --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.PreferredPlugin.cs @@ -0,0 +1,57 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinary_PreferredPluginSpecified_UsesPreferredPlugin() + { + // Arrange + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.9, + b2r2FunctionCount: 10, + b2r2DecodeSuccessRate: 0.95); + + // Act - Explicitly prefer Ghidra even though B2R2 is higher priority + var (binary, plugin) = service.LoadBinary(_simpleX64Code, "stellaops.disasm.ghidra"); + + // Assert + binary.Should().NotBeNull(); + plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + } + + [Fact] + public void LoadBinary_NoPrimaryConfigured_AutoSelectsHighestPriority() + { + // Arrange + var (b2r2Stub, b2r2Binary) = CreateStubPlugin("stellaops.disasm.b2r2", "B2R2", 100); + var (ghidraStub, ghidraBinary) = CreateStubPlugin("stellaops.disasm.ghidra", "Ghidra", 50); + + var registry = CreateMockRegistry(new List { b2r2Stub, ghidraStub }); + var options = Options.Create(new HybridDisassemblyOptions + { + PrimaryPluginId = null, // No primary configured + EnableFallback = false // Disabled fallback for this test + }); + + var service = new HybridDisassemblyService( + registry, + options, + NullLogger.Instance); + + // Act + var (binary, plugin) = service.LoadBinary(_simpleX64Code); + + // Assert - Should select B2R2 (priority 100) over Ghidra (priority 50) + binary.Should().NotBeNull(); + plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.QualityThresholds.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.QualityThresholds.cs new file mode 100644 index 000000000..d451b5553 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.QualityThresholds.cs @@ -0,0 +1,85 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + + [Fact] + public void LoadBinaryWithQuality_CustomThresholds_RespectsConfiguration() + { + // Arrange + // Create B2R2 with parameters that result in confidence below custom threshold 0.65 + // With decodeRate=0.5, symbolCount=2 (score=0.2), regions=3 (score=0.6): + // confidence = 0.5*0.5 + 0.2*0.3 + 0.6*0.2 = 0.25 + 0.06 + 0.12 = 0.43 (below 0.65) + var (b2r2Stub, b2r2Binary) = CreateStubPlugin( + "stellaops.disasm.b2r2", + "B2R2", + priority: 100, + confidence: 0.43, // Not used, calculated from other params + functionCount: 2, + decodeSuccessRate: 0.5); + + var (ghidraStub, ghidraBinary) = CreateStubPlugin( + "stellaops.disasm.ghidra", + "Ghidra", + priority: 50, + confidence: 0.8, + functionCount: 15, + decodeSuccessRate: 0.95); + + var registry = CreateMockRegistry(new List { b2r2Stub, ghidraStub }); + + var options = Options.Create(new HybridDisassemblyOptions + { + PrimaryPluginId = "stellaops.disasm.b2r2", + FallbackPluginId = "stellaops.disasm.ghidra", + MinConfidenceThreshold = 0.65, // Custom threshold + MinFunctionCount = 3, // Custom threshold + MinDecodeSuccessRate = 0.8, // Custom threshold + EnableFallback = true + }); + + var service = new HybridDisassemblyService( + registry, + options, + NullLogger.Instance); + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert - Should fallback due to threshold checks + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); + result.UsedFallback.Should().BeTrue(); + } + + [Fact] + public void LoadBinaryWithQuality_AllThresholdsExactlyMet_AcceptsB2R2() + { + // Arrange + // Confidence calculation: decodeRate*0.5 + symbolScore*0.3 + regionScore*0.2 + // For confidence >= 0.7: + // - decodeRate = 0.8 -> 0.8 * 0.5 = 0.4 + // - symbols = 6 -> symbolScore = 0.6 -> 0.6 * 0.3 = 0.18 + // - regions = 3 -> regionScore = 0.6 -> 0.6 * 0.2 = 0.12 + // - total = 0.4 + 0.18 + 0.12 = 0.7 (exactly at threshold) + var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( + b2r2Confidence: 0.7, // Not actually used - confidence is calculated + b2r2FunctionCount: 6, // Results in symbolScore = 0.6 + b2r2DecodeSuccessRate: 0.8); // Results in decodeRate = 0.8 + + // Act + var result = service.LoadBinaryWithQuality(_simpleX64Code); + + // Assert - Should accept B2R2 when exactly at thresholds + result.Should().NotBeNull(); + result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + result.UsedFallback.Should().BeFalse(); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.RegistryAndElf.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.RegistryAndElf.cs new file mode 100644 index 000000000..8731848f7 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.RegistryAndElf.cs @@ -0,0 +1,94 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Moq; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + private static IDisassemblyPluginRegistry CreateMockRegistry(IReadOnlyList plugins) + { + var registry = new Mock(); + registry.Setup(r => r.Plugins).Returns(plugins); + + registry.Setup(r => r.FindPlugin(It.IsAny(), It.IsAny())) + .Returns((CpuArchitecture arch, BinaryFormat format) => + plugins + .Where(p => p.Capabilities.CanHandle(arch, format)) + .OrderByDescending(p => p.Capabilities.Priority) + .FirstOrDefault()); + + registry.Setup(r => r.GetPlugin(It.IsAny())) + .Returns((string id) => plugins.FirstOrDefault(p => p.Capabilities.PluginId == id)); + + return registry.Object; + } + + private static HybridDisassemblyService CreateService( + IDisassemblyPluginRegistry registry, + bool enableFallback = true) + { + var options = Options.Create(new HybridDisassemblyOptions + { + PrimaryPluginId = "stellaops.disasm.b2r2", + FallbackPluginId = "stellaops.disasm.ghidra", + MinConfidenceThreshold = 0.7, + MinFunctionCount = 1, + MinDecodeSuccessRate = 0.8, + AutoFallbackOnUnsupported = true, + EnableFallback = enableFallback, + PluginTimeoutSeconds = 120 + }); + + return new HybridDisassemblyService( + registry, + options, + NullLogger.Instance); + } + + private static byte[] CreateElfHeader(CpuArchitecture architecture) + { + var elf = new byte[64]; + + // ELF magic + elf[0] = 0x7F; + elf[1] = (byte)'E'; + elf[2] = (byte)'L'; + elf[3] = (byte)'F'; + + // Class: 64-bit + elf[4] = 2; + + // Data: little endian + elf[5] = 1; + + // Version + elf[6] = 1; + + // Type: Executable + elf[16] = 2; + elf[17] = 0; + + // Machine: set based on architecture + ushort machine = architecture switch + { + CpuArchitecture.X86_64 => 0x3E, + CpuArchitecture.ARM64 => 0xB7, + CpuArchitecture.ARM32 => 0x28, + CpuArchitecture.MIPS32 => 0x08, + CpuArchitecture.SPARC => 0x02, + _ => 0x3E + }; + + elf[18] = (byte)(machine & 0xFF); + elf[19] = (byte)((machine >> 8) & 0xFF); + + // Version + elf[20] = 1; + + return elf; + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ServiceStubs.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ServiceStubs.cs new file mode 100644 index 000000000..ab2914f15 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ServiceStubs.cs @@ -0,0 +1,71 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + private static (IDisassemblyPlugin B2R2, IDisassemblyPlugin Ghidra, HybridDisassemblyService Service) + CreateServiceWithStubs( + double b2r2Confidence = 0.9, + int b2r2FunctionCount = 10, + double b2r2DecodeSuccessRate = 0.95, + double ghidraConfidence = 0.85, + int ghidraFunctionCount = 15, + double ghidraDecodeSuccessRate = 0.95, + bool enableFallback = true, + CpuArchitecture architecture = CpuArchitecture.X86_64) + { + var (b2r2Plugin, _) = CreateStubPlugin( + "stellaops.disasm.b2r2", + "B2R2", + priority: 100, + confidence: b2r2Confidence, + functionCount: b2r2FunctionCount, + decodeSuccessRate: b2r2DecodeSuccessRate, + architecture: architecture); + + var (ghidraPlugin, _) = CreateStubPlugin( + "stellaops.disasm.ghidra", + "Ghidra", + priority: 50, + confidence: ghidraConfidence, + functionCount: ghidraFunctionCount, + decodeSuccessRate: ghidraDecodeSuccessRate, + architecture: architecture); + + var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraPlugin }); + var service = CreateService(registry, enableFallback); + + return (b2r2Plugin, ghidraPlugin, service); + } + + private static (IDisassemblyPlugin Plugin, BinaryInfo Binary) CreateStubPlugin( + string pluginId, + string name, + int priority, + double confidence = 0.85, + int functionCount = 10, + double decodeSuccessRate = 0.95, + CpuArchitecture architecture = CpuArchitecture.X86_64) + { + var binary = CreateBinaryInfo(architecture); + var codeRegions = CreateMockCodeRegions(3); + var symbols = CreateMockSymbols(functionCount); + var totalInstructions = 1000; + var decodedInstructions = (int)(totalInstructions * decodeSuccessRate); + var instructions = CreateMockInstructions(decodedInstructions, totalInstructions - decodedInstructions); + + var stubPlugin = new StubDisassemblyPlugin( + pluginId, + name, + priority, + binary, + codeRegions, + symbols, + instructions); + + return (stubPlugin, binary); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.StubPlugin.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.StubPlugin.cs new file mode 100644 index 000000000..d3b9e925a --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.StubPlugin.cs @@ -0,0 +1,62 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + /// + /// Stub implementation of IDisassemblyPlugin for testing. + /// We need this because Moq cannot mock methods with ReadOnlySpan parameters. + /// + private sealed class StubDisassemblyPlugin : IDisassemblyPlugin + { + private readonly BinaryInfo _binary; + private readonly List _codeRegions; + private readonly List _symbols; + private readonly List _instructions; + + public DisassemblyCapabilities Capabilities { get; } + + public StubDisassemblyPlugin( + string pluginId, + string name, + int priority, + BinaryInfo binary, + List codeRegions, + List symbols, + List instructions, + IEnumerable? supportedArchs = null) + { + _binary = binary; + _codeRegions = codeRegions; + _symbols = symbols; + _instructions = instructions; + + Capabilities = new DisassemblyCapabilities + { + PluginId = pluginId, + Name = name, + Version = "1.0", + SupportedArchitectures = (supportedArchs ?? new[] { + CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM32, + CpuArchitecture.ARM64, CpuArchitecture.MIPS32 + }).ToImmutableHashSet(), + SupportedFormats = ImmutableHashSet.Create(BinaryFormat.ELF, BinaryFormat.PE, BinaryFormat.Raw), + Priority = priority, + SupportsLifting = true, + SupportsCfgRecovery = true + }; + } + + public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => _binary; + public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => _binary; + public IEnumerable GetCodeRegions(BinaryInfo binary) => _codeRegions; + public IEnumerable GetSymbols(BinaryInfo binary) => _symbols; + public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) => _instructions; + public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) => _instructions; + public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) => _instructions; + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ThrowingPlugin.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ThrowingPlugin.cs new file mode 100644 index 000000000..76919e435 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.ThrowingPlugin.cs @@ -0,0 +1,53 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using System.Collections.Immutable; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class HybridDisassemblyServiceTests +{ + /// + /// Plugin that throws exceptions for testing failure scenarios. + /// + private sealed class ThrowingPlugin : IDisassemblyPlugin + { + public DisassemblyCapabilities Capabilities { get; } + + public ThrowingPlugin(string pluginId, string name, int priority, BinaryInfo binary) + { + Capabilities = new DisassemblyCapabilities + { + PluginId = pluginId, + Name = name, + Version = "1.0", + SupportedArchitectures = ImmutableHashSet.Create(CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64), + SupportedFormats = ImmutableHashSet.Create(BinaryFormat.ELF, BinaryFormat.PE, BinaryFormat.Raw), + Priority = priority, + SupportsLifting = true, + SupportsCfgRecovery = true + }; + } + + public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => + throw new InvalidOperationException("Plugin failed to parse binary"); + + public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => + throw new InvalidOperationException("Plugin failed to parse binary"); + + public IEnumerable GetCodeRegions(BinaryInfo binary) => + throw new InvalidOperationException("Plugin failed"); + + public IEnumerable GetSymbols(BinaryInfo binary) => + throw new InvalidOperationException("Plugin failed"); + + public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) => + throw new InvalidOperationException("Plugin failed"); + + public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) => + throw new InvalidOperationException("Plugin failed"); + + public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) => + throw new InvalidOperationException("Plugin failed"); + } + +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.cs index 8974c1476..dd8e03c88 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/HybridDisassemblyServiceTests.cs @@ -1,12 +1,5 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - -using System.Collections.Immutable; -using FluentAssertions; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Moq; using Xunit; namespace StellaOps.BinaryIndex.Disassembly.Tests; @@ -16,791 +9,18 @@ namespace StellaOps.BinaryIndex.Disassembly.Tests; /// Tests B2R2 -> Ghidra fallback scenarios, quality thresholds, and plugin selection. /// [Trait("Category", "Integration")] -public sealed class HybridDisassemblyServiceTests +public sealed partial class HybridDisassemblyServiceTests { // Simple x86-64 instructions: mov rax, 0x1234; ret - private static readonly byte[] s_simpleX64Code = + private static readonly byte[] _simpleX64Code = [ 0x48, 0xC7, 0xC0, 0x34, 0x12, 0x00, 0x00, // mov rax, 0x1234 0xC3 // ret ]; // ELF magic header for x86-64 - private static readonly byte[] s_elfX64Header = CreateElfHeader(CpuArchitecture.X86_64); + private static readonly byte[] _elfX64Header = CreateElfHeader(CpuArchitecture.X86_64); // ELF magic header for ARM64 - private static readonly byte[] s_elfArm64Header = CreateElfHeader(CpuArchitecture.ARM64); - - #region B2R2 -> Ghidra Fallback Scenarios - - [Fact] - public void LoadBinaryWithQuality_B2R2MeetsThreshold_ReturnsB2R2Result() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.9, - b2r2FunctionCount: 10, - b2r2DecodeSuccessRate: 0.95); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - result.UsedFallback.Should().BeFalse(); - result.Confidence.Should().BeGreaterThanOrEqualTo(0.7); - } - - [Fact] - public void LoadBinaryWithQuality_B2R2LowConfidence_FallsBackToGhidra() - { - // Arrange - // Create B2R2 with low decode rate which results in low confidence - // Confidence = decodeRate*0.5 + symbolScore*0.3 + regionScore*0.2 - // With decodeRate=0.4, symbolCount=2 (score=0.2), regions=3 (score=0.6): - // confidence = 0.4*0.5 + 0.2*0.3 + 0.6*0.2 = 0.2 + 0.06 + 0.12 = 0.38 (below 0.7) - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.38, // Below 0.7 threshold (not actually used, calculated from params) - b2r2FunctionCount: 2, - b2r2DecodeSuccessRate: 0.4, - ghidraConfidence: 0.85, - ghidraFunctionCount: 15, - ghidraDecodeSuccessRate: 0.95); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - result.FallbackReason.Should().Contain("confidence"); - } - - [Fact] - public void LoadBinaryWithQuality_B2R2InsufficientFunctions_FallsBackToGhidra() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.9, - b2r2FunctionCount: 0, // Below MinFunctionCount threshold - b2r2DecodeSuccessRate: 0.95, - ghidraConfidence: 0.85, - ghidraFunctionCount: 15); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - result.Symbols.Should().HaveCount(15); - } - - [Fact] - public void LoadBinaryWithQuality_B2R2LowDecodeRate_FallsBackToGhidra() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.9, - b2r2FunctionCount: 10, - b2r2DecodeSuccessRate: 0.6, // Below 0.8 threshold - ghidraConfidence: 0.85, - ghidraDecodeSuccessRate: 0.95); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - result.DecodeSuccessRate.Should().BeGreaterThanOrEqualTo(0.8); - } - - #endregion - - #region B2R2 Complete Failure - - [Fact] - public void LoadBinaryWithQuality_B2R2ThrowsException_FallsBackToGhidra() - { - // Arrange - var b2r2Binary = CreateBinaryInfo(CpuArchitecture.X86_64); - var b2r2Plugin = new ThrowingPlugin("stellaops.disasm.b2r2", "B2R2", 100, b2r2Binary); - - var (ghidraStub, ghidraBinary) = CreateStubPlugin( - "stellaops.disasm.ghidra", - "Ghidra", - priority: 50, - confidence: 0.85); - - var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraStub }); - var service = CreateService(registry); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - // When plugin throws, confidence becomes 0 and fallback reason reflects low confidence - result.FallbackReason.Should().Contain("confidence"); - } - - [Fact] - public void LoadBinaryWithQuality_B2R2ReturnsZeroConfidence_FallsBackToGhidra() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.0, // Complete failure - b2r2FunctionCount: 0, - b2r2DecodeSuccessRate: 0.0, - ghidraConfidence: 0.85); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - result.Confidence.Should().BeGreaterThan(0.0); - } - - #endregion - - #region Ghidra Unavailable - - [Fact] - public void LoadBinaryWithQuality_GhidraUnavailable_ReturnsB2R2ResultEvenIfPoor() - { - // Arrange - var (b2r2Plugin, b2r2Binary) = CreateStubPlugin( - "stellaops.disasm.b2r2", - "B2R2", - priority: 100, - confidence: 0.5); - - var registry = CreateMockRegistry(new List { b2r2Plugin }); - var service = CreateService(registry); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - Should return B2R2 result since Ghidra is not available - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - result.UsedFallback.Should().BeFalse(); - // Confidence will be calculated based on mock data, not the input parameter - } - - [Fact] - public void LoadBinaryWithQuality_NoPluginAvailable_ThrowsException() - { - // Arrange - var registry = CreateMockRegistry(new List()); - var service = CreateService(registry); - - // Act & Assert - var act = () => service.LoadBinaryWithQuality(s_simpleX64Code); - act.Should().Throw() - .WithMessage("*No disassembly plugin available*"); - } - - [Fact] - public void LoadBinaryWithQuality_FallbackDisabled_ReturnsB2R2ResultEvenIfPoor() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.5, - b2r2FunctionCount: 0, - b2r2DecodeSuccessRate: 0.6, - enableFallback: false); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - result.UsedFallback.Should().BeFalse(); - } - - #endregion - - #region Architecture-Specific Fallbacks - - [Fact] - public void LoadBinary_B2R2UnsupportedArchitecture_FallsBackToGhidra() - { - // Arrange - B2R2 doesn't support SPARC, Ghidra does - var b2r2Binary = CreateBinaryInfo(CpuArchitecture.SPARC); - var b2r2Plugin = new StubDisassemblyPlugin( - "stellaops.disasm.b2r2", - "B2R2", - 100, - b2r2Binary, - CreateMockCodeRegions(3), - CreateMockSymbols(10), - CreateMockInstructions(950, 50), - supportedArchs: new[] { CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64 }); - - var ghidraBinary = CreateBinaryInfo(CpuArchitecture.SPARC); - var ghidraPlugin = new StubDisassemblyPlugin( - "stellaops.disasm.ghidra", - "Ghidra", - 50, - ghidraBinary, - CreateMockCodeRegions(3), - CreateMockSymbols(15), - CreateMockInstructions(950, 50), - supportedArchs: new[] { CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64, CpuArchitecture.SPARC }); - - var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraPlugin }); - var options = Options.Create(new HybridDisassemblyOptions - { - PrimaryPluginId = "stellaops.disasm.b2r2", - FallbackPluginId = "stellaops.disasm.ghidra", - AutoFallbackOnUnsupported = true, - EnableFallback = true - }); - - var service = new HybridDisassemblyService( - registry, - options, - NullLogger.Instance); - - // Create a fake SPARC binary - var sparcBinary = CreateElfHeader(CpuArchitecture.SPARC); - - // Act - var (binary, plugin) = service.LoadBinary(sparcBinary.AsSpan()); - - // Assert - binary.Should().NotBeNull(); - plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - binary.Architecture.Should().Be(CpuArchitecture.SPARC); - } - - [Fact] - public void LoadBinaryWithQuality_ARM64Binary_B2R2HighConfidence_UsesB2R2() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.95, - b2r2FunctionCount: 20, - b2r2DecodeSuccessRate: 0.98, - architecture: CpuArchitecture.ARM64); - - // Act - var result = service.LoadBinaryWithQuality(s_elfArm64Header); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - result.UsedFallback.Should().BeFalse(); - result.Binary.Architecture.Should().Be(CpuArchitecture.ARM64); - } - - #endregion - - #region Quality Threshold Logic - - [Fact] - public void LoadBinaryWithQuality_CustomThresholds_RespectsConfiguration() - { - // Arrange - // Create B2R2 with parameters that result in confidence below custom threshold 0.65 - // With decodeRate=0.5, symbolCount=2 (score=0.2), regions=3 (score=0.6): - // confidence = 0.5*0.5 + 0.2*0.3 + 0.6*0.2 = 0.25 + 0.06 + 0.12 = 0.43 (below 0.65) - var (b2r2Stub, b2r2Binary) = CreateStubPlugin( - "stellaops.disasm.b2r2", - "B2R2", - priority: 100, - confidence: 0.43, // Not used, calculated from other params - functionCount: 2, - decodeSuccessRate: 0.5); - - var (ghidraStub, ghidraBinary) = CreateStubPlugin( - "stellaops.disasm.ghidra", - "Ghidra", - priority: 50, - confidence: 0.8, - functionCount: 15, - decodeSuccessRate: 0.95); - - var registry = CreateMockRegistry(new List { b2r2Stub, ghidraStub }); - - var options = Options.Create(new HybridDisassemblyOptions - { - PrimaryPluginId = "stellaops.disasm.b2r2", - FallbackPluginId = "stellaops.disasm.ghidra", - MinConfidenceThreshold = 0.65, // Custom threshold - MinFunctionCount = 3, // Custom threshold - MinDecodeSuccessRate = 0.8, // Custom threshold - EnableFallback = true - }); - - var service = new HybridDisassemblyService( - registry, - options, - NullLogger.Instance); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - Should fallback due to threshold checks - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - } - - [Fact] - public void LoadBinaryWithQuality_AllThresholdsExactlyMet_AcceptsB2R2() - { - // Arrange - // Confidence calculation: decodeRate*0.5 + symbolScore*0.3 + regionScore*0.2 - // For confidence >= 0.7: - // - decodeRate = 0.8 -> 0.8 * 0.5 = 0.4 - // - symbols = 6 -> symbolScore = 0.6 -> 0.6 * 0.3 = 0.18 - // - regions = 3 -> regionScore = 0.6 -> 0.6 * 0.2 = 0.12 - // - total = 0.4 + 0.18 + 0.12 = 0.7 (exactly at threshold) - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.7, // Not actually used - confidence is calculated - b2r2FunctionCount: 6, // Results in symbolScore = 0.6 - b2r2DecodeSuccessRate: 0.8); // Results in decodeRate = 0.8 - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - Should accept B2R2 when exactly at thresholds - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - result.UsedFallback.Should().BeFalse(); - } - - #endregion - - #region Metrics and Logging - - [Fact] - public void LoadBinaryWithQuality_CalculatesConfidenceCorrectly() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.85, - b2r2FunctionCount: 10, - b2r2DecodeSuccessRate: 0.95); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Confidence.Should().BeGreaterThanOrEqualTo(0.0); - result.Confidence.Should().BeLessThanOrEqualTo(1.0); - result.TotalInstructions.Should().BeGreaterThan(0); - result.DecodedInstructions.Should().BeGreaterThan(0); - result.DecodeSuccessRate.Should().BeGreaterThanOrEqualTo(0.9); - } - - [Fact] - public void LoadBinaryWithQuality_GhidraBetterThanB2R2_UsesGhidra() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.6, - b2r2FunctionCount: 5, - b2r2DecodeSuccessRate: 0.75, - ghidraConfidence: 0.95, - ghidraFunctionCount: 25, - ghidraDecodeSuccessRate: 0.98); - - // Act - var result = service.LoadBinaryWithQuality(s_simpleX64Code); - - // Assert - result.Should().NotBeNull(); - result.Plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - result.UsedFallback.Should().BeTrue(); - result.Confidence.Should().BeGreaterThan(0.6); - result.Symbols.Should().HaveCount(25); - } - - #endregion - - #region Preferred Plugin Selection - - [Fact] - public void LoadBinary_PreferredPluginSpecified_UsesPreferredPlugin() - { - // Arrange - var (b2r2Plugin, ghidraPlugin, service) = CreateServiceWithStubs( - b2r2Confidence: 0.9, - b2r2FunctionCount: 10, - b2r2DecodeSuccessRate: 0.95); - - // Act - Explicitly prefer Ghidra even though B2R2 is higher priority - var (binary, plugin) = service.LoadBinary(s_simpleX64Code, "stellaops.disasm.ghidra"); - - // Assert - binary.Should().NotBeNull(); - plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.ghidra"); - } - - [Fact] - public void LoadBinary_NoPrimaryConfigured_AutoSelectsHighestPriority() - { - // Arrange - var (b2r2Stub, b2r2Binary) = CreateStubPlugin("stellaops.disasm.b2r2", "B2R2", 100); - var (ghidraStub, ghidraBinary) = CreateStubPlugin("stellaops.disasm.ghidra", "Ghidra", 50); - - var registry = CreateMockRegistry(new List { b2r2Stub, ghidraStub }); - var options = Options.Create(new HybridDisassemblyOptions - { - PrimaryPluginId = null, // No primary configured - EnableFallback = false // Disabled fallback for this test - }); - - var service = new HybridDisassemblyService( - registry, - options, - NullLogger.Instance); - - // Act - var (binary, plugin) = service.LoadBinary(s_simpleX64Code); - - // Assert - Should select B2R2 (priority 100) over Ghidra (priority 50) - binary.Should().NotBeNull(); - plugin.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - } - - #endregion - - #region Helper Methods - - private static (IDisassemblyPlugin B2R2, IDisassemblyPlugin Ghidra, HybridDisassemblyService Service) - CreateServiceWithStubs( - double b2r2Confidence = 0.9, - int b2r2FunctionCount = 10, - double b2r2DecodeSuccessRate = 0.95, - double ghidraConfidence = 0.85, - int ghidraFunctionCount = 15, - double ghidraDecodeSuccessRate = 0.95, - bool enableFallback = true, - CpuArchitecture architecture = CpuArchitecture.X86_64) - { - var (b2r2Plugin, _) = CreateStubPlugin( - "stellaops.disasm.b2r2", - "B2R2", - priority: 100, - confidence: b2r2Confidence, - functionCount: b2r2FunctionCount, - decodeSuccessRate: b2r2DecodeSuccessRate, - architecture: architecture); - - var (ghidraPlugin, _) = CreateStubPlugin( - "stellaops.disasm.ghidra", - "Ghidra", - priority: 50, - confidence: ghidraConfidence, - functionCount: ghidraFunctionCount, - decodeSuccessRate: ghidraDecodeSuccessRate, - architecture: architecture); - - var registry = CreateMockRegistry(new List { b2r2Plugin, ghidraPlugin }); - var service = CreateService(registry, enableFallback); - - return (b2r2Plugin, ghidraPlugin, service); - } - - private static (IDisassemblyPlugin Plugin, BinaryInfo Binary) CreateStubPlugin( - string pluginId, - string name, - int priority, - double confidence = 0.85, - int functionCount = 10, - double decodeSuccessRate = 0.95, - CpuArchitecture architecture = CpuArchitecture.X86_64) - { - var binary = CreateBinaryInfo(architecture); - var codeRegions = CreateMockCodeRegions(3); - var symbols = CreateMockSymbols(functionCount); - var totalInstructions = 1000; - var decodedInstructions = (int)(totalInstructions * decodeSuccessRate); - var instructions = CreateMockInstructions(decodedInstructions, totalInstructions - decodedInstructions); - - var stubPlugin = new StubDisassemblyPlugin( - pluginId, - name, - priority, - binary, - codeRegions, - symbols, - instructions); - - return (stubPlugin, binary); - } - - /// - /// Stub implementation of IDisassemblyPlugin for testing. - /// We need this because Moq cannot mock methods with ReadOnlySpan parameters. - /// - private sealed class StubDisassemblyPlugin : IDisassemblyPlugin - { - private readonly BinaryInfo _binary; - private readonly List _codeRegions; - private readonly List _symbols; - private readonly List _instructions; - - public DisassemblyCapabilities Capabilities { get; } - - public StubDisassemblyPlugin( - string pluginId, - string name, - int priority, - BinaryInfo binary, - List codeRegions, - List symbols, - List instructions, - IEnumerable? supportedArchs = null) - { - _binary = binary; - _codeRegions = codeRegions; - _symbols = symbols; - _instructions = instructions; - - Capabilities = new DisassemblyCapabilities - { - PluginId = pluginId, - Name = name, - Version = "1.0", - SupportedArchitectures = (supportedArchs ?? new[] { - CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM32, - CpuArchitecture.ARM64, CpuArchitecture.MIPS32 - }).ToImmutableHashSet(), - SupportedFormats = ImmutableHashSet.Create(BinaryFormat.ELF, BinaryFormat.PE, BinaryFormat.Raw), - Priority = priority, - SupportsLifting = true, - SupportsCfgRecovery = true - }; - } - - public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => _binary; - public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => _binary; - public IEnumerable GetCodeRegions(BinaryInfo binary) => _codeRegions; - public IEnumerable GetSymbols(BinaryInfo binary) => _symbols; - public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) => _instructions; - public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) => _instructions; - public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) => _instructions; - } - - /// - /// Plugin that throws exceptions for testing failure scenarios. - /// - private sealed class ThrowingPlugin : IDisassemblyPlugin - { - public DisassemblyCapabilities Capabilities { get; } - - public ThrowingPlugin(string pluginId, string name, int priority, BinaryInfo binary) - { - Capabilities = new DisassemblyCapabilities - { - PluginId = pluginId, - Name = name, - Version = "1.0", - SupportedArchitectures = ImmutableHashSet.Create(CpuArchitecture.X86, CpuArchitecture.X86_64, CpuArchitecture.ARM64), - SupportedFormats = ImmutableHashSet.Create(BinaryFormat.ELF, BinaryFormat.PE, BinaryFormat.Raw), - Priority = priority, - SupportsLifting = true, - SupportsCfgRecovery = true - }; - } - - public BinaryInfo LoadBinary(Stream stream, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => - throw new InvalidOperationException("Plugin failed to parse binary"); - - public BinaryInfo LoadBinary(ReadOnlySpan bytes, CpuArchitecture? archHint = null, BinaryFormat? formatHint = null) => - throw new InvalidOperationException("Plugin failed to parse binary"); - - public IEnumerable GetCodeRegions(BinaryInfo binary) => - throw new InvalidOperationException("Plugin failed"); - - public IEnumerable GetSymbols(BinaryInfo binary) => - throw new InvalidOperationException("Plugin failed"); - - public IEnumerable Disassemble(BinaryInfo binary, CodeRegion region) => - throw new InvalidOperationException("Plugin failed"); - - public IEnumerable Disassemble(BinaryInfo binary, ulong startAddress, ulong length) => - throw new InvalidOperationException("Plugin failed"); - - public IEnumerable DisassembleSymbol(BinaryInfo binary, SymbolInfo symbol) => - throw new InvalidOperationException("Plugin failed"); - } - - private static BinaryInfo CreateBinaryInfo(CpuArchitecture architecture) - { - return new BinaryInfo( - Format: BinaryFormat.ELF, - Architecture: architecture, - Bitness: architecture == CpuArchitecture.X86 ? 32 : 64, - Endianness: Endianness.Little, - Abi: "gnu", - EntryPoint: 0x1000, - BuildId: "abc123", - Metadata: new Dictionary(), - Handle: new object()); - } - - private static List CreateMockCodeRegions(int count) - { - var regions = new List(); - for (int i = 0; i < count; i++) - { - regions.Add(new CodeRegion( - Name: $".text{i}", - VirtualAddress: (ulong)(0x1000 + i * 0x1000), - FileOffset: (ulong)(0x1000 + i * 0x1000), - Size: 0x1000, - IsExecutable: true, - IsReadable: true, - IsWritable: false)); - } - return regions; - } - - private static List CreateMockSymbols(int count) - { - var symbols = new List(); - for (int i = 0; i < count; i++) - { - symbols.Add(new SymbolInfo( - Name: $"function_{i}", - Address: (ulong)(0x1000 + i * 0x10), - Size: 0x10, - Type: SymbolType.Function, - Binding: SymbolBinding.Global, - Section: ".text")); - } - return symbols; - } - - private static List CreateMockInstructions(int validCount, int invalidCount) - { - var instructions = new List(); - - // Add valid instructions - for (int i = 0; i < validCount; i++) - { - instructions.Add(new DisassembledInstruction( - Address: (ulong)(0x1000 + i * 4), - RawBytes: ImmutableArray.Create(0x48, 0xC7, 0xC0, 0x00), - Mnemonic: "mov", - OperandsText: "rax, 0", - Kind: InstructionKind.Move, - Operands: ImmutableArray.Empty)); - } - - // Add invalid instructions - for (int i = 0; i < invalidCount; i++) - { - instructions.Add(new DisassembledInstruction( - Address: (ulong)(0x1000 + validCount * 4 + i * 4), - RawBytes: ImmutableArray.Create(0xFF, 0xFF, 0xFF, 0xFF), - Mnemonic: "??", - OperandsText: "", - Kind: InstructionKind.Unknown, - Operands: ImmutableArray.Empty)); - } - - return instructions; - } - - private static IDisassemblyPluginRegistry CreateMockRegistry(IReadOnlyList plugins) - { - var registry = new Mock(); - registry.Setup(r => r.Plugins).Returns(plugins); - - registry.Setup(r => r.FindPlugin(It.IsAny(), It.IsAny())) - .Returns((CpuArchitecture arch, BinaryFormat format) => - plugins - .Where(p => p.Capabilities.CanHandle(arch, format)) - .OrderByDescending(p => p.Capabilities.Priority) - .FirstOrDefault()); - - registry.Setup(r => r.GetPlugin(It.IsAny())) - .Returns((string id) => plugins.FirstOrDefault(p => p.Capabilities.PluginId == id)); - - return registry.Object; - } - - private static HybridDisassemblyService CreateService( - IDisassemblyPluginRegistry registry, - bool enableFallback = true) - { - var options = Options.Create(new HybridDisassemblyOptions - { - PrimaryPluginId = "stellaops.disasm.b2r2", - FallbackPluginId = "stellaops.disasm.ghidra", - MinConfidenceThreshold = 0.7, - MinFunctionCount = 1, - MinDecodeSuccessRate = 0.8, - AutoFallbackOnUnsupported = true, - EnableFallback = enableFallback, - PluginTimeoutSeconds = 120 - }); - - return new HybridDisassemblyService( - registry, - options, - NullLogger.Instance); - } - - private static byte[] CreateElfHeader(CpuArchitecture architecture) - { - var elf = new byte[64]; - - // ELF magic - elf[0] = 0x7F; - elf[1] = (byte)'E'; - elf[2] = (byte)'L'; - elf[3] = (byte)'F'; - - // Class: 64-bit - elf[4] = 2; - - // Data: little endian - elf[5] = 1; - - // Version - elf[6] = 1; - - // Type: Executable - elf[16] = 2; - elf[17] = 0; - - // Machine: set based on architecture - ushort machine = architecture switch - { - CpuArchitecture.X86_64 => 0x3E, - CpuArchitecture.ARM64 => 0xB7, - CpuArchitecture.ARM32 => 0x28, - CpuArchitecture.MIPS32 => 0x08, - CpuArchitecture.SPARC => 0x02, - _ => 0x3E - }; - - elf[18] = (byte)(machine & 0xFF); - elf[19] = (byte)((machine >> 8) & 0xFF); - - // Version - elf[20] = 1; - - return elf; - } - - #endregion + private static readonly byte[] _elfArm64Header = CreateElfHeader(CpuArchitecture.ARM64); } diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.DisassemblyTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.DisassemblyTests.cs new file mode 100644 index 000000000..781612e02 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.DisassemblyTests.cs @@ -0,0 +1,82 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class IcedPluginTests +{ + [Fact] + public void Disassemble_DisassemblesX64Code() + { + // Arrange + var plugin = CreatePlugin(); + var binary = plugin.LoadBinary(_simpleX64Code, CpuArchitecture.X86_64, BinaryFormat.Raw); + var region = new CodeRegion(".text", 0, 0, (ulong)_simpleX64Code.Length, true, true, false); + + // Act + var instructions = plugin.Disassemble(binary, region).ToList(); + + // Assert + instructions.Should().HaveCount(2); + + instructions[0].Mnemonic.Should().Be("Mov"); + instructions[0].Address.Should().Be(0UL); + instructions[0].Kind.Should().Be(InstructionKind.Move); + instructions[0].RawBytes.Length.Should().Be(7); + + instructions[1].Mnemonic.Should().Be("Ret"); + instructions[1].Address.Should().Be(7UL); + instructions[1].Kind.Should().Be(InstructionKind.Return); + } + + [Fact] + public void Disassemble_ClassifiesInstructionKinds() + { + // Arrange + var plugin = CreatePlugin(); + // add rax, rbx; sub rcx, rdx; jmp 0x10; call 0x20; nop; ret + var code = new byte[] + { + 0x48, 0x01, 0xD8, // add rax, rbx + 0x48, 0x29, 0xD1, // sub rcx, rdx + 0xEB, 0x00, // jmp short $+2 + 0xE8, 0x00, 0x00, 0x00, 0x00, // call rel32 + 0x90, // nop + 0xC3 // ret + }; + + var binary = plugin.LoadBinary(code, CpuArchitecture.X86_64, BinaryFormat.Raw); + var region = new CodeRegion(".text", 0, 0, (ulong)code.Length, true, true, false); + + // Act + var instructions = plugin.Disassemble(binary, region).ToList(); + + // Assert + instructions.Should().HaveCountGreaterThanOrEqualTo(6); + instructions[0].Kind.Should().Be(InstructionKind.Arithmetic); // add + instructions[1].Kind.Should().Be(InstructionKind.Arithmetic); // sub + instructions[2].Kind.Should().Be(InstructionKind.Branch); // jmp + instructions[3].Kind.Should().Be(InstructionKind.Call); // call + instructions[4].Kind.Should().Be(InstructionKind.Nop); // nop + instructions[5].Kind.Should().Be(InstructionKind.Return); // ret + } + + [Fact] + public void GetCodeRegions_ReturnsRawRegionForRawFormat() + { + // Arrange + var plugin = CreatePlugin(); + var binary = plugin.LoadBinary(_simpleX64Code, CpuArchitecture.X86_64, BinaryFormat.Raw); + + // Act + var regions = plugin.GetCodeRegions(binary).ToList(); + + // Assert + regions.Should().HaveCount(1); + regions[0].Name.Should().Be(".text"); + regions[0].Size.Should().Be((ulong)_simpleX64Code.Length); + regions[0].IsExecutable.Should().BeTrue(); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.LoadBinaryTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.LoadBinaryTests.cs new file mode 100644 index 000000000..3d18e1b8b --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.LoadBinaryTests.cs @@ -0,0 +1,53 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class IcedPluginTests +{ + [Fact] + public void LoadBinary_DetectsElfFormat() + { + // Arrange + var plugin = CreatePlugin(); + + // Act + var binary = plugin.LoadBinary(_minimalElf64); + + // Assert + binary.Format.Should().Be(BinaryFormat.ELF); + binary.Architecture.Should().Be(CpuArchitecture.X86_64); + binary.Bitness.Should().Be(64); + binary.Endianness.Should().Be(Endianness.Little); + } + + [Fact] + public void LoadBinary_DetectsPeFormat() + { + // Arrange + var plugin = CreatePlugin(); + + // Act + var binary = plugin.LoadBinary(_minimalPe64); + + // Assert + binary.Format.Should().Be(BinaryFormat.PE); + binary.Architecture.Should().Be(CpuArchitecture.X86_64); + } + + [Fact] + public void LoadBinary_RawBytesDefaultsToRaw() + { + // Arrange + var plugin = CreatePlugin(); + var randomBytes = new byte[] { 0x01, 0x02, 0x03, 0x04 }; + + // Act + var binary = plugin.LoadBinary(randomBytes); + + // Assert + binary.Format.Should().Be(BinaryFormat.Raw); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.cs index bb8c6c206..f276c077d 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/IcedPluginTests.cs @@ -1,7 +1,5 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; using StellaOps.BinaryIndex.Disassembly.Iced; using Xunit; @@ -12,10 +10,10 @@ namespace StellaOps.BinaryIndex.Disassembly.Tests; /// Tests for the Iced disassembly plugin. /// [Trait("Category", "Unit")] -public sealed class IcedPluginTests +public sealed partial class IcedPluginTests { // Simple x86-64 ELF header (minimal) - private static readonly byte[] s_minimalElf64 = + private static readonly byte[] _minimalElf64 = [ 0x7F, (byte)'E', (byte)'L', (byte)'F', // Magic 0x02, // 64-bit @@ -33,7 +31,14 @@ public sealed class IcedPluginTests // DOS Header: 64 bytes (including e_lfanew at offset 0x3C) // PE Signature at offset 0x40: "PE\0\0" // Machine field at offset 0x44: 0x8664 for x86-64 - private static readonly byte[] s_minimalPe64 = CreateMinimalPe64(); + private static readonly byte[] _minimalPe64 = CreateMinimalPe64(); + + // Simple x86-64 instructions: mov rax, 0x1234; ret + private static readonly byte[] _simpleX64Code = + [ + 0x48, 0xC7, 0xC0, 0x34, 0x12, 0x00, 0x00, // mov rax, 0x1234 + 0xC3 // ret + ]; private static byte[] CreateMinimalPe64() { @@ -56,130 +61,6 @@ public sealed class IcedPluginTests return pe; } - // Simple x86-64 instructions: mov rax, 0x1234; ret - private static readonly byte[] s_simpleX64Code = - [ - 0x48, 0xC7, 0xC0, 0x34, 0x12, 0x00, 0x00, // mov rax, 0x1234 - 0xC3 // ret - ]; - - [Fact] - public void LoadBinary_DetectsElfFormat() - { - // Arrange - var plugin = CreatePlugin(); - - // Act - var binary = plugin.LoadBinary(s_minimalElf64); - - // Assert - binary.Format.Should().Be(BinaryFormat.ELF); - binary.Architecture.Should().Be(CpuArchitecture.X86_64); - binary.Bitness.Should().Be(64); - binary.Endianness.Should().Be(Endianness.Little); - } - - [Fact] - public void LoadBinary_DetectsPeFormat() - { - // Arrange - var plugin = CreatePlugin(); - - // Act - var binary = plugin.LoadBinary(s_minimalPe64); - - // Assert - binary.Format.Should().Be(BinaryFormat.PE); - binary.Architecture.Should().Be(CpuArchitecture.X86_64); - } - - [Fact] - public void LoadBinary_RawBytesDefaultsToRaw() - { - // Arrange - var plugin = CreatePlugin(); - var randomBytes = new byte[] { 0x01, 0x02, 0x03, 0x04 }; - - // Act - var binary = plugin.LoadBinary(randomBytes); - - // Assert - binary.Format.Should().Be(BinaryFormat.Raw); - } - - [Fact] - public void Disassemble_DisassemblesX64Code() - { - // Arrange - var plugin = CreatePlugin(); - var binary = plugin.LoadBinary(s_simpleX64Code, CpuArchitecture.X86_64, BinaryFormat.Raw); - var region = new CodeRegion(".text", 0, 0, (ulong)s_simpleX64Code.Length, true, true, false); - - // Act - var instructions = plugin.Disassemble(binary, region).ToList(); - - // Assert - instructions.Should().HaveCount(2); - - instructions[0].Mnemonic.Should().Be("Mov"); - instructions[0].Address.Should().Be(0UL); - instructions[0].Kind.Should().Be(InstructionKind.Move); - instructions[0].RawBytes.Length.Should().Be(7); - - instructions[1].Mnemonic.Should().Be("Ret"); - instructions[1].Address.Should().Be(7UL); - instructions[1].Kind.Should().Be(InstructionKind.Return); - } - - [Fact] - public void Disassemble_ClassifiesInstructionKinds() - { - // Arrange - var plugin = CreatePlugin(); - // add rax, rbx; sub rcx, rdx; jmp 0x10; call 0x20; nop; ret - var code = new byte[] - { - 0x48, 0x01, 0xD8, // add rax, rbx - 0x48, 0x29, 0xD1, // sub rcx, rdx - 0xEB, 0x00, // jmp short $+2 - 0xE8, 0x00, 0x00, 0x00, 0x00, // call rel32 - 0x90, // nop - 0xC3 // ret - }; - - var binary = plugin.LoadBinary(code, CpuArchitecture.X86_64, BinaryFormat.Raw); - var region = new CodeRegion(".text", 0, 0, (ulong)code.Length, true, true, false); - - // Act - var instructions = plugin.Disassemble(binary, region).ToList(); - - // Assert - instructions.Should().HaveCountGreaterThanOrEqualTo(6); - instructions[0].Kind.Should().Be(InstructionKind.Arithmetic); // add - instructions[1].Kind.Should().Be(InstructionKind.Arithmetic); // sub - instructions[2].Kind.Should().Be(InstructionKind.Branch); // jmp - instructions[3].Kind.Should().Be(InstructionKind.Call); // call - instructions[4].Kind.Should().Be(InstructionKind.Nop); // nop - instructions[5].Kind.Should().Be(InstructionKind.Return); // ret - } - - [Fact] - public void GetCodeRegions_ReturnsRawRegionForRawFormat() - { - // Arrange - var plugin = CreatePlugin(); - var binary = plugin.LoadBinary(s_simpleX64Code, CpuArchitecture.X86_64, BinaryFormat.Raw); - - // Act - var regions = plugin.GetCodeRegions(binary).ToList(); - - // Assert - regions.Should().HaveCount(1); - regions[0].Name.Should().Be(".text"); - regions[0].Size.Should().Be((ulong)s_simpleX64Code.Length); - regions[0].IsExecutable.Should().BeTrue(); - } - private static IcedDisassemblyPlugin CreatePlugin() { return new IcedDisassemblyPlugin(NullLogger.Instance); diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.Tests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.Tests.cs new file mode 100644 index 000000000..a8f8d3476 --- /dev/null +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.Tests.cs @@ -0,0 +1,93 @@ +// Copyright (c) StellaOps. All rights reserved. +// Licensed under BUSL-1.1. See LICENSE in the project root. +using FluentAssertions; +using Xunit; + +namespace StellaOps.BinaryIndex.Disassembly.Tests; + +public sealed partial class PluginRegistryTests +{ + [Fact] + public void Registry_FindsPluginByArchitectureAndFormat() + { + // Arrange + var registry = CreateRegistry(); + + // Act + var x64Plugin = registry.FindPlugin(CpuArchitecture.X86_64, BinaryFormat.ELF); + var armPlugin = registry.FindPlugin(CpuArchitecture.ARM64, BinaryFormat.ELF); + + // Assert + x64Plugin.Should().NotBeNull(); + x64Plugin!.Capabilities.PluginId.Should().Be("stellaops.disasm.iced"); // Higher priority for x86/x64 + + armPlugin.Should().NotBeNull(); + armPlugin!.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); // Only B2R2 supports ARM + } + + [Fact] + public void Registry_ReturnsNullForUnsupportedCombination() + { + // Arrange + var registry = CreateRegistry(); + + // Act + var plugin = registry.FindPlugin(CpuArchitecture.WASM, BinaryFormat.ELF); + + // Assert - WASM arch is only supported by B2R2, but WASM format not ELF + // Actually B2R2 supports WASM format, but the combination may not be valid + // Let's test with something truly unsupported + } + + [Fact] + public void Registry_FindsPluginById() + { + // Arrange + var registry = CreateRegistry(); + + // Act + var icedPlugin = registry.GetPlugin("stellaops.disasm.iced"); + var b2r2Plugin = registry.GetPlugin("stellaops.disasm.b2r2"); + var unknownPlugin = registry.GetPlugin("stellaops.disasm.unknown"); + + // Assert + icedPlugin.Should().NotBeNull(); + icedPlugin!.Capabilities.Name.Should().Contain("Iced"); + + b2r2Plugin.Should().NotBeNull(); + b2r2Plugin!.Capabilities.Name.Should().Contain("B2R2"); + + unknownPlugin.Should().BeNull(); + } + + [Fact] + public void Registry_PluginsOrderedByPriority() + { + // Arrange + var registry = CreateRegistry(); + + // Act + var plugins = registry.Plugins; + + // Assert - Iced has higher priority (100) than B2R2 (50) + plugins.Should().HaveCount(2); + plugins[0].Capabilities.PluginId.Should().Be("stellaops.disasm.iced"); + plugins[1].Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + } + + [Fact] + public void Registry_FindPluginsForArchitecture_ReturnsMultiple() + { + // Arrange + var registry = CreateRegistry(); + + // Act - both Iced and B2R2 support x86_64 + var x64Plugins = registry.FindPluginsForArchitecture(CpuArchitecture.X86_64).ToList(); + var armPlugins = registry.FindPluginsForArchitecture(CpuArchitecture.ARM64).ToList(); + + // Assert + x64Plugins.Should().HaveCount(2); + armPlugins.Should().HaveCount(1); + armPlugins[0].Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); + } +} diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.cs b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.cs index 55d1fe7c6..755c0a5e7 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.cs +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/PluginRegistryTests.cs @@ -1,8 +1,5 @@ // Copyright (c) StellaOps. All rights reserved. // Licensed under BUSL-1.1. See LICENSE in the project root. - -using FluentAssertions; -using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; using StellaOps.BinaryIndex.Disassembly.B2R2; using StellaOps.BinaryIndex.Disassembly.Iced; @@ -14,92 +11,8 @@ namespace StellaOps.BinaryIndex.Disassembly.Tests; /// Tests for the plugin registry functionality. /// [Trait("Category", "Unit")] -public sealed class PluginRegistryTests +public sealed partial class PluginRegistryTests { - [Fact] - public void Registry_FindsPluginByArchitectureAndFormat() - { - // Arrange - var registry = CreateRegistry(); - - // Act - var x64Plugin = registry.FindPlugin(CpuArchitecture.X86_64, BinaryFormat.ELF); - var armPlugin = registry.FindPlugin(CpuArchitecture.ARM64, BinaryFormat.ELF); - - // Assert - x64Plugin.Should().NotBeNull(); - x64Plugin!.Capabilities.PluginId.Should().Be("stellaops.disasm.iced"); // Higher priority for x86/x64 - - armPlugin.Should().NotBeNull(); - armPlugin!.Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); // Only B2R2 supports ARM - } - - [Fact] - public void Registry_ReturnsNullForUnsupportedCombination() - { - // Arrange - var registry = CreateRegistry(); - - // Act - var plugin = registry.FindPlugin(CpuArchitecture.WASM, BinaryFormat.ELF); - - // Assert - WASM arch is only supported by B2R2, but WASM format not ELF - // Actually B2R2 supports WASM format, but the combination may not be valid - // Let's test with something truly unsupported - } - - [Fact] - public void Registry_FindsPluginById() - { - // Arrange - var registry = CreateRegistry(); - - // Act - var icedPlugin = registry.GetPlugin("stellaops.disasm.iced"); - var b2r2Plugin = registry.GetPlugin("stellaops.disasm.b2r2"); - var unknownPlugin = registry.GetPlugin("stellaops.disasm.unknown"); - - // Assert - icedPlugin.Should().NotBeNull(); - icedPlugin!.Capabilities.Name.Should().Contain("Iced"); - - b2r2Plugin.Should().NotBeNull(); - b2r2Plugin!.Capabilities.Name.Should().Contain("B2R2"); - - unknownPlugin.Should().BeNull(); - } - - [Fact] - public void Registry_PluginsOrderedByPriority() - { - // Arrange - var registry = CreateRegistry(); - - // Act - var plugins = registry.Plugins; - - // Assert - Iced has higher priority (100) than B2R2 (50) - plugins.Should().HaveCount(2); - plugins[0].Capabilities.PluginId.Should().Be("stellaops.disasm.iced"); - plugins[1].Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - } - - [Fact] - public void Registry_FindPluginsForArchitecture_ReturnsMultiple() - { - // Arrange - var registry = CreateRegistry(); - - // Act - both Iced and B2R2 support x86_64 - var x64Plugins = registry.FindPluginsForArchitecture(CpuArchitecture.X86_64).ToList(); - var armPlugins = registry.FindPluginsForArchitecture(CpuArchitecture.ARM64).ToList(); - - // Assert - x64Plugins.Should().HaveCount(2); - armPlugins.Should().HaveCount(1); - armPlugins[0].Capabilities.PluginId.Should().Be("stellaops.disasm.b2r2"); - } - private static DisassemblyPluginRegistry CreateRegistry() { var icedPlugin = new IcedDisassemblyPlugin(NullLogger.Instance); diff --git a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/TASKS.md b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/TASKS.md index 2e6ac10e3..c64a178b6 100644 --- a/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/TASKS.md +++ b/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0744-T | DONE | Revalidated 2026-01-07. | | AUDIT-0744-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/BinaryIndex/__Tests/StellaOps.BinaryIndex.Disassembly.Tests/StellaOps.BinaryIndex.Disassembly.Tests.md (2026-02-04). | diff --git a/src/Concelier/StellaOps.Concelier.WebService/Program.cs b/src/Concelier/StellaOps.Concelier.WebService/Program.cs index 8fb44366a..e0bf26a63 100644 --- a/src/Concelier/StellaOps.Concelier.WebService/Program.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Program.cs @@ -491,6 +491,9 @@ builder.Services.AddConcelierPostgresStorage(pgOptions => pgOptions.MigrationsPath = postgresOptions.MigrationsPath; }); +// Register in-memory lease store (single-instance dev mode). +builder.Services.AddSingleton(); + builder.Services.AddOptions() .Bind(builder.Configuration.GetSection("advisoryObservationEvents")) .PostConfigure(options => diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/InMemoryLeaseStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/InMemoryLeaseStore.cs new file mode 100644 index 000000000..12464c23b --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/InMemoryLeaseStore.cs @@ -0,0 +1,65 @@ +namespace StellaOps.Concelier.Core.Jobs; + +/// +/// In-memory lease store for single-instance deployments. +/// For multi-instance deployments, replace with a Postgres-backed implementation. +/// +public sealed class InMemoryLeaseStore : ILeaseStore +{ + private readonly object _lock = new(); + private readonly Dictionary _leases = new(); + + public Task TryAcquireAsync( + string key, + string holder, + TimeSpan leaseDuration, + DateTimeOffset now, + CancellationToken cancellationToken) + { + lock (_lock) + { + if (_leases.TryGetValue(key, out var existing) && existing.TtlAt > now && existing.Holder != holder) + { + return Task.FromResult(null); + } + + var lease = new JobLease(key, holder, now, now, leaseDuration, now.Add(leaseDuration)); + _leases[key] = lease; + return Task.FromResult(lease); + } + } + + public Task HeartbeatAsync( + string key, + string holder, + TimeSpan leaseDuration, + DateTimeOffset now, + CancellationToken cancellationToken) + { + lock (_lock) + { + if (_leases.TryGetValue(key, out var existing) && existing.Holder == holder) + { + var lease = new JobLease(key, holder, existing.AcquiredAt, now, leaseDuration, now.Add(leaseDuration)); + _leases[key] = lease; + return Task.FromResult(lease); + } + + return Task.FromResult(null); + } + } + + public Task ReleaseAsync(string key, string holder, CancellationToken cancellationToken) + { + lock (_lock) + { + if (_leases.TryGetValue(key, out var existing) && existing.Holder == holder) + { + _leases.Remove(key); + return Task.FromResult(true); + } + } + + return Task.FromResult(false); + } +} diff --git a/src/Doctor/StellaOps.Doctor.Scheduler/Program.cs b/src/Doctor/StellaOps.Doctor.Scheduler/Program.cs index 1f02fe464..36ebeb514 100644 --- a/src/Doctor/StellaOps.Doctor.Scheduler/Program.cs +++ b/src/Doctor/StellaOps.Doctor.Scheduler/Program.cs @@ -11,8 +11,9 @@ using StellaOps.Doctor.Scheduler; using StellaOps.Doctor.Scheduler.Models; using StellaOps.Doctor.Scheduler.Options; using StellaOps.Doctor.Scheduler.Services; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); // Configure options builder.Services.Configure( @@ -42,8 +43,11 @@ builder.Services.AddSingleton(); // Add background worker builder.Services.AddHostedService(); -var host = builder.Build(); -await host.RunAsync(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync(); // Placeholder implementations for development file sealed class InMemoryScheduleRepository : IScheduleRepository diff --git a/src/Doctor/StellaOps.Doctor.Scheduler/StellaOps.Doctor.Scheduler.csproj b/src/Doctor/StellaOps.Doctor.Scheduler/StellaOps.Doctor.Scheduler.csproj index cd9842656..7060330f6 100644 --- a/src/Doctor/StellaOps.Doctor.Scheduler/StellaOps.Doctor.Scheduler.csproj +++ b/src/Doctor/StellaOps.Doctor.Scheduler/StellaOps.Doctor.Scheduler.csproj @@ -10,6 +10,10 @@ Scheduled Doctor health check runs with alerting and trending + + + + @@ -19,6 +23,7 @@ + diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj index 581182b53..9c407282e 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj @@ -7,6 +7,7 @@ enable preview true + false diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json index 73437f4fd..259b042b1 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json @@ -1,10 +1,13 @@ { - Logging: { - LogLevel: { - Default: Information, - Microsoft.AspNetCore: Warning + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" } }, - EvidenceLocker: { - Database: { - ConnectionString: Host=localhost + "EvidenceLocker": { + "Database": { + "ConnectionString": "Host=localhost" + } + } +} diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json index 2e0ce0dab..b5c6dc5cf 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json @@ -1,21 +1,24 @@ { - Logging: { - LogLevel: { - Default: Information, - Microsoft.AspNetCore: Warning + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" } }, - Authority: { - ResourceServer: { - Authority: https://authority.localtest.me, - Audiences: [ - api://evidence-locker + "Authority": { + "ResourceServer": { + "Authority": "https://authority.localtest.me", + "Audiences": [ + "api://evidence-locker" ], - RequiredTenants: [ - tenant-default + "RequiredTenants": [ + "tenant-default" ] } }, - EvidenceLocker: { - Database: { - ConnectionString: Host=localhost + "EvidenceLocker": { + "Database": { + "ConnectionString": "Host=localhost" + } + } +} diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs index a7f97b3c0..8ed9479cd 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs @@ -1,10 +1,13 @@ using StellaOps.EvidenceLocker.Infrastructure.DependencyInjection; using StellaOps.EvidenceLocker.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddEvidenceLockerInfrastructure(builder.Configuration); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -await host.RunAsync(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync(); diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj index f1df43ec6..75f3e171a 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj @@ -14,10 +14,14 @@ enable preview true + false - - + + + + + @@ -35,8 +39,11 @@ - - + + + + + diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json index eef3c60f4..7479123b8 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json @@ -1,10 +1,13 @@ { - Logging: { - LogLevel: { - Default: Information, - Microsoft.Hosting.Lifetime: Information + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" } }, - EvidenceLocker: { - Database: { - ConnectionString: Host=localhost + "EvidenceLocker": { + "Database": { + "ConnectionString": "Host=localhost" + } + } +} diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json index eef3c60f4..7479123b8 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json @@ -1,10 +1,13 @@ { - Logging: { - LogLevel: { - Default: Information, - Microsoft.Hosting.Lifetime: Information + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" } }, - EvidenceLocker: { - Database: { - ConnectionString: Host=localhost + "EvidenceLocker": { + "Database": { + "ConnectionString": "Host=localhost" + } + } +} diff --git a/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj b/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj index 419b1380e..198bdf33d 100644 --- a/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj +++ b/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj @@ -31,5 +31,6 @@ + diff --git a/src/Excititor/StellaOps.Excititor.Worker/Program.cs b/src/Excititor/StellaOps.Excititor.Worker/Program.cs index a47b810c2..279ef2803 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/Program.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Program.cs @@ -23,10 +23,11 @@ using StellaOps.Excititor.Worker.Scheduling; using StellaOps.Excititor.Worker.Signature; using StellaOps.IssuerDirectory.Client; using StellaOps.Plugin; +using StellaOps.Worker.Health; using System.IO; using System.Linq; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); var services = builder.Services; var configuration = builder.Configuration; var workerConfig = configuration.GetSection("Excititor:Worker"); @@ -127,8 +128,11 @@ if (!workerConfigSnapshot.DisableConsensus) } services.AddSingleton(); -var host = builder.Build(); -await host.RunAsync(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync(); // Make Program class file-scoped to prevent it from being exposed to referencing assemblies file sealed partial class Program; diff --git a/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj index 86d310310..9d1ff9183 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj +++ b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj @@ -11,6 +11,10 @@ + + + + diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Persistence/Extensions/ExcititorPersistenceExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Persistence/Extensions/ExcititorPersistenceExtensions.cs index 4d0cd25ee..871a03c31 100644 --- a/src/Excititor/__Libraries/StellaOps.Excititor.Persistence/Extensions/ExcititorPersistenceExtensions.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Persistence/Extensions/ExcititorPersistenceExtensions.cs @@ -28,6 +28,8 @@ public static class ExcititorPersistenceExtensions string sectionName = "Postgres:Excititor") { services.Configure(sectionName, configuration.GetSection(sectionName)); + // Also register unnamed so IOptions resolves (DataSourceBase uses unnamed). + services.Configure(configuration.GetSection(sectionName)); services.Configure(configuration.GetSection("Excititor:Storage")); services.AddSingleton(); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs index 1471ea866..a946fd6d0 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs @@ -11,6 +11,7 @@ using StellaOps.ExportCenter.WebService.Deprecation; using StellaOps.ExportCenter.WebService.EvidenceLocker; using StellaOps.ExportCenter.WebService.ExceptionReport; using StellaOps.ExportCenter.WebService.Incident; +using StellaOps.ExportCenter.Core.Verification; using StellaOps.ExportCenter.WebService.Lineage; using StellaOps.ExportCenter.WebService.RiskBundle; using StellaOps.ExportCenter.WebService.SimulationExport; @@ -83,6 +84,9 @@ builder.Services.AddAuditBundleJobHandler(); // Exception report services builder.Services.AddExceptionReportServices(); +// Export verification services +builder.Services.AddExportVerification(); + // Lineage evidence pack services builder.Services.AddLineageExportServices(); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj index d330f1d20..8f41b9110 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj @@ -7,6 +7,7 @@ enable preview true + false diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json index 977ee273e..6d0a26897 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json @@ -1,20 +1,20 @@ { - Logging: { - LogLevel: { - Default: Information, - Microsoft.AspNetCore: Warning + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" } }, - Authority: { - ResourceServer: { - Authority: https://authority.localtest.me, - Audiences: [ - api://export-center + "Authority": { + "ResourceServer": { + "Authority": "https://authority.localtest.me", + "Audiences": [ + "api://export-center" ], - RequiredTenants: [ - tenant-default + "RequiredTenants": [ + "tenant-default" ] } }, - AllowedHosts: * + "AllowedHosts": "*" } diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs index 0d36ec1c3..522f1bc1d 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs @@ -6,10 +6,13 @@ using StellaOps.Cryptography; using StellaOps.ExportCenter.Core.DevPortalOffline; using StellaOps.ExportCenter.Infrastructure.DevPortalOffline; using StellaOps.ExportCenter.RiskBundles; +using StellaOps.Cryptography.DependencyInjection; using StellaOps.ExportCenter.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); +builder.Services.AddStellaOpsCrypto(); builder.Services.AddSingleton(TimeProvider.System); builder.Services.Configure(builder.Configuration.GetSection("DevPortalOffline")); @@ -41,6 +44,8 @@ builder.Services.AddSingleton(); builder.Services.AddHostedService(); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -host.Run(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj index f04bc722c..0d4459f47 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj @@ -14,10 +14,14 @@ enable preview true + false - - + + + + + @@ -40,7 +44,9 @@ + + diff --git a/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs b/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs index 37c7e1f4b..5f7628e14 100644 --- a/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs +++ b/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs @@ -10,6 +10,7 @@ using Serilog.Events; using StellaOps.Auth.Abstractions; using StellaOps.Auth.ServerIntegration; using StellaOps.Configuration; +using StellaOps.Cryptography.DependencyInjection; using StellaOps.DependencyInjection; using StellaOps.Findings.Ledger; using StellaOps.Findings.Ledger.Domain; @@ -85,6 +86,7 @@ builder.Services.AddOptions() .PostConfigure(options => options.Validate()) .ValidateOnStart(); +builder.Services.AddStellaOpsCrypto(); builder.Services.AddSingleton(TimeProvider.System); builder.Services.AddProblemDetails(); builder.Services.AddEndpointsApiExplorer(); @@ -187,6 +189,8 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddHttpClient("ledger-policy-engine"); +builder.Services.AddSingleton(sp => + sp.GetRequiredService>().Value.PolicyEngine); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); diff --git a/src/Findings/StellaOps.Findings.Ledger.WebService/StellaOps.Findings.Ledger.WebService.csproj b/src/Findings/StellaOps.Findings.Ledger.WebService/StellaOps.Findings.Ledger.WebService.csproj index 394740faf..63347dd37 100644 --- a/src/Findings/StellaOps.Findings.Ledger.WebService/StellaOps.Findings.Ledger.WebService.csproj +++ b/src/Findings/StellaOps.Findings.Ledger.WebService/StellaOps.Findings.Ledger.WebService.csproj @@ -19,6 +19,7 @@ + diff --git a/src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj b/src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj index 912206fd9..2890307a0 100644 --- a/src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj +++ b/src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj @@ -12,4 +12,7 @@ + + + diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerKeyRecordTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerKeyRecordTests.cs new file mode 100644 index 000000000..3129d508b --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerKeyRecordTests.cs @@ -0,0 +1,33 @@ +using FluentAssertions; + +using StellaOps.IssuerDirectory.Core.Domain; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Domain; + +public class IssuerKeyRecordTests +{ + [Fact] + public void WithStatus_Retired_SetsTimestamps() + { + var record = IssuerKeyRecord.Create( + id: "key-1", + issuerId: "issuer-1", + tenantId: "tenant-a", + type: IssuerKeyType.Ed25519PublicKey, + material: new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])), + fingerprint: "fp-1", + createdAtUtc: DateTimeOffset.Parse("2025-11-01T00:00:00Z"), + createdBy: "seed", + expiresAtUtc: null, + replacesKeyId: null); + + var retiredAt = DateTimeOffset.Parse("2025-11-02T00:00:00Z"); + var retired = record.WithStatus(IssuerKeyStatus.Retired, retiredAt, "editor"); + + retired.Status.Should().Be(IssuerKeyStatus.Retired); + retired.RetiredAtUtc.Should().Be(retiredAt); + retired.RevokedAtUtc.Should().BeNull(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerRecordTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerRecordTests.cs new file mode 100644 index 000000000..073e296c8 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Domain/IssuerRecordTests.cs @@ -0,0 +1,62 @@ +using FluentAssertions; + +using StellaOps.IssuerDirectory.Core.Domain; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Domain; + +public class IssuerRecordTests +{ + [Fact] + public void Create_NormalizesSlugAndTags() + { + var record = IssuerRecord.Create( + id: "red-hat", + tenantId: "tenant-a", + displayName: "Red Hat", + slug: " Red-Hat ", + description: null, + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: Array.Empty(), + tags: new[] { "Vendor", " vendor ", "Partner", " " }, + timestampUtc: DateTimeOffset.Parse("2025-11-01T00:00:00Z"), + actor: "seed", + isSystemSeed: false); + + record.Slug.Should().Be("red-hat"); + record.Tags.Should().Equal("vendor", "partner"); + } + + [Fact] + public void WithUpdated_NormalizesTagsAndDescription() + { + var record = IssuerRecord.Create( + id: "red-hat", + tenantId: "tenant-a", + displayName: "Red Hat", + slug: "red-hat", + description: "Initial", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: Array.Empty(), + tags: new[] { "vendor" }, + timestampUtc: DateTimeOffset.Parse("2025-11-01T00:00:00Z"), + actor: "seed", + isSystemSeed: false); + + var updated = record.WithUpdated( + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: Array.Empty(), + tags: new[] { "Beta", "beta ", "Alpha" }, + displayName: "Red Hat Security", + description: " Updated ", + updatedAtUtc: DateTimeOffset.Parse("2025-11-02T00:00:00Z"), + updatedBy: "editor"); + + updated.Description.Should().Be("Updated"); + updated.Tags.Should().Equal("beta", "alpha"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Factory.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Factory.cs new file mode 100644 index 000000000..788b2a8e8 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Factory.cs @@ -0,0 +1,59 @@ +using System; +using System.Net; +using System.Net.Http; +using System.Text; + +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Options; + +using StellaOps.IssuerDirectory.Client; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + private static IIssuerDirectoryClient CreateClient( + RecordingHandler handler, + IssuerDirectoryClientOptions? options = null) + { + var opts = options ?? DefaultOptions(); + var httpClient = new HttpClient(handler) + { + BaseAddress = opts.BaseAddress + }; + + var memoryCache = new MemoryCache(new MemoryCacheOptions()); + var clientOptions = Options.Create(opts); + + var clientType = typeof(IssuerDirectoryClientOptions) + .Assembly + .GetType("StellaOps.IssuerDirectory.Client.IssuerDirectoryClient", throwOnError: true)!; + + var loggerType = typeof(TestLogger<>).MakeGenericType(clientType); + var logger = Activator.CreateInstance(loggerType)!; + + var instance = Activator.CreateInstance( + clientType, + new object[] { httpClient, memoryCache, clientOptions, logger }); + + return (IIssuerDirectoryClient)instance!; + } + + private static IssuerDirectoryClientOptions DefaultOptions() + { + return new IssuerDirectoryClientOptions + { + BaseAddress = new Uri("https://issuer-directory.local/"), + TenantHeader = "X-StellaOps-Tenant", + AuditReasonHeader = "X-StellaOps-Reason" + }; + } + + private static HttpResponseMessage CreateJsonResponse(string json) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Keys.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Keys.cs new file mode 100644 index 000000000..17839f51f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Keys.cs @@ -0,0 +1,43 @@ +using System; +using System.Net.Http; + +using FluentAssertions; + +using StellaOps.TestKit; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task GetIssuerKeysAsync_SendsTenantHeaderAndCachesByIncludeGlobalAsync() + { + var handler = new RecordingHandler( + CreateJsonResponse("""[{"id":"key-1","issuerId":"issuer-1","tenantId":"tenant-a","type":"ed25519","status":"active","materialFormat":"base64","materialValue":"AQ==","fingerprint":"fp-1","expiresAtUtc":null,"retiredAtUtc":null,"revokedAtUtc":null,"replacesKeyId":null}]"""), + CreateJsonResponse("""[{"id":"key-2","issuerId":"issuer-1","tenantId":"tenant-a","type":"ed25519","status":"active","materialFormat":"base64","materialValue":"AQ==","fingerprint":"fp-2","expiresAtUtc":null,"retiredAtUtc":null,"revokedAtUtc":null,"replacesKeyId":null}]""")); + + var client = CreateClient(handler); + + var first = await client.GetIssuerKeysAsync(" tenant-a ", "issuer-1 ", includeGlobal: false, CancellationToken.None); + first.Should().HaveCount(1); + handler.Requests.Should().HaveCount(1); + + var firstRequest = handler.Requests[0]; + firstRequest.Method.Should().Be(HttpMethod.Get); + firstRequest.Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-1/keys?includeGlobal=false")); + firstRequest.Headers.TryGetValue("X-StellaOps-Tenant", out var tenantValues).Should().BeTrue(); + tenantValues!.Should().Equal("tenant-a"); + + var cached = await client.GetIssuerKeysAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + cached.Should().HaveCount(1); + handler.Requests.Should().HaveCount(1); + + var global = await client.GetIssuerKeysAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); + global.Should().HaveCount(1); + handler.Requests.Should().HaveCount(2); + handler.Requests[1].Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-1/keys?includeGlobal=true")); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.RecordingHandler.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.RecordingHandler.cs new file mode 100644 index 000000000..17f1ba08e --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.RecordingHandler.cs @@ -0,0 +1,56 @@ +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + private sealed record RecordedRequest(HttpMethod Method, Uri Uri, IDictionary Headers, string? Body); + + private sealed class RecordingHandler : HttpMessageHandler + { + private readonly Queue _responses; + + public RecordingHandler(params HttpResponseMessage[] responses) + { + _responses = new Queue(responses); + } + + public List Requests { get; } = new(); + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + string? body = null; + if (request.Content is not null) + { + body = await request.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + } + + var headers = request.Headers.ToDictionary(pair => pair.Key, pair => pair.Value.ToArray()); + if (request.Content?.Headers is not null) + { + foreach (var header in request.Content.Headers) + { + headers[header.Key] = header.Value.ToArray(); + } + } + + Requests.Add(new RecordedRequest(request.Method, request.RequestUri!, headers, body)); + + if (_responses.Count == 0) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent("{}", Encoding.UTF8, "application/json") + }; + } + + return _responses.Dequeue(); + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.TestLogger.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.TestLogger.cs new file mode 100644 index 000000000..92be045ee --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.TestLogger.cs @@ -0,0 +1,31 @@ +using Microsoft.Extensions.Logging; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + private sealed class TestLogger : ILogger + { + public IDisposable BeginScope(TState state) where TState : notnull => NullDisposable.Instance; + + public bool IsEnabled(LogLevel logLevel) => false; + + public void Log( + LogLevel logLevel, + EventId eventId, + TState state, + Exception? exception, + Func formatter) + { + } + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + + public void Dispose() + { + } + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Delete.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Delete.cs new file mode 100644 index 000000000..3a4e76f66 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Delete.cs @@ -0,0 +1,45 @@ +using System; +using System.Net; +using System.Net.Http; + +using FluentAssertions; + +using StellaOps.TestKit; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task DeleteIssuerTrustAsync_UsesDeleteVerbAndReasonHeaderWhenProvidedAsync() + { + var handler = new RecordingHandler( + CreateJsonResponse("""{"tenantOverride":{"weight":2.0,"reason":"seed","updatedAtUtc":"2025-11-02T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-02T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":2.0}"""), + new HttpResponseMessage(HttpStatusCode.NoContent), + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}""")); + + var client = CreateClient(handler); + + await client.GetIssuerTrustAsync("tenant-b", "issuer-9", includeGlobal: true, CancellationToken.None); + handler.Requests.Should().HaveCount(1); + + await client.DeleteIssuerTrustAsync("tenant-b", "issuer-9", " cleanup ", CancellationToken.None); + handler.Requests.Should().HaveCount(2); + + var deleteRequest = handler.Requests[1]; + deleteRequest.Method.Should().Be(HttpMethod.Delete); + deleteRequest.Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-9/trust")); + deleteRequest.Headers.TryGetValue("X-StellaOps-Tenant", out var tenantValues).Should().BeTrue(); + tenantValues!.Should().Equal("tenant-b"); + deleteRequest.Headers.TryGetValue("X-StellaOps-Reason", out var reasonValues).Should().BeTrue(); + reasonValues!.Should().Equal("cleanup"); + deleteRequest.Body.Should().BeNull(); + + await client.GetIssuerTrustAsync("tenant-b", "issuer-9", includeGlobal: true, CancellationToken.None); + handler.Requests.Should().HaveCount(3); + handler.Requests[2].Method.Should().Be(HttpMethod.Get); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Failure.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Failure.cs new file mode 100644 index 000000000..0e9afa315 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Failure.cs @@ -0,0 +1,39 @@ +using System.Net; +using System.Net.Http; +using System.Text; + +using FluentAssertions; + +using StellaOps.TestKit; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SetIssuerTrustAsync_PropagatesFailureAndDoesNotEvictCacheAsync() + { + var handler = new RecordingHandler( + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + new HttpResponseMessage(HttpStatusCode.InternalServerError) + { + Content = new StringContent("{}", Encoding.UTF8, "application/json") + }); + + var client = CreateClient(handler); + + var cached = await client.GetIssuerTrustAsync("tenant-c", "issuer-err", includeGlobal: false, CancellationToken.None); + cached.EffectiveWeight.Should().Be(0m); + handler.Requests.Should().HaveCount(1); + + await FluentActions.Invoking(() => client.SetIssuerTrustAsync("tenant-c", "issuer-err", 0.5m, null, CancellationToken.None).AsTask()) + .Should().ThrowAsync(); + handler.Requests.Should().HaveCount(2); + + await client.GetIssuerTrustAsync("tenant-c", "issuer-err", includeGlobal: false, CancellationToken.None); + handler.Requests.Should().HaveCount(2, "cache should remain warm after failure"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Set.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Set.cs new file mode 100644 index 000000000..5e3f1909c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.Trust.Set.cs @@ -0,0 +1,77 @@ +using System; +using System.Net.Http; +using System.Text.Json; + +using FluentAssertions; + +using StellaOps.TestKit; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests; + +public partial class IssuerDirectoryClientTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SetIssuerTrustAsync_SendsAuditMetadataAndInvalidatesCacheAsync() + { + var handler = new RecordingHandler( + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + CreateJsonResponse("""{"tenantOverride":{"weight":1.5,"reason":"rollout","updatedAtUtc":"2025-11-03T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-03T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":1.5}"""), + CreateJsonResponse("""{"tenantOverride":{"weight":1.5,"reason":"rollout","updatedAtUtc":"2025-11-03T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-03T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":1.5}""")); + + var client = CreateClient(handler); + + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + handler.Requests.Should().HaveCount(1); + + var result = await client.SetIssuerTrustAsync(" tenant-a ", " issuer-1 ", 1.5m, " rollout ", CancellationToken.None); + result.EffectiveWeight.Should().Be(1.5m); + handler.Requests.Should().HaveCount(2); + + var putRequest = handler.Requests[1]; + putRequest.Method.Should().Be(HttpMethod.Put); + putRequest.Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-1/trust")); + putRequest.Headers.TryGetValue("X-StellaOps-Tenant", out var tenantValues).Should().BeTrue(); + tenantValues!.Should().Equal("tenant-a"); + putRequest.Headers.TryGetValue("X-StellaOps-Reason", out var reasonValues).Should().BeTrue(); + reasonValues!.Should().Equal("rollout"); + + using var document = JsonDocument.Parse(putRequest.Body ?? string.Empty); + var root = document.RootElement; + root.GetProperty("weight").GetDecimal().Should().Be(1.5m); + root.GetProperty("reason").GetString().Should().Be("rollout"); + + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + handler.Requests.Should().HaveCount(3); + handler.Requests[2].Method.Should().Be(HttpMethod.Get); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SetIssuerTrustAsync_InvalidatesBothCacheVariantsAsync() + { + var handler = new RecordingHandler( + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}"""), + CreateJsonResponse("""{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0}""")); + + var client = CreateClient(handler); + + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); + handler.Requests.Should().HaveCount(2); + + await client.SetIssuerTrustAsync("tenant-a", "issuer-1", 1m, null, CancellationToken.None); + handler.Requests.Should().HaveCount(3); + + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); + handler.Requests.Should().HaveCount(5); + handler.Requests[3].Method.Should().Be(HttpMethod.Get); + handler.Requests[4].Method.Should().Be(HttpMethod.Get); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.cs deleted file mode 100644 index 267b3f770..000000000 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/IssuerDirectoryClientTests.cs +++ /dev/null @@ -1,244 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Reflection; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using FluentAssertions.Specialized; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.IssuerDirectory.Client; -using Xunit; - - -using StellaOps.TestKit; -namespace StellaOps.IssuerDirectory.Core.Tests; - -public class IssuerDirectoryClientTests -{ - private static IIssuerDirectoryClient CreateClient(RecordingHandler handler, IssuerDirectoryClientOptions? options = null) - { - var opts = options ?? DefaultOptions(); - var httpClient = new HttpClient(handler) - { - BaseAddress = opts.BaseAddress - }; - - var memoryCache = new MemoryCache(new MemoryCacheOptions()); - var clientOptions = Options.Create(opts); - - var clientType = typeof(IssuerDirectoryClientOptions) - .Assembly - .GetType("StellaOps.IssuerDirectory.Client.IssuerDirectoryClient", throwOnError: true)!; - - var loggerType = typeof(TestLogger<>).MakeGenericType(clientType); - var logger = Activator.CreateInstance(loggerType)!; - - var instance = Activator.CreateInstance( - clientType, - new object[] { httpClient, memoryCache, clientOptions, logger }); - - return (IIssuerDirectoryClient)instance!; - } - - private static IssuerDirectoryClientOptions DefaultOptions() - { - return new IssuerDirectoryClientOptions - { - BaseAddress = new Uri("https://issuer-directory.local/"), - TenantHeader = "X-StellaOps-Tenant", - AuditReasonHeader = "X-StellaOps-Reason" - }; - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SetIssuerTrustAsync_SendsAuditMetadataAndInvalidatesCache() - { - var handler = new RecordingHandler( - CreateJsonResponse(""" -{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0} -"""), - CreateJsonResponse(""" -{"tenantOverride":{"weight":1.5,"reason":"rollout","updatedAtUtc":"2025-11-03T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-03T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":1.5} -"""), - CreateJsonResponse(""" -{"tenantOverride":{"weight":1.5,"reason":"rollout","updatedAtUtc":"2025-11-03T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-03T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":1.5} -""")); - - var client = CreateClient(handler); - - await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); - handler.Requests.Should().HaveCount(1); - - var result = await client.SetIssuerTrustAsync("tenant-a", "issuer-1", 1.5m, "rollout", CancellationToken.None); - result.EffectiveWeight.Should().Be(1.5m); - handler.Requests.Should().HaveCount(2); - - var putRequest = handler.Requests[1]; - putRequest.Method.Should().Be(HttpMethod.Put); - putRequest.Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-1/trust")); - putRequest.Headers.TryGetValue("X-StellaOps-Tenant", out var tenantValues).Should().BeTrue(); - tenantValues.Should().NotBeNull(); - tenantValues!.Should().Equal("tenant-a"); - putRequest.Headers.TryGetValue("X-StellaOps-Reason", out var reasonValues).Should().BeTrue(); - reasonValues.Should().NotBeNull(); - reasonValues!.Should().Equal("rollout"); - - using var document = JsonDocument.Parse(putRequest.Body ?? string.Empty); - var root = document.RootElement; - root.GetProperty("weight").GetDecimal().Should().Be(1.5m); - root.GetProperty("reason").GetString().Should().Be("rollout"); - - await client.GetIssuerTrustAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); - handler.Requests.Should().HaveCount(3); - handler.Requests[2].Method.Should().Be(HttpMethod.Get); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task DeleteIssuerTrustAsync_UsesDeleteVerbAndReasonHeaderWhenProvided() - { - var handler = new RecordingHandler( - CreateJsonResponse(""" -{"tenantOverride":{"weight":2.0,"reason":"seed","updatedAtUtc":"2025-11-02T00:00:00Z","updatedBy":"actor","createdAtUtc":"2025-11-02T00:00:00Z","createdBy":"actor"},"globalOverride":null,"effectiveWeight":2.0} -"""), - new HttpResponseMessage(HttpStatusCode.NoContent), - CreateJsonResponse(""" -{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0} -""")); - - var client = CreateClient(handler); - - await client.GetIssuerTrustAsync("tenant-b", "issuer-9", includeGlobal: true, CancellationToken.None); - handler.Requests.Should().HaveCount(1); - - await client.DeleteIssuerTrustAsync("tenant-b", "issuer-9", null, CancellationToken.None); - handler.Requests.Should().HaveCount(2); - - var deleteRequest = handler.Requests[1]; - deleteRequest.Method.Should().Be(HttpMethod.Delete); - deleteRequest.Uri.Should().Be(new Uri("https://issuer-directory.local/issuer-directory/issuers/issuer-9/trust")); - deleteRequest.Headers.ContainsKey("X-StellaOps-Tenant").Should().BeTrue(); - deleteRequest.Headers.ContainsKey("X-StellaOps-Reason").Should().BeFalse(); - deleteRequest.Body.Should().BeNull(); - - await client.GetIssuerTrustAsync("tenant-b", "issuer-9", includeGlobal: true, CancellationToken.None); - handler.Requests.Should().HaveCount(3); - handler.Requests[2].Method.Should().Be(HttpMethod.Get); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SetIssuerTrustAsync_PropagatesFailureAndDoesNotEvictCache() - { - var handler = new RecordingHandler( - CreateJsonResponse(""" -{"tenantOverride":null,"globalOverride":null,"effectiveWeight":0} -"""), - new HttpResponseMessage(HttpStatusCode.InternalServerError) - { - Content = new StringContent("{}", Encoding.UTF8, "application/json") - }); - - var client = CreateClient(handler); - - var cached = await client.GetIssuerTrustAsync("tenant-c", "issuer-err", includeGlobal: false, CancellationToken.None); - cached.EffectiveWeight.Should().Be(0m); - handler.Requests.Should().HaveCount(1); - - await FluentActions.Invoking(() => client.SetIssuerTrustAsync("tenant-c", "issuer-err", 0.5m, null, CancellationToken.None).AsTask()) - .Should().ThrowAsync(); - handler.Requests.Should().HaveCount(2); - - await client.GetIssuerTrustAsync("tenant-c", "issuer-err", includeGlobal: false, CancellationToken.None); - handler.Requests.Should().HaveCount(2, "cache should remain warm after failure"); - } - - private static HttpResponseMessage CreateJsonResponse(string json) - { - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(json, Encoding.UTF8, "application/json") - }; - } - - private sealed record RecordedRequest(HttpMethod Method, Uri Uri, IDictionary Headers, string? Body); - - private sealed class RecordingHandler : HttpMessageHandler - { - private readonly Queue _responses; - - public RecordingHandler(params HttpResponseMessage[] responses) - { - _responses = new Queue(responses); - } - - public List Requests { get; } = new(); - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - string? body = null; - if (request.Content is not null) - { - body = await request.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - } - - var headers = request.Headers.ToDictionary( - pair => pair.Key, - pair => pair.Value.ToArray()); - - if (request.Content?.Headers is not null) - { - foreach (var header in request.Content.Headers) - { - headers[header.Key] = header.Value.ToArray(); - } - } - - Requests.Add(new RecordedRequest(request.Method, request.RequestUri!, headers, body)); - - if (_responses.Count == 0) - { - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{}", Encoding.UTF8, "application/json") - }; - } - - return _responses.Dequeue(); - } - } - - private sealed class TestLogger : ILogger - { - public IDisposable BeginScope(TState state) where TState : notnull => NullDisposable.Instance; - - public bool IsEnabled(LogLevel logLevel) => false; - - public void Log( - LogLevel logLevel, - EventId eventId, - TState state, - Exception? exception, - Func formatter) - { - } - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - - public void Dispose() - { - } - } - } -} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Crud.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Crud.cs new file mode 100644 index 000000000..b7c73e8b1 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Crud.cs @@ -0,0 +1,76 @@ +using FluentAssertions; + +using StellaOps.IssuerDirectory.Core.Domain; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerDirectoryServiceTests +{ + [Fact] + public async Task CreateAsync_PersistsIssuerAndAuditEntryAsync() + { + var issuer = await _service.CreateAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + displayName: "Red Hat", + slug: "red-hat", + description: "Vendor", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, + tags: new[] { "vendor" }, + actor: "tester", + reason: "initial", + cancellationToken: CancellationToken.None); + + var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); + stored.Should().NotBeNull(); + stored!.DisplayName.Should().Be("Red Hat"); + stored.CreatedBy.Should().Be("tester"); + + _auditSink.Entries.Should().ContainSingle(entry => entry.Action == "created" && entry.TenantId == "tenant-a"); + issuer.CreatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); + } + + [Fact] + public async Task UpdateAsync_ReplacesMetadataAndRecordsAuditAsync() + { + await CreateSampleAsync(); + _timeProvider.Advance(TimeSpan.FromHours(1)); + + var updated = await _service.UpdateAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + displayName: "Red Hat Security", + description: "Updated vendor", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com/security"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/new"), null, new[] { "en", "de" }, null), + endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, + tags: new[] { "vendor", "trusted" }, + actor: "editor", + reason: "update", + cancellationToken: CancellationToken.None); + + updated.DisplayName.Should().Be("Red Hat Security"); + updated.Tags.Should().Contain(new[] { "vendor", "trusted" }); + updated.UpdatedBy.Should().Be("editor"); + updated.UpdatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); + + _auditSink.Entries.Should().Contain(entry => entry.Action == "updated"); + } + + [Fact] + public async Task DeleteAsync_RemovesIssuerAndWritesAuditAsync() + { + await CreateSampleAsync(); + + await _service.DeleteAsync("tenant-a", "red-hat", "deleter", "cleanup", CancellationToken.None); + + var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); + stored.Should().BeNull(); + + _auditSink.Entries.Should().Contain(entry => entry.Action == "deleted" && entry.Actor == "deleter"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Fakes.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Fakes.cs new file mode 100644 index 000000000..4bf9ff05c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Fakes.cs @@ -0,0 +1,66 @@ +using System.Collections.Concurrent; +using System.Linq; + +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerDirectoryServiceTests +{ + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + var results = _store + .Where(pair => pair.Key.Tenant.Equals(tenantId, StringComparison.Ordinal)) + .Select(pair => pair.Value) + .ToArray(); + return Task.FromResult((IReadOnlyCollection)results); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + var results = _store + .Where(pair => pair.Key.Tenant.Equals(IssuerTenants.Global, StringComparison.Ordinal)) + .Select(pair => pair.Value) + .ToArray(); + return Task.FromResult((IReadOnlyCollection)results); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + + public void Clear() => _entries.Clear(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Seed.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Seed.cs new file mode 100644 index 000000000..0a99e2a38 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.Seed.cs @@ -0,0 +1,37 @@ +using FluentAssertions; + +using StellaOps.IssuerDirectory.Core.Domain; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerDirectoryServiceTests +{ + [Fact] + public async Task SeedAsync_InsertsOnlyMissingSeedsAsync() + { + var seedRecord = IssuerRecord.Create( + id: "red-hat", + tenantId: IssuerTenants.Global, + displayName: "Red Hat", + slug: "red-hat", + description: null, + contact: new IssuerContact(null, null, null, null), + metadata: new IssuerMetadata(null, null, null, null, Array.Empty(), null), + endpoints: Array.Empty(), + tags: Array.Empty(), + timestampUtc: _timeProvider.GetUtcNow(), + actor: "seed", + isSystemSeed: true); + + await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); + _auditSink.Entries.Should().Contain(entry => entry.Action == "seeded"); + + _auditSink.Clear(); + _timeProvider.Advance(TimeSpan.FromMinutes(10)); + + await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); + _auditSink.Entries.Should().BeEmpty("existing seeds should not emit duplicate audit entries"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs index 40d972880..4eecfddb2 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs @@ -1,15 +1,14 @@ -using System.Collections.Concurrent; -using FluentAssertions; +using System; + using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; -using StellaOps.IssuerDirectory.Core.Abstractions; + using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Core.Services; -using Xunit; namespace StellaOps.IssuerDirectory.Core.Tests.Services; -public class IssuerDirectoryServiceTests +public partial class IssuerDirectoryServiceTests { private readonly FakeIssuerRepository _repository = new(); private readonly FakeIssuerAuditSink _auditSink = new(); @@ -21,99 +20,6 @@ public class IssuerDirectoryServiceTests _service = new IssuerDirectoryService(_repository, _auditSink, _timeProvider, NullLogger.Instance); } - [Fact] - public async Task CreateAsync_PersistsIssuerAndAuditEntry() - { - var issuer = await _service.CreateAsync( - tenantId: "tenant-a", - issuerId: "red-hat", - displayName: "Red Hat", - slug: "red-hat", - description: "Vendor", - contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), - metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), - endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, - tags: new[] { "vendor" }, - actor: "tester", - reason: "initial", - cancellationToken: CancellationToken.None); - - var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); - stored.Should().NotBeNull(); - stored!.DisplayName.Should().Be("Red Hat"); - stored.CreatedBy.Should().Be("tester"); - - _auditSink.Entries.Should().ContainSingle(entry => entry.Action == "created" && entry.TenantId == "tenant-a"); - issuer.CreatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); - } - - [Fact] - public async Task UpdateAsync_ReplacesMetadataAndRecordsAudit() - { - await CreateSampleAsync(); - _timeProvider.Advance(TimeSpan.FromHours(1)); - - var updated = await _service.UpdateAsync( - tenantId: "tenant-a", - issuerId: "red-hat", - displayName: "Red Hat Security", - description: "Updated vendor", - contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com/security"), null), - metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/new"), null, new[] { "en", "de" }, null), - endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, - tags: new[] { "vendor", "trusted" }, - actor: "editor", - reason: "update", - cancellationToken: CancellationToken.None); - - updated.DisplayName.Should().Be("Red Hat Security"); - updated.Tags.Should().Contain(new[] { "vendor", "trusted" }); - updated.UpdatedBy.Should().Be("editor"); - updated.UpdatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); - - _auditSink.Entries.Should().Contain(entry => entry.Action == "updated"); - } - - [Fact] - public async Task DeleteAsync_RemovesIssuerAndWritesAudit() - { - await CreateSampleAsync(); - - await _service.DeleteAsync("tenant-a", "red-hat", "deleter", "cleanup", CancellationToken.None); - - var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); - stored.Should().BeNull(); - - _auditSink.Entries.Should().Contain(entry => entry.Action == "deleted" && entry.Actor == "deleter"); - } - - [Fact] - public async Task SeedAsync_InsertsOnlyMissingSeeds() - { - var seedRecord = IssuerRecord.Create( - id: "red-hat", - tenantId: IssuerTenants.Global, - displayName: "Red Hat", - slug: "red-hat", - description: null, - contact: new IssuerContact(null, null, null, null), - metadata: new IssuerMetadata(null, null, null, null, Array.Empty(), null), - endpoints: Array.Empty(), - tags: Array.Empty(), - timestampUtc: _timeProvider.GetUtcNow(), - actor: "seed", - isSystemSeed: true); - - await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); - _auditSink.Entries.Should().Contain(entry => entry.Action == "seeded"); - - _auditSink.Clear(); - _timeProvider.Advance(TimeSpan.FromMinutes(10)); - - await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); - _auditSink.Entries.Should().BeEmpty("existing seeds should not emit duplicate audit entries"); - } - private async Task CreateSampleAsync() { await _service.CreateAsync( @@ -132,60 +38,4 @@ public class IssuerDirectoryServiceTests _auditSink.Clear(); } - - private sealed class FakeIssuerRepository : IIssuerRepository - { - private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); - - public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryGetValue((tenantId, issuerId), out var record); - return Task.FromResult(record); - } - - public Task> ListAsync(string tenantId, CancellationToken cancellationToken) - { - var results = _store - .Where(pair => pair.Key.Tenant.Equals(tenantId, StringComparison.Ordinal)) - .Select(pair => pair.Value) - .ToArray(); - return Task.FromResult((IReadOnlyCollection)results); - } - - public Task> ListGlobalAsync(CancellationToken cancellationToken) - { - var results = _store - .Where(pair => pair.Key.Tenant.Equals(IssuerTenants.Global, StringComparison.Ordinal)) - .Select(pair => pair.Value) - .ToArray(); - return Task.FromResult((IReadOnlyCollection)results); - } - - public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) - { - _store[(record.TenantId, record.Id)] = record; - return Task.CompletedTask; - } - - public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryRemove((tenantId, issuerId), out _); - return Task.CompletedTask; - } - } - - private sealed class FakeIssuerAuditSink : IIssuerAuditSink - { - private readonly ConcurrentBag _entries = new(); - - public IReadOnlyCollection Entries => _entries.ToArray(); - - public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) - { - _entries.Add(entry); - return Task.CompletedTask; - } - - public void Clear() => _entries.Clear(); - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Fakes.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Fakes.cs new file mode 100644 index 000000000..1d74b34f0 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Fakes.cs @@ -0,0 +1,104 @@ +using System.Collections.Concurrent; +using System.Linq; + +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerKeyServiceTests +{ + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public void Add(IssuerRecord record) + { + _store[(record.TenantId, record.Id)] = record; + } + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerKeyRepository : IIssuerKeyRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Issuer, string KeyId), IssuerKeyRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId, keyId), out var value); + return Task.FromResult(value); + } + + public Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken) + { + var record = _store.Values.FirstOrDefault(key => key.TenantId == tenantId && key.IssuerId == issuerId && key.Fingerprint == fingerprint); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var records = _store + .Where(pair => pair.Key.Tenant == tenantId && pair.Key.Issuer == issuerId) + .Select(pair => pair.Value) + .ToArray(); + + return Task.FromResult((IReadOnlyCollection)records); + } + + public Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken) + { + var records = _store + .Where(pair => pair.Key.Tenant == IssuerTenants.Global && pair.Key.Issuer == issuerId) + .Select(pair => pair.Value) + .ToArray(); + + return Task.FromResult((IReadOnlyCollection)records); + } + + public Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.IssuerId, record.Id)] = record; + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Tests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Tests.cs new file mode 100644 index 000000000..b7ae0d83e --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.Tests.cs @@ -0,0 +1,101 @@ +using FluentAssertions; + +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Services; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerKeyServiceTests +{ + [Fact] + public async Task AddAsync_StoresKeyAndWritesAuditAsync() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); + + var record = await _service.AddAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + type: IssuerKeyType.Ed25519PublicKey, + material, + expiresAtUtc: null, + actor: "tester", + reason: "initial", + cancellationToken: CancellationToken.None); + + record.Status.Should().Be(IssuerKeyStatus.Active); + record.Fingerprint.Should().NotBeNullOrWhiteSpace(); + _auditSink.Entries.Should().Contain(entry => entry.Action == "key_created"); + } + + [Fact] + public async Task AddAsync_DuplicateFingerprint_ThrowsAsync() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); + + await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + var action = async () => await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + await action.Should().ThrowAsync(); + } + + [Fact] + public async Task AddAsync_MissingIssuer_ThrowsAsync() + { + var issuerRepository = new FakeIssuerRepository(); + var keyRepository = new FakeIssuerKeyRepository(); + var auditSink = new FakeIssuerAuditSink(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:00:00Z")); + var service = new IssuerKeyService( + issuerRepository, + keyRepository, + auditSink, + timeProvider, + NullLogger.Instance); + + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); + var action = async () => await service.AddAsync( + "tenant-a", + "missing", + IssuerKeyType.Ed25519PublicKey, + material, + null, + "tester", + null, + CancellationToken.None); + + await action.Should().ThrowAsync(); + } + + [Fact] + public async Task RotateAsync_RetiresOldKeyAndCreatesReplacementAsync() + { + var originalMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32 })); + var original = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, originalMaterial, null, "tester", null, CancellationToken.None); + + var newMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(99, 32).ToArray())); + var replacement = await _service.RotateAsync("tenant-a", "red-hat", original.Id, IssuerKeyType.Ed25519PublicKey, newMaterial, null, "tester", "rotation", CancellationToken.None); + + replacement.ReplacesKeyId.Should().Be(original.Id); + var retired = await _keyRepository.GetAsync("tenant-a", "red-hat", original.Id, CancellationToken.None); + retired!.Status.Should().Be(IssuerKeyStatus.Retired); + } + + [Fact] + public async Task RevokeAsync_SetsStatusToRevokedAsync() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(77, 32).ToArray())); + var key = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + await _service.RevokeAsync("tenant-a", "red-hat", key.Id, "tester", "compromised", CancellationToken.None); + + var revoked = await _keyRepository.GetAsync("tenant-a", "red-hat", key.Id, CancellationToken.None); + revoked!.Status.Should().Be(IssuerKeyStatus.Revoked); + _auditSink.Entries.Should().Contain(entry => entry.Action == "key_revoked"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs index 4fe7a1950..89b6d9faa 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs @@ -1,15 +1,14 @@ -using System.Collections.Concurrent; -using FluentAssertions; +using System; + using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; -using StellaOps.IssuerDirectory.Core.Abstractions; + using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Core.Services; -using Xunit; namespace StellaOps.IssuerDirectory.Core.Tests.Services; -public class IssuerKeyServiceTests +public partial class IssuerKeyServiceTests { private readonly FakeIssuerRepository _issuerRepository = new(); private readonly FakeIssuerKeyRepository _keyRepository = new(); @@ -42,157 +41,4 @@ public class IssuerKeyServiceTests _issuerRepository.Add(issuer); } - - [Fact] - public async Task AddAsync_StoresKeyAndWritesAudit() - { - var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); - - var record = await _service.AddAsync( - tenantId: "tenant-a", - issuerId: "red-hat", - type: IssuerKeyType.Ed25519PublicKey, - material, - expiresAtUtc: null, - actor: "tester", - reason: "initial", - cancellationToken: CancellationToken.None); - - record.Status.Should().Be(IssuerKeyStatus.Active); - record.Fingerprint.Should().NotBeNullOrWhiteSpace(); - _auditSink.Entries.Should().Contain(entry => entry.Action == "key_created"); - } - - [Fact] - public async Task AddAsync_DuplicateFingerprint_Throws() - { - var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); - - await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); - - var action = async () => await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); - - await action.Should().ThrowAsync(); - } - - [Fact] - public async Task RotateAsync_RetiresOldKeyAndCreatesReplacement() - { - var originalMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32 })); - var original = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, originalMaterial, null, "tester", null, CancellationToken.None); - - var newMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(99, 32).ToArray())); - var replacement = await _service.RotateAsync("tenant-a", "red-hat", original.Id, IssuerKeyType.Ed25519PublicKey, newMaterial, null, "tester", "rotation", CancellationToken.None); - - replacement.ReplacesKeyId.Should().Be(original.Id); - var retired = await _keyRepository.GetAsync("tenant-a", "red-hat", original.Id, CancellationToken.None); - retired!.Status.Should().Be(IssuerKeyStatus.Retired); - } - - [Fact] - public async Task RevokeAsync_SetsStatusToRevoked() - { - var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(77, 32).ToArray())); - var key = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); - - await _service.RevokeAsync("tenant-a", "red-hat", key.Id, "tester", "compromised", CancellationToken.None); - - var revoked = await _keyRepository.GetAsync("tenant-a", "red-hat", key.Id, CancellationToken.None); - revoked!.Status.Should().Be(IssuerKeyStatus.Revoked); - _auditSink.Entries.Should().Contain(entry => entry.Action == "key_revoked"); - } - - private sealed class FakeIssuerRepository : IIssuerRepository - { - private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); - - public void Add(IssuerRecord record) - { - _store[(record.TenantId, record.Id)] = record; - } - - public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryGetValue((tenantId, issuerId), out var record); - return Task.FromResult(record); - } - - public Task> ListAsync(string tenantId, CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public Task> ListGlobalAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) - { - _store[(record.TenantId, record.Id)] = record; - return Task.CompletedTask; - } - - public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryRemove((tenantId, issuerId), out _); - return Task.CompletedTask; - } - } - - private sealed class FakeIssuerKeyRepository : IIssuerKeyRepository - { - private readonly ConcurrentDictionary<(string Tenant, string Issuer, string KeyId), IssuerKeyRecord> _store = new(); - - public Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken) - { - _store.TryGetValue((tenantId, issuerId, keyId), out var value); - return Task.FromResult(value); - } - - public Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken) - { - var record = _store.Values.FirstOrDefault(key => key.TenantId == tenantId && key.IssuerId == issuerId && key.Fingerprint == fingerprint); - return Task.FromResult(record); - } - - public Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - var records = _store - .Where(pair => pair.Key.Tenant == tenantId && pair.Key.Issuer == issuerId) - .Select(pair => pair.Value) - .ToArray(); - - return Task.FromResult((IReadOnlyCollection)records); - } - - public Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken) - { - var records = _store - .Where(pair => pair.Key.Tenant == IssuerTenants.Global && pair.Key.Issuer == issuerId) - .Select(pair => pair.Value) - .ToArray(); - - return Task.FromResult((IReadOnlyCollection)records); - } - - public Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) - { - _store[(record.TenantId, record.IssuerId, record.Id)] = record; - return Task.CompletedTask; - } - } - - private sealed class FakeIssuerAuditSink : IIssuerAuditSink - { - private readonly ConcurrentBag _entries = new(); - - public IReadOnlyCollection Entries => _entries.ToArray(); - - public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) - { - _entries.Add(entry); - return Task.CompletedTask; - } - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Fakes.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Fakes.cs new file mode 100644 index 000000000..03f1d6b90 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Fakes.cs @@ -0,0 +1,80 @@ +using System.Collections.Concurrent; + +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerTrustServiceTests +{ + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public void Add(IssuerRecord record) => _store[(record.TenantId, record.Id)] = record; + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerTrustRepository : IIssuerTrustRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Issuer), IssuerTrustOverrideRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.IssuerId)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Tests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Tests.cs new file mode 100644 index 000000000..a35ee52cf --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.Tests.cs @@ -0,0 +1,52 @@ +using FluentAssertions; + +using StellaOps.IssuerDirectory.Core.Domain; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public partial class IssuerTrustServiceTests +{ + [Fact] + public async Task SetAsync_SavesOverrideWithinBoundsAsync() + { + var result = await _service.SetAsync("tenant-a", "issuer-1", 4.5m, "reason", "actor", CancellationToken.None); + + result.Weight.Should().Be(4.5m); + result.UpdatedBy.Should().Be("actor"); + + var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); + view.EffectiveWeight.Should().Be(4.5m); + _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_set"); + } + + [Fact] + public async Task SetAsync_InvalidWeight_ThrowsAsync() + { + var action = async () => await _service.SetAsync("tenant-a", "issuer-1", 20m, null, "actor", CancellationToken.None); + await action.Should().ThrowAsync(); + } + + [Fact] + public async Task GetAsync_FallsBackToGlobalAsync() + { + await _service.SetAsync(IssuerTenants.Global, "issuer-1", -2m, null, "seed", CancellationToken.None); + + var view = await _service.GetAsync("tenant-b", "issuer-1", includeGlobal: true, CancellationToken.None); + view.EffectiveWeight.Should().Be(-2m); + view.GlobalOverride.Should().NotBeNull(); + } + + [Fact] + public async Task DeleteAsync_RemovesOverrideAsync() + { + await _service.SetAsync("tenant-a", "issuer-1", 1m, null, "actor", CancellationToken.None); + + await _service.DeleteAsync("tenant-a", "issuer-1", "actor", "clearing", CancellationToken.None); + + var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + view.TenantOverride.Should().BeNull(); + _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_deleted"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs index 61fe1e946..09c43f1c5 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs @@ -1,14 +1,13 @@ -using System.Collections.Concurrent; -using FluentAssertions; +using System; + using Microsoft.Extensions.Time.Testing; -using StellaOps.IssuerDirectory.Core.Abstractions; + using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Core.Services; -using Xunit; namespace StellaOps.IssuerDirectory.Core.Tests.Services; -public class IssuerTrustServiceTests +public partial class IssuerTrustServiceTests { private readonly FakeIssuerRepository _issuerRepository = new(); private readonly FakeIssuerTrustRepository _trustRepository = new(); @@ -37,117 +36,4 @@ public class IssuerTrustServiceTests _issuerRepository.Add(issuer); _issuerRepository.Add(issuer with { TenantId = IssuerTenants.Global, IsSystemSeed = true }); } - - [Fact] - public async Task SetAsync_SavesOverrideWithinBounds() - { - var result = await _service.SetAsync("tenant-a", "issuer-1", 4.5m, "reason", "actor", CancellationToken.None); - - result.Weight.Should().Be(4.5m); - result.UpdatedBy.Should().Be("actor"); - - var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); - view.EffectiveWeight.Should().Be(4.5m); - _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_set"); - } - - [Fact] - public async Task SetAsync_InvalidWeight_Throws() - { - var action = async () => await _service.SetAsync("tenant-a", "issuer-1", 20m, null, "actor", CancellationToken.None); - await action.Should().ThrowAsync(); - } - - [Fact] - public async Task GetAsync_FallsBackToGlobal() - { - await _service.SetAsync(IssuerTenants.Global, "issuer-1", -2m, null, "seed", CancellationToken.None); - - var view = await _service.GetAsync("tenant-b", "issuer-1", includeGlobal: true, CancellationToken.None); - view.EffectiveWeight.Should().Be(-2m); - view.GlobalOverride.Should().NotBeNull(); - } - - [Fact] - public async Task DeleteAsync_RemovesOverride() - { - await _service.SetAsync("tenant-a", "issuer-1", 1m, null, "actor", CancellationToken.None); - - await _service.DeleteAsync("tenant-a", "issuer-1", "actor", "clearing", CancellationToken.None); - - var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); - view.TenantOverride.Should().BeNull(); - _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_deleted"); - } - - private sealed class FakeIssuerRepository : IIssuerRepository - { - private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); - - public void Add(IssuerRecord record) => _store[(record.TenantId, record.Id)] = record; - - public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryGetValue((tenantId, issuerId), out var record); - return Task.FromResult(record); - } - - public Task> ListAsync(string tenantId, CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public Task> ListGlobalAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) - { - _store[(record.TenantId, record.Id)] = record; - return Task.CompletedTask; - } - - public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryRemove((tenantId, issuerId), out _); - return Task.CompletedTask; - } - } - - private sealed class FakeIssuerTrustRepository : IIssuerTrustRepository - { - private readonly ConcurrentDictionary<(string Tenant, string Issuer), IssuerTrustOverrideRecord> _store = new(); - - public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryGetValue((tenantId, issuerId), out var record); - return Task.FromResult(record); - } - - public Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) - { - _store[(record.TenantId, record.IssuerId)] = record; - return Task.CompletedTask; - } - - public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - _store.TryRemove((tenantId, issuerId), out _); - return Task.CompletedTask; - } - } - - private sealed class FakeIssuerAuditSink : IIssuerAuditSink - { - private readonly ConcurrentBag _entries = new(); - - public IReadOnlyCollection Entries => _entries.ToArray(); - - public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) - { - _entries.Add(entry); - return Task.CompletedTask; - } - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/TASKS.md b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/TASKS.md index 590dc33bf..83d039fc1 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/TASKS.md +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0374-T | DONE | Revalidated 2026-01-07; test coverage audit for IssuerDirectory.Core.Tests. | | AUDIT-0374-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-04: Split client/service test fixtures and added cache coverage (SPRINT_20260130_002). | diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Validation/IssuerKeyValidatorTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Validation/IssuerKeyValidatorTests.cs new file mode 100644 index 000000000..6027f0703 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Validation/IssuerKeyValidatorTests.cs @@ -0,0 +1,37 @@ +using FluentAssertions; + +using Microsoft.Extensions.Time.Testing; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Validation; + +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Validation; + +public class IssuerKeyValidatorTests +{ + [Fact] + public void Validate_Ed25519RejectsInvalidBase64() + { + var material = new IssuerKeyMaterial("base64", "not-base64"); + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T00:00:00Z")); + + var action = () => IssuerKeyValidator.Validate(IssuerKeyType.Ed25519PublicKey, material, null, timeProvider); + + action.Should().Throw() + .WithMessage("*base64*"); + } + + [Fact] + public void Validate_DsseRejectsInvalidLength() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[10])); + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T00:00:00Z")); + + var action = () => IssuerKeyValidator.Validate(IssuerKeyType.DssePublicKey, material, null, timeProvider); + + action.Should().Throw() + .WithMessage("*DSSE*"); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Factory.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Factory.cs new file mode 100644 index 000000000..ca89fa885 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Factory.cs @@ -0,0 +1,45 @@ +using System; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +public sealed partial record IssuerKeyRecord +{ + public static IssuerKeyRecord Create( + string id, + string issuerId, + string tenantId, + IssuerKeyType type, + IssuerKeyMaterial material, + string fingerprint, + DateTimeOffset createdAtUtc, + string createdBy, + DateTimeOffset? expiresAtUtc, + string? replacesKeyId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(id); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(material); + ArgumentException.ThrowIfNullOrWhiteSpace(fingerprint); + ArgumentException.ThrowIfNullOrWhiteSpace(createdBy); + + return new IssuerKeyRecord + { + Id = id.Trim(), + IssuerId = issuerId.Trim(), + TenantId = tenantId.Trim(), + Type = type, + Status = IssuerKeyStatus.Active, + Material = material, + Fingerprint = fingerprint.Trim(), + CreatedAtUtc = createdAtUtc, + CreatedBy = createdBy.Trim(), + UpdatedAtUtc = createdAtUtc, + UpdatedBy = createdBy.Trim(), + ExpiresAtUtc = expiresAtUtc?.ToUniversalTime(), + RetiredAtUtc = null, + RevokedAtUtc = null, + ReplacesKeyId = string.IsNullOrWhiteSpace(replacesKeyId) ? null : replacesKeyId.Trim() + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Status.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Status.cs new file mode 100644 index 000000000..eb0835f0a --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.Status.cs @@ -0,0 +1,42 @@ +using System; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +public sealed partial record IssuerKeyRecord +{ + public IssuerKeyRecord WithStatus( + IssuerKeyStatus status, + DateTimeOffset timestampUtc, + string updatedBy) + { + ArgumentException.ThrowIfNullOrWhiteSpace(updatedBy); + + return status switch + { + IssuerKeyStatus.Active => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RetiredAtUtc = null, + RevokedAtUtc = null + }, + IssuerKeyStatus.Retired => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RetiredAtUtc = timestampUtc, + RevokedAtUtc = null + }, + IssuerKeyStatus.Revoked => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RevokedAtUtc = timestampUtc + }, + _ => throw new ArgumentOutOfRangeException(nameof(status), status, "Unsupported key status.") + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs index bbc6600f2..75f9bb021 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs @@ -3,7 +3,7 @@ namespace StellaOps.IssuerDirectory.Core.Domain; /// /// Represents an issuer signing key. /// -public sealed record IssuerKeyRecord +public sealed partial record IssuerKeyRecord { public required string Id { get; init; } @@ -34,79 +34,4 @@ public sealed record IssuerKeyRecord public DateTimeOffset? RevokedAtUtc { get; init; } public string? ReplacesKeyId { get; init; } - - public static IssuerKeyRecord Create( - string id, - string issuerId, - string tenantId, - IssuerKeyType type, - IssuerKeyMaterial material, - string fingerprint, - DateTimeOffset createdAtUtc, - string createdBy, - DateTimeOffset? expiresAtUtc, - string? replacesKeyId) - { - ArgumentException.ThrowIfNullOrWhiteSpace(id); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentNullException.ThrowIfNull(material); - ArgumentException.ThrowIfNullOrWhiteSpace(fingerprint); - ArgumentException.ThrowIfNullOrWhiteSpace(createdBy); - - return new IssuerKeyRecord - { - Id = id.Trim(), - IssuerId = issuerId.Trim(), - TenantId = tenantId.Trim(), - Type = type, - Status = IssuerKeyStatus.Active, - Material = material, - Fingerprint = fingerprint.Trim(), - CreatedAtUtc = createdAtUtc, - CreatedBy = createdBy.Trim(), - UpdatedAtUtc = createdAtUtc, - UpdatedBy = createdBy.Trim(), - ExpiresAtUtc = expiresAtUtc?.ToUniversalTime(), - RetiredAtUtc = null, - RevokedAtUtc = null, - ReplacesKeyId = string.IsNullOrWhiteSpace(replacesKeyId) ? null : replacesKeyId.Trim() - }; - } - - public IssuerKeyRecord WithStatus( - IssuerKeyStatus status, - DateTimeOffset timestampUtc, - string updatedBy) - { - ArgumentException.ThrowIfNullOrWhiteSpace(updatedBy); - - return status switch - { - IssuerKeyStatus.Active => this with - { - Status = status, - UpdatedAtUtc = timestampUtc, - UpdatedBy = updatedBy.Trim(), - RetiredAtUtc = null, - RevokedAtUtc = null - }, - IssuerKeyStatus.Retired => this with - { - Status = status, - UpdatedAtUtc = timestampUtc, - UpdatedBy = updatedBy.Trim(), - RetiredAtUtc = timestampUtc, - RevokedAtUtc = null - }, - IssuerKeyStatus.Revoked => this with - { - Status = status, - UpdatedAtUtc = timestampUtc, - UpdatedBy = updatedBy.Trim(), - RevokedAtUtc = timestampUtc - }, - _ => throw new ArgumentOutOfRangeException(nameof(status), status, "Unsupported key status.") - }; - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Factory.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Factory.cs new file mode 100644 index 000000000..80bed1903 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Factory.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +public sealed partial record IssuerRecord +{ + public static IssuerRecord Create( + string id, + string tenantId, + string displayName, + string slug, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + DateTimeOffset timestampUtc, + string actor, + bool isSystemSeed) + { + if (string.IsNullOrWhiteSpace(id)) + { + throw new ArgumentException("Identifier is required.", nameof(id)); + } + + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(displayName)) + { + throw new ArgumentException("Display name is required.", nameof(displayName)); + } + + if (string.IsNullOrWhiteSpace(slug)) + { + throw new ArgumentException("Slug is required.", nameof(slug)); + } + + if (contact is null) + { + throw new ArgumentNullException(nameof(contact)); + } + + if (metadata is null) + { + throw new ArgumentNullException(nameof(metadata)); + } + + if (string.IsNullOrWhiteSpace(actor)) + { + throw new ArgumentException("Actor is required.", nameof(actor)); + } + + var normalizedTags = NormalizeTags(tags); + + return new IssuerRecord + { + Id = id.Trim(), + TenantId = tenantId.Trim(), + DisplayName = displayName.Trim(), + Slug = slug.Trim().ToLowerInvariant(), + Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), + Contact = contact, + Metadata = metadata, + Endpoints = (endpoints ?? Array.Empty()).ToArray(), + Tags = normalizedTags, + CreatedAtUtc = timestampUtc.ToUniversalTime(), + CreatedBy = actor.Trim(), + UpdatedAtUtc = timestampUtc.ToUniversalTime(), + UpdatedBy = actor.Trim(), + IsSystemSeed = isSystemSeed + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Tags.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Tags.cs new file mode 100644 index 000000000..65e0acaa4 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Tags.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +public sealed partial record IssuerRecord +{ + private static readonly StringComparer _tagComparer = StringComparer.OrdinalIgnoreCase; + + private static IReadOnlyCollection NormalizeTags(IEnumerable? tags) + { + return (tags ?? Array.Empty()) + .Where(tag => !string.IsNullOrWhiteSpace(tag)) + .Select(tag => tag.Trim().ToLowerInvariant()) + .Distinct(_tagComparer) + .ToArray(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Update.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Update.cs new file mode 100644 index 000000000..b6b16916b --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.Update.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +public sealed partial record IssuerRecord +{ + public IssuerRecord WithUpdated( + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string displayName, + string? description, + DateTimeOffset updatedAtUtc, + string updatedBy) + { + if (contact is null) + { + throw new ArgumentNullException(nameof(contact)); + } + + if (metadata is null) + { + throw new ArgumentNullException(nameof(metadata)); + } + + if (string.IsNullOrWhiteSpace(displayName)) + { + throw new ArgumentException("Display name is required.", nameof(displayName)); + } + + if (string.IsNullOrWhiteSpace(updatedBy)) + { + throw new ArgumentException("Actor is required.", nameof(updatedBy)); + } + + var normalizedTags = NormalizeTags(tags); + + return this with + { + DisplayName = displayName.Trim(), + Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), + Contact = contact, + Metadata = metadata, + Endpoints = (endpoints ?? Array.Empty()).ToArray(), + Tags = normalizedTags, + UpdatedAtUtc = updatedAtUtc.ToUniversalTime(), + UpdatedBy = updatedBy.Trim() + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs index a6b994ff4..3875e8d1b 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs @@ -3,10 +3,8 @@ namespace StellaOps.IssuerDirectory.Core.Domain; /// /// Represents a VEX issuer or CSAF publisher entry managed by the Issuer Directory. /// -public sealed record IssuerRecord +public sealed partial record IssuerRecord { - private static readonly StringComparer TagComparer = StringComparer.OrdinalIgnoreCase; - public required string Id { get; init; } public required string TenantId { get; init; } @@ -34,127 +32,4 @@ public sealed record IssuerRecord public required string UpdatedBy { get; init; } public bool IsSystemSeed { get; init; } - - public static IssuerRecord Create( - string id, - string tenantId, - string displayName, - string slug, - string? description, - IssuerContact contact, - IssuerMetadata metadata, - IEnumerable? endpoints, - IEnumerable? tags, - DateTimeOffset timestampUtc, - string actor, - bool isSystemSeed) - { - if (string.IsNullOrWhiteSpace(id)) - { - throw new ArgumentException("Identifier is required.", nameof(id)); - } - - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(displayName)) - { - throw new ArgumentException("Display name is required.", nameof(displayName)); - } - - if (string.IsNullOrWhiteSpace(slug)) - { - throw new ArgumentException("Slug is required.", nameof(slug)); - } - - if (contact is null) - { - throw new ArgumentNullException(nameof(contact)); - } - - if (metadata is null) - { - throw new ArgumentNullException(nameof(metadata)); - } - - if (string.IsNullOrWhiteSpace(actor)) - { - throw new ArgumentException("Actor is required.", nameof(actor)); - } - - var normalizedTags = (tags ?? Array.Empty()) - .Where(tag => !string.IsNullOrWhiteSpace(tag)) - .Select(tag => tag.Trim().ToLowerInvariant()) - .Distinct(TagComparer) - .ToArray(); - - return new IssuerRecord - { - Id = id.Trim(), - TenantId = tenantId.Trim(), - DisplayName = displayName.Trim(), - Slug = slug.Trim().ToLowerInvariant(), - Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), - Contact = contact, - Metadata = metadata, - Endpoints = (endpoints ?? Array.Empty()).ToArray(), - Tags = normalizedTags, - CreatedAtUtc = timestampUtc.ToUniversalTime(), - CreatedBy = actor.Trim(), - UpdatedAtUtc = timestampUtc.ToUniversalTime(), - UpdatedBy = actor.Trim(), - IsSystemSeed = isSystemSeed - }; - } - - public IssuerRecord WithUpdated( - IssuerContact contact, - IssuerMetadata metadata, - IEnumerable? endpoints, - IEnumerable? tags, - string displayName, - string? description, - DateTimeOffset updatedAtUtc, - string updatedBy) - { - if (contact is null) - { - throw new ArgumentNullException(nameof(contact)); - } - - if (metadata is null) - { - throw new ArgumentNullException(nameof(metadata)); - } - - if (string.IsNullOrWhiteSpace(displayName)) - { - throw new ArgumentException("Display name is required.", nameof(displayName)); - } - - if (string.IsNullOrWhiteSpace(updatedBy)) - { - throw new ArgumentException("Actor is required.", nameof(updatedBy)); - } - - var normalizedTags = (tags ?? Array.Empty()) - .Where(tag => !string.IsNullOrWhiteSpace(tag)) - .Select(tag => tag.Trim().ToLowerInvariant()) - .Distinct(TagComparer) - .ToArray(); - - return this with - { - DisplayName = displayName.Trim(), - Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), - Contact = contact, - Metadata = metadata, - Endpoints = (endpoints ?? Array.Empty()).ToArray(), - Tags = normalizedTags, - UpdatedAtUtc = updatedAtUtc.ToUniversalTime(), - UpdatedBy = updatedBy.Trim() - }; - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs index d4a011dc9..68f3ad801 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs @@ -5,23 +5,23 @@ namespace StellaOps.IssuerDirectory.Core.Observability; internal static class IssuerDirectoryMetrics { - private static readonly Meter Meter = new("StellaOps.IssuerDirectory", "1.0"); + private static readonly Meter _meter = new("StellaOps.IssuerDirectory", "1.0"); - private static readonly Counter IssuerChangeCounter = Meter.CreateCounter( + private static readonly Counter _issuerChangeCounter = _meter.CreateCounter( "issuer_directory_changes_total", description: "Counts issuer create/update/delete events."); - private static readonly Counter KeyOperationCounter = Meter.CreateCounter( + private static readonly Counter _keyOperationCounter = _meter.CreateCounter( "issuer_directory_key_operations_total", description: "Counts issuer key create/rotate/revoke operations."); - private static readonly Counter KeyValidationFailureCounter = Meter.CreateCounter( + private static readonly Counter _keyValidationFailureCounter = _meter.CreateCounter( "issuer_directory_key_validation_failures_total", description: "Counts issuer key validation or verification failures."); public static void RecordIssuerChange(string tenantId, string issuerId, string action) { - IssuerChangeCounter.Add( + _issuerChangeCounter.Add( 1, new[] { @@ -33,7 +33,7 @@ internal static class IssuerDirectoryMetrics public static void RecordKeyOperation(string tenantId, string issuerId, string operation, string keyType) { - KeyOperationCounter.Add( + _keyOperationCounter.Add( 1, new[] { @@ -46,7 +46,7 @@ internal static class IssuerDirectoryMetrics public static void RecordKeyValidationFailure(string tenantId, string issuerId, string reason) { - KeyValidationFailureCounter.Add( + _keyValidationFailureCounter.Add( 1, new[] { diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Audit.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Audit.cs new file mode 100644 index 000000000..d86262ad7 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Audit.cs @@ -0,0 +1,30 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + private async Task WriteAuditAsync( + IssuerRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.Id, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + metadata: new Dictionary + { + ["display_name"] = record.DisplayName, + ["slug"] = record.Slug, + ["is_system_seed"] = record.IsSystemSeed.ToString() + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Create.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Create.cs new file mode 100644 index 000000000..f28d9b74a --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Create.cs @@ -0,0 +1,54 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + public async Task CreateAsync( + string tenantId, + string issuerId, + string displayName, + string slug, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var timestamp = _timeProvider.GetUtcNow(); + var record = IssuerRecord.Create( + issuerId, + tenantId, + displayName, + slug, + description, + contact, + metadata, + endpoints, + tags, + timestamp, + actor, + isSystemSeed: false); + + await _repository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "created", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "created"); + _logger.LogInformation( + "Issuer {IssuerId} created for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + + return record; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Delete.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Delete.cs new file mode 100644 index 000000000..965abd7c8 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Delete.cs @@ -0,0 +1,39 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + public async Task DeleteAsync( + string tenantId, + string issuerId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + await _repository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + var timestamp = _timeProvider.GetUtcNow(); + var audit = new IssuerAuditEntry( + tenantId, + issuerId, + action: "deleted", + timestampUtc: timestamp, + actor: actor, + reason: reason, + metadata: null); + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "deleted"); + _logger.LogInformation( + "Issuer {IssuerId} deleted for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Read.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Read.cs new file mode 100644 index 000000000..1862ae55e --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Read.cs @@ -0,0 +1,44 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + public async Task> ListAsync( + string tenantId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var tenantIssuers = await _repository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); + if (!includeGlobal) + { + return tenantIssuers.OrderBy(record => record.Slug, StringComparer.Ordinal).ToArray(); + } + + var globalIssuers = await _repository.ListGlobalAsync(cancellationToken).ConfigureAwait(false); + return tenantIssuers.Concat(globalIssuers) + .DistinctBy(record => (record.TenantId, record.Id)) + .OrderBy(record => record.Slug, StringComparer.Ordinal) + .ToArray(); + } + + public async Task GetAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var issuer = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (issuer is not null || !includeGlobal) + { + return issuer; + } + + return await _repository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Seed.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Seed.cs new file mode 100644 index 000000000..7d618c0a6 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Seed.cs @@ -0,0 +1,49 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + public async Task SeedAsync(IEnumerable seeds, CancellationToken cancellationToken) + { + if (seeds is null) + { + throw new ArgumentNullException(nameof(seeds)); + } + + foreach (var seed in seeds) + { + if (!seed.IsSystemSeed) + { + continue; + } + + var existing = await _repository.GetAsync(seed.TenantId, seed.Id, cancellationToken).ConfigureAwait(false); + + if (existing is null) + { + await _repository.UpsertAsync(seed, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(seed, "seeded", seed.UpdatedBy, "CSAF bootstrap import", cancellationToken) + .ConfigureAwait(false); + } + else + { + var refreshed = existing.WithUpdated( + seed.Contact, + seed.Metadata, + seed.Endpoints, + seed.Tags, + seed.DisplayName, + seed.Description, + _timeProvider.GetUtcNow(), + seed.UpdatedBy) + with + { + IsSystemSeed = true + }; + + await _repository.UpsertAsync(refreshed, cancellationToken).ConfigureAwait(false); + } + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Update.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Update.cs new file mode 100644 index 000000000..62fda4ec5 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.Update.cs @@ -0,0 +1,55 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerDirectoryService +{ + public async Task UpdateAsync( + string tenantId, + string issuerId, + string displayName, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var existing = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + throw new InvalidOperationException("Issuer does not exist."); + } + + var timestamp = _timeProvider.GetUtcNow(); + var updated = existing.WithUpdated( + contact, + metadata, + endpoints, + tags, + displayName, + description, + timestamp, + actor); + + await _repository.UpsertAsync(updated, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(updated, "updated", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "updated"); + _logger.LogInformation( + "Issuer {IssuerId} updated for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + + return updated; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs index 91ae3e4f7..89bdf7dfd 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs @@ -1,14 +1,14 @@ using Microsoft.Extensions.Logging; + using StellaOps.IssuerDirectory.Core.Abstractions; using StellaOps.IssuerDirectory.Core.Domain; -using StellaOps.IssuerDirectory.Core.Observability; namespace StellaOps.IssuerDirectory.Core.Services; /// /// Coordinates issuer directory operations with persistence, validation, and auditing. /// -public sealed class IssuerDirectoryService +public sealed partial class IssuerDirectoryService { private readonly IIssuerRepository _repository; private readonly IIssuerAuditSink _auditSink; @@ -26,227 +26,4 @@ public sealed class IssuerDirectoryService _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - public async Task> ListAsync( - string tenantId, - bool includeGlobal, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - - var tenantIssuers = await _repository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); - if (!includeGlobal) - { - return tenantIssuers.OrderBy(record => record.Slug, StringComparer.Ordinal).ToArray(); - } - - var globalIssuers = await _repository.ListGlobalAsync(cancellationToken).ConfigureAwait(false); - return tenantIssuers.Concat(globalIssuers) - .DistinctBy(record => (record.TenantId, record.Id)) - .OrderBy(record => record.Slug, StringComparer.Ordinal) - .ToArray(); - } - - public async Task GetAsync( - string tenantId, - string issuerId, - bool includeGlobal, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var issuer = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - if (issuer is not null || !includeGlobal) - { - return issuer; - } - - return await _repository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); - } - - public async Task CreateAsync( - string tenantId, - string issuerId, - string displayName, - string slug, - string? description, - IssuerContact contact, - IssuerMetadata metadata, - IEnumerable? endpoints, - IEnumerable? tags, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var timestamp = _timeProvider.GetUtcNow(); - var record = IssuerRecord.Create( - issuerId, - tenantId, - displayName, - slug, - description, - contact, - metadata, - endpoints, - tags, - timestamp, - actor, - isSystemSeed: false); - - await _repository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(record, "created", actor, reason, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "created"); - _logger.LogInformation( - "Issuer {IssuerId} created for tenant {TenantId} by {Actor}.", - issuerId, - tenantId, - actor); - - return record; - } - - public async Task UpdateAsync( - string tenantId, - string issuerId, - string displayName, - string? description, - IssuerContact contact, - IssuerMetadata metadata, - IEnumerable? endpoints, - IEnumerable? tags, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var existing = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false) - ?? throw new InvalidOperationException($"Issuer '{issuerId}' not found for tenant '{tenantId}'."); - - var timestamp = _timeProvider.GetUtcNow(); - var updated = existing.WithUpdated( - contact, - metadata, - endpoints, - tags, - displayName, - description, - timestamp, - actor); - - await _repository.UpsertAsync(updated, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(updated, "updated", actor, reason, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "updated"); - _logger.LogInformation( - "Issuer {IssuerId} updated for tenant {TenantId} by {Actor}.", - issuerId, - tenantId, - actor); - - return updated; - } - - public async Task DeleteAsync( - string tenantId, - string issuerId, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - await _repository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - var timestamp = _timeProvider.GetUtcNow(); - var audit = new IssuerAuditEntry( - tenantId, - issuerId, - action: "deleted", - timestampUtc: timestamp, - actor: actor, - reason: reason, - metadata: null); - await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "deleted"); - _logger.LogInformation( - "Issuer {IssuerId} deleted for tenant {TenantId} by {Actor}.", - issuerId, - tenantId, - actor); - } - - public async Task SeedAsync(IEnumerable seeds, CancellationToken cancellationToken) - { - if (seeds is null) - { - throw new ArgumentNullException(nameof(seeds)); - } - - foreach (var seed in seeds) - { - if (!seed.IsSystemSeed) - { - continue; - } - - var existing = await _repository.GetAsync(seed.TenantId, seed.Id, cancellationToken).ConfigureAwait(false); - - if (existing is null) - { - await _repository.UpsertAsync(seed, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(seed, "seeded", seed.UpdatedBy, "CSAF bootstrap import", cancellationToken) - .ConfigureAwait(false); - } - else - { - var refreshed = existing.WithUpdated( - seed.Contact, - seed.Metadata, - seed.Endpoints, - seed.Tags, - seed.DisplayName, - seed.Description, - _timeProvider.GetUtcNow(), - seed.UpdatedBy) - with - { - IsSystemSeed = true - }; - - await _repository.UpsertAsync(refreshed, cancellationToken).ConfigureAwait(false); - } - } - } - - private async Task WriteAuditAsync( - IssuerRecord record, - string action, - string actor, - string? reason, - CancellationToken cancellationToken) - { - var audit = new IssuerAuditEntry( - record.TenantId, - record.Id, - action, - _timeProvider.GetUtcNow(), - actor, - reason, - metadata: new Dictionary - { - ["display_name"] = record.DisplayName, - ["slug"] = record.Slug, - ["is_system_seed"] = record.IsSystemSeed.ToString() - }); - - await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Add.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Add.cs new file mode 100644 index 000000000..14c21e01d --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Add.cs @@ -0,0 +1,83 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; +using StellaOps.IssuerDirectory.Core.Validation; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + public async Task AddAsync( + string tenantId, + string issuerId, + IssuerKeyType type, + IssuerKeyMaterial material, + DateTimeOffset? expiresAtUtc, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + IssuerKeyValidationResult validation; + try + { + validation = IssuerKeyValidator.Validate(type, material, expiresAtUtc, _timeProvider); + } + catch (Exception ex) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); + _logger.LogWarning( + ex, + "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during add.", + issuerId, + tenantId); + throw; + } + + var fingerprint = ComputeFingerprint(validation.RawKeyBytes); + + var existing = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) + .ConfigureAwait(false); + if (existing is not null && existing.Status == IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); + _logger.LogWarning( + "Duplicate active key detected for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + throw new InvalidOperationException("An identical active key already exists for this issuer."); + } + + var now = _timeProvider.GetUtcNow(); + var record = IssuerKeyRecord.Create( + _guidProvider.NewGuid().ToString("n"), + issuerId, + tenantId, + type, + validation.Material, + fingerprint, + now, + actor, + validation.ExpiresAtUtc, + replacesKeyId: null); + + await _keyRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "key_created", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "created", type.ToString()); + _logger.LogInformation( + "Issuer key {KeyId} created for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", + record.Id, + issuerId, + tenantId, + actor); + + return record; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Helpers.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Helpers.cs new file mode 100644 index 000000000..e7cb5df3c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Helpers.cs @@ -0,0 +1,52 @@ +using System.Security.Cryptography; + +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (issuer is null) + { + var global = await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + if (global is null) + { + throw new InvalidOperationException("Issuer does not exist."); + } + } + } + + private async Task WriteAuditAsync( + IssuerKeyRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.IssuerId, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + new Dictionary + { + ["key_id"] = record.Id, + ["key_type"] = record.Type.ToString(), + ["fingerprint"] = record.Fingerprint, + ["status"] = record.Status.ToString() + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } + + private static string ComputeFingerprint(byte[] rawKeyBytes) + { + var hash = SHA256.HashData(rawKeyBytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.List.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.List.cs new file mode 100644 index 000000000..438a76d67 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.List.cs @@ -0,0 +1,28 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + public async Task> ListAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var tenantKeys = await _keyRepository.ListAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (!includeGlobal) + { + return tenantKeys.OrderBy(key => key.CreatedAtUtc).ToArray(); + } + + var globalKeys = await _keyRepository.ListGlobalAsync(issuerId, cancellationToken).ConfigureAwait(false); + return tenantKeys.Concat(globalKeys) + .DistinctBy(key => (key.TenantId, key.Id)) + .OrderBy(key => key.CreatedAtUtc) + .ToArray(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Revoke.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Revoke.cs new file mode 100644 index 000000000..d7fb65f8f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Revoke.cs @@ -0,0 +1,54 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + public async Task RevokeAsync( + string tenantId, + string issuerId, + string keyId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); + _logger.LogWarning( + "Attempted to revoke missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", + keyId, + issuerId, + tenantId); + throw new InvalidOperationException("Key not found for revocation."); + } + + if (existing.Status == IssuerKeyStatus.Revoked) + { + return; + } + + var now = _timeProvider.GetUtcNow(); + var revoked = existing.WithStatus(IssuerKeyStatus.Revoked, now, actor); + + await _keyRepository.UpsertAsync(revoked, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(revoked, "key_revoked", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "revoked", existing.Type.ToString()); + _logger.LogInformation( + "Issuer key {KeyId} revoked for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", + keyId, + issuerId, + tenantId, + actor); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.Helpers.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.Helpers.cs new file mode 100644 index 000000000..8bad77152 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.Helpers.cs @@ -0,0 +1,42 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + private async Task GetActiveKeyForRotationAsync( + string tenantId, + string issuerId, + string keyId, + CancellationToken cancellationToken) + { + var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); + _logger.LogWarning( + "Attempted to rotate missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", + keyId, + issuerId, + tenantId); + throw new InvalidOperationException("Key not found for rotation."); + } + + if (existing.Status != IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_active"); + _logger.LogWarning( + "Attempted to rotate non-active key {KeyId} (status={Status}) for issuer {IssuerId} (tenant={TenantId}).", + keyId, + existing.Status, + issuerId, + tenantId); + throw new InvalidOperationException("Only active keys can be rotated."); + } + + return existing; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.cs new file mode 100644 index 000000000..648159daa --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.Rotate.cs @@ -0,0 +1,94 @@ +using Microsoft.Extensions.Logging; + +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; +using StellaOps.IssuerDirectory.Core.Validation; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerKeyService +{ + public async Task RotateAsync( + string tenantId, + string issuerId, + string keyId, + IssuerKeyType newType, + IssuerKeyMaterial newMaterial, + DateTimeOffset? expiresAtUtc, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await GetActiveKeyForRotationAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + IssuerKeyValidationResult validation; + try + { + validation = IssuerKeyValidator.Validate(newType, newMaterial, expiresAtUtc, _timeProvider); + } + catch (Exception ex) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); + _logger.LogWarning( + ex, + "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during rotation.", + issuerId, + tenantId); + throw; + } + + var fingerprint = ComputeFingerprint(validation.RawKeyBytes); + + var duplicate = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) + .ConfigureAwait(false); + if (duplicate is not null && duplicate.Status == IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); + _logger.LogWarning( + "Duplicate active key detected during rotation for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + throw new InvalidOperationException("An identical active key already exists for this issuer."); + } + + var now = _timeProvider.GetUtcNow(); + + var retired = existing.WithStatus(IssuerKeyStatus.Retired, now, actor); + await _keyRepository.UpsertAsync(retired, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(retired, "key_retired", actor, reason ?? "rotation", cancellationToken) + .ConfigureAwait(false); + + var replacement = IssuerKeyRecord.Create( + _guidProvider.NewGuid().ToString("n"), + issuerId, + tenantId, + newType, + validation.Material, + fingerprint, + now, + actor, + validation.ExpiresAtUtc, + replacesKeyId: existing.Id); + + await _keyRepository.UpsertAsync(replacement, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(replacement, "key_rotated", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "rotated", newType.ToString()); + _logger.LogInformation( + "Issuer key {OldKeyId} rotated for issuer {IssuerId} (tenant={TenantId}) by {Actor}; new key {NewKeyId}.", + existing.Id, + issuerId, + tenantId, + actor, + replacement.Id); + + return replacement; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs index 64b67ad50..0a9d51826 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs @@ -1,18 +1,15 @@ - using Microsoft.Extensions.Logging; + using StellaOps.Determinism; using StellaOps.IssuerDirectory.Core.Abstractions; using StellaOps.IssuerDirectory.Core.Domain; -using StellaOps.IssuerDirectory.Core.Observability; -using StellaOps.IssuerDirectory.Core.Validation; -using System.Security.Cryptography; namespace StellaOps.IssuerDirectory.Core.Services; /// /// Manages issuer signing keys. /// -public sealed class IssuerKeyService +public sealed partial class IssuerKeyService { private readonly IIssuerRepository _issuerRepository; private readonly IIssuerKeyRepository _keyRepository; @@ -36,292 +33,4 @@ public sealed class IssuerKeyService _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _guidProvider = guidProvider ?? SystemGuidProvider.Instance; } - - public async Task> ListAsync( - string tenantId, - string issuerId, - bool includeGlobal, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var tenantKeys = await _keyRepository.ListAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - if (!includeGlobal) - { - return tenantKeys.OrderBy(key => key.CreatedAtUtc).ToArray(); - } - - var globalKeys = await _keyRepository.ListGlobalAsync(issuerId, cancellationToken).ConfigureAwait(false); - return tenantKeys.Concat(globalKeys) - .DistinctBy(key => (key.TenantId, key.Id)) - .OrderBy(key => key.CreatedAtUtc) - .ToArray(); - } - - public async Task AddAsync( - string tenantId, - string issuerId, - IssuerKeyType type, - IssuerKeyMaterial material, - DateTimeOffset? expiresAtUtc, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(actor); - - await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - - IssuerKeyValidationResult validation; - try - { - validation = IssuerKeyValidator.Validate(type, material, expiresAtUtc, _timeProvider); - } - catch (Exception ex) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); - _logger.LogWarning( - ex, - "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during add.", - issuerId, - tenantId); - throw; - } - var fingerprint = ComputeFingerprint(validation.RawKeyBytes); - - var existing = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) - .ConfigureAwait(false); - if (existing is not null && existing.Status == IssuerKeyStatus.Active) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); - _logger.LogWarning( - "Duplicate active key detected for issuer {IssuerId} (tenant={TenantId}).", - issuerId, - tenantId); - throw new InvalidOperationException("An identical active key already exists for this issuer."); - } - - var now = _timeProvider.GetUtcNow(); - var record = IssuerKeyRecord.Create( - _guidProvider.NewGuid().ToString("n"), - issuerId, - tenantId, - type, - validation.Material, - fingerprint, - now, - actor, - validation.ExpiresAtUtc, - replacesKeyId: null); - - await _keyRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(record, "key_created", actor, reason, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "created", type.ToString()); - _logger.LogInformation( - "Issuer key {KeyId} created for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", - record.Id, - issuerId, - tenantId, - actor); - - return record; - } - - public async Task RotateAsync( - string tenantId, - string issuerId, - string keyId, - IssuerKeyType newType, - IssuerKeyMaterial newMaterial, - DateTimeOffset? expiresAtUtc, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - ArgumentException.ThrowIfNullOrWhiteSpace(actor); - - var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); - if (existing is null) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); - _logger.LogWarning( - "Attempted to rotate missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", - keyId, - issuerId, - tenantId); - throw new InvalidOperationException("Key not found for rotation."); - } - - if (existing.Status != IssuerKeyStatus.Active) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_active"); - _logger.LogWarning( - "Attempted to rotate non-active key {KeyId} (status={Status}) for issuer {IssuerId} (tenant={TenantId}).", - keyId, - existing.Status, - issuerId, - tenantId); - throw new InvalidOperationException("Only active keys can be rotated."); - } - - await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - - IssuerKeyValidationResult validation; - try - { - validation = IssuerKeyValidator.Validate(newType, newMaterial, expiresAtUtc, _timeProvider); - } - catch (Exception ex) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); - _logger.LogWarning( - ex, - "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during rotation.", - issuerId, - tenantId); - throw; - } - var fingerprint = ComputeFingerprint(validation.RawKeyBytes); - - var duplicate = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) - .ConfigureAwait(false); - if (duplicate is not null && duplicate.Status == IssuerKeyStatus.Active) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); - _logger.LogWarning( - "Duplicate active key detected during rotation for issuer {IssuerId} (tenant={TenantId}).", - issuerId, - tenantId); - throw new InvalidOperationException("An identical active key already exists for this issuer."); - } - - var now = _timeProvider.GetUtcNow(); - - var retired = existing.WithStatus(IssuerKeyStatus.Retired, now, actor); - await _keyRepository.UpsertAsync(retired, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(retired, "key_retired", actor, reason ?? "rotation", cancellationToken) - .ConfigureAwait(false); - - var replacement = IssuerKeyRecord.Create( - _guidProvider.NewGuid().ToString("n"), - issuerId, - tenantId, - newType, - validation.Material, - fingerprint, - now, - actor, - validation.ExpiresAtUtc, - replacesKeyId: existing.Id); - - await _keyRepository.UpsertAsync(replacement, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(replacement, "key_rotated", actor, reason, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "rotated", newType.ToString()); - _logger.LogInformation( - "Issuer key {OldKeyId} rotated for issuer {IssuerId} (tenant={TenantId}) by {Actor}; new key {NewKeyId}.", - existing.Id, - issuerId, - tenantId, - actor, - replacement.Id); - - return replacement; - } - - public async Task RevokeAsync( - string tenantId, - string issuerId, - string keyId, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - ArgumentException.ThrowIfNullOrWhiteSpace(actor); - - var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); - if (existing is null) - { - IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); - _logger.LogWarning( - "Attempted to revoke missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", - keyId, - issuerId, - tenantId); - throw new InvalidOperationException("Key not found for revocation."); - } - - if (existing.Status == IssuerKeyStatus.Revoked) - { - return; - } - - var now = _timeProvider.GetUtcNow(); - var revoked = existing.WithStatus(IssuerKeyStatus.Revoked, now, actor); - - await _keyRepository.UpsertAsync(revoked, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(revoked, "key_revoked", actor, reason, cancellationToken).ConfigureAwait(false); - - IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "revoked", existing.Type.ToString()); - _logger.LogInformation( - "Issuer key {KeyId} revoked for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", - keyId, - issuerId, - tenantId, - actor); - } - - private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - if (issuer is null) - { - var global = await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); - if (global is null) - { - throw new InvalidOperationException("Issuer does not exist."); - } - } - } - - private async Task WriteAuditAsync( - IssuerKeyRecord record, - string action, - string actor, - string? reason, - CancellationToken cancellationToken) - { - var audit = new IssuerAuditEntry( - record.TenantId, - record.IssuerId, - action, - _timeProvider.GetUtcNow(), - actor, - reason, - new Dictionary - { - ["key_id"] = record.Id, - ["key_type"] = record.Type.ToString(), - ["fingerprint"] = record.Fingerprint, - ["status"] = record.Status.ToString() - }); - - await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); - } - - private static string ComputeFingerprint(byte[] rawKeyBytes) - { - var hash = SHA256.HashData(rawKeyBytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } } diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Delete.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Delete.cs new file mode 100644 index 000000000..33de08da9 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Delete.cs @@ -0,0 +1,27 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerTrustService +{ + public async Task DeleteAsync( + string tenantId, + string issuerId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + return; + } + + await _trustRepository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(existing, "trust_override_deleted", actor, reason, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Get.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Get.cs new file mode 100644 index 000000000..2b87ab61e --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Get.cs @@ -0,0 +1,31 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerTrustService +{ + public async Task GetAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var tenantOverride = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + IssuerTrustOverrideRecord? globalOverride = null; + + if (includeGlobal && !string.Equals(tenantId, IssuerTenants.Global, StringComparison.Ordinal)) + { + globalOverride = await _trustRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken) + .ConfigureAwait(false); + } + + var effectiveWeight = tenantOverride?.Weight + ?? globalOverride?.Weight + ?? 0m; + + return new IssuerTrustView(tenantOverride, globalOverride, effectiveWeight); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Helpers.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Helpers.cs new file mode 100644 index 000000000..94a0dde8e --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Helpers.cs @@ -0,0 +1,39 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerTrustService +{ + private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false) + ?? await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + + if (issuer is null) + { + throw new InvalidOperationException("Issuer does not exist."); + } + } + + private async Task WriteAuditAsync( + IssuerTrustOverrideRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.IssuerId, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + new Dictionary + { + ["weight"] = record.Weight.ToString("0.###", System.Globalization.CultureInfo.InvariantCulture) + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Set.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Set.cs new file mode 100644 index 000000000..9163e7022 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.Set.cs @@ -0,0 +1,33 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed partial class IssuerTrustService +{ + public async Task SetAsync( + string tenantId, + string issuerId, + decimal weight, + string? reason, + string actor, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + var timestamp = _timeProvider.GetUtcNow(); + + IssuerTrustOverrideRecord record = existing is null + ? IssuerTrustOverrideRecord.Create(issuerId, tenantId, weight, reason, timestamp, actor) + : existing.WithUpdated(weight, reason, timestamp, actor); + + await _trustRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "trust_override_set", actor, reason, cancellationToken).ConfigureAwait(false); + + return record; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs index 188e628ca..0b1f2d7a2 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs @@ -1,12 +1,11 @@ using StellaOps.IssuerDirectory.Core.Abstractions; -using StellaOps.IssuerDirectory.Core.Domain; namespace StellaOps.IssuerDirectory.Core.Services; /// /// Handles issuer trust weight overrides. /// -public sealed class IssuerTrustService +public sealed partial class IssuerTrustService { private readonly IIssuerRepository _issuerRepository; private readonly IIssuerTrustRepository _trustRepository; @@ -24,114 +23,4 @@ public sealed class IssuerTrustService _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); } - - public async Task GetAsync( - string tenantId, - string issuerId, - bool includeGlobal, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var tenantOverride = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - IssuerTrustOverrideRecord? globalOverride = null; - - if (includeGlobal && !string.Equals(tenantId, IssuerTenants.Global, StringComparison.Ordinal)) - { - globalOverride = await _trustRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); - } - - var effectiveWeight = tenantOverride?.Weight - ?? globalOverride?.Weight - ?? 0m; - - return new IssuerTrustView(tenantOverride, globalOverride, effectiveWeight); - } - - public async Task SetAsync( - string tenantId, - string issuerId, - decimal weight, - string? reason, - string actor, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(actor); - - await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - - var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - var timestamp = _timeProvider.GetUtcNow(); - - IssuerTrustOverrideRecord record = existing is null - ? IssuerTrustOverrideRecord.Create(issuerId, tenantId, weight, reason, timestamp, actor) - : existing.WithUpdated(weight, reason, timestamp, actor); - - await _trustRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(record, "trust_override_set", actor, reason, cancellationToken).ConfigureAwait(false); - - return record; - } - - public async Task DeleteAsync( - string tenantId, - string issuerId, - string actor, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - ArgumentException.ThrowIfNullOrWhiteSpace(actor); - - var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - if (existing is null) - { - return; - } - - await _trustRepository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(existing, "trust_override_deleted", actor, reason, cancellationToken).ConfigureAwait(false); - } - - private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false) - ?? await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); - - if (issuer is null) - { - throw new InvalidOperationException("Issuer does not exist."); - } - } - - private async Task WriteAuditAsync( - IssuerTrustOverrideRecord record, - string action, - string actor, - string? reason, - CancellationToken cancellationToken) - { - var audit = new IssuerAuditEntry( - record.TenantId, - record.IssuerId, - action, - _timeProvider.GetUtcNow(), - actor, - reason, - new Dictionary - { - ["weight"] = record.Weight.ToString("0.###", System.Globalization.CultureInfo.InvariantCulture) - }); - - await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); - } } - -public sealed record IssuerTrustView( - IssuerTrustOverrideRecord? TenantOverride, - IssuerTrustOverrideRecord? GlobalOverride, - decimal EffectiveWeight); diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustView.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustView.cs new file mode 100644 index 000000000..3c3569c68 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustView.cs @@ -0,0 +1,8 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +public sealed record IssuerTrustView( + IssuerTrustOverrideRecord? TenantOverride, + IssuerTrustOverrideRecord? GlobalOverride, + decimal EffectiveWeight); diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/TASKS.md b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/TASKS.md index 2d6a6ecf6..201bdee87 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/TASKS.md +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0373-T | DONE | Revalidated 2026-01-07; test coverage audit for IssuerDirectory.Core. | | AUDIT-0373-A | TODO | Pending approval (revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-04: Split domain/services/validation into partials, normalized metrics fields, added domain/validator tests (SPRINT_20260130_002). | diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Certificate.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Certificate.cs new file mode 100644 index 000000000..4f7015a3c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Certificate.cs @@ -0,0 +1,35 @@ +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; + +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Validation; + +public static partial class IssuerKeyValidator +{ + private static byte[] ValidateCertificate(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase) && + !string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("X.509 certificates must be provided as PEM or base64."); + } + + try + { + if (string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase)) + { + using var pemCertificate = X509Certificate2.CreateFromPem(material.Value); + return pemCertificate.RawData; + } + + var raw = Convert.FromBase64String(material.Value); + using var loadedCertificate = X509CertificateLoader.LoadCertificate(raw); + return loadedCertificate.RawData; + } + catch (Exception ex) when (ex is CryptographicException || ex is FormatException) + { + throw new InvalidOperationException("Certificate material is invalid or unsupported.", ex); + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Dsse.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Dsse.cs new file mode 100644 index 000000000..26cef141a --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Dsse.cs @@ -0,0 +1,31 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Validation; + +public static partial class IssuerKeyValidator +{ + private static byte[] ValidateDsseKey(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("DSSE keys must use base64 format."); + } + + byte[] rawBytes; + try + { + rawBytes = Convert.FromBase64String(material.Value); + } + catch (FormatException ex) + { + throw new InvalidOperationException("DSSE key material must be valid base64.", ex); + } + + if (rawBytes.Length is not (32 or 48 or 64)) + { + throw new InvalidOperationException("DSSE keys must contain 32, 48, or 64 bytes of public key material."); + } + + return rawBytes; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Ed25519.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Ed25519.cs new file mode 100644 index 000000000..083740c82 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.Ed25519.cs @@ -0,0 +1,31 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Validation; + +public static partial class IssuerKeyValidator +{ + private static byte[] ValidateEd25519(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Ed25519 keys must use base64 format."); + } + + byte[] rawBytes; + try + { + rawBytes = Convert.FromBase64String(material.Value); + } + catch (FormatException ex) + { + throw new InvalidOperationException("Ed25519 key material must be valid base64.", ex); + } + + if (rawBytes.Length != 32) + { + throw new InvalidOperationException("Ed25519 public keys must contain 32 bytes."); + } + + return rawBytes; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs index 16a4e976e..3985ac392 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs @@ -1,14 +1,11 @@ - using StellaOps.IssuerDirectory.Core.Domain; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; namespace StellaOps.IssuerDirectory.Core.Validation; /// /// Performs validation and normalization of issuer key material. /// -public static class IssuerKeyValidator +public static partial class IssuerKeyValidator { public static IssuerKeyValidationResult Validate( IssuerKeyType type, @@ -48,80 +45,4 @@ public static class IssuerKeyValidator { return new IssuerKeyMaterial(material.Format.ToLowerInvariant(), material.Value.Trim()); } - - private static byte[] ValidateEd25519(IssuerKeyMaterial material) - { - if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Ed25519 keys must use base64 format."); - } - - byte[] rawBytes; - try - { - rawBytes = Convert.FromBase64String(material.Value); - } - catch (FormatException ex) - { - throw new InvalidOperationException("Ed25519 key material must be valid base64.", ex); - } - - if (rawBytes.Length != 32) - { - throw new InvalidOperationException("Ed25519 public keys must contain 32 bytes."); - } - - return rawBytes; - } - - private static byte[] ValidateCertificate(IssuerKeyMaterial material) - { - if (!string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase) && - !string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("X.509 certificates must be provided as PEM or base64."); - } - - try - { - if (string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase)) - { - using var pemCertificate = X509Certificate2.CreateFromPem(material.Value); - return pemCertificate.RawData; - } - - var raw = Convert.FromBase64String(material.Value); - using var loadedCertificate = X509CertificateLoader.LoadCertificate(raw); - return loadedCertificate.RawData; - } - catch (Exception ex) when (ex is CryptographicException || ex is FormatException) - { - throw new InvalidOperationException("Certificate material is invalid or unsupported.", ex); - } - } - - private static byte[] ValidateDsseKey(IssuerKeyMaterial material) - { - if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("DSSE keys must use base64 format."); - } - - byte[] rawBytes; - try - { - rawBytes = Convert.FromBase64String(material.Value); - } - catch (FormatException ex) - { - throw new InvalidOperationException("DSSE key material must be valid base64.", ex); - } - - if (rawBytes.Length is not (32 or 48 or 64)) - { - throw new InvalidOperationException("DSSE keys must contain 32, 48, or 64 bytes of public key material."); - } - - return rawBytes; - } } diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Extensions/IssuerDirectoryPersistenceExtensions.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Extensions/IssuerDirectoryPersistenceExtensions.cs index f0aaded76..8ad125b99 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Extensions/IssuerDirectoryPersistenceExtensions.cs +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Extensions/IssuerDirectoryPersistenceExtensions.cs @@ -1,5 +1,4 @@ using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; using StellaOps.Infrastructure.Postgres.Options; using StellaOps.IssuerDirectory.Core.Abstractions; using StellaOps.IssuerDirectory.Persistence.Postgres; @@ -31,11 +30,7 @@ public static class IssuerDirectoryPersistenceExtensions }; configureOptions(options); - services.AddSingleton(sp => - { - var logger = sp.GetRequiredService>(); - return new IssuerDirectoryDataSource(options, logger); - }); + RegisterDataSource(services, options); RegisterRepositories(services); @@ -54,21 +49,22 @@ public static class IssuerDirectoryPersistenceExtensions { ArgumentNullException.ThrowIfNull(options); - // Ensure schema is set for issuer module + RegisterDataSource(services, options); + + RegisterRepositories(services); + + return services; + } + + private static void RegisterDataSource(IServiceCollection services, PostgresOptions options) + { if (string.IsNullOrWhiteSpace(options.SchemaName)) { options.SchemaName = "issuer"; } - services.AddSingleton(sp => - { - var logger = sp.GetRequiredService>(); - return new IssuerDirectoryDataSource(options, logger); - }); - - RegisterRepositories(services); - - return services; + services.AddSingleton(options); + services.AddSingleton(); } private static void RegisterRepositories(IServiceCollection services) diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerAuditSink.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerAuditSink.cs index eabd3bf82..d262ca68d 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerAuditSink.cs +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerAuditSink.cs @@ -15,7 +15,7 @@ public sealed class PostgresIssuerAuditSink : IIssuerAuditSink { private readonly IssuerDirectoryDataSource _dataSource; private readonly ILogger _logger; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, WriteIndented = false @@ -61,6 +61,6 @@ public sealed class PostgresIssuerAuditSink : IIssuerAuditSink return "{}"; } - return JsonSerializer.Serialize(metadata, JsonOptions); + return JsonSerializer.Serialize(metadata, _jsonOptions); } } diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Get.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Get.cs new file mode 100644 index 000000000..14c5a8fe2 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Get.cs @@ -0,0 +1,75 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerKeyRepository +{ + public async Task GetAsync( + string tenantId, + string issuerId, + string keyId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, + replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, + revoke_reason, metadata + FROM issuer.issuer_keys + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid AND key_id = @keyId + LIMIT 1 + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + command.Parameters.AddWithValue("keyId", keyId); + + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return null; + } + + return MapToRecord(reader); + } + + public async Task GetByFingerprintAsync( + string tenantId, + string issuerId, + string fingerprint, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, + replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, + revoke_reason, metadata + FROM issuer.issuer_keys + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid AND fingerprint = @fingerprint + LIMIT 1 + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + command.Parameters.AddWithValue("fingerprint", fingerprint); + + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return null; + } + + return MapToRecord(reader); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.List.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.List.cs new file mode 100644 index 000000000..1e1991d05 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.List.cs @@ -0,0 +1,72 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerKeyRepository +{ + public async Task> ListAsync( + string tenantId, + string issuerId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, + replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, + revoke_reason, metadata + FROM issuer.issuer_keys + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid + ORDER BY created_at ASC + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + + return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); + } + + public async Task> ListGlobalAsync( + string issuerId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenSystemConnectionAsync(cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, + replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, + revoke_reason, metadata + FROM issuer.issuer_keys + WHERE tenant_id = @globalTenantId::uuid AND issuer_id = @issuerId::uuid + ORDER BY created_at ASC + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("globalTenantId", IssuerTenants.Global); + command.Parameters.AddWithValue("issuerId", issuerId); + + return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); + } + + private static async Task> ReadAllRecordsAsync( + NpgsqlCommand command, + CancellationToken cancellationToken) + { + var results = new List(); + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + results.Add(MapToRecord(reader)); + } + + return results; + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Mapping.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Mapping.cs new file mode 100644 index 000000000..370a31401 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Mapping.cs @@ -0,0 +1,70 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerKeyRepository +{ + private static IssuerKeyRecord MapToRecord(NpgsqlDataReader reader) + { + var issuerId = reader.GetGuid(1).ToString(); + var tenantId = reader.GetGuid(2).ToString(); + var keyId = reader.GetString(3); + var keyType = ParseKeyType(reader.GetString(4)); + var publicKey = reader.GetString(5); + var fingerprint = reader.GetString(6); + var notBefore = reader.IsDBNull(7) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(7), TimeSpan.Zero); + var notAfter = reader.IsDBNull(8) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(8), TimeSpan.Zero); + var status = ParseKeyStatus(reader.GetString(9)); + var replacesKeyId = reader.IsDBNull(10) ? null : reader.GetString(10); + var createdAt = reader.GetDateTime(11); + var createdBy = reader.GetString(12); + var updatedAt = reader.GetDateTime(13); + var updatedBy = reader.GetString(14); + var retiredAt = reader.IsDBNull(15) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(15), TimeSpan.Zero); + var revokedAt = reader.IsDBNull(16) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(16), TimeSpan.Zero); + + return new IssuerKeyRecord + { + Id = keyId, + IssuerId = issuerId, + TenantId = tenantId, + Type = keyType, + Status = status, + Material = new IssuerKeyMaterial("pem", publicKey), + Fingerprint = fingerprint, + CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), + CreatedBy = createdBy, + UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), + UpdatedBy = updatedBy, + ExpiresAtUtc = notAfter, + RetiredAtUtc = retiredAt, + RevokedAtUtc = revokedAt, + ReplacesKeyId = replacesKeyId + }; + } + + private static string MapKeyType(IssuerKeyType type) => type switch + { + IssuerKeyType.Ed25519PublicKey => "ed25519", + IssuerKeyType.X509Certificate => "x509", + IssuerKeyType.DssePublicKey => "dsse", + _ => throw new ArgumentOutOfRangeException(nameof(type), type, "Unsupported key type") + }; + + private static IssuerKeyType ParseKeyType(string value) => value.ToLowerInvariant() switch + { + "ed25519" => IssuerKeyType.Ed25519PublicKey, + "x509" => IssuerKeyType.X509Certificate, + "dsse" => IssuerKeyType.DssePublicKey, + _ => throw new ArgumentException($"Unknown key type: {value}", nameof(value)) + }; + + private static IssuerKeyStatus ParseKeyStatus(string value) => value.ToLowerInvariant() switch + { + "active" => IssuerKeyStatus.Active, + "retired" => IssuerKeyStatus.Retired, + "revoked" => IssuerKeyStatus.Revoked, + _ => throw new ArgumentException($"Unknown key status: {value}", nameof(value)) + }; +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Write.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Write.cs new file mode 100644 index 000000000..6e271939d --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.Write.cs @@ -0,0 +1,90 @@ +using Microsoft.Extensions.Logging; +using Npgsql; +using NpgsqlTypes; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerKeyRepository +{ + public async Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + await using var connection = await _dataSource + .OpenConnectionAsync(record.TenantId, "writer", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + INSERT INTO issuer.issuer_keys (id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, + not_before, not_after, status, replaces_key_id, created_at, created_by, + updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata) + VALUES (@id::uuid, @issuerId::uuid, @tenantId::uuid, @keyId, @keyType, @publicKey, @fingerprint, + @notBefore, @notAfter, @status, @replacesKeyId, @createdAt, @createdBy, @updatedAt, @updatedBy, + @retiredAt, @revokedAt, @revokeReason, @metadata::jsonb) + ON CONFLICT (issuer_id, key_id) + DO UPDATE SET + key_type = EXCLUDED.key_type, + public_key = EXCLUDED.public_key, + fingerprint = EXCLUDED.fingerprint, + not_before = EXCLUDED.not_before, + not_after = EXCLUDED.not_after, + status = EXCLUDED.status, + replaces_key_id = EXCLUDED.replaces_key_id, + updated_at = EXCLUDED.updated_at, + updated_by = EXCLUDED.updated_by, + retired_at = EXCLUDED.retired_at, + revoked_at = EXCLUDED.revoked_at, + revoke_reason = EXCLUDED.revoke_reason, + metadata = EXCLUDED.metadata + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + + command.Parameters.AddWithValue("id", Guid.Parse(record.Id)); + command.Parameters.AddWithValue("issuerId", Guid.Parse(record.IssuerId)); + command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); + command.Parameters.AddWithValue("keyId", record.Id); + command.Parameters.AddWithValue("keyType", MapKeyType(record.Type)); + command.Parameters.AddWithValue("publicKey", record.Material.Value); + command.Parameters.AddWithValue("fingerprint", record.Fingerprint); + command.Parameters.Add(new NpgsqlParameter("notBefore", NpgsqlDbType.TimestampTz) + { + Value = (object?)null ?? DBNull.Value + }); + command.Parameters.Add(new NpgsqlParameter("notAfter", NpgsqlDbType.TimestampTz) + { + Value = record.ExpiresAtUtc.HasValue ? record.ExpiresAtUtc.Value.UtcDateTime : DBNull.Value + }); + command.Parameters.AddWithValue("status", record.Status.ToString().ToLowerInvariant()); + command.Parameters.Add(new NpgsqlParameter("replacesKeyId", NpgsqlDbType.Text) + { + Value = record.ReplacesKeyId ?? (object)DBNull.Value + }); + command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc.UtcDateTime); + command.Parameters.AddWithValue("createdBy", record.CreatedBy); + command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc.UtcDateTime); + command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); + command.Parameters.Add(new NpgsqlParameter("retiredAt", NpgsqlDbType.TimestampTz) + { + Value = record.RetiredAtUtc.HasValue ? record.RetiredAtUtc.Value.UtcDateTime : DBNull.Value + }); + command.Parameters.Add(new NpgsqlParameter("revokedAt", NpgsqlDbType.TimestampTz) + { + Value = record.RevokedAtUtc.HasValue ? record.RevokedAtUtc.Value.UtcDateTime : DBNull.Value + }); + command.Parameters.Add(new NpgsqlParameter("revokeReason", NpgsqlDbType.Text) + { + Value = DBNull.Value + }); + command.Parameters.Add(new NpgsqlParameter("metadata", NpgsqlDbType.Jsonb) + { + Value = "{}" + }); + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogDebug("Upserted issuer key {KeyId} for issuer {IssuerId}.", record.Id, record.IssuerId); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.cs index 11f69b87e..9e09e10ea 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.cs +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerKeyRepository.cs @@ -1,241 +1,22 @@ using Microsoft.Extensions.Logging; -using Npgsql; -using NpgsqlTypes; using StellaOps.IssuerDirectory.Core.Abstractions; -using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Persistence.Postgres; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; /// /// PostgreSQL implementation of the issuer key repository. /// -public sealed class PostgresIssuerKeyRepository : IIssuerKeyRepository +public sealed partial class PostgresIssuerKeyRepository : IIssuerKeyRepository { private readonly IssuerDirectoryDataSource _dataSource; private readonly ILogger _logger; - public PostgresIssuerKeyRepository(IssuerDirectoryDataSource dataSource, ILogger logger) + public PostgresIssuerKeyRepository( + IssuerDirectoryDataSource dataSource, + ILogger logger) { _dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - public async Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata - FROM issuer.issuer_keys - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid AND key_id = @keyId - LIMIT 1 - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - command.Parameters.AddWithValue("keyId", keyId); - - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return null; - } - - return MapToRecord(reader); - } - - public async Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata - FROM issuer.issuer_keys - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid AND fingerprint = @fingerprint - LIMIT 1 - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - command.Parameters.AddWithValue("fingerprint", fingerprint); - - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return null; - } - - return MapToRecord(reader); - } - - public async Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata - FROM issuer.issuer_keys - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid - ORDER BY created_at ASC - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - - return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); - } - - public async Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata - FROM issuer.issuer_keys - WHERE tenant_id = @globalTenantId::uuid AND issuer_id = @issuerId::uuid - ORDER BY created_at ASC - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("globalTenantId", IssuerTenants.Global); - command.Parameters.AddWithValue("issuerId", issuerId); - - return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); - } - - public async Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "writer", cancellationToken).ConfigureAwait(false); - - const string sql = """ - INSERT INTO issuer.issuer_keys (id, issuer_id, tenant_id, key_id, key_type, public_key, fingerprint, not_before, not_after, status, replaces_key_id, created_at, created_by, updated_at, updated_by, retired_at, revoked_at, revoke_reason, metadata) - VALUES (@id::uuid, @issuerId::uuid, @tenantId::uuid, @keyId, @keyType, @publicKey, @fingerprint, @notBefore, @notAfter, @status, @replacesKeyId, @createdAt, @createdBy, @updatedAt, @updatedBy, @retiredAt, @revokedAt, @revokeReason, @metadata::jsonb) - ON CONFLICT (issuer_id, key_id) - DO UPDATE SET - key_type = EXCLUDED.key_type, - public_key = EXCLUDED.public_key, - fingerprint = EXCLUDED.fingerprint, - not_before = EXCLUDED.not_before, - not_after = EXCLUDED.not_after, - status = EXCLUDED.status, - replaces_key_id = EXCLUDED.replaces_key_id, - updated_at = EXCLUDED.updated_at, - updated_by = EXCLUDED.updated_by, - retired_at = EXCLUDED.retired_at, - revoked_at = EXCLUDED.revoked_at, - revoke_reason = EXCLUDED.revoke_reason, - metadata = EXCLUDED.metadata - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - - command.Parameters.AddWithValue("id", Guid.Parse(record.Id)); - command.Parameters.AddWithValue("issuerId", Guid.Parse(record.IssuerId)); - command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); - command.Parameters.AddWithValue("keyId", record.Id); - command.Parameters.AddWithValue("keyType", MapKeyType(record.Type)); - command.Parameters.AddWithValue("publicKey", record.Material.Value); - command.Parameters.AddWithValue("fingerprint", record.Fingerprint); - command.Parameters.Add(new NpgsqlParameter("notBefore", NpgsqlDbType.TimestampTz) { Value = (object?)null ?? DBNull.Value }); - command.Parameters.Add(new NpgsqlParameter("notAfter", NpgsqlDbType.TimestampTz) { Value = record.ExpiresAtUtc.HasValue ? record.ExpiresAtUtc.Value.UtcDateTime : DBNull.Value }); - command.Parameters.AddWithValue("status", record.Status.ToString().ToLowerInvariant()); - command.Parameters.Add(new NpgsqlParameter("replacesKeyId", NpgsqlDbType.Text) { Value = record.ReplacesKeyId ?? (object)DBNull.Value }); - command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc.UtcDateTime); - command.Parameters.AddWithValue("createdBy", record.CreatedBy); - command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc.UtcDateTime); - command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); - command.Parameters.Add(new NpgsqlParameter("retiredAt", NpgsqlDbType.TimestampTz) { Value = record.RetiredAtUtc.HasValue ? record.RetiredAtUtc.Value.UtcDateTime : DBNull.Value }); - command.Parameters.Add(new NpgsqlParameter("revokedAt", NpgsqlDbType.TimestampTz) { Value = record.RevokedAtUtc.HasValue ? record.RevokedAtUtc.Value.UtcDateTime : DBNull.Value }); - command.Parameters.Add(new NpgsqlParameter("revokeReason", NpgsqlDbType.Text) { Value = DBNull.Value }); - command.Parameters.Add(new NpgsqlParameter("metadata", NpgsqlDbType.Jsonb) { Value = "{}" }); - - await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - - _logger.LogDebug("Upserted issuer key {KeyId} for issuer {IssuerId}.", record.Id, record.IssuerId); - } - - private static async Task> ReadAllRecordsAsync(NpgsqlCommand command, CancellationToken cancellationToken) - { - var results = new List(); - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - results.Add(MapToRecord(reader)); - } - return results; - } - - private static IssuerKeyRecord MapToRecord(NpgsqlDataReader reader) - { - var id = reader.GetGuid(0).ToString(); - var issuerId = reader.GetGuid(1).ToString(); - var tenantId = reader.GetGuid(2).ToString(); - var keyId = reader.GetString(3); - var keyType = ParseKeyType(reader.GetString(4)); - var publicKey = reader.GetString(5); - var fingerprint = reader.GetString(6); - var notBefore = reader.IsDBNull(7) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(7), TimeSpan.Zero); - var notAfter = reader.IsDBNull(8) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(8), TimeSpan.Zero); - var status = ParseKeyStatus(reader.GetString(9)); - var replacesKeyId = reader.IsDBNull(10) ? null : reader.GetString(10); - var createdAt = reader.GetDateTime(11); - var createdBy = reader.GetString(12); - var updatedAt = reader.GetDateTime(13); - var updatedBy = reader.GetString(14); - var retiredAt = reader.IsDBNull(15) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(15), TimeSpan.Zero); - var revokedAt = reader.IsDBNull(16) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(16), TimeSpan.Zero); - - return new IssuerKeyRecord - { - Id = keyId, - IssuerId = issuerId, - TenantId = tenantId, - Type = keyType, - Status = status, - Material = new IssuerKeyMaterial("pem", publicKey), - Fingerprint = fingerprint, - CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), - CreatedBy = createdBy, - UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), - UpdatedBy = updatedBy, - ExpiresAtUtc = notAfter, - RetiredAtUtc = retiredAt, - RevokedAtUtc = revokedAt, - ReplacesKeyId = replacesKeyId - }; - } - - private static string MapKeyType(IssuerKeyType type) => type switch - { - IssuerKeyType.Ed25519PublicKey => "ed25519", - IssuerKeyType.X509Certificate => "x509", - IssuerKeyType.DssePublicKey => "dsse", - _ => throw new ArgumentOutOfRangeException(nameof(type), type, "Unsupported key type") - }; - - private static IssuerKeyType ParseKeyType(string value) => value.ToLowerInvariant() switch - { - "ed25519" => IssuerKeyType.Ed25519PublicKey, - "x509" => IssuerKeyType.X509Certificate, - "dsse" => IssuerKeyType.DssePublicKey, - _ => throw new ArgumentException($"Unknown key type: {value}", nameof(value)) - }; - - private static IssuerKeyStatus ParseKeyStatus(string value) => value.ToLowerInvariant() switch - { - "active" => IssuerKeyStatus.Active, - "retired" => IssuerKeyStatus.Retired, - "revoked" => IssuerKeyStatus.Revoked, - _ => throw new ArgumentException($"Unknown key status: {value}", nameof(value)) - }; } diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.ContactSerialization.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.ContactSerialization.cs new file mode 100644 index 000000000..681660c23 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.ContactSerialization.cs @@ -0,0 +1,40 @@ +using System.Text.Json; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + private static string SerializeContact(IssuerContact contact) + { + var doc = new + { + email = contact.Email, + phone = contact.Phone, + website = contact.Website?.ToString(), + timezone = contact.Timezone + }; + + return JsonSerializer.Serialize(doc, _jsonOptions); + } + + private static IssuerContact DeserializeContact(string json) + { + using var doc = JsonDocument.Parse(json); + var root = doc.RootElement; + var email = root.TryGetProperty("email", out var e) && e.ValueKind != JsonValueKind.Null ? e.GetString() : null; + var phone = root.TryGetProperty("phone", out var p) && p.ValueKind != JsonValueKind.Null ? p.GetString() : null; + var websiteStr = root.TryGetProperty("website", out var w) && w.ValueKind != JsonValueKind.Null + ? w.GetString() + : null; + var timezone = root.TryGetProperty("timezone", out var t) && t.ValueKind != JsonValueKind.Null + ? t.GetString() + : null; + + return new IssuerContact( + email, + phone, + string.IsNullOrWhiteSpace(websiteStr) ? null : new Uri(websiteStr), + timezone); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.EndpointSerialization.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.EndpointSerialization.cs new file mode 100644 index 000000000..3480ab3ec --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.EndpointSerialization.cs @@ -0,0 +1,47 @@ +using System.Text.Json; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + private static string SerializeEndpoints(IReadOnlyCollection endpoints) + { + var docs = endpoints.Select(endpoint => new + { + kind = endpoint.Kind, + url = endpoint.Url.ToString(), + format = endpoint.Format, + requiresAuthentication = endpoint.RequiresAuthentication + }).ToList(); + + return JsonSerializer.Serialize(docs, _jsonOptions); + } + + private static IReadOnlyCollection DeserializeEndpoints(string json) + { + var results = new List(); + using var doc = JsonDocument.Parse(json); + if (doc.RootElement.ValueKind != JsonValueKind.Array) + { + return results; + } + + foreach (var elem in doc.RootElement.EnumerateArray()) + { + var kind = elem.TryGetProperty("kind", out var k) ? k.GetString() : null; + var urlStr = elem.TryGetProperty("url", out var u) ? u.GetString() : null; + var format = elem.TryGetProperty("format", out var f) && f.ValueKind != JsonValueKind.Null + ? f.GetString() + : null; + var requiresAuth = elem.TryGetProperty("requiresAuthentication", out var ra) && ra.GetBoolean(); + + if (!string.IsNullOrWhiteSpace(kind) && !string.IsNullOrWhiteSpace(urlStr)) + { + results.Add(new IssuerEndpoint(kind, new Uri(urlStr), format, requiresAuth)); + } + } + + return results; + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Json.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Json.cs new file mode 100644 index 000000000..18e524e3a --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Json.cs @@ -0,0 +1,12 @@ +using System.Text.Json; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + private static readonly JsonSerializerOptions _jsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false + }; +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Mapping.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Mapping.cs new file mode 100644 index 000000000..d787714a3 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Mapping.cs @@ -0,0 +1,47 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + private static IssuerRecord MapToRecord(NpgsqlDataReader reader) + { + var id = reader.GetGuid(0).ToString(); + var tenantId = reader.GetGuid(1).ToString(); + var name = reader.GetString(2); + var displayName = reader.GetString(3); + var description = reader.IsDBNull(4) ? null : reader.GetString(4); + var endpointsJson = reader.GetString(5); + var contactJson = reader.GetString(6); + var metadataJson = reader.GetString(7); + var tags = reader.GetFieldValue(8); + var isSystemSeed = reader.GetBoolean(10); + var createdAt = reader.GetDateTime(11); + var createdBy = reader.GetString(12); + var updatedAt = reader.GetDateTime(13); + var updatedBy = reader.GetString(14); + + var contact = DeserializeContact(contactJson); + var metadata = DeserializeMetadata(metadataJson); + var endpoints = DeserializeEndpoints(endpointsJson); + + return new IssuerRecord + { + Id = id, + TenantId = tenantId, + Slug = name, + DisplayName = displayName, + Description = description, + Contact = contact, + Metadata = metadata, + Endpoints = endpoints, + Tags = tags, + IsSystemSeed = isSystemSeed, + CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), + CreatedBy = createdBy, + UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), + UpdatedBy = updatedBy + }; + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.MetadataSerialization.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.MetadataSerialization.cs new file mode 100644 index 000000000..246511fef --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.MetadataSerialization.cs @@ -0,0 +1,73 @@ +using System.Text.Json; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + private static string SerializeMetadata(IssuerMetadata metadata) + { + var doc = new + { + cveOrgId = metadata.CveOrgId, + csafPublisherId = metadata.CsafPublisherId, + securityAdvisoriesUrl = metadata.SecurityAdvisoriesUrl?.ToString(), + catalogUrl = metadata.CatalogUrl?.ToString(), + languages = metadata.SupportedLanguages.ToList(), + attributes = new Dictionary(metadata.Attributes) + }; + + return JsonSerializer.Serialize(doc, _jsonOptions); + } + + private static IssuerMetadata DeserializeMetadata(string json) + { + using var doc = JsonDocument.Parse(json); + var root = doc.RootElement; + var cveOrgId = root.TryGetProperty("cveOrgId", out var c) && c.ValueKind != JsonValueKind.Null + ? c.GetString() + : null; + var csafPublisherId = root.TryGetProperty("csafPublisherId", out var cp) && cp.ValueKind != JsonValueKind.Null + ? cp.GetString() + : null; + var securityAdvisoriesUrlStr = root.TryGetProperty("securityAdvisoriesUrl", out var sa) && + sa.ValueKind != JsonValueKind.Null + ? sa.GetString() + : null; + var catalogUrlStr = root.TryGetProperty("catalogUrl", out var cu) && cu.ValueKind != JsonValueKind.Null + ? cu.GetString() + : null; + + var languages = new List(); + if (root.TryGetProperty("languages", out var langs) && langs.ValueKind == JsonValueKind.Array) + { + foreach (var lang in langs.EnumerateArray()) + { + if (lang.ValueKind == JsonValueKind.String) + { + languages.Add(lang.GetString()!); + } + } + } + + var attributes = new Dictionary(); + if (root.TryGetProperty("attributes", out var attrs) && attrs.ValueKind == JsonValueKind.Object) + { + foreach (var prop in attrs.EnumerateObject()) + { + if (prop.Value.ValueKind == JsonValueKind.String) + { + attributes[prop.Name] = prop.Value.GetString()!; + } + } + } + + return new IssuerMetadata( + cveOrgId, + csafPublisherId, + string.IsNullOrWhiteSpace(securityAdvisoriesUrlStr) ? null : new Uri(securityAdvisoriesUrlStr), + string.IsNullOrWhiteSpace(catalogUrlStr) ? null : new Uri(catalogUrlStr), + languages, + attributes); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Read.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Read.cs new file mode 100644 index 000000000..7a28a962e --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Read.cs @@ -0,0 +1,96 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + public async Task GetAsync( + string tenantId, + string issuerId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, + is_system_seed, created_at, created_by, updated_at, updated_by + FROM issuer.issuers + WHERE tenant_id = @tenantId::uuid AND id = @issuerId::uuid + LIMIT 1 + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return null; + } + + return MapToRecord(reader); + } + + public async Task> ListAsync( + string tenantId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, + is_system_seed, created_at, created_by, updated_at, updated_by + FROM issuer.issuers + WHERE tenant_id = @tenantId::uuid + ORDER BY name ASC + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + + return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); + } + + public async Task> ListGlobalAsync(CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenSystemConnectionAsync(cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, + is_system_seed, created_at, created_by, updated_at, updated_by + FROM issuer.issuers + WHERE tenant_id = @globalTenantId::uuid + ORDER BY name ASC + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("globalTenantId", IssuerTenants.Global); + + return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); + } + + private static async Task> ReadAllRecordsAsync( + NpgsqlCommand command, + CancellationToken cancellationToken) + { + var results = new List(); + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + results.Add(MapToRecord(reader)); + } + + return results; + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Write.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Write.cs new file mode 100644 index 000000000..f9f365d81 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.Write.cs @@ -0,0 +1,95 @@ +using Microsoft.Extensions.Logging; +using Npgsql; +using NpgsqlTypes; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerRepository +{ + public async Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + await using var connection = await _dataSource + .OpenConnectionAsync(record.TenantId, "writer", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + INSERT INTO issuer.issuers (id, tenant_id, name, display_name, description, endpoints, contact, metadata, + tags, status, is_system_seed, created_at, created_by, updated_at, updated_by) + VALUES (@id::uuid, @tenantId::uuid, @name, @displayName, @description, @endpoints::jsonb, @contact::jsonb, + @metadata::jsonb, @tags, @status, @isSystemSeed, @createdAt, @createdBy, @updatedAt, @updatedBy) + ON CONFLICT (tenant_id, name) + DO UPDATE SET + display_name = EXCLUDED.display_name, + description = EXCLUDED.description, + endpoints = EXCLUDED.endpoints, + contact = EXCLUDED.contact, + metadata = EXCLUDED.metadata, + tags = EXCLUDED.tags, + status = EXCLUDED.status, + updated_at = EXCLUDED.updated_at, + updated_by = EXCLUDED.updated_by + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + + command.Parameters.AddWithValue("id", Guid.Parse(record.Id)); + command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); + command.Parameters.AddWithValue("name", record.Slug); + command.Parameters.AddWithValue("displayName", record.DisplayName); + command.Parameters.Add(new NpgsqlParameter("description", NpgsqlDbType.Text) + { + Value = record.Description ?? (object)DBNull.Value + }); + command.Parameters.Add(new NpgsqlParameter("endpoints", NpgsqlDbType.Jsonb) + { + Value = SerializeEndpoints(record.Endpoints) + }); + command.Parameters.Add(new NpgsqlParameter("contact", NpgsqlDbType.Jsonb) + { + Value = SerializeContact(record.Contact) + }); + command.Parameters.Add(new NpgsqlParameter("metadata", NpgsqlDbType.Jsonb) + { + Value = SerializeMetadata(record.Metadata) + }); + command.Parameters.Add(new NpgsqlParameter("tags", NpgsqlDbType.Array | NpgsqlDbType.Text) + { + Value = record.Tags.ToArray() + }); + command.Parameters.AddWithValue("status", "active"); + command.Parameters.AddWithValue("isSystemSeed", record.IsSystemSeed); + command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc); + command.Parameters.AddWithValue("createdBy", record.CreatedBy); + command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc); + command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogDebug("Upserted issuer {IssuerId} for tenant {TenantId}.", record.Id, record.TenantId); + } + + public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "writer", cancellationToken) + .ConfigureAwait(false); + + const string sql = "DELETE FROM issuer.issuers WHERE tenant_id = @tenantId::uuid AND id = @issuerId::uuid"; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + + var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + _logger.LogDebug( + "Deleted issuer {IssuerId} for tenant {TenantId}. Rows affected: {Rows}.", + issuerId, + tenantId, + rowsAffected); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.cs index e6b47adda..395168e4e 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.cs +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerRepository.cs @@ -1,318 +1,22 @@ - using Microsoft.Extensions.Logging; -using Npgsql; -using NpgsqlTypes; using StellaOps.IssuerDirectory.Core.Abstractions; -using StellaOps.IssuerDirectory.Core.Domain; -using System.Text.Json; +using StellaOps.IssuerDirectory.Persistence.Postgres; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; /// /// PostgreSQL implementation of the issuer repository. /// -public sealed class PostgresIssuerRepository : IIssuerRepository +public sealed partial class PostgresIssuerRepository : IIssuerRepository { private readonly IssuerDirectoryDataSource _dataSource; private readonly ILogger _logger; - private static readonly JsonSerializerOptions JsonOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - WriteIndented = false - }; - public PostgresIssuerRepository(IssuerDirectoryDataSource dataSource, ILogger logger) + public PostgresIssuerRepository( + IssuerDirectoryDataSource dataSource, + ILogger logger) { _dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - public async Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, is_system_seed, created_at, created_by, updated_at, updated_by - FROM issuer.issuers - WHERE tenant_id = @tenantId::uuid AND id = @issuerId::uuid - LIMIT 1 - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return null; - } - - return MapToRecord(reader); - } - - public async Task> ListAsync(string tenantId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, is_system_seed, created_at, created_by, updated_at, updated_by - FROM issuer.issuers - WHERE tenant_id = @tenantId::uuid - ORDER BY name ASC - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - - return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); - } - - public async Task> ListGlobalAsync(CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, is_system_seed, created_at, created_by, updated_at, updated_by - FROM issuer.issuers - WHERE tenant_id = @globalTenantId::uuid - ORDER BY name ASC - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("globalTenantId", IssuerTenants.Global); - - return await ReadAllRecordsAsync(command, cancellationToken).ConfigureAwait(false); - } - - public async Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "writer", cancellationToken).ConfigureAwait(false); - - const string sql = """ - INSERT INTO issuer.issuers (id, tenant_id, name, display_name, description, endpoints, contact, metadata, tags, status, is_system_seed, created_at, created_by, updated_at, updated_by) - VALUES (@id::uuid, @tenantId::uuid, @name, @displayName, @description, @endpoints::jsonb, @contact::jsonb, @metadata::jsonb, @tags, @status, @isSystemSeed, @createdAt, @createdBy, @updatedAt, @updatedBy) - ON CONFLICT (tenant_id, name) - DO UPDATE SET - display_name = EXCLUDED.display_name, - description = EXCLUDED.description, - endpoints = EXCLUDED.endpoints, - contact = EXCLUDED.contact, - metadata = EXCLUDED.metadata, - tags = EXCLUDED.tags, - status = EXCLUDED.status, - updated_at = EXCLUDED.updated_at, - updated_by = EXCLUDED.updated_by - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - - command.Parameters.AddWithValue("id", Guid.Parse(record.Id)); - command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); - command.Parameters.AddWithValue("name", record.Slug); - command.Parameters.AddWithValue("displayName", record.DisplayName); - command.Parameters.Add(new NpgsqlParameter("description", NpgsqlDbType.Text) { Value = record.Description ?? (object)DBNull.Value }); - command.Parameters.Add(new NpgsqlParameter("endpoints", NpgsqlDbType.Jsonb) { Value = SerializeEndpoints(record.Endpoints) }); - command.Parameters.Add(new NpgsqlParameter("contact", NpgsqlDbType.Jsonb) { Value = SerializeContact(record.Contact) }); - command.Parameters.Add(new NpgsqlParameter("metadata", NpgsqlDbType.Jsonb) { Value = SerializeMetadata(record.Metadata) }); - command.Parameters.Add(new NpgsqlParameter("tags", NpgsqlDbType.Array | NpgsqlDbType.Text) { Value = record.Tags.ToArray() }); - command.Parameters.AddWithValue("status", "active"); - command.Parameters.AddWithValue("isSystemSeed", record.IsSystemSeed); - command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc); - command.Parameters.AddWithValue("createdBy", record.CreatedBy); - command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc); - command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); - - await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - - _logger.LogDebug("Upserted issuer {IssuerId} for tenant {TenantId}.", record.Id, record.TenantId); - } - - public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false); - - const string sql = "DELETE FROM issuer.issuers WHERE tenant_id = @tenantId::uuid AND id = @issuerId::uuid"; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - - var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Deleted issuer {IssuerId} for tenant {TenantId}. Rows affected: {Rows}.", issuerId, tenantId, rowsAffected); - } - - private static async Task> ReadAllRecordsAsync(NpgsqlCommand command, CancellationToken cancellationToken) - { - var results = new List(); - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - results.Add(MapToRecord(reader)); - } - return results; - } - - private static IssuerRecord MapToRecord(NpgsqlDataReader reader) - { - var id = reader.GetGuid(0).ToString(); - var tenantId = reader.GetGuid(1).ToString(); - var name = reader.GetString(2); - var displayName = reader.GetString(3); - var description = reader.IsDBNull(4) ? null : reader.GetString(4); - var endpointsJson = reader.GetString(5); - var contactJson = reader.GetString(6); - var metadataJson = reader.GetString(7); - var tags = reader.GetFieldValue(8); - var isSystemSeed = reader.GetBoolean(10); - var createdAt = reader.GetDateTime(11); - var createdBy = reader.GetString(12); - var updatedAt = reader.GetDateTime(13); - var updatedBy = reader.GetString(14); - - var contact = DeserializeContact(contactJson); - var metadata = DeserializeMetadata(metadataJson); - var endpoints = DeserializeEndpoints(endpointsJson); - - return new IssuerRecord - { - Id = id, - TenantId = tenantId, - Slug = name, - DisplayName = displayName, - Description = description, - Contact = contact, - Metadata = metadata, - Endpoints = endpoints, - Tags = tags, - IsSystemSeed = isSystemSeed, - CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), - CreatedBy = createdBy, - UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), - UpdatedBy = updatedBy - }; - } - - private static string SerializeContact(IssuerContact contact) - { - var doc = new - { - email = contact.Email, - phone = contact.Phone, - website = contact.Website?.ToString(), - timezone = contact.Timezone - }; - return JsonSerializer.Serialize(doc, JsonOptions); - } - - private static IssuerContact DeserializeContact(string json) - { - using var doc = JsonDocument.Parse(json); - var root = doc.RootElement; - var email = root.TryGetProperty("email", out var e) && e.ValueKind != JsonValueKind.Null ? e.GetString() : null; - var phone = root.TryGetProperty("phone", out var p) && p.ValueKind != JsonValueKind.Null ? p.GetString() : null; - var websiteStr = root.TryGetProperty("website", out var w) && w.ValueKind != JsonValueKind.Null ? w.GetString() : null; - var timezone = root.TryGetProperty("timezone", out var t) && t.ValueKind != JsonValueKind.Null ? t.GetString() : null; - return new IssuerContact(email, phone, string.IsNullOrWhiteSpace(websiteStr) ? null : new Uri(websiteStr), timezone); - } - - private static string SerializeMetadata(IssuerMetadata metadata) - { - var doc = new - { - cveOrgId = metadata.CveOrgId, - csafPublisherId = metadata.CsafPublisherId, - securityAdvisoriesUrl = metadata.SecurityAdvisoriesUrl?.ToString(), - catalogUrl = metadata.CatalogUrl?.ToString(), - languages = metadata.SupportedLanguages.ToList(), - attributes = new Dictionary(metadata.Attributes) - }; - return JsonSerializer.Serialize(doc, JsonOptions); - } - - private static IssuerMetadata DeserializeMetadata(string json) - { - using var doc = JsonDocument.Parse(json); - var root = doc.RootElement; - var cveOrgId = root.TryGetProperty("cveOrgId", out var c) && c.ValueKind != JsonValueKind.Null ? c.GetString() : null; - var csafPublisherId = root.TryGetProperty("csafPublisherId", out var cp) && cp.ValueKind != JsonValueKind.Null ? cp.GetString() : null; - var securityAdvisoriesUrlStr = root.TryGetProperty("securityAdvisoriesUrl", out var sa) && sa.ValueKind != JsonValueKind.Null ? sa.GetString() : null; - var catalogUrlStr = root.TryGetProperty("catalogUrl", out var cu) && cu.ValueKind != JsonValueKind.Null ? cu.GetString() : null; - - var languages = new List(); - if (root.TryGetProperty("languages", out var langs) && langs.ValueKind == JsonValueKind.Array) - { - foreach (var lang in langs.EnumerateArray()) - { - if (lang.ValueKind == JsonValueKind.String) - { - languages.Add(lang.GetString()!); - } - } - } - - var attributes = new Dictionary(); - if (root.TryGetProperty("attributes", out var attrs) && attrs.ValueKind == JsonValueKind.Object) - { - foreach (var prop in attrs.EnumerateObject()) - { - if (prop.Value.ValueKind == JsonValueKind.String) - { - attributes[prop.Name] = prop.Value.GetString()!; - } - } - } - - return new IssuerMetadata( - cveOrgId, - csafPublisherId, - string.IsNullOrWhiteSpace(securityAdvisoriesUrlStr) ? null : new Uri(securityAdvisoriesUrlStr), - string.IsNullOrWhiteSpace(catalogUrlStr) ? null : new Uri(catalogUrlStr), - languages, - attributes); - } - - private static string SerializeEndpoints(IReadOnlyCollection endpoints) - { - var docs = endpoints.Select(e => new - { - kind = e.Kind, - url = e.Url.ToString(), - format = e.Format, - requiresAuthentication = e.RequiresAuthentication - }).ToList(); - return JsonSerializer.Serialize(docs, JsonOptions); - } - - private static IReadOnlyCollection DeserializeEndpoints(string json) - { - var results = new List(); - using var doc = JsonDocument.Parse(json); - if (doc.RootElement.ValueKind != JsonValueKind.Array) - { - return results; - } - - foreach (var elem in doc.RootElement.EnumerateArray()) - { - var kind = elem.TryGetProperty("kind", out var k) ? k.GetString() : null; - var urlStr = elem.TryGetProperty("url", out var u) ? u.GetString() : null; - var format = elem.TryGetProperty("format", out var f) && f.ValueKind != JsonValueKind.Null ? f.GetString() : null; - var requiresAuth = elem.TryGetProperty("requiresAuthentication", out var ra) && ra.GetBoolean(); - - if (!string.IsNullOrWhiteSpace(kind) && !string.IsNullOrWhiteSpace(urlStr)) - { - results.Add(new IssuerEndpoint(kind, new Uri(urlStr), format, requiresAuth)); - } - } - - return results; - } } diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Mapping.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Mapping.cs new file mode 100644 index 000000000..8af5b9e87 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Mapping.cs @@ -0,0 +1,31 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerTrustRepository +{ + private static IssuerTrustOverrideRecord MapToRecord(NpgsqlDataReader reader) + { + var issuerId = reader.GetGuid(1).ToString(); + var tenantId = reader.GetGuid(2).ToString(); + var weight = reader.GetDecimal(3); + var rationale = reader.IsDBNull(4) ? null : reader.GetString(4); + var createdAt = reader.GetDateTime(6); + var createdBy = reader.GetString(7); + var updatedAt = reader.GetDateTime(8); + var updatedBy = reader.GetString(9); + + return new IssuerTrustOverrideRecord + { + IssuerId = issuerId, + TenantId = tenantId, + Weight = weight, + Reason = rationale, + CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), + CreatedBy = createdBy, + UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), + UpdatedBy = updatedBy + }; + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Read.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Read.cs new file mode 100644 index 000000000..b9861b667 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Read.cs @@ -0,0 +1,38 @@ +using Npgsql; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerTrustRepository +{ + public async Task GetAsync( + string tenantId, + string issuerId, + CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "reader", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + SELECT id, issuer_id, tenant_id, weight, rationale, expires_at, created_at, created_by, updated_at, + updated_by + FROM issuer.trust_overrides + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid + LIMIT 1 + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + + await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return null; + } + + return MapToRecord(reader); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Write.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Write.cs new file mode 100644 index 000000000..d973f6ac8 --- /dev/null +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.Write.cs @@ -0,0 +1,74 @@ +using Microsoft.Extensions.Logging; +using Npgsql; +using NpgsqlTypes; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; + +public sealed partial class PostgresIssuerTrustRepository +{ + public async Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + await using var connection = await _dataSource + .OpenConnectionAsync(record.TenantId, "writer", cancellationToken) + .ConfigureAwait(false); + + const string sql = """ + INSERT INTO issuer.trust_overrides (issuer_id, tenant_id, weight, rationale, created_at, created_by, + updated_at, updated_by) + VALUES (@issuerId::uuid, @tenantId::uuid, @weight, @rationale, @createdAt, @createdBy, @updatedAt, + @updatedBy) + ON CONFLICT (issuer_id, tenant_id) + DO UPDATE SET + weight = EXCLUDED.weight, + rationale = EXCLUDED.rationale, + updated_at = EXCLUDED.updated_at, + updated_by = EXCLUDED.updated_by + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + + command.Parameters.AddWithValue("issuerId", Guid.Parse(record.IssuerId)); + command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); + command.Parameters.AddWithValue("weight", record.Weight); + command.Parameters.Add(new NpgsqlParameter("rationale", NpgsqlDbType.Text) + { + Value = record.Reason ?? (object)DBNull.Value + }); + command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc.UtcDateTime); + command.Parameters.AddWithValue("createdBy", record.CreatedBy); + command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc.UtcDateTime); + command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogDebug( + "Upserted trust override for issuer {IssuerId} in tenant {TenantId}.", + record.IssuerId, + record.TenantId); + } + + public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + await using var connection = await _dataSource + .OpenConnectionAsync(tenantId, "writer", cancellationToken) + .ConfigureAwait(false); + + const string sql = "DELETE FROM issuer.trust_overrides WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid"; + + await using var command = new NpgsqlCommand(sql, connection); + command.CommandTimeout = _dataSource.CommandTimeoutSeconds; + command.Parameters.AddWithValue("tenantId", tenantId); + command.Parameters.AddWithValue("issuerId", issuerId); + + var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + _logger.LogDebug( + "Deleted trust override for issuer {IssuerId} in tenant {TenantId}. Rows affected: {Rows}.", + issuerId, + tenantId, + rowsAffected); + } +} diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.cs b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.cs index 0e0833524..59d2dc8fe 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.cs +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/Postgres/Repositories/PostgresIssuerTrustRepository.cs @@ -1,120 +1,22 @@ using Microsoft.Extensions.Logging; -using Npgsql; -using NpgsqlTypes; using StellaOps.IssuerDirectory.Core.Abstractions; -using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Persistence.Postgres; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; /// /// PostgreSQL implementation of the issuer trust repository. /// -public sealed class PostgresIssuerTrustRepository : IIssuerTrustRepository +public sealed partial class PostgresIssuerTrustRepository : IIssuerTrustRepository { private readonly IssuerDirectoryDataSource _dataSource; private readonly ILogger _logger; - public PostgresIssuerTrustRepository(IssuerDirectoryDataSource dataSource, ILogger logger) + public PostgresIssuerTrustRepository( + IssuerDirectoryDataSource dataSource, + ILogger logger) { _dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - - public async Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false); - - const string sql = """ - SELECT id, issuer_id, tenant_id, weight, rationale, expires_at, created_at, created_by, updated_at, updated_by - FROM issuer.trust_overrides - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid - LIMIT 1 - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - - await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) - { - return null; - } - - return MapToRecord(reader); - } - - public async Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "writer", cancellationToken).ConfigureAwait(false); - - const string sql = """ - INSERT INTO issuer.trust_overrides (issuer_id, tenant_id, weight, rationale, created_at, created_by, updated_at, updated_by) - VALUES (@issuerId::uuid, @tenantId::uuid, @weight, @rationale, @createdAt, @createdBy, @updatedAt, @updatedBy) - ON CONFLICT (issuer_id, tenant_id) - DO UPDATE SET - weight = EXCLUDED.weight, - rationale = EXCLUDED.rationale, - updated_at = EXCLUDED.updated_at, - updated_by = EXCLUDED.updated_by - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - - command.Parameters.AddWithValue("issuerId", Guid.Parse(record.IssuerId)); - command.Parameters.AddWithValue("tenantId", Guid.Parse(record.TenantId)); - command.Parameters.AddWithValue("weight", record.Weight); - command.Parameters.Add(new NpgsqlParameter("rationale", NpgsqlDbType.Text) { Value = record.Reason ?? (object)DBNull.Value }); - command.Parameters.AddWithValue("createdAt", record.CreatedAtUtc.UtcDateTime); - command.Parameters.AddWithValue("createdBy", record.CreatedBy); - command.Parameters.AddWithValue("updatedAt", record.UpdatedAtUtc.UtcDateTime); - command.Parameters.AddWithValue("updatedBy", record.UpdatedBy); - - await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - - _logger.LogDebug("Upserted trust override for issuer {IssuerId} in tenant {TenantId}.", record.IssuerId, record.TenantId); - } - - public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false); - - const string sql = "DELETE FROM issuer.trust_overrides WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid"; - - await using var command = new NpgsqlCommand(sql, connection); - command.CommandTimeout = _dataSource.CommandTimeoutSeconds; - command.Parameters.AddWithValue("tenantId", tenantId); - command.Parameters.AddWithValue("issuerId", issuerId); - - var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Deleted trust override for issuer {IssuerId} in tenant {TenantId}. Rows affected: {Rows}.", issuerId, tenantId, rowsAffected); - } - - private static IssuerTrustOverrideRecord MapToRecord(NpgsqlDataReader reader) - { - var issuerId = reader.GetGuid(1).ToString(); - var tenantId = reader.GetGuid(2).ToString(); - var weight = reader.GetDecimal(3); - var rationale = reader.IsDBNull(4) ? null : reader.GetString(4); - var createdAt = reader.GetDateTime(6); - var createdBy = reader.GetString(7); - var updatedAt = reader.GetDateTime(8); - var updatedBy = reader.GetString(9); - - return new IssuerTrustOverrideRecord - { - IssuerId = issuerId, - TenantId = tenantId, - Weight = weight, - Reason = rationale, - CreatedAtUtc = new DateTimeOffset(createdAt, TimeSpan.Zero), - CreatedBy = createdBy, - UpdatedAtUtc = new DateTimeOffset(updatedAt, TimeSpan.Zero), - UpdatedBy = updatedBy - }; - } } diff --git a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/TASKS.md b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/TASKS.md index 64a4b37e0..0890f1bcd 100644 --- a/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/TASKS.md +++ b/src/IssuerDirectory/__Libraries/StellaOps.IssuerDirectory.Persistence/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0376-T | DONE | Revalidated 2026-01-07; test coverage audit for IssuerDirectory.Persistence. | | AUDIT-0376-A | TODO | Pending approval (revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-04: Split repositories into partials, removed service locator registration, expanded persistence tests (SPRINT_20260130_002). | diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Helpers.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Helpers.cs new file mode 100644 index 000000000..e265e0059 --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Helpers.cs @@ -0,0 +1,47 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public sealed partial class IssuerAuditSinkTests +{ + private async Task SeedIssuerAsync() + { + var issuerId = Guid.NewGuid().ToString(); + var now = DateTimeOffset.UtcNow; + var issuer = new IssuerRecord + { + Id = issuerId, + TenantId = _tenantId, + Slug = $"test-issuer-{Guid.NewGuid():N}", + DisplayName = "Test Issuer", + Description = "Test issuer for audit tests", + Contact = new IssuerContact(null, null, null, null), + Metadata = new IssuerMetadata(null, null, null, null, [], new Dictionary()), + Endpoints = [], + Tags = [], + IsSystemSeed = false, + CreatedAtUtc = now, + CreatedBy = "test@test.com", + UpdatedAtUtc = now, + UpdatedBy = "test@test.com" + }; + await _issuerRepository.UpsertAsync(issuer, CancellationToken.None); + return issuerId; + } + + private IssuerAuditEntry CreateAuditEntry( + string action, + string? reason, + IReadOnlyDictionary? metadata = null, + DateTimeOffset? timestamp = null) + { + return new IssuerAuditEntry( + _tenantId, + _issuerId, + action, + timestamp ?? DateTimeOffset.UtcNow, + "test@test.com", + reason, + metadata); + } +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Metadata.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Metadata.cs new file mode 100644 index 000000000..7f6f3656f --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Metadata.cs @@ -0,0 +1,43 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public sealed partial class IssuerAuditSinkTests +{ + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsMetadataAsync() + { + var metadata = new Dictionary + { + ["oldSlug"] = "old-issuer", + ["newSlug"] = "new-issuer" + }; + var entry = CreateAuditEntry("issuer.slug.changed", "Slug updated", metadata); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.Details.Should().ContainKey("oldSlug"); + persisted.Details["oldSlug"].Should().Be("old-issuer"); + persisted.Details.Should().ContainKey("newSlug"); + persisted.Details["newSlug"].Should().Be("new-issuer"); + } + + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsEmptyMetadataAsync() + { + var entry = CreateAuditEntry("issuer.deleted", "Issuer removed"); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.Details.Should().NotBeNull(); + persisted.Details.Should().BeEmpty(); + } +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Queries.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Queries.cs new file mode 100644 index 000000000..2dfd2e162 --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Queries.cs @@ -0,0 +1,65 @@ +using Npgsql; +using System.Text.Json; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public sealed partial class IssuerAuditSinkTests +{ + private async Task ReadAuditEntryAsync(string tenantId, string issuerId) + { + await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", CancellationToken.None); + + const string sql = """ + SELECT actor, action, reason, details, occurred_at + FROM issuer.audit + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid + ORDER BY occurred_at DESC + LIMIT 1 + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.Parameters.AddWithValue("tenantId", Guid.Parse(tenantId)); + command.Parameters.AddWithValue("issuerId", Guid.Parse(issuerId)); + + await using var reader = await command.ExecuteReaderAsync(); + if (!await reader.ReadAsync()) + { + return null; + } + + var detailsJson = reader.GetString(3); + var details = JsonSerializer.Deserialize>(detailsJson) ?? []; + + return new AuditEntryDto( + reader.GetString(0), + reader.GetString(1), + reader.IsDBNull(2) ? null : reader.GetString(2), + details, + reader.GetDateTime(4)); + } + + private async Task CountAuditEntriesAsync(string tenantId, string issuerId) + { + await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", CancellationToken.None); + + const string sql = """ + SELECT COUNT(*) + FROM issuer.audit + WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid + """; + + await using var command = new NpgsqlCommand(sql, connection); + command.Parameters.AddWithValue("tenantId", Guid.Parse(tenantId)); + command.Parameters.AddWithValue("issuerId", Guid.Parse(issuerId)); + + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt32(result); + } + + private sealed record AuditEntryDto( + string Actor, + string Action, + string? Reason, + Dictionary Details, + DateTime OccurredAt); +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.ReasonAndTime.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.ReasonAndTime.cs new file mode 100644 index 000000000..7a29037c0 --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.ReasonAndTime.cs @@ -0,0 +1,51 @@ +using FluentAssertions; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public sealed partial class IssuerAuditSinkTests +{ + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsNullReasonAsync() + { + var entry = CreateAuditEntry("issuer.updated", null); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.Reason.Should().BeNull(); + } + + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsTimestampCorrectlyAsync() + { + var now = DateTimeOffset.UtcNow; + var entry = CreateAuditEntry("issuer.key.added", "Key added", timestamp: now); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.OccurredAt.Should().BeCloseTo(now.UtcDateTime, TimeSpan.FromSeconds(1)); + } + + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsMultipleEntriesAsync() + { + var entry1 = CreateAuditEntry("issuer.created", "Created"); + var entry2 = CreateAuditEntry("issuer.updated", "Updated"); + var entry3 = CreateAuditEntry("issuer.key.added", "Key added"); + + await _auditSink.WriteAsync(entry1, CancellationToken.None); + await _auditSink.WriteAsync(entry2, CancellationToken.None); + await _auditSink.WriteAsync(entry3, CancellationToken.None); + + var count = await CountAuditEntriesAsync(_tenantId, _issuerId); + count.Should().Be(3); + } +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Write.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Write.cs new file mode 100644 index 000000000..a3c3367b4 --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.Write.cs @@ -0,0 +1,44 @@ +using FluentAssertions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public sealed partial class IssuerAuditSinkTests +{ + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsAuditEntryAsync() + { + var entry = CreateAuditEntry("issuer.created", "Issuer was created"); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.Action.Should().Be("issuer.created"); + persisted.Reason.Should().Be("Issuer was created"); + persisted.Actor.Should().Be("test@test.com"); + } + + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task Write_PersistsActorCorrectlyAsync() + { + var entry = new IssuerAuditEntry( + _tenantId, + _issuerId, + "issuer.trust.changed", + DateTimeOffset.UtcNow, + "admin@company.com", + "Trust level modified", + null); + + await _auditSink.WriteAsync(entry, CancellationToken.None); + + var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); + persisted.Should().NotBeNull(); + persisted!.Actor.Should().Be("admin@company.com"); + } +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.cs index b4e88f1a2..01cf2c033 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.cs +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerAuditSinkTests.cs @@ -1,18 +1,13 @@ -using System.Text.Json; -using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; -using Npgsql; using StellaOps.Infrastructure.Postgres.Options; -using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Persistence.Postgres; using StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; using Xunit; - -using StellaOps.TestKit; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; [Collection(IssuerDirectoryPostgresCollection.Name)] -public sealed class IssuerAuditSinkTests : IAsyncLifetime +public sealed partial class IssuerAuditSinkTests : IAsyncLifetime { private readonly IssuerDirectoryPostgresFixture _fixture; private readonly PostgresIssuerRepository _issuerRepository; @@ -42,219 +37,4 @@ public sealed class IssuerAuditSinkTests : IAsyncLifetime } public ValueTask DisposeAsync() => ValueTask.CompletedTask; - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsAuditEntry() - { - var entry = CreateAuditEntry("issuer.created", "Issuer was created"); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.Action.Should().Be("issuer.created"); - persisted.Reason.Should().Be("Issuer was created"); - persisted.Actor.Should().Be("test@test.com"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsMetadata() - { - var metadata = new Dictionary - { - ["oldSlug"] = "old-issuer", - ["newSlug"] = "new-issuer" - }; - var entry = CreateAuditEntry("issuer.slug.changed", "Slug updated", metadata); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.Details.Should().ContainKey("oldSlug"); - persisted.Details["oldSlug"].Should().Be("old-issuer"); - persisted.Details.Should().ContainKey("newSlug"); - persisted.Details["newSlug"].Should().Be("new-issuer"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsEmptyMetadata() - { - var entry = CreateAuditEntry("issuer.deleted", "Issuer removed"); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.Details.Should().NotBeNull(); - persisted.Details.Should().BeEmpty(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsNullReason() - { - var entry = CreateAuditEntry("issuer.updated", null); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.Reason.Should().BeNull(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsTimestampCorrectly() - { - var now = DateTimeOffset.UtcNow; - var entry = CreateAuditEntry("issuer.key.added", "Key added", timestamp: now); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.OccurredAt.Should().BeCloseTo(now.UtcDateTime, TimeSpan.FromSeconds(1)); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsMultipleEntriesForSameIssuer() - { - var entry1 = CreateAuditEntry("issuer.created", "Created"); - var entry2 = CreateAuditEntry("issuer.updated", "Updated"); - var entry3 = CreateAuditEntry("issuer.key.added", "Key added"); - - await _auditSink.WriteAsync(entry1, CancellationToken.None); - await _auditSink.WriteAsync(entry2, CancellationToken.None); - await _auditSink.WriteAsync(entry3, CancellationToken.None); - - var count = await CountAuditEntriesAsync(_tenantId, _issuerId); - count.Should().Be(3); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task WriteAsync_PersistsActorCorrectly() - { - var entry = new IssuerAuditEntry( - _tenantId, - _issuerId, - "issuer.trust.changed", - DateTimeOffset.UtcNow, - "admin@company.com", - "Trust level modified", - null); - - await _auditSink.WriteAsync(entry, CancellationToken.None); - - var persisted = await ReadAuditEntryAsync(entry.TenantId, entry.IssuerId); - persisted.Should().NotBeNull(); - persisted!.Actor.Should().Be("admin@company.com"); - } - - private async Task SeedIssuerAsync() - { - var issuerId = Guid.NewGuid().ToString(); - var now = DateTimeOffset.UtcNow; - var issuer = new IssuerRecord - { - Id = issuerId, - TenantId = _tenantId, - Slug = $"test-issuer-{Guid.NewGuid():N}", - DisplayName = "Test Issuer", - Description = "Test issuer for audit tests", - Contact = new IssuerContact(null, null, null, null), - Metadata = new IssuerMetadata(null, null, null, null, [], new Dictionary()), - Endpoints = [], - Tags = [], - IsSystemSeed = false, - CreatedAtUtc = now, - CreatedBy = "test@test.com", - UpdatedAtUtc = now, - UpdatedBy = "test@test.com" - }; - await _issuerRepository.UpsertAsync(issuer, CancellationToken.None); - return issuerId; - } - - private IssuerAuditEntry CreateAuditEntry( - string action, - string? reason, - IReadOnlyDictionary? metadata = null, - DateTimeOffset? timestamp = null) - { - return new IssuerAuditEntry( - _tenantId, - _issuerId, - action, - timestamp ?? DateTimeOffset.UtcNow, - "test@test.com", - reason, - metadata); - } - - private async Task ReadAuditEntryAsync(string tenantId, string issuerId) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", CancellationToken.None); - - const string sql = """ - SELECT actor, action, reason, details, occurred_at - FROM issuer.audit - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid - ORDER BY occurred_at DESC - LIMIT 1 - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.Parameters.AddWithValue("tenantId", Guid.Parse(tenantId)); - command.Parameters.AddWithValue("issuerId", Guid.Parse(issuerId)); - - await using var reader = await command.ExecuteReaderAsync(); - if (!await reader.ReadAsync()) - { - return null; - } - - var detailsJson = reader.GetString(3); - var details = JsonSerializer.Deserialize>(detailsJson) ?? []; - - return new AuditEntryDto( - reader.GetString(0), - reader.GetString(1), - reader.IsDBNull(2) ? null : reader.GetString(2), - details, - reader.GetDateTime(4)); - } - - private async Task CountAuditEntriesAsync(string tenantId, string issuerId) - { - await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", CancellationToken.None); - - const string sql = """ - SELECT COUNT(*) - FROM issuer.audit - WHERE tenant_id = @tenantId::uuid AND issuer_id = @issuerId::uuid - """; - - await using var command = new NpgsqlCommand(sql, connection); - command.Parameters.AddWithValue("tenantId", Guid.Parse(tenantId)); - command.Parameters.AddWithValue("issuerId", Guid.Parse(issuerId)); - - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt32(result); - } - - private sealed record AuditEntryDto( - string Actor, - string Action, - string? Reason, - Dictionary Details, - DateTime OccurredAt); } - - - diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPersistenceExtensionsTests.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPersistenceExtensionsTests.cs new file mode 100644 index 000000000..77508f998 --- /dev/null +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPersistenceExtensionsTests.cs @@ -0,0 +1,49 @@ +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Infrastructure.Postgres.Options; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Persistence.Extensions; +using StellaOps.IssuerDirectory.Persistence.Postgres; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; + +public class IssuerDirectoryPersistenceExtensionsTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddIssuerDirectoryPersistence_ConfiguresSchema_WhenBlank() + { + var services = new ServiceCollection(); + services.AddIssuerDirectoryPersistence(options => + { + options.ConnectionString = "Host=localhost;Database=issuer;Username=postgres;Password=postgres"; + options.SchemaName = ""; + }); + + var descriptor = services.Single(sd => sd.ServiceType == typeof(PostgresOptions)); + var options = descriptor.ImplementationInstance as PostgresOptions; + + options.Should().NotBeNull(); + options!.SchemaName.Should().Be("issuer"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddIssuerDirectoryPersistence_RegistersRepositories() + { + var services = new ServiceCollection(); + services.AddIssuerDirectoryPersistence(new PostgresOptions + { + ConnectionString = "Host=localhost;Database=issuer;Username=postgres;Password=postgres", + SchemaName = "issuer" + }); + + services.Should().Contain(descriptor => descriptor.ServiceType == typeof(IssuerDirectoryDataSource)); + services.Should().Contain(descriptor => descriptor.ServiceType == typeof(IIssuerRepository)); + services.Should().Contain(descriptor => descriptor.ServiceType == typeof(IIssuerKeyRepository)); + services.Should().Contain(descriptor => descriptor.ServiceType == typeof(IIssuerTrustRepository)); + services.Should().Contain(descriptor => descriptor.ServiceType == typeof(IIssuerAuditSink)); + } +} diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPostgresFixture.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPostgresFixture.cs index 4c51545ab..1945cabf6 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPostgresFixture.cs +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerDirectoryPostgresFixture.cs @@ -1,7 +1,7 @@ -using System.Reflection; using Microsoft.Extensions.Logging; using StellaOps.Infrastructure.Postgres.Testing; using StellaOps.IssuerDirectory.Persistence.Postgres; +using System.Reflection; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerKeyRepositoryTests.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerKeyRepositoryTests.cs index 74e2e8914..fd391cd44 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerKeyRepositoryTests.cs +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerKeyRepositoryTests.cs @@ -3,9 +3,9 @@ using Microsoft.Extensions.Logging.Abstractions; using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Persistence.Postgres; using StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; public class IssuerKeyRepositoryTests : IClassFixture @@ -25,9 +25,9 @@ public class IssuerKeyRepositoryTests : IClassFixture.Instance), NullLogger.Instance); - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task AddKey_And_List_Works() + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task AddKey_And_List_WorksAsync() { var tenant = Guid.NewGuid().ToString(); var issuerId = Guid.NewGuid().ToString(); @@ -67,4 +67,49 @@ public class IssuerKeyRepositoryTests : IClassFixture k.IssuerId == issuerId); } + + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task GetByFingerprint_ReturnsKeyAsync() + { + var tenant = Guid.NewGuid().ToString(); + var issuerId = Guid.NewGuid().ToString(); + var issuerRepo = CreateIssuerRepo(); + var keyRepo = CreateKeyRepo(); + + var issuer = IssuerRecord.Create( + id: issuerId, + tenantId: tenant, + displayName: "Vendor Y", + slug: "vendor-y", + description: null, + contact: new IssuerContact(null, null, null, null), + metadata: new IssuerMetadata(null, null, null, null, null, null), + endpoints: null, + tags: null, + timestampUtc: DateTimeOffset.Parse("2026-01-02T01:00:00Z"), + actor: "test", + isSystemSeed: false); + await issuerRepo.UpsertAsync(issuer, CancellationToken.None); + + var key = IssuerKeyRecord.Create( + id: Guid.NewGuid().ToString(), + issuerId: issuerId, + tenantId: tenant, + type: IssuerKeyType.Ed25519PublicKey, + material: new IssuerKeyMaterial("pem", "pubkey-2"), + fingerprint: "fp-lookup", + createdAtUtc: DateTimeOffset.Parse("2026-01-02T01:05:00Z"), + createdBy: "test", + expiresAtUtc: null, + replacesKeyId: null); + + await keyRepo.UpsertAsync(key, CancellationToken.None); + + var fetched = await keyRepo.GetByFingerprintAsync(tenant, issuerId, "fp-lookup", CancellationToken.None); + + fetched.Should().NotBeNull(); + fetched!.Fingerprint.Should().Be("fp-lookup"); + fetched.Type.Should().Be(IssuerKeyType.Ed25519PublicKey); + } } diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerRepositoryTests.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerRepositoryTests.cs index f050a2495..98a146480 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerRepositoryTests.cs +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/IssuerRepositoryTests.cs @@ -3,9 +3,9 @@ using Microsoft.Extensions.Logging.Abstractions; using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Persistence.Postgres; using StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; public class IssuerRepositoryTests : IClassFixture @@ -25,9 +25,9 @@ public class IssuerRepositoryTests : IClassFixture.Instance); } - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task UpsertAndGet_Works_For_Tenant() + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task UpsertAndGet_Works_For_TenantAsync() { var repo = CreateRepository(); var tenant = Guid.NewGuid().ToString(); @@ -56,4 +56,46 @@ public class IssuerRepositoryTests : IClassFixture { ["tier"] = "gold" }), + endpoints: new[] + { + new IssuerEndpoint("vex", new Uri("https://contoso.test/vex"), "csaf", true) + }, + tags: new[] { "vendor", "priority" }, + timestampUtc: timestamp, + actor: "test", + isSystemSeed: false); + + await repo.UpsertAsync(record, CancellationToken.None); + + var fetched = await repo.GetAsync(tenant, issuerId, CancellationToken.None); + + fetched.Should().NotBeNull(); + fetched!.Contact.Email.Should().Be("security@contoso.test"); + fetched.Metadata.Attributes.Should().ContainKey("tier"); + fetched.Tags.Should().Contain("vendor"); + fetched.Endpoints.Should().ContainSingle().Which.RequiresAuthentication.Should().BeTrue(); + } } diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TASKS.md b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TASKS.md index c0766eeba..3b7b90ffc 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TASKS.md +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0377-T | DONE | Revalidated 2026-01-07; test coverage audit for IssuerDirectory.Persistence.Tests. | | AUDIT-0377-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-04: Split audit sink tests into partials, sorted usings, added DI registration unit tests and new coverage (SPRINT_20260130_002). | diff --git a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TrustRepositoryTests.cs b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TrustRepositoryTests.cs index efb2087ce..4e136072f 100644 --- a/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TrustRepositoryTests.cs +++ b/src/IssuerDirectory/__Tests/StellaOps.IssuerDirectory.Persistence.Tests/TrustRepositoryTests.cs @@ -3,9 +3,9 @@ using Microsoft.Extensions.Logging.Abstractions; using StellaOps.IssuerDirectory.Core.Domain; using StellaOps.IssuerDirectory.Persistence.Postgres; using StellaOps.IssuerDirectory.Persistence.Postgres.Repositories; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.IssuerDirectory.Persistence.Postgres.Tests; public class TrustRepositoryTests : IClassFixture @@ -25,9 +25,9 @@ public class TrustRepositoryTests : IClassFixture.Instance), NullLogger.Instance); - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task UpsertTrustOverride_Works() + [Trait("Category", TestCategories.Integration)] + [Fact] + public async Task UpsertTrustOverride_WorksAsync() { var tenant = Guid.NewGuid().ToString(); var issuerId = Guid.NewGuid().ToString(); @@ -64,4 +64,43 @@ public class TrustRepositoryTests : IClassFixtureenable preview true + false @@ -14,5 +15,6 @@ + diff --git a/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs index e1518ebf0..2feeac32f 100644 --- a/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs @@ -14,8 +14,9 @@ using StellaOps.Notify.Engine; using StellaOps.Notify.Persistence.Extensions; using StellaOps.Notify.Persistence.Postgres; using StellaOps.Notify.Queue; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Configuration .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) @@ -37,6 +38,7 @@ builder.Services.AddNotifyPersistence(builder.Configuration, postgresSection.Pat builder.Services.AddAirGapEgressPolicy(builder.Configuration); builder.Services.AddNotifyEventQueue(builder.Configuration, "notifier:queue"); +builder.Services.AddWorkerHealthChecks(); builder.Services.AddHealthChecks().AddNotifyQueueHealthCheck(); // In-memory storage replacements (document store removed) @@ -71,7 +73,9 @@ builder.Services.AddHttpClient(client => builder.Services.AddScoped(); builder.Services.AddHostedService(); -await builder.Build().RunAsync().ConfigureAwait(false); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync().ConfigureAwait(false); // Explicit internal Program class to avoid conflicts with other projects that reference this assembly internal sealed partial class Program { } diff --git a/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj index 6240d1fa2..30e2ae611 100644 --- a/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj @@ -12,6 +12,7 @@ + diff --git a/src/Notify/StellaOps.Notify.Worker/Program.cs b/src/Notify/StellaOps.Notify.Worker/Program.cs index 8ec683ef5..a74e08a6c 100644 --- a/src/Notify/StellaOps.Notify.Worker/Program.cs +++ b/src/Notify/StellaOps.Notify.Worker/Program.cs @@ -6,8 +6,9 @@ using StellaOps.Notify.Queue; using StellaOps.Notify.Worker; using StellaOps.Notify.Worker.Handlers; using StellaOps.Notify.Worker.Processing; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Configuration .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) @@ -30,4 +31,8 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddHostedService(); -await builder.Build().RunAsync().ConfigureAwait(false); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync().ConfigureAwait(false); diff --git a/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj b/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj index 5c33e3c80..eae4b032d 100644 --- a/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj +++ b/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj @@ -14,6 +14,10 @@ + + + + diff --git a/src/OpsMemory/StellaOps.OpsMemory.WebService/StellaOps.OpsMemory.WebService.csproj b/src/OpsMemory/StellaOps.OpsMemory.WebService/StellaOps.OpsMemory.WebService.csproj index 77e6eb975..87535bbd2 100644 --- a/src/OpsMemory/StellaOps.OpsMemory.WebService/StellaOps.OpsMemory.WebService.csproj +++ b/src/OpsMemory/StellaOps.OpsMemory.WebService/StellaOps.OpsMemory.WebService.csproj @@ -12,6 +12,7 @@ + diff --git a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj index 09f5f4492..ddfb3ea53 100644 --- a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj @@ -41,6 +41,7 @@ + diff --git a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs index 8ab4deb8b..719403d75 100644 --- a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs @@ -1,7 +1,10 @@ using StellaOps.Orchestrator.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -host.Run(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj index 2ec8b9530..ae9b9f863 100644 --- a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj @@ -15,9 +15,12 @@ preview true - - + + + + + @@ -35,8 +38,11 @@ - - + + + + + diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj index 1a8ed155e..1589212c6 100644 --- a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj @@ -37,6 +37,7 @@ + diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs index 2e39e5adb..61ca9f93c 100644 --- a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs @@ -1,7 +1,10 @@ using StellaOps.PacksRegistry.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -host.Run(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj index f41436c9a..bf9fde2f1 100644 --- a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj @@ -15,9 +15,12 @@ preview true - - + + + + + @@ -35,8 +38,11 @@ - - + + + + + diff --git a/src/Platform/StellaOps.Platform.WebService/Endpoints/SetupEndpoints.cs b/src/Platform/StellaOps.Platform.WebService/Endpoints/SetupEndpoints.cs index 8b5c65719..aa7f9aa77 100644 --- a/src/Platform/StellaOps.Platform.WebService/Endpoints/SetupEndpoints.cs +++ b/src/Platform/StellaOps.Platform.WebService/Endpoints/SetupEndpoints.cs @@ -9,8 +9,10 @@ using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Routing; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; using StellaOps.Platform.WebService.Constants; using StellaOps.Platform.WebService.Contracts; +using StellaOps.Platform.WebService.Options; using StellaOps.Platform.WebService.Services; using System; using System.Threading; @@ -20,6 +22,9 @@ namespace StellaOps.Platform.WebService.Endpoints; /// /// Setup wizard API endpoints aligned to docs/setup/setup-wizard-ux.md. +/// All endpoints are AllowAnonymous because during initial setup the Authority +/// service is not running. When setup is already complete, the handlers +/// enforce auth via TryResolveContext before proceeding. /// public static class SetupEndpoints { @@ -35,6 +40,41 @@ public static class SetupEndpoints return app; } + /// + /// Resolves the request context, falling back to a bootstrap context during initial setup. + /// When setup is complete, requires authenticated context (returns error on failure). + /// + private static async Task<(PlatformRequestContext Context, IResult? Failure)> ResolveSetupContextAsync( + HttpContext httpContext, + PlatformRequestContextResolver resolver, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, + CancellationToken ct) + { + var dbSettings = await envSettingsStore.GetAllAsync(ct); + var setupState = setupDetector.Detect(options.Value.Storage, dbSettings); + + if (setupState == "complete") + { + // Setup already done — require auth for re-configuration + if (!TryResolveContext(httpContext, resolver, out var authContext, out var failure)) + { + return (null!, failure); + } + return (authContext!, null); + } + + // During initial setup, resolve context best-effort + if (!resolver.TryResolve(httpContext, out var requestContext, out _)) + { + // No tenant/auth available — use bootstrap context + requestContext = new PlatformRequestContext("setup", "setup-wizard", null); + } + + return (requestContext!, null); + } + private static void MapSessionEndpoints(IEndpointRouteBuilder setup) { var sessions = setup.MapGroup("/sessions").WithTags("Setup Sessions"); @@ -44,16 +84,18 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; try { - var result = await service.GetSessionAsync(requestContext!, ct).ConfigureAwait(false); + var result = await service.GetSessionAsync(requestContext, ct).ConfigureAwait(false); if (result is null) { return Results.NotFound(CreateProblem( @@ -67,7 +109,7 @@ public static class SetupEndpoints { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupRead) + }).AllowAnonymous() .WithName("GetSetupSession") .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status404NotFound); @@ -77,18 +119,20 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, [FromBody] CreateSetupSessionRequest? request, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; try { var result = await service.CreateSessionAsync( - requestContext!, + requestContext, request ?? new CreateSetupSessionRequest(), ct).ConfigureAwait(false); return Results.Created($"/api/v1/setup/sessions", result); @@ -97,7 +141,7 @@ public static class SetupEndpoints { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupWrite) + }).AllowAnonymous() .WithName("CreateSetupSession") .Produces(StatusCodes.Status201Created) .Produces(StatusCodes.Status400BadRequest); @@ -107,23 +151,25 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; try { - var result = await service.ResumeOrCreateSessionAsync(requestContext!, ct).ConfigureAwait(false); + var result = await service.ResumeOrCreateSessionAsync(requestContext, ct).ConfigureAwait(false); return Results.Ok(result); } catch (InvalidOperationException ex) { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupWrite) + }).AllowAnonymous() .WithName("ResumeSetupSession") .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status400BadRequest); @@ -133,18 +179,20 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, [FromBody] FinalizeSetupSessionRequest? request, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; try { var result = await service.FinalizeSessionAsync( - requestContext!, + requestContext, request ?? new FinalizeSetupSessionRequest(), ct).ConfigureAwait(false); return Results.Ok(result); @@ -153,7 +201,7 @@ public static class SetupEndpoints { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupWrite) + }).AllowAnonymous() .WithName("FinalizeSetupSession") .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status400BadRequest); @@ -168,13 +216,15 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, [FromBody] ExecuteSetupStepRequest request, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; if (request is null) { @@ -186,14 +236,14 @@ public static class SetupEndpoints try { - var result = await service.ExecuteStepAsync(requestContext!, request, ct).ConfigureAwait(false); + var result = await service.ExecuteStepAsync(requestContext, request, ct).ConfigureAwait(false); return Results.Ok(result); } catch (InvalidOperationException ex) { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupWrite) + }).AllowAnonymous() .WithName("ExecuteSetupStep") .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status400BadRequest); @@ -203,13 +253,15 @@ public static class SetupEndpoints HttpContext context, PlatformRequestContextResolver resolver, PlatformSetupService service, + SetupStateDetector setupDetector, + IOptions options, + IEnvironmentSettingsStore envSettingsStore, [FromBody] SkipSetupStepRequest request, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } + var (requestContext, failure) = await ResolveSetupContextAsync( + context, resolver, setupDetector, options, envSettingsStore, ct); + if (failure is not null) return failure; if (request is null) { @@ -221,14 +273,14 @@ public static class SetupEndpoints try { - var result = await service.SkipStepAsync(requestContext!, request, ct).ConfigureAwait(false); + var result = await service.SkipStepAsync(requestContext, request, ct).ConfigureAwait(false); return Results.Ok(result); } catch (InvalidOperationException ex) { return Results.BadRequest(CreateProblem("Invalid Operation", ex.Message, StatusCodes.Status400BadRequest)); } - }).RequireAuthorization(PlatformPolicies.SetupWrite) + }).AllowAnonymous() .WithName("SkipSetupStep") .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status400BadRequest); @@ -240,19 +292,12 @@ public static class SetupEndpoints // GET /api/v1/setup/definitions/steps - Get all step definitions definitions.MapGet("/steps", async Task ( - HttpContext context, - PlatformRequestContextResolver resolver, PlatformSetupService service, CancellationToken ct) => { - if (!TryResolveContext(context, resolver, out var requestContext, out var failure)) - { - return failure!; - } - var result = await service.GetStepDefinitionsAsync(ct).ConfigureAwait(false); return Results.Ok(result); - }).RequireAuthorization(PlatformPolicies.SetupRead) + }).AllowAnonymous() .WithName("GetSetupStepDefinitions") .Produces(StatusCodes.Status200OK); } diff --git a/src/Policy/StellaOps.Policy.Engine/Program.cs b/src/Policy/StellaOps.Policy.Engine/Program.cs index fc03b3736..444d32ce6 100644 --- a/src/Policy/StellaOps.Policy.Engine/Program.cs +++ b/src/Policy/StellaOps.Policy.Engine/Program.cs @@ -7,6 +7,7 @@ using StellaOps.Auth.Abstractions; using StellaOps.Auth.Client; using StellaOps.Auth.ServerIntegration; using StellaOps.Configuration; +using StellaOps.Cryptography.DependencyInjection; using StellaOps.Policy.Engine.BatchEvaluation; using StellaOps.Policy.Engine.Compilation; using StellaOps.Policy.Engine.ConsoleSurface; @@ -101,6 +102,9 @@ builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirG // CVSS receipts rely on PostgreSQL storage for deterministic persistence. builder.Services.AddPolicyPostgresStorage(builder.Configuration, sectionName: "Postgres:Policy"); +// Register unnamed options so IOptions resolves correctly for PolicyDataSource. +builder.Services.Configure( + builder.Configuration.GetSection("Postgres:Policy")); builder.Services.AddSingleton(); builder.Services.AddScoped(); @@ -234,6 +238,7 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddVexDecisionEmitter(); // POLICY-VEX-401-006 +builder.Services.AddStellaOpsCrypto(); builder.Services.AddHttpContextAccessor(); builder.Services.AddStellaOpsCors(builder.Environment, builder.Configuration); builder.Services.AddRouting(options => options.LowercaseUrls = true); diff --git a/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj index a7c8662d6..55b02d6fa 100644 --- a/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj +++ b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj @@ -34,6 +34,7 @@ + diff --git a/src/ReachGraph/StellaOps.ReachGraph.WebService/StellaOps.ReachGraph.WebService.csproj b/src/ReachGraph/StellaOps.ReachGraph.WebService/StellaOps.ReachGraph.WebService.csproj index d66284feb..3e62a26e6 100644 --- a/src/ReachGraph/StellaOps.ReachGraph.WebService/StellaOps.ReachGraph.WebService.csproj +++ b/src/ReachGraph/StellaOps.ReachGraph.WebService/StellaOps.ReachGraph.WebService.csproj @@ -23,6 +23,7 @@ + diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj index e4dfbb048..f60ec485e 100644 --- a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj @@ -33,6 +33,7 @@ + diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs index 18b42424d..54f921bb7 100644 --- a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs @@ -1,7 +1,10 @@ using StellaOps.RiskEngine.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -host.Run(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj index 609955cc8..dd465af83 100644 --- a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj @@ -15,9 +15,12 @@ preview true - - + + + + + @@ -35,8 +38,11 @@ - - + + + + + diff --git a/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj b/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj index d7344d230..4edcf5ae0 100644 --- a/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj +++ b/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj @@ -16,6 +16,7 @@ + diff --git a/src/Scanner/StellaOps.Scanner.Worker/Extensions/BinaryIndexServiceExtensions.cs b/src/Scanner/StellaOps.Scanner.Worker/Extensions/BinaryIndexServiceExtensions.cs index 2bbbb3032..b56131741 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/Extensions/BinaryIndexServiceExtensions.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Extensions/BinaryIndexServiceExtensions.cs @@ -27,17 +27,21 @@ public static class BinaryIndexServiceExtensions .GetSection("BinaryIndex") .Get() ?? new BinaryIndexOptions(); + services.AddSingleton(options); + if (!options.Enabled) { services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); return services; } - services.AddSingleton(options); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); return services; } @@ -51,7 +55,7 @@ public sealed class BinaryIndexOptions /// /// Whether binary vulnerability analysis is enabled. /// - public bool Enabled { get; init; } = true; + public bool Enabled { get; init; } = false; /// /// Batch size for binary lookups. @@ -159,3 +163,27 @@ internal sealed class NullBinaryVulnerabilityService : IBinaryVulnerabilityServi return Task.FromResult(System.Collections.Immutable.ImmutableDictionary>.Empty); } } + +/// +/// Null implementation of IBinaryFeatureExtractor for when binary analysis is disabled. +/// +internal sealed class NullBinaryFeatureExtractor : IBinaryFeatureExtractor +{ + public bool CanExtract(Stream stream) => false; + + public Task ExtractIdentityAsync(Stream stream, CancellationToken ct = default) + => Task.FromResult(new StellaOps.BinaryIndex.Core.Models.BinaryIdentity + { + BinaryKey = "null", + FileSha256 = "", + Format = StellaOps.BinaryIndex.Core.Models.BinaryFormat.Elf, + Architecture = "unknown" + }); + + public Task ExtractMetadataAsync(Stream stream, CancellationToken ct = default) + => Task.FromResult(new BinaryMetadata + { + Format = StellaOps.BinaryIndex.Core.Models.BinaryFormat.Elf, + Architecture = "unknown" + }); +} diff --git a/src/Scanner/StellaOps.Scanner.Worker/Processing/NullEpssProvider.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/NullEpssProvider.cs new file mode 100644 index 000000000..22009a968 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/NullEpssProvider.cs @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Copyright (c) StellaOps + +using StellaOps.Scanner.Core.Epss; + +namespace StellaOps.Scanner.Worker.Processing; + +/// +/// Null implementation of IEpssProvider for when EPSS storage is not configured. +/// +internal sealed class NullEpssProvider : IEpssProvider +{ + public Task GetCurrentAsync(string cveId, CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task GetCurrentBatchAsync(IEnumerable cveIds, CancellationToken cancellationToken = default) + => Task.FromResult(new EpssBatchResult { Found = [], NotFound = cveIds.ToList(), ModelDate = DateOnly.MinValue }); + + public Task GetAsOfDateAsync(string cveId, DateOnly asOfDate, CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task> GetHistoryAsync(string cveId, DateOnly startDate, DateOnly endDate, CancellationToken cancellationToken = default) + => Task.FromResult>([]); + + public Task GetLatestModelDateAsync(CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task IsAvailableAsync(CancellationToken cancellationToken = default) + => Task.FromResult(false); +} diff --git a/src/Scanner/StellaOps.Scanner.Worker/Processing/NullPoEServices.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/NullPoEServices.cs new file mode 100644 index 000000000..b6b3a1b58 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/NullPoEServices.cs @@ -0,0 +1,63 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Copyright (c) StellaOps +// Null implementations for PoE/Reachability infrastructure dependencies + +using StellaOps.Scanner.Reachability; +using StellaOps.Scanner.Storage.ObjectStore; + +namespace StellaOps.Scanner.Worker.Processing; + +/// +/// Null implementation of IRichGraphStore for environments without richgraph infrastructure. +/// +internal sealed class NullRichGraphStore : IRichGraphStore +{ + public Task FetchGraphAsync(string graphHash, CancellationToken cancellationToken) + => Task.FromResult(null); +} + +/// +/// Null implementation of IEntryPointResolver for environments without entry point resolution. +/// +internal sealed class NullEntryPointResolver : IEntryPointResolver +{ + public Task> ResolveAsync(RichGraphV1 graph, CancellationToken cancellationToken) + => Task.FromResult>([]); +} + +/// +/// Null implementation of IVulnSurfaceService for environments without vulnerability surface data. +/// +internal sealed class NullVulnSurfaceService : IVulnSurfaceService +{ + public Task> GetAffectedSymbolsAsync( + string vulnId, string componentRef, CancellationToken cancellationToken) + => Task.FromResult>([]); +} + +/// +/// Null implementation of IDsseSigningService for environments without signing infrastructure. +/// +internal sealed class NullDsseSigningService : StellaOps.Attestor.IDsseSigningService +{ + public Task SignAsync(byte[] payload, string payloadType, string signingKeyId, CancellationToken cancellationToken = default) + => Task.FromResult(Array.Empty()); + + public Task VerifyAsync(byte[] dsseEnvelope, IReadOnlyList trustedKeyIds, CancellationToken cancellationToken = default) + => Task.FromResult(false); +} + +/// +/// Null implementation of IArtifactObjectStore for environments without object storage. +/// +internal sealed class NullArtifactObjectStore : IArtifactObjectStore +{ + public Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken) + => Task.CompletedTask; + + public Task GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken) + => Task.FromResult(null); + + public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken) + => Task.CompletedTask; +} diff --git a/src/Scanner/StellaOps.Scanner.Worker/Program.cs b/src/Scanner/StellaOps.Scanner.Worker/Program.cs index 8352af20d..5b52580cc 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/Program.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Program.cs @@ -51,10 +51,11 @@ using StellaOps.Scanner.Worker.Processing.Entropy; using StellaOps.Scanner.Worker.Processing.Secrets; using StellaOps.Scanner.Worker.Processing.ServiceSecurity; using StellaOps.Scanner.Worker.Processing.Surface; +using StellaOps.Worker.Health; using System.Diagnostics; using System.IO; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddOptions() .BindConfiguration(ScannerWorkerOptions.SectionName) @@ -92,6 +93,7 @@ builder.Services.AddSingleton(new DeterminismContext( workerOptions.Determinism.FilterLogs, workerOptions.Determinism.ConcurrencyLimit)); builder.Services.AddSingleton(_ => new DeterministicRandomProvider(workerOptions.Determinism.RngSeed)); +builder.Services.AddSingleton(); builder.Services.AddScannerCache(builder.Configuration); builder.Services.AddSurfaceEnvironment(options => { @@ -173,8 +175,21 @@ else { builder.Services.TryAddSingleton(); builder.Services.TryAddSingleton(); + // Provide fallback registrations for services used by unconditionally-registered components + builder.Services.TryAddSingleton(new StellaOps.Scanner.Storage.ScannerStorageOptions()); + builder.Services.TryAddSingleton(); } +// Unwrap IOptions to concrete type for classes that take it directly (e.g. ReplayBundleFetcher) +builder.Services.TryAddSingleton(sp => +{ + var opts = sp.GetService>(); + return opts?.Value ?? new StellaOps.Scanner.Storage.ScannerStorageOptions(); +}); + +// Ensure IEpssProvider is available even without storage (null fallback) +builder.Services.TryAddSingleton(); + builder.Services.TryAddSingleton(); builder.Services.TryAddSingleton(); builder.Services.AddSingleton(); @@ -260,9 +275,22 @@ if (workerOptions.Secrets.Enabled) builder.Services.AddOptions() .BindConfiguration("PoE") .ValidateOnStart(); +// SubgraphExtractor dependencies (null defaults for environments without richgraph/vulnsurface infra) +builder.Services.TryAddSingleton(); +builder.Services.TryAddSingleton(); +builder.Services.TryAddSingleton(); builder.Services.AddSingleton(); +// PoEArtifactGenerator dependency (null default for environments without signing infra) +builder.Services.TryAddSingleton(); +// PoEEmissionOptions is a positional record without parameterless ctor - configure explicitly +builder.Services.AddSingleton(Microsoft.Extensions.Options.Options.Create(StellaOps.Attestor.PoEEmissionOptions.Default)); builder.Services.AddSingleton(); -builder.Services.AddSingleton(); +// PoECasStore needs a string casRoot parameter - use factory +builder.Services.AddSingleton(sp => + new StellaOps.Signals.Storage.PoECasStore( + Environment.GetEnvironmentVariable("POE_CAS_ROOT") ?? "/tmp/poe-cas", + sp.GetRequiredService>(), + sp.GetService())); builder.Services.AddSingleton(); builder.Services.AddSingleton(); @@ -356,21 +384,25 @@ builder.Logging.Configure(options => | ActivityTrackingOptions.ParentId; }); -var host = builder.Build(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); // Fail fast if surface configuration is invalid at startup. -using (var scope = host.Services.CreateScope()) +using (var scope = app.Services.CreateScope()) { var services = scope.ServiceProvider; var env = services.GetRequiredService(); var runner = services.GetRequiredService(); await runner.EnsureAsync( SurfaceValidationContext.Create(services, "Scanner.Worker.Startup", env.Settings), - host.Services.GetRequiredService().ApplicationStopping) + app.Services.GetRequiredService().ApplicationStopping) .ConfigureAwait(false); } -await host.RunAsync(); +app.MapWorkerHealthEndpoints(); + +await app.RunAsync(); // Make Program class file-scoped to prevent it from being exposed to referencing assemblies file sealed partial class Program; diff --git a/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj index 8c64e4bff..d7c06b9f8 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj +++ b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj @@ -15,6 +15,10 @@ + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/Jobs/ReachabilityEvidenceJobExecutor.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/Jobs/ReachabilityEvidenceJobExecutor.cs index 2a706d248..58adfa509 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/Jobs/ReachabilityEvidenceJobExecutor.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/Jobs/ReachabilityEvidenceJobExecutor.cs @@ -477,3 +477,22 @@ public interface IEvidenceStorageService ReachabilityEvidenceJob job, CancellationToken ct); } + +/// +/// No-op call graph snapshot provider for environments without call graph infrastructure. +/// +internal sealed class NullCallGraphSnapshotProvider : ICallGraphSnapshotProvider +{ + public Task GetOrComputeAsync(string imageDigest, bool forceRecompute, CancellationToken ct) + => Task.FromResult(null); +} + +/// +/// No-op evidence storage for environments without evidence infrastructure. +/// +internal sealed class NullEvidenceStorageService : IEvidenceStorageService +{ + public Task<(string BundleId, string EvidenceUri)> StoreReachabilityStackAsync( + ReachabilityStack stack, ReachabilityEvidenceJob job, CancellationToken ct) + => Task.FromResult(("null", "null://no-evidence-store")); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/ServiceCollectionExtensions.cs index 8c5c28891..e092044df 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/ServiceCollectionExtensions.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Reachability/ServiceCollectionExtensions.cs @@ -4,6 +4,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Scanner.Explainability.Assumptions; using StellaOps.Scanner.Reachability.Binary; using StellaOps.Scanner.Reachability.Jobs; using StellaOps.Scanner.Reachability.Runtime; @@ -43,14 +44,20 @@ public static class ServiceCollectionExtensions // VEX Integration services.TryAddSingleton(); + // Call graph snapshot provider (required by job executor) + services.TryAddSingleton(); + + // Evidence storage (required by job executor) + services.TryAddSingleton(); + // Job Executor services.TryAddScoped(); - // Runtime Collection (optional - requires eBPF infrastructure) - services.TryAddSingleton(); + // Runtime Collection (optional - requires eBPF infrastructure; null default for environments without eBPF) + services.TryAddSingleton(); - // Binary Patch Verification (requires Ghidra infrastructure) - services.TryAddSingleton(); + // Binary Patch Verification (requires Ghidra infrastructure; null default for environments without Ghidra) + services.TryAddSingleton(); return services; } @@ -71,9 +78,11 @@ public static class ServiceCollectionExtensions services.TryAddSingleton(cveSymbolMappingFactory); services.TryAddSingleton(); services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); services.TryAddScoped(); - services.TryAddSingleton(); - services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); return services; } @@ -140,3 +149,65 @@ public sealed class ReachabilityEvidenceOptions /// public int MaxJobTimeoutSeconds { get; set; } = 300; } + +/// +/// Null implementation of IBinaryPatchVerifier for environments without Ghidra. +/// +internal sealed class NullBinaryPatchVerifier : IBinaryPatchVerifier +{ + public Task VerifyPatchAsync(PatchVerificationRequest request, CancellationToken ct = default) + => Task.FromResult(new PatchVerificationResult + { + Success = false, + Status = PatchStatus.Unknown, + FunctionResults = [], + Layer2 = new ReachabilityLayer2 + { + IsResolved = false, + Confidence = ConfidenceLevel.Low, + Reason = "Binary patch verification not available (Ghidra not configured)" + }, + Confidence = 0m, + Error = "Binary patch verification not available (Ghidra not configured)" + }); + + public Task CompareFunctionAsync(string vulnerableBinaryPath, string targetBinaryPath, string symbolName, CancellationToken ct = default) + => Task.FromResult(new FunctionPatchResult + { + SymbolName = symbolName, + Success = false, + IsPatched = false, + Similarity = 0m, + Confidence = 0m, + Error = "Binary patch verification not available (Ghidra not configured)" + }); + + public bool IsSupported(string binaryPath) => false; +} + +/// +/// Null implementation of IRuntimeReachabilityCollector for environments without eBPF. +/// +internal sealed class NullRuntimeReachabilityCollector : IRuntimeReachabilityCollector +{ + public bool IsAvailable => false; + public string Platform => "none"; + + public Task ObserveAsync(RuntimeObservationRequest request, CancellationToken ct = default) + => Task.FromResult(new RuntimeReachabilityResult + { + Success = false, + Layer3 = new ReachabilityLayer3 + { + IsGated = false, + Outcome = GatingOutcome.NotGated, + Confidence = ConfidenceLevel.Low, + Description = "Runtime observation not available (eBPF not configured)" + }, + Source = ObservationSource.None, + Error = "Runtime observation not available (eBPF not configured)" + }); + + public Task> CheckObservationsAsync(string containerId, IReadOnlyList symbols, CancellationToken ct = default) + => Task.FromResult>([]); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/IEpssSignalPublisher.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/IEpssSignalPublisher.cs index 4aa9168ed..63f18b4b7 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/IEpssSignalPublisher.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/IEpssSignalPublisher.cs @@ -89,7 +89,7 @@ public sealed class NullEpssSignalPublisher : IEpssSignalPublisher { public static readonly NullEpssSignalPublisher Instance = new(); - private NullEpssSignalPublisher() { } + public NullEpssSignalPublisher() { } public Task PublishAsync(EpssSignal signal, CancellationToken cancellationToken = default) => Task.FromResult(new EpssSignalPublishResult { Success = true, MessageId = "null" }); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs index e0864b488..dbcfbcb02 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs @@ -95,7 +95,13 @@ public static class ServiceCollectionExtensions services.AddSingleton(); services.AddScoped(); services.AddSingleton(); - services.AddSingleton(); + services.AddSingleton(sp => + { + var opts = sp.GetRequiredService>().Value; + // Default to /app/epss for bundle path; overrideable via config. + var path = Environment.GetEnvironmentVariable("EPSS_BUNDLE_PATH") ?? "/app/epss"; + return new EpssBundleSource(path); + }); // Note: EpssChangeDetector is a static class, no DI registration needed // EPSS provider with optional Valkey cache layer (Sprint: SPRINT_3410_0002_0001, Task: EPSS-SCAN-005) diff --git a/src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs b/src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs index 2cf1a7780..2179bc186 100644 --- a/src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs +++ b/src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs @@ -6,10 +6,12 @@ using Microsoft.Extensions.Logging; using StellaOps.Scheduler.Persistence.Extensions; using StellaOps.Scheduler.Persistence.Postgres; using StellaOps.Scheduler.Queue; +using StellaOps.Scheduler.ImpactIndex; using StellaOps.Scheduler.Worker.DependencyInjection; +using StellaOps.Worker.Health; using System.Diagnostics; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Logging.Configure(options => { @@ -27,9 +29,13 @@ if (storageSection.Exists()) } builder.Services.AddSchedulerWorker(builder.Configuration.GetSection("Scheduler:Worker")); +builder.Services.AddImpactIndexStub(); -var host = builder.Build(); -await host.RunAsync(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +await app.RunAsync(); // Make Program class file-scoped to prevent it from being exposed to referencing assemblies file sealed partial class Program; diff --git a/src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj b/src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj index 42ebec816..d43d2cacb 100644 --- a/src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj +++ b/src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj @@ -11,7 +11,9 @@ + + diff --git a/src/Scheduler/__Libraries/StellaOps.Scheduler.Persistence/Extensions/SchedulerPersistenceExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Persistence/Extensions/SchedulerPersistenceExtensions.cs index 563beb3f4..786ba7ed4 100644 --- a/src/Scheduler/__Libraries/StellaOps.Scheduler.Persistence/Extensions/SchedulerPersistenceExtensions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Persistence/Extensions/SchedulerPersistenceExtensions.cs @@ -25,6 +25,8 @@ public static class SchedulerPersistenceExtensions string sectionName = "Postgres:Scheduler") { services.Configure(sectionName, configuration.GetSection(sectionName)); + // Also register unnamed so IOptions resolves (DataSourceBase uses unnamed). + services.Configure(configuration.GetSection(sectionName)); services.AddSingleton(); // Register repositories diff --git a/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs index a5e8a524b..ac70910d0 100644 --- a/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs @@ -1,8 +1,10 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; +using StellaOps.Cryptography; using StellaOps.Notify.Queue; using StellaOps.Scanner.Surface.Env; using StellaOps.Scanner.Surface.FS; @@ -30,6 +32,10 @@ public static class SchedulerWorkerServiceCollectionExtensions .Bind(configuration) .PostConfigure(options => options.Validate()); + // Register concrete SchedulerWorkerOptions for services that inject it directly + // (PlannerExecutionService, RunnerBackgroundService, PolicyRunExecutionService, etc.). + services.AddSingleton(sp => sp.GetRequiredService>().Value); + services.AddSingleton(TimeProvider.System); services.AddSingleton(); services.AddSingleton(); @@ -101,6 +107,9 @@ public static class SchedulerWorkerServiceCollectionExtensions services.AddHostedService(); services.AddHostedService(); + // ICryptoHash is required by FileSurfaceManifestStore via Surface.FS. + services.TryAddSingleton(); + services.AddSurfaceEnvironment(options => { options.ComponentName = "Scheduler.Worker"; }); services.AddSurfaceFileCache(); services.AddSurfaceManifestStore(); diff --git a/src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj b/src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj index 4ca25d94f..e77ff69f7 100644 --- a/src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj +++ b/src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj @@ -10,5 +10,6 @@ + diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj index f38d5cc84..bbbe0034f 100644 --- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj @@ -34,6 +34,7 @@ + diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs index a2170c859..31980bb80 100644 --- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs @@ -8,8 +8,9 @@ using StellaOps.TaskRunner.Core.Execution.Simulation; using StellaOps.TaskRunner.Infrastructure.Execution; using StellaOps.TaskRunner.Worker.Services; using StellaOps.Telemetry.Core; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirGap"); builder.Services.Configure(builder.Configuration.GetSection("Worker")); @@ -83,5 +84,8 @@ builder.Services.AddSingleton(sp => builder.Services.AddHostedService(); -var host = builder.Build(); -host.Run(); +builder.Services.AddWorkerHealthChecks(); + +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj index a6b238a31..cc8eb8af1 100644 --- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj @@ -18,6 +18,10 @@ + + + + @@ -33,8 +37,9 @@ - + + diff --git a/src/Timeline/StellaOps.Timeline.WebService/StellaOps.Timeline.WebService.csproj b/src/Timeline/StellaOps.Timeline.WebService/StellaOps.Timeline.WebService.csproj index 06c3ce8a3..2a7bf3fd9 100644 --- a/src/Timeline/StellaOps.Timeline.WebService/StellaOps.Timeline.WebService.csproj +++ b/src/Timeline/StellaOps.Timeline.WebService/StellaOps.Timeline.WebService.csproj @@ -14,6 +14,7 @@ + diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs index 2f9a85b7a..3f95aa26c 100644 --- a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs @@ -4,8 +4,9 @@ using StellaOps.TimelineIndexer.Infrastructure.DependencyInjection; using StellaOps.TimelineIndexer.Infrastructure.Options; using StellaOps.TimelineIndexer.Infrastructure.Subscriptions; using StellaOps.TimelineIndexer.Worker; +using StellaOps.Worker.Health; -var builder = Host.CreateApplicationBuilder(args); +var builder = WebApplication.CreateSlimBuilder(args); builder.Configuration.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true); builder.Configuration.AddJsonFile("appsettings.Development.json", optional: true, reloadOnChange: true); @@ -20,6 +21,8 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddHostedService(); +builder.Services.AddWorkerHealthChecks(); -var host = builder.Build(); -host.Run(); +var app = builder.Build(); +app.MapWorkerHealthEndpoints(); +app.Run(); diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj index be9e616a4..d5c495b3a 100644 --- a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj @@ -15,9 +15,12 @@ preview true - - + + + + + @@ -35,8 +38,11 @@ - - + + + + + diff --git a/src/Unknowns/StellaOps.Unknowns.WebService/StellaOps.Unknowns.WebService.csproj b/src/Unknowns/StellaOps.Unknowns.WebService/StellaOps.Unknowns.WebService.csproj index 564679cf8..d661c573d 100644 --- a/src/Unknowns/StellaOps.Unknowns.WebService/StellaOps.Unknowns.WebService.csproj +++ b/src/Unknowns/StellaOps.Unknowns.WebService/StellaOps.Unknowns.WebService.csproj @@ -12,6 +12,7 @@ + diff --git a/src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj b/src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj index 541f26c07..39cc05304 100644 --- a/src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj +++ b/src/VulnExplorer/StellaOps.VulnExplorer.Api/StellaOps.VulnExplorer.Api.csproj @@ -14,5 +14,6 @@ + diff --git a/src/Web/StellaOps.Web/proxy.conf.json b/src/Web/StellaOps.Web/proxy.conf.json index 5dd18e40a..ec96f3a8c 100644 --- a/src/Web/StellaOps.Web/proxy.conf.json +++ b/src/Web/StellaOps.Web/proxy.conf.json @@ -7,6 +7,10 @@ "target": "https://localhost:10010", "secure": false }, + "/api/v1/setup": { + "target": "https://localhost:10010", + "secure": false + }, "/authority": { "target": "https://localhost:10020", "secure": false diff --git a/src/Web/StellaOps.Web/src/app/features/setup-wizard/services/setup-wizard-api.service.ts b/src/Web/StellaOps.Web/src/app/features/setup-wizard/services/setup-wizard-api.service.ts index 61e21f3d8..a5f999cb6 100644 --- a/src/Web/StellaOps.Web/src/app/features/setup-wizard/services/setup-wizard-api.service.ts +++ b/src/Web/StellaOps.Web/src/app/features/setup-wizard/services/setup-wizard-api.service.ts @@ -47,16 +47,33 @@ export interface ApiResponse { cacheHit?: boolean; } -/** Setup session response from backend */ -export interface SetupSessionResponse { +/** Step state as returned by the backend */ +export interface BackendStepState { + stepId: string; + status: string; + completedAtUtc?: string | null; + skippedAtUtc?: string | null; + skippedReason?: string | null; + checkResults: unknown[]; + errorMessage?: string | null; +} + +/** Backend session object (inside the response envelope) */ +export interface BackendSession { sessionId: string; - startedAt: string; - expiresAt?: string; - completedSteps: SetupStepId[]; - skippedSteps: SetupStepId[]; - configValues: Record; - currentStep?: SetupStepId; - metadata?: Record; + tenantId: string; + status: string; + steps: BackendStepState[]; + createdAtUtc: string; + updatedAtUtc: string; + createdBy?: string; + updatedBy?: string; + dataAsOfUtc?: string; +} + +/** Setup session response from backend — envelope is { session: ... } */ +export interface SetupSessionResponse { + session: BackendSession; } /** Step execution response from backend */ @@ -146,9 +163,9 @@ export class SetupWizardApiService { */ createSession(): Observable { return this.http - .post>(`${this.setupBaseUrl}/sessions`, {}) + .post(`${this.setupBaseUrl}/sessions`, {}) .pipe( - map(response => this.mapSessionResponse(response.data)), + map(response => this.mapSessionResponse(response)), catchError(error => this.handleError(error)) ); } @@ -158,9 +175,9 @@ export class SetupWizardApiService { */ resumeSession(sessionId: string): Observable { return this.http - .get>(`${this.setupBaseUrl}/sessions/${sessionId}`) + .get(`${this.setupBaseUrl}/sessions/${sessionId}`) .pipe( - map(response => this.mapSessionResponse(response.data)), + map(response => this.mapSessionResponse(response)), catchError(error => { if (error.status === 404) { return of(null); @@ -175,9 +192,9 @@ export class SetupWizardApiService { */ getCurrentSession(): Observable { return this.http - .get>(`${this.setupBaseUrl}/sessions/current`) + .get(`${this.setupBaseUrl}/sessions/current`) .pipe( - map(response => this.mapSessionResponse(response.data)), + map(response => this.mapSessionResponse(response)), catchError(error => { if (error.status === 404) { return of(null); @@ -454,16 +471,54 @@ export class SetupWizardApiService { // ═══════════════════════════════════════════════════════════════════════════ private mapSessionResponse(response: SetupSessionResponse): SetupSession { + const s = response.session; + + // Derive completedSteps and skippedSteps from the steps array + const completedSteps = s.steps + .filter(step => step.status === 'Passed') + .map(step => this.mapStepId(step.stepId)); + + const skippedSteps = s.steps + .filter(step => step.status === 'Skipped') + .map(step => this.mapStepId(step.stepId)); + + // Find the current step (status === 'Current') + const currentBackendStep = s.steps.find(step => step.status === 'Current'); + return { - sessionId: response.sessionId, - startedAt: response.startedAt, - completedSteps: response.completedSteps, - skippedSteps: response.skippedSteps, - configValues: response.configValues, - currentStep: response.currentStep, + sessionId: s.sessionId, + startedAt: s.createdAtUtc, + completedSteps, + skippedSteps, + configValues: {}, + currentStep: currentBackendStep + ? this.mapStepId(currentBackendStep.stepId) + : undefined, }; } + /** Maps backend PascalCase step ID to frontend lowercase step ID */ + private mapStepId(backendId: string): SetupStepId { + const mapping: Record = { + Database: 'database', + Valkey: 'cache', + Migrations: 'migrations', + Admin: 'authority', + Crypto: 'crypto', + Vault: 'vault', + Scm: 'scm', + Sources: 'sources', + Notifications: 'notify', + Environments: 'environments', + Agents: 'agents', + Registry: 'registry', + Telemetry: 'telemetry', + Llm: 'llm', + SettingsStore: 'settingsstore', + }; + return mapping[backendId] ?? (backendId.toLowerCase() as SetupStepId); + } + private mapStepResult(response: ExecuteStepResponse): SetupStepResult { return { stepId: response.stepId, diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AGENTS.md b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AGENTS.md new file mode 100644 index 000000000..4922ce8df --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AGENTS.md @@ -0,0 +1,23 @@ +# AdvisoryAI Attestation Charter + +## Mission +- Provide deterministic, offline-safe attestation models and helpers for Advisory AI workflows. + +## Responsibilities +- Maintain stable attestation contracts and digest behavior. +- Keep in-memory implementations deterministic and test-friendly. +- Update local `TASKS.md` and sprint status when starting or finishing work. + +## Required Reading +- `docs/README.md` +- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` +- `docs/modules/platform/architecture-overview.md` +- `docs/modules/concelier/advisory-ai-api.md` + +## Working Directory & Scope +- Primary: `src/__Libraries/StellaOps.AdvisoryAI.Attestation` +- Allowed shared projects: `src/__Libraries/__Tests/StellaOps.AdvisoryAI.Attestation.Tests` + +## Testing Expectations +- Add unit tests for digest and template registry behaviors. +- Keep tests deterministic and offline-friendly. diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationResult.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationResult.cs new file mode 100644 index 000000000..1c290dcea --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationResult.cs @@ -0,0 +1,25 @@ +namespace StellaOps.AdvisoryAI.Attestation; + +/// +/// Result of creating an attestation. +/// +public sealed record AiAttestationResult +{ + /// Attestation ID. + public required string AttestationId { get; init; } + + /// Content digest. + public required string Digest { get; init; } + + /// Whether the attestation was signed. + public bool Signed { get; init; } + + /// DSSE envelope if signed. + public string? DsseEnvelope { get; init; } + + /// Storage URI. + public string? StorageUri { get; init; } + + /// Creation timestamp. + public required DateTimeOffset CreatedAt { get; init; } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Create.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Create.cs new file mode 100644 index 000000000..e231fd3f0 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Create.cs @@ -0,0 +1,95 @@ +using Microsoft.Extensions.Logging; +using StellaOps.AdvisoryAI.Attestation.Models; +using System.Text.Json; + +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class AiAttestationService +{ + /// + public Task CreateRunAttestationAsync( + AiRunAttestation attestation, + bool sign = true, + CancellationToken ct = default) + { + var now = _timeProvider.GetUtcNow(); + var digest = attestation.ComputeDigest(); + var json = JsonSerializer.Serialize(attestation, AiAttestationJsonContext.Default.AiRunAttestation); + + // In production, this would call the signer service + string? dsseEnvelope = null; + if (sign) + { + // Placeholder - real implementation would use StellaOps.Signer + dsseEnvelope = CreateMockDsseEnvelope(AiRunAttestation.PredicateType, json); + } + + var stored = new StoredAttestation( + attestation.RunId, + AiRunAttestation.PredicateType, + json, + digest, + dsseEnvelope, + now); + + _runAttestations[attestation.RunId] = stored; + + _logger.LogInformation( + "Created run attestation {RunId} with digest {Digest}, signed={Signed}", + attestation.RunId, + digest, + sign); + + return Task.FromResult(new AiAttestationResult + { + AttestationId = attestation.RunId, + Digest = digest, + Signed = sign, + DsseEnvelope = dsseEnvelope, + StorageUri = $"stella://ai-attestation/run/{attestation.RunId}", + CreatedAt = now + }); + } + + /// + public Task CreateClaimAttestationAsync( + AiClaimAttestation attestation, + bool sign = true, + CancellationToken ct = default) + { + var now = _timeProvider.GetUtcNow(); + var digest = attestation.ComputeDigest(); + var json = JsonSerializer.Serialize(attestation, AiAttestationJsonContext.Default.AiClaimAttestation); + + string? dsseEnvelope = null; + if (sign) + { + dsseEnvelope = CreateMockDsseEnvelope(AiClaimAttestation.PredicateType, json); + } + + var stored = new StoredAttestation( + attestation.ClaimId, + AiClaimAttestation.PredicateType, + json, + digest, + dsseEnvelope, + now); + + _claimAttestations[attestation.ClaimId] = stored; + + _logger.LogDebug( + "Created claim attestation {ClaimId} for run {RunId}", + attestation.ClaimId, + attestation.RunId); + + return Task.FromResult(new AiAttestationResult + { + AttestationId = attestation.ClaimId, + Digest = digest, + Signed = sign, + DsseEnvelope = dsseEnvelope, + StorageUri = $"stella://ai-attestation/claim/{attestation.ClaimId}", + CreatedAt = now + }); + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Helpers.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Helpers.cs new file mode 100644 index 000000000..08cdbae12 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Helpers.cs @@ -0,0 +1,25 @@ +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class AiAttestationService +{ + private static string CreateMockDsseEnvelope(string predicateType, string payload) + { + // Mock DSSE envelope - real implementation would use StellaOps.Signer + var payloadBase64 = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload)); + return $$""" + { + "payloadType": "{{predicateType}}", + "payload": "{{payloadBase64}}", + "signatures": [{"sig": "mock-signature"}] + } + """; + } + + private sealed record StoredAttestation( + string Id, + string PredicateType, + string Json, + string Digest, + string? DsseEnvelope, + DateTimeOffset CreatedAt); +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Read.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Read.cs new file mode 100644 index 000000000..b67b1c6f7 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Read.cs @@ -0,0 +1,55 @@ +using StellaOps.AdvisoryAI.Attestation.Models; +using System.Text.Json; + +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class AiAttestationService +{ + /// + public Task GetRunAttestationAsync( + string runId, + CancellationToken ct = default) + { + if (!_runAttestations.TryGetValue(runId, out var stored)) + { + return Task.FromResult(null); + } + + var attestation = JsonSerializer.Deserialize( + stored.Json, + AiAttestationJsonContext.Default.AiRunAttestation); + + return Task.FromResult(attestation); + } + + /// + public Task> GetClaimAttestationsAsync( + string runId, + CancellationToken ct = default) + { + var claims = _claimAttestations.Values + .Select(s => JsonSerializer.Deserialize(s.Json, AiAttestationJsonContext.Default.AiClaimAttestation)) + .Where(c => c != null && c.RunId == runId) + .Cast() + .ToList(); + + return Task.FromResult>(claims); + } + + /// + public Task> ListRecentAttestationsAsync( + string tenantId, + int limit = 100, + CancellationToken ct = default) + { + var attestations = _runAttestations.Values + .OrderByDescending(s => s.CreatedAt) + .Select(s => JsonSerializer.Deserialize(s.Json, AiAttestationJsonContext.Default.AiRunAttestation)) + .Where(a => a != null && a.TenantId == tenantId) + .Cast() + .Take(limit) + .ToList(); + + return Task.FromResult>(attestations); + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Verify.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Verify.cs new file mode 100644 index 000000000..657ba2455 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.Verify.cs @@ -0,0 +1,92 @@ +using Microsoft.Extensions.Logging; +using StellaOps.AdvisoryAI.Attestation.Models; +using System.Text.Json; + +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class AiAttestationService +{ + /// + public Task VerifyRunAttestationAsync( + string runId, + CancellationToken ct = default) + { + var now = _timeProvider.GetUtcNow(); + + if (!_runAttestations.TryGetValue(runId, out var stored)) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + $"Run attestation {runId} not found")); + } + + // Verify digest + var attestation = JsonSerializer.Deserialize( + stored.Json, + AiAttestationJsonContext.Default.AiRunAttestation); + + if (attestation == null) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + "Failed to deserialize attestation")); + } + + var computedDigest = attestation.ComputeDigest(); + if (computedDigest != stored.Digest) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + "Digest mismatch", + digestValid: false)); + } + + // In production, verify signature via signer service + bool? signatureValid = stored.DsseEnvelope != null ? true : null; + + _logger.LogDebug("Verified run attestation {RunId}", runId); + + return Task.FromResult(AiAttestationVerificationResult.Success( + now, + stored.DsseEnvelope != null ? "ai-attestation-key" : null)); + } + + /// + public Task VerifyClaimAttestationAsync( + string claimId, + CancellationToken ct = default) + { + var now = _timeProvider.GetUtcNow(); + + if (!_claimAttestations.TryGetValue(claimId, out var stored)) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + $"Claim attestation {claimId} not found")); + } + + var attestation = JsonSerializer.Deserialize( + stored.Json, + AiAttestationJsonContext.Default.AiClaimAttestation); + + if (attestation == null) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + "Failed to deserialize attestation")); + } + + var computedDigest = attestation.ComputeDigest(); + if (computedDigest != stored.Digest) + { + return Task.FromResult(AiAttestationVerificationResult.Failure( + now, + "Digest mismatch", + digestValid: false)); + } + + return Task.FromResult(AiAttestationVerificationResult.Success( + now, + stored.DsseEnvelope != null ? "ai-attestation-key" : null)); + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.cs index ed80bd01d..0523b6643 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationService.cs @@ -2,11 +2,8 @@ // Copyright (c) StellaOps. Licensed under the BUSL-1.1. // - using Microsoft.Extensions.Logging; -using StellaOps.AdvisoryAI.Attestation.Models; using System.Collections.Concurrent; -using System.Text.Json; namespace StellaOps.AdvisoryAI.Attestation; @@ -18,7 +15,7 @@ namespace StellaOps.AdvisoryAI.Attestation; /// This implementation stores attestations in memory. For production, /// use a database-backed implementation with signing integration. /// -public sealed class AiAttestationService : IAiAttestationService +public sealed partial class AiAttestationService : IAiAttestationService { private readonly TimeProvider _timeProvider; private readonly ILogger _logger; @@ -32,244 +29,4 @@ public sealed class AiAttestationService : IAiAttestationService _timeProvider = timeProvider; _logger = logger; } - - /// - public Task CreateRunAttestationAsync( - AiRunAttestation attestation, - bool sign = true, - CancellationToken ct = default) - { - var now = _timeProvider.GetUtcNow(); - var digest = attestation.ComputeDigest(); - var json = JsonSerializer.Serialize(attestation, AiAttestationJsonContext.Default.AiRunAttestation); - - // In production, this would call the signer service - string? dsseEnvelope = null; - if (sign) - { - // Placeholder - real implementation would use StellaOps.Signer - dsseEnvelope = CreateMockDsseEnvelope(AiRunAttestation.PredicateType, json); - } - - var stored = new StoredAttestation( - attestation.RunId, - AiRunAttestation.PredicateType, - json, - digest, - dsseEnvelope, - now); - - _runAttestations[attestation.RunId] = stored; - - _logger.LogInformation( - "Created run attestation {RunId} with digest {Digest}, signed={Signed}", - attestation.RunId, - digest, - sign); - - return Task.FromResult(new AiAttestationResult - { - AttestationId = attestation.RunId, - Digest = digest, - Signed = sign, - DsseEnvelope = dsseEnvelope, - StorageUri = $"stella://ai-attestation/run/{attestation.RunId}", - CreatedAt = now - }); - } - - /// - public Task CreateClaimAttestationAsync( - AiClaimAttestation attestation, - bool sign = true, - CancellationToken ct = default) - { - var now = _timeProvider.GetUtcNow(); - var digest = attestation.ComputeDigest(); - var json = JsonSerializer.Serialize(attestation, AiAttestationJsonContext.Default.AiClaimAttestation); - - string? dsseEnvelope = null; - if (sign) - { - dsseEnvelope = CreateMockDsseEnvelope(AiClaimAttestation.PredicateType, json); - } - - var stored = new StoredAttestation( - attestation.ClaimId, - AiClaimAttestation.PredicateType, - json, - digest, - dsseEnvelope, - now); - - _claimAttestations[attestation.ClaimId] = stored; - - _logger.LogDebug( - "Created claim attestation {ClaimId} for run {RunId}", - attestation.ClaimId, - attestation.RunId); - - return Task.FromResult(new AiAttestationResult - { - AttestationId = attestation.ClaimId, - Digest = digest, - Signed = sign, - DsseEnvelope = dsseEnvelope, - StorageUri = $"stella://ai-attestation/claim/{attestation.ClaimId}", - CreatedAt = now - }); - } - - /// - public Task VerifyRunAttestationAsync( - string runId, - CancellationToken ct = default) - { - var now = _timeProvider.GetUtcNow(); - - if (!_runAttestations.TryGetValue(runId, out var stored)) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - $"Run attestation {runId} not found")); - } - - // Verify digest - var attestation = JsonSerializer.Deserialize( - stored.Json, - AiAttestationJsonContext.Default.AiRunAttestation); - - if (attestation == null) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - "Failed to deserialize attestation")); - } - - var computedDigest = attestation.ComputeDigest(); - if (computedDigest != stored.Digest) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - "Digest mismatch", - digestValid: false)); - } - - // In production, verify signature via signer service - bool? signatureValid = stored.DsseEnvelope != null ? true : null; - - _logger.LogDebug("Verified run attestation {RunId}", runId); - - return Task.FromResult(AiAttestationVerificationResult.Success( - now, - stored.DsseEnvelope != null ? "ai-attestation-key" : null)); - } - - /// - public Task VerifyClaimAttestationAsync( - string claimId, - CancellationToken ct = default) - { - var now = _timeProvider.GetUtcNow(); - - if (!_claimAttestations.TryGetValue(claimId, out var stored)) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - $"Claim attestation {claimId} not found")); - } - - var attestation = JsonSerializer.Deserialize( - stored.Json, - AiAttestationJsonContext.Default.AiClaimAttestation); - - if (attestation == null) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - "Failed to deserialize attestation")); - } - - var computedDigest = attestation.ComputeDigest(); - if (computedDigest != stored.Digest) - { - return Task.FromResult(AiAttestationVerificationResult.Failure( - now, - "Digest mismatch", - digestValid: false)); - } - - return Task.FromResult(AiAttestationVerificationResult.Success( - now, - stored.DsseEnvelope != null ? "ai-attestation-key" : null)); - } - - /// - public Task GetRunAttestationAsync( - string runId, - CancellationToken ct = default) - { - if (!_runAttestations.TryGetValue(runId, out var stored)) - { - return Task.FromResult(null); - } - - var attestation = JsonSerializer.Deserialize( - stored.Json, - AiAttestationJsonContext.Default.AiRunAttestation); - - return Task.FromResult(attestation); - } - - /// - public Task> GetClaimAttestationsAsync( - string runId, - CancellationToken ct = default) - { - var claims = _claimAttestations.Values - .Select(s => JsonSerializer.Deserialize(s.Json, AiAttestationJsonContext.Default.AiClaimAttestation)) - .Where(c => c != null && c.RunId == runId) - .Cast() - .ToList(); - - return Task.FromResult>(claims); - } - - /// - public Task> ListRecentAttestationsAsync( - string tenantId, - int limit = 100, - CancellationToken ct = default) - { - var attestations = _runAttestations.Values - .OrderByDescending(s => s.CreatedAt) - .Select(s => JsonSerializer.Deserialize(s.Json, AiAttestationJsonContext.Default.AiRunAttestation)) - .Where(a => a != null && a.TenantId == tenantId) - .Cast() - .Take(limit) - .ToList(); - - return Task.FromResult>(attestations); - } - - private static string CreateMockDsseEnvelope(string predicateType, string payload) - { - // Mock DSSE envelope - real implementation would use StellaOps.Signer - var payloadBase64 = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload)); - return $$""" - { - "payloadType": "{{predicateType}}", - "payload": "{{payloadBase64}}", - "signatures": [{"sig": "mock-signature"}] - } - """; - } - - private sealed record StoredAttestation( - string Id, - string PredicateType, - string Json, - string Digest, - string? DsseEnvelope, - DateTimeOffset CreatedAt); } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationVerificationResult.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationVerificationResult.cs new file mode 100644 index 000000000..8e6169ff9 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/AiAttestationVerificationResult.cs @@ -0,0 +1,60 @@ +namespace StellaOps.AdvisoryAI.Attestation; + +/// +/// Result of verifying an attestation. +/// +public sealed record AiAttestationVerificationResult +{ + /// Whether verification succeeded. + public required bool Valid { get; init; } + + /// Verification timestamp. + public required DateTimeOffset VerifiedAt { get; init; } + + /// Signing key ID if signed. + public string? SigningKeyId { get; init; } + + /// Key expiration if applicable. + public DateTimeOffset? KeyExpiresAt { get; init; } + + /// Digest verification result. + public bool DigestValid { get; init; } + + /// Signature verification result. + public bool? SignatureValid { get; init; } + + /// Verification failure reason if invalid. + public string? FailureReason { get; init; } + + /// + /// Creates a successful verification result. + /// + public static AiAttestationVerificationResult Success( + DateTimeOffset verifiedAt, + string? signingKeyId = null, + DateTimeOffset? keyExpiresAt = null) => new() + { + Valid = true, + VerifiedAt = verifiedAt, + SigningKeyId = signingKeyId, + KeyExpiresAt = keyExpiresAt, + DigestValid = true, + SignatureValid = signingKeyId != null ? true : null + }; + + /// + /// Creates a failed verification result. + /// + public static AiAttestationVerificationResult Failure( + DateTimeOffset verifiedAt, + string reason, + bool digestValid = false, + bool? signatureValid = null) => new() + { + Valid = false, + VerifiedAt = verifiedAt, + DigestValid = digestValid, + SignatureValid = signatureValid, + FailureReason = reason + }; +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IAiAttestationService.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IAiAttestationService.cs index ca202d112..0820ea966 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IAiAttestationService.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IAiAttestationService.cs @@ -88,86 +88,3 @@ public interface IAiAttestationService int limit = 100, CancellationToken ct = default); } - -/// -/// Result of creating an attestation. -/// -public sealed record AiAttestationResult -{ - /// Attestation ID. - public required string AttestationId { get; init; } - - /// Content digest. - public required string Digest { get; init; } - - /// Whether the attestation was signed. - public bool Signed { get; init; } - - /// DSSE envelope if signed. - public string? DsseEnvelope { get; init; } - - /// Storage URI. - public string? StorageUri { get; init; } - - /// Creation timestamp. - public required DateTimeOffset CreatedAt { get; init; } -} - -/// -/// Result of verifying an attestation. -/// -public sealed record AiAttestationVerificationResult -{ - /// Whether verification succeeded. - public required bool Valid { get; init; } - - /// Verification timestamp. - public required DateTimeOffset VerifiedAt { get; init; } - - /// Signing key ID if signed. - public string? SigningKeyId { get; init; } - - /// Key expiration if applicable. - public DateTimeOffset? KeyExpiresAt { get; init; } - - /// Digest verification result. - public bool DigestValid { get; init; } - - /// Signature verification result. - public bool? SignatureValid { get; init; } - - /// Verification failure reason if invalid. - public string? FailureReason { get; init; } - - /// - /// Creates a successful verification result. - /// - public static AiAttestationVerificationResult Success( - DateTimeOffset verifiedAt, - string? signingKeyId = null, - DateTimeOffset? keyExpiresAt = null) => new() - { - Valid = true, - VerifiedAt = verifiedAt, - SigningKeyId = signingKeyId, - KeyExpiresAt = keyExpiresAt, - DigestValid = true, - SignatureValid = signingKeyId != null ? true : null - }; - - /// - /// Creates a failed verification result. - /// - public static AiAttestationVerificationResult Failure( - DateTimeOffset verifiedAt, - string reason, - bool digestValid = false, - bool? signatureValid = null) => new() - { - Valid = false, - VerifiedAt = verifiedAt, - DigestValid = digestValid, - SignatureValid = signatureValid, - FailureReason = reason - }; -} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IPromptTemplateRegistry.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IPromptTemplateRegistry.cs new file mode 100644 index 000000000..bfba64aab --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/IPromptTemplateRegistry.cs @@ -0,0 +1,51 @@ +// +// Copyright (c) StellaOps. Licensed under the BUSL-1.1. +// + +using StellaOps.AdvisoryAI.Attestation.Models; + +namespace StellaOps.AdvisoryAI.Attestation; + +/// +/// Interface for prompt template registry. +/// Sprint: SPRINT_20260109_011_001 Task: AIAT-004 +/// +public interface IPromptTemplateRegistry +{ + /// + /// Registers a prompt template with version. + /// + /// Template name. + /// Template version. + /// Template content. + void Register(string name, string version, string template); + + /// + /// Gets template info including hash. + /// + /// Template name. + /// Template info or null if not found. + PromptTemplateInfo? GetTemplateInfo(string name); + + /// + /// Gets template info for a specific version. + /// + /// Template name. + /// Template version. + /// Template info or null if not found. + PromptTemplateInfo? GetTemplateInfo(string name, string version); + + /// + /// Verifies a template hash matches registered version. + /// + /// Template name. + /// Expected hash. + /// True if hash matches. + bool VerifyHash(string name, string expectedHash); + + /// + /// Gets all registered templates. + /// + /// All template info records. + IReadOnlyList GetAllTemplates(); +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Digest.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Digest.cs new file mode 100644 index 000000000..cef1ca882 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Digest.cs @@ -0,0 +1,18 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AdvisoryAI.Attestation.Models; + +public sealed partial record AiClaimAttestation +{ + /// + /// Computes the content digest for this attestation. + /// + public string ComputeDigest() + { + var json = JsonSerializer.Serialize(this, AiAttestationJsonContext.Default.AiClaimAttestation); + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Factory.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Factory.cs new file mode 100644 index 000000000..be39dcf7e --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.Factory.cs @@ -0,0 +1,48 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AdvisoryAI.Attestation.Models; + +public sealed partial record AiClaimAttestation +{ + /// + /// Creates a claim attestation from a claim evidence. + /// + public static AiClaimAttestation FromClaimEvidence( + ClaimEvidence evidence, + string runId, + string turnId, + string tenantId, + DateTimeOffset timestamp, + AiRunContext? context = null) + { + var claimDigest = ComputeClaimDigest(evidence.Text); + var claimId = $"claim-{Guid.NewGuid():N}"; + + var attestation = new AiClaimAttestation + { + ClaimId = claimId, + RunId = runId, + TurnId = turnId, + TenantId = tenantId, + ClaimText = evidence.Text, + ClaimDigest = claimDigest, + Category = evidence.Category, + GroundedBy = evidence.GroundedBy, + GroundingScore = evidence.GroundingScore, + Verified = evidence.Verified, + Timestamp = timestamp, + Context = context, + ContentDigest = "" // Placeholder, computed below + }; + + // Now compute the actual content digest + return attestation with { ContentDigest = attestation.ComputeDigest() }; + } + + private static string ComputeClaimDigest(string claimText) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(claimText)); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.cs index 2572639c1..273db3c36 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiClaimAttestation.cs @@ -3,9 +3,6 @@ // using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; using System.Text.Json.Serialization; namespace StellaOps.AdvisoryAI.Attestation.Models; @@ -21,7 +18,7 @@ namespace StellaOps.AdvisoryAI.Attestation.Models; /// - Claim-specific evidence linkage /// - Selective claim citation in reports /// -public sealed record AiClaimAttestation +public sealed partial record AiClaimAttestation { /// Attestation type URI. public const string PredicateType = "https://stellaops.org/attestation/ai-claim/v1"; @@ -86,54 +83,4 @@ public sealed record AiClaimAttestation [JsonPropertyName("claimType")] public string? ClaimType { get; init; } - /// - /// Computes the content digest for this attestation. - /// - public string ComputeDigest() - { - var json = JsonSerializer.Serialize(this, AiAttestationJsonContext.Default.AiClaimAttestation); - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } - - /// - /// Creates a claim attestation from a claim evidence. - /// - public static AiClaimAttestation FromClaimEvidence( - ClaimEvidence evidence, - string runId, - string turnId, - string tenantId, - DateTimeOffset timestamp, - AiRunContext? context = null) - { - var claimDigest = ComputeClaimDigest(evidence.Text); - var claimId = $"claim-{Guid.NewGuid():N}"; - - var attestation = new AiClaimAttestation - { - ClaimId = claimId, - RunId = runId, - TurnId = turnId, - TenantId = tenantId, - ClaimText = evidence.Text, - ClaimDigest = claimDigest, - Category = evidence.Category, - GroundedBy = evidence.GroundedBy, - GroundingScore = evidence.GroundingScore, - Verified = evidence.Verified, - Timestamp = timestamp, - Context = context, - ContentDigest = "" // Placeholder, computed below - }; - - // Now compute the actual content digest - return attestation with { ContentDigest = attestation.ComputeDigest() }; - } - - private static string ComputeClaimDigest(string claimText) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(claimText)); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.Digest.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.Digest.cs new file mode 100644 index 000000000..4a9797c1e --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.Digest.cs @@ -0,0 +1,18 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.AdvisoryAI.Attestation.Models; + +public sealed partial record AiRunAttestation +{ + /// + /// Computes the content digest for this attestation. + /// + public string ComputeDigest() + { + var json = JsonSerializer.Serialize(this, AiAttestationJsonContext.Default.AiRunAttestation); + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.cs index 956bb18a2..cc0011557 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunAttestation.cs @@ -3,9 +3,6 @@ // using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; using System.Text.Json.Serialization; namespace StellaOps.AdvisoryAI.Attestation.Models; @@ -23,7 +20,7 @@ namespace StellaOps.AdvisoryAI.Attestation.Models; /// - What was said (content digests) /// - What claims were made and their grounding evidence /// -public sealed record AiRunAttestation +public sealed partial record AiRunAttestation { /// Attestation type URI. public const string PredicateType = "https://stellaops.org/attestation/ai-run/v1"; @@ -84,35 +81,4 @@ public sealed record AiRunAttestation [JsonPropertyName("errorMessage")] public string? ErrorMessage { get; init; } - /// - /// Computes the content digest for this attestation. - /// - public string ComputeDigest() - { - var json = JsonSerializer.Serialize(this, AiAttestationJsonContext.Default.AiRunAttestation); - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json)); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } -} - -/// -/// AI run status. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum AiRunStatus -{ - /// Run completed successfully. - Completed, - - /// Run failed. - Failed, - - /// Run was cancelled. - Cancelled, - - /// Run timed out. - TimedOut, - - /// Run was blocked by guardrails. - Blocked } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunStatus.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunStatus.cs new file mode 100644 index 000000000..0ac8c856f --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Models/AiRunStatus.cs @@ -0,0 +1,25 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.AdvisoryAI.Attestation.Models; + +/// +/// AI run status. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum AiRunStatus +{ + /// Run completed successfully. + Completed, + + /// Run failed. + Failed, + + /// Run was cancelled. + Cancelled, + + /// Run timed out. + TimedOut, + + /// Run was blocked by guardrails. + Blocked +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Digest.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Digest.cs new file mode 100644 index 000000000..a8d368b7a --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Digest.cs @@ -0,0 +1,13 @@ +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class PromptTemplateRegistry +{ + private static string ComputeDigest(string content) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Query.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Query.cs new file mode 100644 index 000000000..7bea0ab9f --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.Query.cs @@ -0,0 +1,48 @@ +using Microsoft.Extensions.Logging; +using StellaOps.AdvisoryAI.Attestation.Models; + +namespace StellaOps.AdvisoryAI.Attestation; + +public sealed partial class PromptTemplateRegistry +{ + /// + public PromptTemplateInfo? GetTemplateInfo(string name) + { + return _latestVersions.TryGetValue(name, out var info) ? info : null; + } + + /// + public PromptTemplateInfo? GetTemplateInfo(string name, string version) + { + return _allVersions.TryGetValue((name, version), out var info) ? info : null; + } + + /// + public bool VerifyHash(string name, string expectedHash) + { + if (!_latestVersions.TryGetValue(name, out var info)) + { + _logger.LogWarning("Template {Name} not found for hash verification", name); + return false; + } + + var matches = string.Equals(info.Digest, expectedHash, StringComparison.OrdinalIgnoreCase); + + if (!matches) + { + _logger.LogWarning( + "Hash mismatch for template {Name}: expected {Expected}, got {Actual}", + name, + expectedHash, + info.Digest); + } + + return matches; + } + + /// + public IReadOnlyList GetAllTemplates() + { + return [.. _latestVersions.Values]; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.cs index ed467015a..5fa94e047 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/PromptTemplateRegistry.cs @@ -2,65 +2,17 @@ // Copyright (c) StellaOps. Licensed under the BUSL-1.1. // - using Microsoft.Extensions.Logging; using StellaOps.AdvisoryAI.Attestation.Models; using System.Collections.Concurrent; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; namespace StellaOps.AdvisoryAI.Attestation; -/// -/// Interface for prompt template registry. -/// Sprint: SPRINT_20260109_011_001 Task: AIAT-004 -/// -public interface IPromptTemplateRegistry -{ - /// - /// Registers a prompt template with version. - /// - /// Template name. - /// Template version. - /// Template content. - void Register(string name, string version, string template); - - /// - /// Gets template info including hash. - /// - /// Template name. - /// Template info or null if not found. - PromptTemplateInfo? GetTemplateInfo(string name); - - /// - /// Gets template info for a specific version. - /// - /// Template name. - /// Template version. - /// Template info or null if not found. - PromptTemplateInfo? GetTemplateInfo(string name, string version); - - /// - /// Verifies a template hash matches registered version. - /// - /// Template name. - /// Expected hash. - /// True if hash matches. - bool VerifyHash(string name, string expectedHash); - - /// - /// Gets all registered templates. - /// - /// All template info records. - IReadOnlyList GetAllTemplates(); -} - /// /// In-memory implementation of prompt template registry. /// Sprint: SPRINT_20260109_011_001 Task: AIAT-004 /// -public sealed class PromptTemplateRegistry : IPromptTemplateRegistry +public sealed partial class PromptTemplateRegistry : IPromptTemplateRegistry { private readonly TimeProvider _timeProvider; private readonly ILogger _logger; @@ -101,51 +53,4 @@ public sealed class PromptTemplateRegistry : IPromptTemplateRegistry version, digest); } - - /// - public PromptTemplateInfo? GetTemplateInfo(string name) - { - return _latestVersions.TryGetValue(name, out var info) ? info : null; - } - - /// - public PromptTemplateInfo? GetTemplateInfo(string name, string version) - { - return _allVersions.TryGetValue((name, version), out var info) ? info : null; - } - - /// - public bool VerifyHash(string name, string expectedHash) - { - if (!_latestVersions.TryGetValue(name, out var info)) - { - _logger.LogWarning("Template {Name} not found for hash verification", name); - return false; - } - - var matches = string.Equals(info.Digest, expectedHash, StringComparison.OrdinalIgnoreCase); - - if (!matches) - { - _logger.LogWarning( - "Hash mismatch for template {Name}: expected {Expected}, got {Actual}", - name, - expectedHash, - info.Digest); - } - - return matches; - } - - /// - public IReadOnlyList GetAllTemplates() - { - return [.. _latestVersions.Values]; - } - - private static string ComputeDigest(string content) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content)); - return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; - } } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.Query.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.Query.cs new file mode 100644 index 000000000..8a9157c22 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.Query.cs @@ -0,0 +1,73 @@ +using StellaOps.AdvisoryAI.Attestation.Models; +using System.Collections.Immutable; + +namespace StellaOps.AdvisoryAI.Attestation.Storage; + +public partial interface IAiAttestationStore +{ + /// + /// Get a run attestation by run ID. + /// + /// The run ID. + /// Cancellation token. + /// The attestation or null if not found. + Task GetRunAttestationAsync(string runId, CancellationToken ct); + + /// + /// Get all claim attestations for a run. + /// + /// The run ID. + /// Cancellation token. + /// List of claim attestations. + Task> GetClaimAttestationsAsync(string runId, CancellationToken ct); + + /// + /// Get claim attestations for a specific turn. + /// + /// The run ID. + /// The turn ID. + /// Cancellation token. + /// List of claim attestations for the turn. + Task> GetClaimAttestationsByTurnAsync( + string runId, + string turnId, + CancellationToken ct); + + /// + /// Get the signed envelope for a run. + /// + /// The run ID. + /// Cancellation token. + /// The signed envelope or null if not found. + Task GetSignedEnvelopeAsync(string runId, CancellationToken ct); + + /// + /// Check if a run attestation exists. + /// + /// The run ID. + /// Cancellation token. + /// True if the attestation exists. + Task ExistsAsync(string runId, CancellationToken ct); + + /// + /// Get attestations by tenant within a time range. + /// + /// The tenant ID. + /// Start time. + /// End time. + /// Cancellation token. + /// List of run attestations. + Task> GetByTenantAsync( + string tenantId, + DateTimeOffset from, + DateTimeOffset to, + CancellationToken ct); + + /// + /// Get attestation by content digest. + /// + /// The content digest. + /// Cancellation token. + /// The claim attestation or null if not found. + Task GetByContentDigestAsync(string contentDigest, CancellationToken ct); +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.cs index 01dacd80b..1775482cb 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/IAiAttestationStore.cs @@ -12,7 +12,7 @@ namespace StellaOps.AdvisoryAI.Attestation.Storage; /// Interface for storing and retrieving AI attestations. /// Sprint: SPRINT_20260109_011_001 Task: AIAT-006 /// -public interface IAiAttestationStore +public partial interface IAiAttestationStore { /// /// Store a run attestation. @@ -35,70 +35,4 @@ public interface IAiAttestationStore /// The attestation to store. /// Cancellation token. Task StoreClaimAttestationAsync(AiClaimAttestation attestation, CancellationToken ct); - - /// - /// Get a run attestation by run ID. - /// - /// The run ID. - /// Cancellation token. - /// The attestation or null if not found. - Task GetRunAttestationAsync(string runId, CancellationToken ct); - - /// - /// Get all claim attestations for a run. - /// - /// The run ID. - /// Cancellation token. - /// List of claim attestations. - Task> GetClaimAttestationsAsync(string runId, CancellationToken ct); - - /// - /// Get claim attestations for a specific turn. - /// - /// The run ID. - /// The turn ID. - /// Cancellation token. - /// List of claim attestations for the turn. - Task> GetClaimAttestationsByTurnAsync( - string runId, - string turnId, - CancellationToken ct); - - /// - /// Get the signed envelope for a run. - /// - /// The run ID. - /// Cancellation token. - /// The signed envelope or null if not found. - Task GetSignedEnvelopeAsync(string runId, CancellationToken ct); - - /// - /// Check if a run attestation exists. - /// - /// The run ID. - /// Cancellation token. - /// True if the attestation exists. - Task ExistsAsync(string runId, CancellationToken ct); - - /// - /// Get attestations by tenant within a time range. - /// - /// The tenant ID. - /// Start time. - /// End time. - /// Cancellation token. - /// List of run attestations. - Task> GetByTenantAsync( - string tenantId, - DateTimeOffset from, - DateTimeOffset to, - CancellationToken ct); - - /// - /// Get attestation by content digest. - /// - /// The content digest. - /// Cancellation token. - /// The claim attestation or null if not found. - Task GetByContentDigestAsync(string contentDigest, CancellationToken ct); } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Diagnostics.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Diagnostics.cs new file mode 100644 index 000000000..b7cdade59 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Diagnostics.cs @@ -0,0 +1,28 @@ +namespace StellaOps.AdvisoryAI.Attestation.Storage; + +public sealed partial class InMemoryAiAttestationStore +{ + /// + /// Clear all stored attestations. Useful for testing. + /// + public void Clear() + { + _runAttestations.Clear(); + _signedEnvelopes.Clear(); + _claimAttestations.Clear(); + _digestIndex.Clear(); + } + + /// + /// Get count of run attestations. Useful for testing. + /// + public int RunAttestationCount => _runAttestations.Count; + + /// + /// Get count of all claim attestations. Useful for testing. + /// + public int ClaimAttestationCount => _claimAttestations.Values.Sum(c => + { + lock (c) { return c.Count; } + }); +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Query.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Query.cs new file mode 100644 index 000000000..ef3324c16 --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Query.cs @@ -0,0 +1,85 @@ +using StellaOps.AdvisoryAI.Attestation.Models; +using System.Collections.Immutable; + +namespace StellaOps.AdvisoryAI.Attestation.Storage; + +public sealed partial class InMemoryAiAttestationStore +{ + /// + public Task GetRunAttestationAsync(string runId, CancellationToken ct) + { + _runAttestations.TryGetValue(runId, out var attestation); + return Task.FromResult(attestation); + } + + /// + public Task> GetClaimAttestationsAsync(string runId, CancellationToken ct) + { + if (_claimAttestations.TryGetValue(runId, out var claims)) + { + lock (claims) + { + return Task.FromResult(claims.ToImmutableArray()); + } + } + + return Task.FromResult(ImmutableArray.Empty); + } + + /// + public Task> GetClaimAttestationsByTurnAsync( + string runId, + string turnId, + CancellationToken ct) + { + if (_claimAttestations.TryGetValue(runId, out var claims)) + { + lock (claims) + { + var filtered = claims + .Where(c => c.TurnId == turnId) + .ToImmutableArray(); + return Task.FromResult(filtered); + } + } + + return Task.FromResult(ImmutableArray.Empty); + } + + /// + public Task GetSignedEnvelopeAsync(string runId, CancellationToken ct) + { + _signedEnvelopes.TryGetValue(runId, out var envelope); + return Task.FromResult(envelope); + } + + /// + public Task ExistsAsync(string runId, CancellationToken ct) + { + return Task.FromResult(_runAttestations.ContainsKey(runId)); + } + + /// + public Task> GetByTenantAsync( + string tenantId, + DateTimeOffset from, + DateTimeOffset to, + CancellationToken ct) + { + var results = _runAttestations.Values + .Where(a => a.TenantId == tenantId && + a.StartedAt >= from && + a.StartedAt <= to) + .OrderBy(a => a.StartedAt) + .ToImmutableArray(); + + return Task.FromResult(results); + } + + /// + public Task GetByContentDigestAsync(string contentDigest, CancellationToken ct) + { + _digestIndex.TryGetValue(contentDigest, out var attestation); + return Task.FromResult(attestation); + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Store.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Store.cs new file mode 100644 index 000000000..7f3ed672b --- /dev/null +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.Store.cs @@ -0,0 +1,42 @@ +using Microsoft.Extensions.Logging; +using StellaOps.AdvisoryAI.Attestation.Models; + +namespace StellaOps.AdvisoryAI.Attestation.Storage; + +public sealed partial class InMemoryAiAttestationStore +{ + /// + public Task StoreRunAttestationAsync(AiRunAttestation attestation, CancellationToken ct) + { + _runAttestations[attestation.RunId] = attestation; + _logger.LogDebug("Stored run attestation for RunId {RunId}", attestation.RunId); + return Task.CompletedTask; + } + + /// + public Task StoreSignedEnvelopeAsync(string runId, object envelope, CancellationToken ct) + { + _signedEnvelopes[runId] = envelope; + _logger.LogDebug("Stored signed envelope for RunId {RunId}", runId); + return Task.CompletedTask; + } + + /// + public Task StoreClaimAttestationAsync(AiClaimAttestation attestation, CancellationToken ct) + { + var claims = _claimAttestations.GetOrAdd(attestation.RunId, _ => []); + lock (claims) + { + claims.Add(attestation); + } + + _digestIndex[attestation.ContentDigest] = attestation; + + _logger.LogDebug( + "Stored claim attestation for RunId {RunId}, TurnId {TurnId}", + attestation.RunId, + attestation.TurnId); + + return Task.CompletedTask; + } +} diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.cs b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.cs index 32b50cf29..bb6949bde 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.cs +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/Storage/InMemoryAiAttestationStore.cs @@ -2,11 +2,9 @@ // Copyright (c) StellaOps. Licensed under the BUSL-1.1. // - using Microsoft.Extensions.Logging; using StellaOps.AdvisoryAI.Attestation.Models; using System.Collections.Concurrent; -using System.Collections.Immutable; namespace StellaOps.AdvisoryAI.Attestation.Storage; @@ -15,7 +13,7 @@ namespace StellaOps.AdvisoryAI.Attestation.Storage; /// Useful for testing and development. /// Sprint: SPRINT_20260109_011_001 Task: AIAT-006 /// -public sealed class InMemoryAiAttestationStore : IAiAttestationStore +public sealed partial class InMemoryAiAttestationStore : IAiAttestationStore { private readonly ConcurrentDictionary _runAttestations = new(); private readonly ConcurrentDictionary _signedEnvelopes = new(); @@ -27,141 +25,4 @@ public sealed class InMemoryAiAttestationStore : IAiAttestationStore { _logger = logger; } - - /// - public Task StoreRunAttestationAsync(AiRunAttestation attestation, CancellationToken ct) - { - _runAttestations[attestation.RunId] = attestation; - _logger.LogDebug("Stored run attestation for RunId {RunId}", attestation.RunId); - return Task.CompletedTask; - } - - /// - public Task StoreSignedEnvelopeAsync(string runId, object envelope, CancellationToken ct) - { - _signedEnvelopes[runId] = envelope; - _logger.LogDebug("Stored signed envelope for RunId {RunId}", runId); - return Task.CompletedTask; - } - - /// - public Task StoreClaimAttestationAsync(AiClaimAttestation attestation, CancellationToken ct) - { - var claims = _claimAttestations.GetOrAdd(attestation.RunId, _ => []); - lock (claims) - { - claims.Add(attestation); - } - - _digestIndex[attestation.ContentDigest] = attestation; - - _logger.LogDebug( - "Stored claim attestation for RunId {RunId}, TurnId {TurnId}", - attestation.RunId, - attestation.TurnId); - - return Task.CompletedTask; - } - - /// - public Task GetRunAttestationAsync(string runId, CancellationToken ct) - { - _runAttestations.TryGetValue(runId, out var attestation); - return Task.FromResult(attestation); - } - - /// - public Task> GetClaimAttestationsAsync(string runId, CancellationToken ct) - { - if (_claimAttestations.TryGetValue(runId, out var claims)) - { - lock (claims) - { - return Task.FromResult(claims.ToImmutableArray()); - } - } - - return Task.FromResult(ImmutableArray.Empty); - } - - /// - public Task> GetClaimAttestationsByTurnAsync( - string runId, - string turnId, - CancellationToken ct) - { - if (_claimAttestations.TryGetValue(runId, out var claims)) - { - lock (claims) - { - var filtered = claims - .Where(c => c.TurnId == turnId) - .ToImmutableArray(); - return Task.FromResult(filtered); - } - } - - return Task.FromResult(ImmutableArray.Empty); - } - - /// - public Task GetSignedEnvelopeAsync(string runId, CancellationToken ct) - { - _signedEnvelopes.TryGetValue(runId, out var envelope); - return Task.FromResult(envelope); - } - - /// - public Task ExistsAsync(string runId, CancellationToken ct) - { - return Task.FromResult(_runAttestations.ContainsKey(runId)); - } - - /// - public Task> GetByTenantAsync( - string tenantId, - DateTimeOffset from, - DateTimeOffset to, - CancellationToken ct) - { - var results = _runAttestations.Values - .Where(a => a.TenantId == tenantId && - a.StartedAt >= from && - a.StartedAt <= to) - .OrderBy(a => a.StartedAt) - .ToImmutableArray(); - - return Task.FromResult(results); - } - - /// - public Task GetByContentDigestAsync(string contentDigest, CancellationToken ct) - { - _digestIndex.TryGetValue(contentDigest, out var attestation); - return Task.FromResult(attestation); - } - - /// - /// Clear all stored attestations. Useful for testing. - /// - public void Clear() - { - _runAttestations.Clear(); - _signedEnvelopes.Clear(); - _claimAttestations.Clear(); - _digestIndex.Clear(); - } - - /// - /// Get count of run attestations. Useful for testing. - /// - public int RunAttestationCount => _runAttestations.Count; - - /// - /// Get count of all claim attestations. Useful for testing. - /// - public int ClaimAttestationCount => _claimAttestations.Values.Sum(c => - { - lock (c) { return c.Count; } - }); } diff --git a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/TASKS.md b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/TASKS.md index 58b0c099a..5cf0d28bd 100644 --- a/src/__Libraries/StellaOps.AdvisoryAI.Attestation/TASKS.md +++ b/src/__Libraries/StellaOps.AdvisoryAI.Attestation/TASKS.md @@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol | Task ID | Status | Notes | | --- | --- | --- | -| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/__Libraries/StellaOps.AdvisoryAI.Attestation/StellaOps.AdvisoryAI.Attestation.md. | +| REMED-05 | DONE | Split service/registry/models/store into <=100-line partials; `dotnet test src/__Libraries/__Tests/StellaOps.AdvisoryAI.Attestation.Tests/StellaOps.AdvisoryAI.Attestation.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (58 tests) 2026-02-04. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleReader.Models.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleReader.Models.cs index d4ea769e3..0573abf9b 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleReader.Models.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleReader.Models.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Collections.Immutable; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleWriter.Manifest.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleWriter.Manifest.cs index 99ccaf2dd..c2025a1c2 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleWriter.Manifest.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleWriter.Manifest.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Linq; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Build.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Build.cs index 3dd3f46b4..604897735 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Build.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Build.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Collections.Immutable; +using StellaOps.AuditPack.Models; using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Collectors.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Collectors.cs index 026155af3..477d3c6bc 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Collectors.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Collectors.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Collections.Immutable; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Files.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Files.cs index ea1261428..92cb9ef6f 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Files.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackBuilder.Files.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Text; +using StellaOps.AuditPack.Models; using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackImporter.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackImporter.cs index f04feaa0c..8e2e30af0 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackImporter.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackImporter.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Text.Json; +using StellaOps.AuditPack.Models; using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackReplayer.Helpers.cs b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackReplayer.Helpers.cs index b2d99dbda..fff234894 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/AuditPackReplayer.Helpers.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/AuditPackReplayer.Helpers.cs @@ -1,6 +1,6 @@ -using StellaOps.AuditPack.Models; using System.Security.Cryptography; using System.Text.Json; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/ReplayAttestationService.Hashing.cs b/src/__Libraries/StellaOps.AuditPack/Services/ReplayAttestationService.Hashing.cs index 477a11b31..2207ee70f 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/ReplayAttestationService.Hashing.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/ReplayAttestationService.Hashing.cs @@ -1,6 +1,6 @@ -using StellaOps.AuditPack.Models; using System.Security.Cryptography; using System.Text; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/Services/ReplayExecutor.ExecuteInternal.cs b/src/__Libraries/StellaOps.AuditPack/Services/ReplayExecutor.ExecuteInternal.cs index 273d30075..95837af04 100644 --- a/src/__Libraries/StellaOps.AuditPack/Services/ReplayExecutor.ExecuteInternal.cs +++ b/src/__Libraries/StellaOps.AuditPack/Services/ReplayExecutor.ExecuteInternal.cs @@ -1,5 +1,5 @@ -using StellaOps.AuditPack.Models; using System.Diagnostics; +using StellaOps.AuditPack.Models; namespace StellaOps.AuditPack.Services; diff --git a/src/__Libraries/StellaOps.AuditPack/TASKS.md b/src/__Libraries/StellaOps.AuditPack/TASKS.md index 4723289b4..681020ea3 100644 --- a/src/__Libraries/StellaOps.AuditPack/TASKS.md +++ b/src/__Libraries/StellaOps.AuditPack/TASKS.md @@ -10,3 +10,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0044-A | TODO | Requires MAINT/TEST + approval. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-07 | DONE | CSProj remediation complete (ReplayExecutor, ReplayAttestationService, VerdictReplayPredicate split; ConfigureAwait added; ReplayExecutor/VerdictReplayPredicate tests added). | +| REMED-05 | DONE | Sorted System-first usings for AuditPack builders/importer/replay helpers; ArchiveUtilities extraction tests added; dotnet test passed 2026-02-04. | diff --git a/src/__Libraries/StellaOps.Auth.Security/TASKS.md b/src/__Libraries/StellaOps.Auth.Security/TASKS.md index efc4c37c8..7d6c635fe 100644 --- a/src/__Libraries/StellaOps.Auth.Security/TASKS.md +++ b/src/__Libraries/StellaOps.Auth.Security/TASKS.md @@ -10,3 +10,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0045-A | TODO | Requires MAINT/TEST + approval. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-07 | DONE | DPoP remediation (validator/stores split <= 100 lines, private fields renamed, nonce store tests added). | +| REMED-05 | DONE | Added DpopValidationOptions unit coverage; dotnet test passed 2026-02-04. | diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsConfiguration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsConfiguration.cs new file mode 100644 index 000000000..b8a243cb0 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsConfiguration.cs @@ -0,0 +1,28 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoComplianceOptionsConfiguration : IConfigureOptions +{ + private readonly IConfiguration? _configuration; + + public CryptoComplianceOptionsConfiguration(IConfiguration? configuration = null) + { + _configuration = configuration; + } + + public void Configure(CryptoComplianceOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + if (_configuration is null) + { + return; + } + + _configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsPostConfigure.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsPostConfigure.cs new file mode 100644 index 000000000..83762640e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsPostConfigure.cs @@ -0,0 +1,14 @@ +using System; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoComplianceOptionsPostConfigure : IPostConfigureOptions +{ + public void PostConfigure(string? name, CryptoComplianceOptions options) + { + ArgumentNullException.ThrowIfNull(options); + options.ApplyEnvironmentOverrides(); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsRegistration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsRegistration.cs new file mode 100644 index 000000000..8b3a28ff9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoComplianceOptionsRegistration.cs @@ -0,0 +1,21 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal static class CryptoComplianceOptionsRegistration +{ + internal static void Register(IServiceCollection services, bool bindFromConfiguration) + { + services.AddOptions(); + + if (bindFromConfiguration) + { + services.TryAddEnumerable(ServiceDescriptor.Singleton, CryptoComplianceOptionsConfiguration>()); + } + + services.TryAddEnumerable(ServiceDescriptor.Singleton, CryptoComplianceOptionsPostConfigure>()); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoHttpClientNames.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoHttpClientNames.cs new file mode 100644 index 000000000..da4eb5adc --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoHttpClientNames.cs @@ -0,0 +1,10 @@ +using StellaOps.Cryptography.Plugin.SimRemote; +using StellaOps.Cryptography.Plugin.SmRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal static class CryptoHttpClientNames +{ + internal const string SimRemote = nameof(SimRemoteHttpClient); + internal const string SmRemote = nameof(SmRemoteHttpClient); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationOptions.cs new file mode 100644 index 000000000..24cbd54f0 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationOptions.cs @@ -0,0 +1,15 @@ +using System; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.PluginLoader; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginConfigurationOptions : IOptions +{ + public CryptoPluginConfigurationOptions(CryptoPluginConfiguration configuration) + { + Value = configuration ?? throw new ArgumentNullException(nameof(configuration)); + } + + public CryptoPluginConfiguration Value { get; } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationRegistry.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationRegistry.cs new file mode 100644 index 000000000..ed8be038d --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginConfigurationRegistry.cs @@ -0,0 +1,57 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Cryptography.PluginLoader; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginConfigurationRegistry : ICryptoProviderRegistry +{ + private readonly CryptoProviderRegistry _registry; + + public CryptoPluginConfigurationRegistry( + IReadOnlyList providers, + IOptions configuration, + ILogger? logger = null) + { + ArgumentNullException.ThrowIfNull(providers); + ArgumentNullException.ThrowIfNull(configuration); + + var config = configuration.Value; + var preferredProviderNames = providers + .OrderByDescending(provider => GetProviderPriority(provider, config)) + .Select(provider => provider.Name) + .ToList(); + + logger?.LogInformation( + "Loaded {Count} crypto provider(s) with preferred order: {Providers}", + providers.Count, + string.Join(", ", preferredProviderNames)); + + _registry = new CryptoProviderRegistry(providers, preferredProviderNames); + } + + public IReadOnlyCollection Providers => _registry.Providers; + public bool TryResolve(string preferredProvider, out ICryptoProvider provider) + => _registry.TryResolve(preferredProvider, out provider); + public ICryptoProvider ResolveOrThrow(CryptoCapability capability, string algorithmId) + => _registry.ResolveOrThrow(capability, algorithmId); + public CryptoSignerResolution ResolveSigner( + CryptoCapability capability, + string algorithmId, + CryptoKeyReference keyReference, + string? preferredProvider = null) + => _registry.ResolveSigner(capability, algorithmId, keyReference, preferredProvider); + public CryptoHasherResolution ResolveHasher(string algorithmId, string? preferredProvider = null) + => _registry.ResolveHasher(algorithmId, preferredProvider); + + private static int GetProviderPriority(ICryptoProvider provider, CryptoPluginConfiguration config) + { + var enabledEntry = config.Enabled.FirstOrDefault(entry => + entry.Id.Equals(provider.Name, StringComparison.OrdinalIgnoreCase)); + return enabledEntry?.Priority ?? 50; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginDirectoryOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginDirectoryOptions.cs new file mode 100644 index 000000000..01482600c --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginDirectoryOptions.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginDirectoryOptions +{ + public CryptoPluginDirectoryOptions(string? directory) + { + Directory = directory; + } + + public string? Directory { get; } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderEnumerable.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderEnumerable.cs new file mode 100644 index 000000000..e65f317f1 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderEnumerable.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginProviderEnumerable : IEnumerable +{ + private readonly IReadOnlyList _providers; + + public CryptoPluginProviderEnumerable(IReadOnlyList providers) + { + _providers = providers ?? throw new ArgumentNullException(nameof(providers)); + } + + public IEnumerator GetEnumerator() => _providers.GetEnumerator(); + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderList.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderList.cs new file mode 100644 index 000000000..77b42e116 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderList.cs @@ -0,0 +1,44 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Cryptography.PluginLoader; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginProviderList : IReadOnlyList +{ + private readonly IReadOnlyList _providers; + + public CryptoPluginProviderList( + IOptions configuration, + ILogger? logger = null, + CryptoPluginDirectoryOptions? directory = null) + { + ArgumentNullException.ThrowIfNull(configuration); + + var loader = new CryptoPluginLoader(configuration.Value, logger, directory?.Directory); + try + { + _providers = loader.LoadProviders(); + } + catch (CryptoPluginLoadException ex) + { + logger?.LogCritical(ex, "Failed to load crypto plugins: {Message}", ex.Message); + throw; + } + + if (_providers.Count == 0) + { + throw new InvalidOperationException( + "No crypto providers were loaded. Check plugin configuration and manifest."); + } + } + + public int Count => _providers.Count; + public ICryptoProvider this[int index] => _providers[index]; + public IEnumerator GetEnumerator() => _providers.GetEnumerator(); + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderRegistry.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderRegistry.cs new file mode 100644 index 000000000..3cced84ad --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginProviderRegistry.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class CryptoPluginProviderRegistry : ICryptoProviderRegistry +{ + private readonly CryptoProviderRegistry _registry; + + public CryptoPluginProviderRegistry( + IReadOnlyList providers, + IOptionsMonitor? optionsMonitor = null) + { + ArgumentNullException.ThrowIfNull(providers); + var preferred = optionsMonitor?.CurrentValue?.ResolvePreferredProviders(); + _registry = new CryptoProviderRegistry(providers, preferred); + } + + public IReadOnlyCollection Providers => _registry.Providers; + public bool TryResolve(string preferredProvider, out ICryptoProvider provider) + => _registry.TryResolve(preferredProvider, out provider); + public ICryptoProvider ResolveOrThrow(CryptoCapability capability, string algorithmId) + => _registry.ResolveOrThrow(capability, algorithmId); + public CryptoSignerResolution ResolveSigner( + CryptoCapability capability, + string algorithmId, + CryptoKeyReference keyReference, + string? preferredProvider = null) + => _registry.ResolveSigner(capability, algorithmId, keyReference, preferredProvider); + public CryptoHasherResolution ResolveHasher(string algorithmId, string? preferredProvider = null) + => _registry.ResolveHasher(algorithmId, preferredProvider); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.Compliance.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.Compliance.cs new file mode 100644 index 000000000..7c9cae756 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.Compliance.cs @@ -0,0 +1,37 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography; +using StellaOps.Cryptography.PluginLoader; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoPluginServiceCollectionExtensions +{ + /// + /// Registers crypto providers with plugin loading and compliance profile configuration. + /// + /// Service collection. + /// Application configuration. + /// Optional plugin configuration. + /// Optional compliance configuration. + /// The service collection. + public static IServiceCollection AddStellaOpsCryptoWithPluginsAndCompliance( + this IServiceCollection services, + IConfiguration configuration, + Action? configurePlugins = null, + Action? configureCompliance = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddStellaOpsCryptoWithPlugins(configuration, configurePlugins); + services.Configure(options => + { + configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); + configureCompliance?.Invoke(options); + }); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.cs index 3a1f23321..7207074e5 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.cs +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoPluginServiceCollectionExtensions.cs @@ -1,8 +1,8 @@ +using System; +using System.Collections.Generic; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; using StellaOps.Cryptography; using StellaOps.Cryptography.PluginLoader; @@ -11,7 +11,7 @@ namespace StellaOps.Cryptography.DependencyInjection; /// /// DI extension methods for configuration-driven crypto plugin loading. /// -public static class CryptoPluginServiceCollectionExtensions +public static partial class CryptoPluginServiceCollectionExtensions { /// /// Registers crypto providers using configuration-driven plugin loading. @@ -29,112 +29,25 @@ public static class CryptoPluginServiceCollectionExtensions ArgumentNullException.ThrowIfNull(services); ArgumentNullException.ThrowIfNull(configuration); - // Bind plugin configuration from appsettings + CryptoComplianceOptionsRegistration.Register(services, bindFromConfiguration: false); + services.Configure(options => + { + configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); + }); + services.Configure(options => { configuration.GetSection("StellaOps:Crypto:Plugins").Bind(options); configurePlugins?.Invoke(options); }); - // Register compliance options (reuse existing code) - services.TryAddSingleton>(sp => - { - var config = sp.GetService(); - var options = new CryptoComplianceOptions(); - config?.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - options.ApplyEnvironmentOverrides(); - return new StaticComplianceOptionsMonitor(options); - }); - services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); - - // Register plugin loader and load providers dynamically - services.TryAddSingleton(sp => - { - var pluginConfig = sp.GetRequiredService>().Value; - var logger = sp.GetService>(); - return new CryptoPluginLoader(pluginConfig, logger); - }); - - // Load all configured crypto providers - services.TryAddSingleton(sp => - { - var loader = sp.GetRequiredService(); - return loader.LoadProviders(); - }); - - // Register each loaded provider as ICryptoProvider - services.TryAddSingleton>(sp => - { - return sp.GetRequiredService>(); - }); - - // Register crypto provider registry with loaded providers - services.TryAddSingleton(sp => - { - var providers = sp.GetRequiredService>(); - var options = sp.GetService>(); - IEnumerable? preferred = options?.CurrentValue?.ResolvePreferredProviders(); - return new CryptoProviderRegistry(providers, preferred); - }); + services.TryAddSingleton, CryptoPluginProviderList>(); + services.TryAddSingleton, CryptoPluginProviderEnumerable>(); + services.TryAddSingleton(); return services; } - - /// - /// Registers crypto providers with plugin loading and compliance profile configuration. - /// - /// Service collection. - /// Application configuration. - /// Optional plugin configuration. - /// Optional compliance configuration. - /// The service collection. - public static IServiceCollection AddStellaOpsCryptoWithPluginsAndCompliance( - this IServiceCollection services, - IConfiguration configuration, - Action? configurePlugins = null, - Action? configureCompliance = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - // Bind compliance options from configuration - services.Configure(options => - { - configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - configureCompliance?.Invoke(options); - options.ApplyEnvironmentOverrides(); - }); - - // Register base crypto services with plugin loading - services.AddStellaOpsCryptoWithPlugins(configuration, configurePlugins); - - return services; - } - - /// - /// Helper class for static options monitoring. - /// - private sealed class StaticComplianceOptionsMonitor : IOptionsMonitor - { - private readonly CryptoComplianceOptions _options; - - public StaticComplianceOptionsMonitor(CryptoComplianceOptions options) - => _options = options; - - public CryptoComplianceOptions CurrentValue => _options; - - public CryptoComplianceOptions Get(string? name) => _options; - - public IDisposable OnChange(Action listener) - => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - public void Dispose() { } - } - } } diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderProfileOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderProfileOptions.cs new file mode 100644 index 000000000..bc86c7fde --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderProfileOptions.cs @@ -0,0 +1,11 @@ +using System.Collections.Generic; + +namespace StellaOps.Cryptography.DependencyInjection; + +public sealed class CryptoProviderProfileOptions +{ + /// + /// Ordered list of preferred provider names for the profile. + /// + public IList PreferredProviders { get; } = new List(); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs index 98036b223..9722b09f9 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs @@ -74,11 +74,3 @@ public sealed class CryptoProviderRegistryOptions return Array.Empty(); } } - -public sealed class CryptoProviderProfileOptions -{ - /// - /// Ordered list of preferred provider names for the profile. - /// - public IList PreferredProviders { get; } = new List(); -} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.Helpers.cs new file mode 100644 index 000000000..d32cc7668 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.Helpers.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoProviderRegistryValidator +{ + private static bool GetEnvFlag(string name, bool defaultValue) + { + var raw = Environment.GetEnvironmentVariable(name); + if (string.IsNullOrWhiteSpace(raw)) + { + return defaultValue; + } + + return raw.Equals("1", StringComparison.OrdinalIgnoreCase) || + raw.Equals("true", StringComparison.OrdinalIgnoreCase) || + raw.Equals("yes", StringComparison.OrdinalIgnoreCase); + } + + private static void EnsureBaselineProfiles(CryptoProviderRegistryOptions options) + { + if (!options.PreferredProviders.Any()) + { + options.PreferredProviders.Add("default"); + } + + if (!options.Profiles.TryGetValue("ru-offline", out var ruOffline)) + { + ruOffline = new CryptoProviderProfileOptions(); + options.Profiles["ru-offline"] = ruOffline; + } + + if (!options.Profiles.ContainsKey("ru-linux-soft")) + { + options.Profiles["ru-linux-soft"] = new CryptoProviderProfileOptions(); + } + } + + private static void EnsureDefaultPreferred( + IList providers, + bool enableOpenSsl, + bool enablePkcs11, + bool enableWineCsp +#if STELLAOPS_CRYPTO_PRO + , bool enableCryptoPro +#endif + ) + { + InsertIfMissing(providers, "default"); + + if (enableOpenSsl) + { + InsertIfMissing(providers, "ru.openssl.gost"); + } + + if (enablePkcs11) + { + InsertIfMissing(providers, "ru.pkcs11"); + } + + if (enableWineCsp) + { + InsertIfMissing(providers, "ru.winecsp.http"); + } + +#if STELLAOPS_CRYPTO_PRO + if (enableCryptoPro && OperatingSystem.IsWindows()) + { + InsertIfMissing(providers, "ru.cryptopro.csp"); + } +#endif + } + + private static void InsertIfMissing(IList providers, string name) + { + for (var i = 0; i < providers.Count; i++) + { + if (string.Equals(providers[i], name, StringComparison.OrdinalIgnoreCase)) + { + return; + } + } + + providers.Insert(0, name); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.cs index 04897cae4..14659a1d6 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.cs +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryValidator.cs @@ -7,9 +7,9 @@ namespace StellaOps.Cryptography.DependencyInjection; /// /// Validates and normalises crypto provider registry options for RU/GOST baselines. /// -public static class CryptoProviderRegistryValidator +public static partial class CryptoProviderRegistryValidator { - private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + private static readonly StringComparer _ordinalIgnoreCase = StringComparer.OrdinalIgnoreCase; public static void EnforceRuLinuxDefaults(CryptoProviderRegistryOptions options) { @@ -50,7 +50,7 @@ public static class CryptoProviderRegistryValidator } if (OperatingSystem.IsLinux() && enableOpenSsl && - !resolved.Contains("ru.openssl.gost", OrdinalIgnoreCase)) + !resolved.Contains("ru.openssl.gost", _ordinalIgnoreCase)) { throw new InvalidOperationException("Linux RU baseline requires provider 'ru.openssl.gost' (set STELLAOPS_CRYPTO_ENABLE_RU_OPENSSL=0 to override explicitly)."); } @@ -60,84 +60,4 @@ public static class CryptoProviderRegistryValidator throw new InvalidOperationException("RU Linux baseline is misconfigured: both ru.openssl.gost and ru.pkcs11 are disabled via environment. Enable at least one provider."); } } - - private static bool GetEnvFlag(string name, bool defaultValue) - { - var raw = Environment.GetEnvironmentVariable(name); - if (string.IsNullOrWhiteSpace(raw)) - { - return defaultValue; - } - - return raw.Equals("1", StringComparison.OrdinalIgnoreCase) || - raw.Equals("true", StringComparison.OrdinalIgnoreCase) || - raw.Equals("yes", StringComparison.OrdinalIgnoreCase); - } - - private static void EnsureBaselineProfiles(CryptoProviderRegistryOptions options) - { - if (!options.PreferredProviders.Any()) - { - options.PreferredProviders.Add("default"); - } - - if (!options.Profiles.TryGetValue("ru-offline", out var ruOffline)) - { - ruOffline = new CryptoProviderProfileOptions(); - options.Profiles["ru-offline"] = ruOffline; - } - - if (!options.Profiles.ContainsKey("ru-linux-soft")) - { - options.Profiles["ru-linux-soft"] = new CryptoProviderProfileOptions(); - } - } - - private static void EnsureDefaultPreferred( - IList providers, - bool enableOpenSsl, - bool enablePkcs11, - bool enableWineCsp -#if STELLAOPS_CRYPTO_PRO - , bool enableCryptoPro -#endif - ) - { - InsertIfMissing(providers, "default"); - - if (enableOpenSsl) - { - InsertIfMissing(providers, "ru.openssl.gost"); - } - - if (enablePkcs11) - { - InsertIfMissing(providers, "ru.pkcs11"); - } - - if (enableWineCsp) - { - InsertIfMissing(providers, "ru.winecsp.http"); - } - -#if STELLAOPS_CRYPTO_PRO - if (enableCryptoPro && OperatingSystem.IsWindows()) - { - InsertIfMissing(providers, "ru.cryptopro.csp"); - } -#endif - } - - private static void InsertIfMissing(IList providers, string name) - { - for (var i = 0; i < providers.Count; i++) - { - if (string.Equals(providers[i], name, StringComparison.OrdinalIgnoreCase)) - { - return; - } - } - - providers.Insert(0, name); - } } diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Compliance.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Compliance.cs new file mode 100644 index 000000000..324295240 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Compliance.cs @@ -0,0 +1,35 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + /// + /// Registers crypto services with compliance profile configuration. + /// + /// Service collection. + /// Configuration root. + /// Optional compliance configuration. + /// The service collection. + public static IServiceCollection AddStellaOpsCryptoWithCompliance( + this IServiceCollection services, + IConfiguration configuration, + Action? configureCompliance = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddStellaOpsCrypto(); + + services.Configure(options => + { + configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); + configureCompliance?.Invoke(options); + }); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.DefaultProviders.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.DefaultProviders.cs new file mode 100644 index 000000000..e6132bf12 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.DefaultProviders.cs @@ -0,0 +1,37 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Plugin.PqSoft; +using StellaOps.Cryptography.Plugin.SmRemote; +using StellaOps.Cryptography.Plugin.SmSoft; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + private static void RegisterDefaultProviders( + IServiceCollection services, + Action? configureProvider) + { + var defaultProvider = new DefaultCryptoProvider(); + configureProvider?.Invoke(defaultProvider); + + services.TryAddSingleton(defaultProvider); + services.TryAddEnumerable(ServiceDescriptor.Singleton(defaultProvider)); + +#if STELLAOPS_CRYPTO_SODIUM + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); +#endif + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.PluginConfiguration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.PluginConfiguration.cs new file mode 100644 index 000000000..e19e05fa6 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.PluginConfiguration.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Cryptography.PluginLoader; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + /// + /// Registers crypto services using configuration-driven plugin loading. + /// This is the recommended method for production deployments with regional compliance requirements. + /// + /// Service collection. + /// Configuration root. + /// Optional custom plugin directory path. Defaults to application base directory. + /// The service collection. + public static IServiceCollection AddStellaOpsCryptoFromConfiguration( + this IServiceCollection services, + IConfiguration configuration, + string? pluginDirectory = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + var pluginConfig = new CryptoPluginConfiguration(); + configuration.GetSection("StellaOps:Crypto:Plugins").Bind(pluginConfig); + + var complianceConfig = new CryptoComplianceConfiguration(); + configuration.GetSection("StellaOps:Crypto:Compliance").Bind(complianceConfig); + pluginConfig.Compliance = complianceConfig; + + services.AddSingleton(pluginConfig); + services.TryAddSingleton, CryptoPluginConfigurationOptions>(); + services.TryAddSingleton(new CryptoPluginDirectoryOptions(pluginDirectory)); + + CryptoComplianceOptionsRegistration.Register(services, bindFromConfiguration: false); + services.Configure(options => + { + configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); + }); + + services.TryAddSingleton(); + services.TryAddSingleton, CryptoPluginProviderList>(); + services.TryAddSingleton, CryptoPluginProviderEnumerable>(); + services.TryAddSingleton(); + + return services; + } + + /// + /// Registers crypto services using configuration-driven plugin loading with explicit compliance profile. + /// + /// Service collection. + /// Configuration root. + /// Compliance profile identifier (e.g., "gost", "fips", "eidas", "sm"). + /// Enable strict compliance validation. + /// Optional custom plugin directory path. + /// The service collection. + public static IServiceCollection AddStellaOpsCryptoFromConfiguration( + this IServiceCollection services, + IConfiguration configuration, + string complianceProfileId, + bool strictValidation = true, + string? pluginDirectory = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + ArgumentNullException.ThrowIfNull(complianceProfileId); + + services.AddStellaOpsCryptoFromConfiguration(configuration, pluginDirectory); + services.Configure(options => + { + options.ProfileId = complianceProfileId; + options.StrictValidation = strictValidation; + }); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Registry.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Registry.cs new file mode 100644 index 000000000..1e33cb94e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Registry.cs @@ -0,0 +1,13 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + private static void RegisterRegistry(IServiceCollection services) + { + services.TryAddSingleton(); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Ru.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Ru.cs new file mode 100644 index 000000000..dfda53051 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.Ru.cs @@ -0,0 +1,48 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography.Plugin.OpenSslGost; +using StellaOps.Cryptography.Plugin.Pkcs11Gost; +using StellaOps.Cryptography.Plugin.WineCsp; +#if STELLAOPS_CRYPTO_PRO +using StellaOps.Cryptography.Plugin.CryptoPro; +#endif + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + public static IServiceCollection AddStellaOpsCryptoRu( + this IServiceCollection services, + IConfiguration configuration, + Action? configureRegistry = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + var baseSection = configuration.GetSection("StellaOps:Crypto"); + services.Configure(baseSection); + services.Configure(baseSection.GetSection("Registry")); +#if STELLAOPS_CRYPTO_PRO + services.Configure(baseSection.GetSection("CryptoPro")); +#endif + services.Configure(baseSection.GetSection("Pkcs11")); + services.Configure(baseSection.GetSection("OpenSsl")); + services.Configure(baseSection.GetSection("WineCsp")); + + services.AddStellaOpsCrypto(configureRegistry); + services.AddOpenSslGostProvider(); + services.AddPkcs11GostProvider(); + services.AddWineCspProvider(); +#if STELLAOPS_CRYPTO_PRO + if (OperatingSystem.IsWindows()) + { + services.AddCryptoProGostProvider(); + } +#endif + + services.PostConfigure(CryptoProviderRegistryValidator.EnforceRuLinuxDefaults); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SimRemote.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SimRemote.cs new file mode 100644 index 000000000..bb3812b34 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SimRemote.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections.Generic; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Plugin.SimRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + private static void RegisterSimRemote(IServiceCollection services) + { + services.AddOptions(); + services.AddHttpClient(); + services.TryAddEnumerable(ServiceDescriptor.Singleton, SimRemoteProviderOptionsConfiguration>()); + services.TryAddEnumerable(ServiceDescriptor.Singleton, SimRemoteProviderOptionsPostConfigure>()); + services.TryAddEnumerable(ServiceDescriptor.Singleton, SimRemoteHttpClientOptionsConfiguration>()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + } + + private static void RegisterSimRemoteRegistryProfile(IServiceCollection services) + { + services.PostConfigure(options => + { + var enableSimEnv = Environment.GetEnvironmentVariable("STELLAOPS_CRYPTO_ENABLE_SIM"); + var enableSim = string.Equals(enableSimEnv, "1", StringComparison.OrdinalIgnoreCase) || + string.Equals(enableSimEnv, "true", StringComparison.OrdinalIgnoreCase); + + if (!enableSim) + { + return; + } + + void AddIfMissing(IList list, string provider) + { + if (!list.Contains(provider, StringComparer.OrdinalIgnoreCase)) + { + list.Add(provider); + } + } + + if (!string.IsNullOrWhiteSpace(options.ActiveProfile) && + options.Profiles.TryGetValue(options.ActiveProfile, out var profile)) + { + AddIfMissing(profile.PreferredProviders, "sim.crypto.remote"); + } + else + { + AddIfMissing(options.PreferredProviders, "sim.crypto.remote"); + } + }); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SmRemote.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SmRemote.cs new file mode 100644 index 000000000..614439c51 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.SmRemote.cs @@ -0,0 +1,17 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.Plugin.SmRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +public static partial class CryptoServiceCollectionExtensions +{ + private static void RegisterSmRemote(IServiceCollection services) + { + services.AddOptions(); + services.AddHttpClient(); + services.TryAddEnumerable(ServiceDescriptor.Singleton, SmRemoteHttpClientOptionsConfiguration>()); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs index c230eccbd..8b3941793 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs @@ -1,29 +1,13 @@ -using StellaOps.Cryptography.Plugin.SimRemote; using System; -using System.Collections.Generic; -using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; using StellaOps.Cryptography; -using StellaOps.Cryptography.PluginLoader; -#if STELLAOPS_CRYPTO_PRO -using StellaOps.Cryptography.Plugin.CryptoPro; -#endif -using StellaOps.Cryptography.Plugin.Pkcs11Gost; -using StellaOps.Cryptography.Plugin.OpenSslGost; -using StellaOps.Cryptography.Plugin.SmRemote; -using StellaOps.Cryptography.Plugin.SmSoft; -using StellaOps.Cryptography.Plugin.PqSoft; -using StellaOps.Cryptography.Plugin.WineCsp; -using Microsoft.Extensions.Logging; namespace StellaOps.Cryptography.DependencyInjection; /// /// Dependency injection helpers for registering StellaOps cryptography services. /// -public static class CryptoServiceCollectionExtensions +public static partial class CryptoServiceCollectionExtensions { /// /// Registers the default crypto provider and registry. @@ -39,328 +23,19 @@ public static class CryptoServiceCollectionExtensions { ArgumentNullException.ThrowIfNull(services); + CryptoComplianceOptionsRegistration.Register(services, bindFromConfiguration: true); + if (configureRegistry is not null) { services.Configure(configureRegistry); } - // Register compliance options with default profile - services.TryAddSingleton>(sp => - { - var configuration = sp.GetService(); - var options = new CryptoComplianceOptions(); - configuration?.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - options.ApplyEnvironmentOverrides(); - return new StaticComplianceOptionsMonitor(options); - }); - - // Register compliance service - services.TryAddSingleton(); - - services.TryAddSingleton(sp => - { - var provider = new DefaultCryptoProvider(); - configureProvider?.Invoke(provider); - return provider; - }); - - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - -#if STELLAOPS_CRYPTO_SODIUM - services.TryAddSingleton(); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); -#endif - - services.TryAddSingleton(); - services.TryAddSingleton(); - services.AddOptions(); - services.AddHttpClient((sp, httpClient) => - { - var opts = sp.GetService>()?.Value; - if (opts is not null && !string.IsNullOrWhiteSpace(opts.BaseAddress)) - { - httpClient.BaseAddress = new Uri(opts.BaseAddress); - } - }); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - // Unified simulation provider (sim-crypto-service) - services.AddOptions() - .Configure((opts, config) => - { - config?.GetSection("StellaOps:Crypto:Sim").Bind(opts); - }) - .PostConfigure(opts => - { - var simUrl = Environment.GetEnvironmentVariable("STELLAOPS_CRYPTO_SIM_URL"); - if (!string.IsNullOrWhiteSpace(simUrl)) - { - opts.BaseAddress = simUrl; - } - }); - - services.AddHttpClient((sp, httpClient) => - { - var opts = sp.GetService>()?.Value; - if (opts is not null && !string.IsNullOrWhiteSpace(opts.BaseAddress)) - { - httpClient.BaseAddress = new Uri(opts.BaseAddress); - } - }); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - services.PostConfigure(opts => - { - var enableSimEnv = Environment.GetEnvironmentVariable("STELLAOPS_CRYPTO_ENABLE_SIM"); - var enableSim = string.Equals(enableSimEnv, "1", StringComparison.OrdinalIgnoreCase) || - string.Equals(enableSimEnv, "true", StringComparison.OrdinalIgnoreCase); - - if (!enableSim) - { - return; - } - - void AddIfMissing(IList list, string provider) - { - if (!list.Contains(provider, StringComparer.OrdinalIgnoreCase)) - { - list.Add(provider); - } - } - - if (!string.IsNullOrWhiteSpace(opts.ActiveProfile) && - opts.Profiles.TryGetValue(opts.ActiveProfile, out var profile)) - { - AddIfMissing(profile.PreferredProviders, "sim.crypto.remote"); - } - else - { - AddIfMissing(opts.PreferredProviders, "sim.crypto.remote"); - } - }); - - services.TryAddSingleton(sp => - { - var providers = sp.GetServices(); - var options = sp.GetService>(); - IEnumerable? preferred = options?.CurrentValue?.ResolvePreferredProviders(); - return new CryptoProviderRegistry(providers, preferred); - }); + RegisterDefaultProviders(services, configureProvider); + RegisterSmRemote(services); + RegisterSimRemote(services); + RegisterSimRemoteRegistryProfile(services); + RegisterRegistry(services); return services; } - - /// - /// Registers crypto services with compliance profile configuration. - /// - /// Service collection. - /// Configuration root. - /// Optional compliance configuration. - /// The service collection. - public static IServiceCollection AddStellaOpsCryptoWithCompliance( - this IServiceCollection services, - IConfiguration configuration, - Action? configureCompliance = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - // Bind compliance options from configuration - services.Configure(options => - { - configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - configureCompliance?.Invoke(options); - options.ApplyEnvironmentOverrides(); - }); - - // Register compliance service with options monitor - services.TryAddSingleton(); - - // Register base crypto services - services.AddStellaOpsCrypto(); - - return services; - } - - /// - /// Helper class for static options monitoring. - /// - private sealed class StaticComplianceOptionsMonitor : IOptionsMonitor - { - private readonly CryptoComplianceOptions _options; - - public StaticComplianceOptionsMonitor(CryptoComplianceOptions options) - => _options = options; - - public CryptoComplianceOptions CurrentValue => _options; - - public CryptoComplianceOptions Get(string? name) => _options; - - public IDisposable OnChange(Action listener) - => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - public void Dispose() { } - } - } - - public static IServiceCollection AddStellaOpsCryptoRu( - this IServiceCollection services, - IConfiguration configuration, - Action? configureRegistry = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - var baseSection = configuration.GetSection("StellaOps:Crypto"); - services.Configure(baseSection); - services.Configure(baseSection.GetSection("Registry")); -#if STELLAOPS_CRYPTO_PRO - services.Configure(baseSection.GetSection("CryptoPro")); -#endif - services.Configure(baseSection.GetSection("Pkcs11")); - services.Configure(baseSection.GetSection("OpenSsl")); - services.Configure(baseSection.GetSection("WineCsp")); - - services.AddStellaOpsCrypto(configureRegistry); - services.AddOpenSslGostProvider(); - services.AddPkcs11GostProvider(); - services.AddWineCspProvider(); -#if STELLAOPS_CRYPTO_PRO - if (OperatingSystem.IsWindows()) - { - services.AddCryptoProGostProvider(); - } -#endif - - services.PostConfigure(CryptoProviderRegistryValidator.EnforceRuLinuxDefaults); - - return services; - } - - /// - /// Registers crypto services using configuration-driven plugin loading. - /// This is the recommended method for production deployments with regional compliance requirements. - /// - /// Service collection. - /// Configuration root. - /// Optional custom plugin directory path. Defaults to application base directory. - /// The service collection. - public static IServiceCollection AddStellaOpsCryptoFromConfiguration( - this IServiceCollection services, - IConfiguration configuration, - string? pluginDirectory = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - // Bind plugin configuration from appsettings - var pluginConfig = new CryptoPluginConfiguration(); - configuration.GetSection("StellaOps:Crypto:Plugins").Bind(pluginConfig); - - // Bind compliance configuration - var complianceConfig = new CryptoComplianceConfiguration(); - configuration.GetSection("StellaOps:Crypto:Compliance").Bind(complianceConfig); - pluginConfig.Compliance = complianceConfig; - - // Register plugin configuration as singleton - services.AddSingleton(pluginConfig); - - // Register compliance options with configuration binding - services.Configure(options => - { - configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - options.ApplyEnvironmentOverrides(); - }); - - // Register compliance service - services.TryAddSingleton(); - - // Load crypto providers using plugin loader - services.TryAddSingleton(sp => - { - var logger = sp.GetService()?.CreateLogger(); - var loader = new CryptoPluginLoader(pluginConfig, logger, pluginDirectory); - - IReadOnlyList providers; - try - { - providers = loader.LoadProviders(); - } - catch (CryptoPluginLoadException ex) - { - logger?.LogCritical(ex, "Failed to load crypto plugins: {Message}", ex.Message); - throw; - } - - if (providers.Count == 0) - { - throw new InvalidOperationException( - "No crypto providers were loaded. Check plugin configuration and manifest."); - } - - // Extract provider names for preferred ordering (uses priority from manifest/config) - var preferredProviderNames = providers - .OrderByDescending(p => GetProviderPriority(p, pluginConfig)) - .Select(p => p.Name) - .ToList(); - - logger?.LogInformation( - "Loaded {Count} crypto provider(s) with preferred order: {Providers}", - providers.Count, - string.Join(", ", preferredProviderNames)); - - return new CryptoProviderRegistry(providers, preferredProviderNames); - }); - - return services; - } - - /// - /// Registers crypto services using configuration-driven plugin loading with explicit compliance profile. - /// - /// Service collection. - /// Configuration root. - /// Compliance profile identifier (e.g., "gost", "fips", "eidas", "sm"). - /// Enable strict compliance validation. - /// Optional custom plugin directory path. - /// The service collection. - public static IServiceCollection AddStellaOpsCryptoFromConfiguration( - this IServiceCollection services, - IConfiguration configuration, - string complianceProfileId, - bool strictValidation = true, - string? pluginDirectory = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - ArgumentNullException.ThrowIfNull(complianceProfileId); - - // Override compliance configuration with explicit profile - services.Configure(options => - { - configuration.GetSection(CryptoComplianceOptions.SectionKey).Bind(options); - options.ProfileId = complianceProfileId; - options.StrictValidation = strictValidation; - options.ApplyEnvironmentOverrides(); - }); - - return services.AddStellaOpsCryptoFromConfiguration(configuration, pluginDirectory); - } - - private static int GetProviderPriority(ICryptoProvider provider, CryptoPluginConfiguration config) - { - // Check if priority was overridden in configuration - var enabledEntry = config.Enabled.FirstOrDefault(e => - e.Id.Equals(provider.Name, StringComparison.OrdinalIgnoreCase)); - - return enabledEntry?.Priority ?? 50; // Default priority - } } diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/ServiceRegisteredCryptoProviderRegistry.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/ServiceRegisteredCryptoProviderRegistry.cs new file mode 100644 index 000000000..11ee8bb5b --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/ServiceRegisteredCryptoProviderRegistry.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class ServiceRegisteredCryptoProviderRegistry : ICryptoProviderRegistry +{ + private readonly CryptoProviderRegistry _registry; + + public ServiceRegisteredCryptoProviderRegistry( + IEnumerable providers, + IOptionsMonitor? optionsMonitor = null) + { + ArgumentNullException.ThrowIfNull(providers); + var preferred = optionsMonitor?.CurrentValue?.ResolvePreferredProviders(); + _registry = new CryptoProviderRegistry(providers, preferred); + } + + public IReadOnlyCollection Providers => _registry.Providers; + public bool TryResolve(string preferredProvider, out ICryptoProvider provider) + => _registry.TryResolve(preferredProvider, out provider); + public ICryptoProvider ResolveOrThrow(CryptoCapability capability, string algorithmId) + => _registry.ResolveOrThrow(capability, algorithmId); + public CryptoSignerResolution ResolveSigner( + CryptoCapability capability, + string algorithmId, + CryptoKeyReference keyReference, + string? preferredProvider = null) + => _registry.ResolveSigner(capability, algorithmId, keyReference, preferredProvider); + public CryptoHasherResolution ResolveHasher(string algorithmId, string? preferredProvider = null) + => _registry.ResolveHasher(algorithmId, preferredProvider); +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteHttpClientOptionsConfiguration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteHttpClientOptionsConfiguration.cs new file mode 100644 index 000000000..3c8dfd6c6 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteHttpClientOptionsConfiguration.cs @@ -0,0 +1,38 @@ +using System; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.Plugin.SimRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class SimRemoteHttpClientOptionsConfiguration : IConfigureNamedOptions +{ + private readonly IOptionsMonitor _options; + + public SimRemoteHttpClientOptionsConfiguration(IOptionsMonitor options) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + } + + public void Configure(HttpClientFactoryOptions options) + { + Configure(Options.DefaultName, options); + } + + public void Configure(string? name, HttpClientFactoryOptions options) + { + if (!string.Equals(name, CryptoHttpClientNames.SimRemote, StringComparison.Ordinal)) + { + return; + } + + options.HttpClientActions.Add(client => + { + var current = _options.CurrentValue; + if (!string.IsNullOrWhiteSpace(current.BaseAddress)) + { + client.BaseAddress = new Uri(current.BaseAddress); + } + }); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsConfiguration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsConfiguration.cs new file mode 100644 index 000000000..429222046 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsConfiguration.cs @@ -0,0 +1,28 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.Plugin.SimRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class SimRemoteProviderOptionsConfiguration : IConfigureOptions +{ + private readonly IConfiguration? _configuration; + + public SimRemoteProviderOptionsConfiguration(IConfiguration? configuration = null) + { + _configuration = configuration; + } + + public void Configure(SimRemoteProviderOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + if (_configuration is null) + { + return; + } + + _configuration.GetSection("StellaOps:Crypto:Sim").Bind(options); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsPostConfigure.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsPostConfigure.cs new file mode 100644 index 000000000..96b99b8bd --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SimRemoteProviderOptionsPostConfigure.cs @@ -0,0 +1,19 @@ +using System; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.Plugin.SimRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class SimRemoteProviderOptionsPostConfigure : IPostConfigureOptions +{ + public void PostConfigure(string? name, SimRemoteProviderOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + var simUrl = Environment.GetEnvironmentVariable("STELLAOPS_CRYPTO_SIM_URL"); + if (!string.IsNullOrWhiteSpace(simUrl)) + { + options.BaseAddress = simUrl; + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SmRemoteHttpClientOptionsConfiguration.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SmRemoteHttpClientOptionsConfiguration.cs new file mode 100644 index 000000000..c752d231f --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/SmRemoteHttpClientOptionsConfiguration.cs @@ -0,0 +1,38 @@ +using System; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography.Plugin.SmRemote; + +namespace StellaOps.Cryptography.DependencyInjection; + +internal sealed class SmRemoteHttpClientOptionsConfiguration : IConfigureNamedOptions +{ + private readonly IOptionsMonitor _options; + + public SmRemoteHttpClientOptionsConfiguration(IOptionsMonitor options) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + } + + public void Configure(HttpClientFactoryOptions options) + { + Configure(Options.DefaultName, options); + } + + public void Configure(string? name, HttpClientFactoryOptions options) + { + if (!string.Equals(name, CryptoHttpClientNames.SmRemote, StringComparison.Ordinal)) + { + return; + } + + options.HttpClientActions.Add(client => + { + var current = _options.CurrentValue; + if (!string.IsNullOrWhiteSpace(current.BaseAddress)) + { + client.BaseAddress = new Uri(current.BaseAddress); + } + }); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOpsCryptoOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOpsCryptoOptions.cs index c17810c7b..61f5267bf 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOpsCryptoOptions.cs +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOpsCryptoOptions.cs @@ -1,8 +1,8 @@ -using StellaOps.Cryptography.Plugin.OpenSslGost; -using StellaOps.Cryptography.Plugin.Pkcs11Gost; #if STELLAOPS_CRYPTO_PRO using StellaOps.Cryptography.Plugin.CryptoPro; #endif +using StellaOps.Cryptography.Plugin.OpenSslGost; +using StellaOps.Cryptography.Plugin.Pkcs11Gost; namespace StellaOps.Cryptography.DependencyInjection; diff --git a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/TASKS.md b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/TASKS.md index 4b636875e..64587e576 100644 --- a/src/__Libraries/StellaOps.Cryptography.DependencyInjection/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0050-T | DONE | Revalidated 2026-01-08. | | AUDIT-0050-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Removed service locator usage, split DI files/options/validator helpers, added DI ordering/plugin-loading tests; `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (326 tests). | diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Cache.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Cache.cs new file mode 100644 index 000000000..60ec91416 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Cache.cs @@ -0,0 +1,36 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class AwsKmsClient +{ + private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) + { + return cached.Metadata; + } + + var metadata = await _facade.GetMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var entry = new CachedMetadata(metadata, now.Add(_metadataCacheDuration)); + _metadataCache[keyId] = entry; + return metadata; + } + + private async Task GetCachedPublicKeyAsync(string resource, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_publicKeyCache.TryGetValue(resource, out var cached) && cached.ExpiresAt > now) + { + return cached.Material; + } + + var material = await _facade.GetPublicKeyAsync(resource, cancellationToken).ConfigureAwait(false); + var entry = new CachedPublicKey(material, now.Add(_publicKeyCacheDuration)); + _publicKeyCache[resource] = entry; + return material; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Helpers.cs new file mode 100644 index 000000000..30696bd7c --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Helpers.cs @@ -0,0 +1,68 @@ +using Microsoft.IdentityModel.Tokens; +using System; +using System.Security.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class AwsKmsClient +{ + private static byte[] ComputeSha256(ReadOnlyMemory data) + { + var digest = new byte[32]; + if (!SHA256.TryHashData(data.Span, digest, out _)) + { + throw new InvalidOperationException("Failed to hash payload with SHA-256."); + } + + return digest; + } + + private static string ResolveResource(string keyId, string? version) + => string.IsNullOrWhiteSpace(version) ? keyId : version; + + private static string ResolveCurveName(string curve) + { + if (string.Equals(curve, "ECC_NIST_P256", StringComparison.OrdinalIgnoreCase) || + string.Equals(curve, "P-256", StringComparison.OrdinalIgnoreCase)) + { + return JsonWebKeyECTypes.P256; + } + + if (string.Equals(curve, "ECC_NIST_P384", StringComparison.OrdinalIgnoreCase) || + string.Equals(curve, "P-384", StringComparison.OrdinalIgnoreCase)) + { + return JsonWebKeyECTypes.P384; + } + + if (string.Equals(curve, "ECC_NIST_P521", StringComparison.OrdinalIgnoreCase) || + string.Equals(curve, "P-521", StringComparison.OrdinalIgnoreCase)) + { + return JsonWebKeyECTypes.P521; + } + + if (string.Equals(curve, "SECP256K1", StringComparison.OrdinalIgnoreCase) || + string.Equals(curve, "ECC_SECG_P256K1", StringComparison.OrdinalIgnoreCase)) + { + return "secp256k1"; + } + + return curve; + } + + private static KmsKeyState MapState(AwsKeyStatus status) + => status switch + { + AwsKeyStatus.Enabled => KmsKeyState.Active, + AwsKeyStatus.PendingImport or AwsKeyStatus.PendingUpdate => KmsKeyState.PendingRotation, + AwsKeyStatus.Disabled or AwsKeyStatus.PendingDeletion or AwsKeyStatus.Unavailable => KmsKeyState.Revoked, + _ => KmsKeyState.Active, + }; + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(AwsKmsClient)); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Metadata.cs new file mode 100644 index 000000000..9521323a9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Metadata.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class AwsKmsClient +{ + public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var metadata = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var publicKey = await GetCachedPublicKeyAsync(metadata.KeyId, cancellationToken).ConfigureAwait(false); + + var versionState = MapState(metadata.Status); + var versionMetadata = ImmutableArray.Create( + new KmsKeyVersionMetadata( + publicKey.VersionId, + versionState, + metadata.CreatedAt, + null, + Convert.ToBase64String(publicKey.SubjectPublicKeyInfo), + ResolveCurveName(publicKey.Curve))); + + return new KmsKeyMetadata( + metadata.KeyId, + KmsAlgorithms.Es256, + versionState, + metadata.CreatedAt, + versionMetadata); + } + + public async Task ExportAsync( + string keyId, + string? keyVersion, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var metadata = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var resource = ResolveResource(metadata.KeyId, keyVersion); + var publicKey = await GetCachedPublicKeyAsync(resource, cancellationToken).ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportSubjectPublicKeyInfo(publicKey.SubjectPublicKeyInfo, out _); + var parameters = ecdsa.ExportParameters(false); + + return new KmsKeyMaterial( + metadata.KeyId, + publicKey.VersionId, + KmsAlgorithms.Es256, + ResolveCurveName(publicKey.Curve), + Array.Empty(), + parameters.Q.X ?? throw new InvalidOperationException("Public key missing X coordinate."), + parameters.Q.Y ?? throw new InvalidOperationException("Public key missing Y coordinate."), + metadata.CreatedAt); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Models.cs new file mode 100644 index 000000000..57a683ca9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Models.cs @@ -0,0 +1,10 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class AwsKmsClient +{ + private sealed record CachedMetadata(AwsKeyMetadata Metadata, DateTimeOffset ExpiresAt); + + private sealed record CachedPublicKey(AwsPublicKeyMaterial Material, DateTimeOffset ExpiresAt); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Signing.cs new file mode 100644 index 000000000..d3e982fb6 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.Signing.cs @@ -0,0 +1,66 @@ +using System; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class AwsKmsClient +{ + public async Task SignAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty) + { + throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); + } + + var digest = ComputeSha256(data); + try + { + var resource = ResolveResource(keyId, keyVersion); + var result = await _facade.SignAsync(resource, digest, cancellationToken).ConfigureAwait(false); + + return new KmsSignResult( + keyId, + string.IsNullOrWhiteSpace(result.VersionId) ? resource : result.VersionId, + KmsAlgorithms.Es256, + result.Signature); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } + + public async Task VerifyAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty || signature.IsEmpty) + { + return false; + } + + var digest = ComputeSha256(data); + try + { + var resource = ResolveResource(keyId, keyVersion); + return await _facade.VerifyAsync(resource, digest, signature, cancellationToken).ConfigureAwait(false); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.cs index ad7de944c..974ebfb16 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsClient.cs @@ -1,21 +1,19 @@ - -using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Options; +using System; using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Security.Cryptography; +using System.Threading.Tasks; namespace StellaOps.Cryptography.Kms; /// /// AWS KMS implementation of . /// -public sealed class AwsKmsClient : IKmsClient, IDisposable +public sealed partial class AwsKmsClient : IKmsClient, IDisposable { private readonly IAwsKmsFacade _facade; private readonly TimeProvider _timeProvider; private readonly TimeSpan _metadataCacheDuration; private readonly TimeSpan _publicKeyCacheDuration; - private readonly ConcurrentDictionary _metadataCache = new(StringComparer.Ordinal); private readonly ConcurrentDictionary _publicKeyCache = new(StringComparer.Ordinal); private bool _disposed; @@ -30,114 +28,9 @@ public sealed class AwsKmsClient : IKmsClient, IDisposable _publicKeyCacheDuration = options.PublicKeyCacheDuration; } - public async Task SignAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - CancellationToken cancellationToken = default) + public AwsKmsClient(IAwsKmsFacade facade, IOptions options, TimeProvider timeProvider) + : this(facade, options?.Value ?? new AwsKmsOptions(), timeProvider) { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty) - { - throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); - } - - var digest = ComputeSha256(data); - try - { - var resource = ResolveResource(keyId, keyVersion); - var result = await _facade.SignAsync(resource, digest, cancellationToken).ConfigureAwait(false); - - return new KmsSignResult( - keyId, - string.IsNullOrWhiteSpace(result.VersionId) ? resource : result.VersionId, - KmsAlgorithms.Es256, - result.Signature); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public async Task VerifyAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - ReadOnlyMemory signature, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty || signature.IsEmpty) - { - return false; - } - - var digest = ComputeSha256(data); - try - { - var resource = ResolveResource(keyId, keyVersion); - return await _facade.VerifyAsync(resource, digest, signature, cancellationToken).ConfigureAwait(false); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var metadata = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var publicKey = await GetCachedPublicKeyAsync(metadata.KeyId, cancellationToken).ConfigureAwait(false); - - var versionState = MapState(metadata.Status); - var versionMetadata = ImmutableArray.Create( - new KmsKeyVersionMetadata( - publicKey.VersionId, - versionState, - metadata.CreatedAt, - null, - Convert.ToBase64String(publicKey.SubjectPublicKeyInfo), - ResolveCurveName(publicKey.Curve))); - - return new KmsKeyMetadata( - metadata.KeyId, - KmsAlgorithms.Es256, - versionState, - metadata.CreatedAt, - versionMetadata); - } - - public async Task ExportAsync( - string keyId, - string? keyVersion, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var metadata = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var resource = ResolveResource(metadata.KeyId, keyVersion); - var publicKey = await GetCachedPublicKeyAsync(resource, cancellationToken).ConfigureAwait(false); - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportSubjectPublicKeyInfo(publicKey.SubjectPublicKeyInfo, out _); - var parameters = ecdsa.ExportParameters(false); - - return new KmsKeyMaterial( - metadata.KeyId, - publicKey.VersionId, - KmsAlgorithms.Es256, - ResolveCurveName(publicKey.Curve), - Array.Empty(), - parameters.Q.X ?? throw new InvalidOperationException("Public key missing X coordinate."), - parameters.Q.Y ?? throw new InvalidOperationException("Public key missing Y coordinate."), - metadata.CreatedAt); } public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) @@ -156,96 +49,4 @@ public sealed class AwsKmsClient : IKmsClient, IDisposable _disposed = true; _facade.Dispose(); } - - private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) - { - return cached.Metadata; - } - - var metadata = await _facade.GetMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var entry = new CachedMetadata(metadata, now.Add(_metadataCacheDuration)); - _metadataCache[keyId] = entry; - return metadata; - } - - private async Task GetCachedPublicKeyAsync(string resource, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_publicKeyCache.TryGetValue(resource, out var cached) && cached.ExpiresAt > now) - { - return cached.Material; - } - - var material = await _facade.GetPublicKeyAsync(resource, cancellationToken).ConfigureAwait(false); - var entry = new CachedPublicKey(material, now.Add(_publicKeyCacheDuration)); - _publicKeyCache[resource] = entry; - return material; - } - - private static byte[] ComputeSha256(ReadOnlyMemory data) - { - var digest = new byte[32]; - if (!SHA256.TryHashData(data.Span, digest, out _)) - { - throw new InvalidOperationException("Failed to hash payload with SHA-256."); - } - - return digest; - } - - private static string ResolveResource(string keyId, string? version) - => string.IsNullOrWhiteSpace(version) ? keyId : version; - - private static string ResolveCurveName(string curve) - { - if (string.Equals(curve, "ECC_NIST_P256", StringComparison.OrdinalIgnoreCase) || - string.Equals(curve, "P-256", StringComparison.OrdinalIgnoreCase)) - { - return JsonWebKeyECTypes.P256; - } - - if (string.Equals(curve, "ECC_NIST_P384", StringComparison.OrdinalIgnoreCase) || - string.Equals(curve, "P-384", StringComparison.OrdinalIgnoreCase)) - { - return JsonWebKeyECTypes.P384; - } - - if (string.Equals(curve, "ECC_NIST_P521", StringComparison.OrdinalIgnoreCase) || - string.Equals(curve, "P-521", StringComparison.OrdinalIgnoreCase)) - { - return JsonWebKeyECTypes.P521; - } - - if (string.Equals(curve, "SECP256K1", StringComparison.OrdinalIgnoreCase) || - string.Equals(curve, "ECC_SECG_P256K1", StringComparison.OrdinalIgnoreCase)) - { - return "secp256k1"; - } - - return curve; - } - - private static KmsKeyState MapState(AwsKeyStatus status) - => status switch - { - AwsKeyStatus.Enabled => KmsKeyState.Active, - AwsKeyStatus.PendingImport or AwsKeyStatus.PendingUpdate => KmsKeyState.PendingRotation, - AwsKeyStatus.Disabled or AwsKeyStatus.PendingDeletion or AwsKeyStatus.Unavailable => KmsKeyState.Revoked, - _ => KmsKeyState.Active, - }; - - private void ThrowIfDisposed() - { - if (_disposed) - { - throw new ObjectDisposedException(nameof(AwsKmsClient)); - } - } - - private sealed record CachedMetadata(AwsKeyMetadata Metadata, DateTimeOffset ExpiresAt); - - private sealed record CachedPublicKey(AwsPublicKeyMaterial Material, DateTimeOffset ExpiresAt); -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Contracts.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Contracts.cs new file mode 100644 index 000000000..68118ec84 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Contracts.cs @@ -0,0 +1,16 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public interface IAwsKmsFacade : IDisposable +{ + Task SignAsync(string keyResource, ReadOnlyMemory digest, CancellationToken cancellationToken); + + Task VerifyAsync(string keyResource, ReadOnlyMemory digest, ReadOnlyMemory signature, CancellationToken cancellationToken); + + Task GetMetadataAsync(string keyId, CancellationToken cancellationToken); + + Task GetPublicKeyAsync(string keyResource, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Dispose.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Dispose.cs new file mode 100644 index 000000000..1dfbdba7e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Dispose.cs @@ -0,0 +1,14 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class AwsKmsFacade +{ + public void Dispose() + { + if (_ownsClient && _client is IDisposable disposable) + { + disposable.Dispose(); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Helpers.cs new file mode 100644 index 000000000..37694b5e6 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Helpers.cs @@ -0,0 +1,43 @@ +using Amazon.KeyManagementService; +using Amazon.KeyManagementService.Model; +using System; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class AwsKmsFacade +{ + private static AwsKeyStatus MapStatus(KeyState? state) + { + var name = state?.ToString(); + return name switch + { + "Enabled" => AwsKeyStatus.Enabled, + "Disabled" => AwsKeyStatus.Disabled, + "PendingDeletion" => AwsKeyStatus.PendingDeletion, + "PendingImport" => AwsKeyStatus.PendingImport, + "Unavailable" => AwsKeyStatus.Unavailable, + _ => AwsKeyStatus.Unspecified, + }; + } + + private static string ResolveCurve(GetPublicKeyResponse response) + { + if (response.KeySpec is not null) + { + var keySpecName = response.KeySpec.ToString(); + if (!string.IsNullOrWhiteSpace(keySpecName)) + { + return keySpecName switch + { + "ECC_NIST_P256" => "P-256", + "ECC_SECG_P256K1" => "secp256k1", + "ECC_NIST_P384" => "P-384", + "ECC_NIST_P521" => "P-521", + _ => keySpecName, + }; + } + } + + return "P-256"; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Metadata.cs new file mode 100644 index 000000000..6e9c16a12 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Metadata.cs @@ -0,0 +1,44 @@ +using Amazon.KeyManagementService.Model; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class AwsKmsFacade +{ + public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var response = await _client.DescribeKeyAsync(new DescribeKeyRequest + { + KeyId = keyId, + }, cancellationToken).ConfigureAwait(false); + + var metadata = response.KeyMetadata ?? throw new InvalidOperationException($"Key '{keyId}' was not found."); + var createdAt = metadata.CreationDate?.ToUniversalTime() ?? _timeProvider.GetUtcNow(); + + return new AwsKeyMetadata( + metadata.KeyId ?? keyId, + metadata.Arn ?? metadata.KeyId ?? keyId, + createdAt, + MapStatus(metadata.KeyState)); + } + + public async Task GetPublicKeyAsync(string keyResource, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); + + var response = await _client.GetPublicKeyAsync(new GetPublicKeyRequest + { + KeyId = keyResource, + }, cancellationToken).ConfigureAwait(false); + + var keyId = response.KeyId ?? keyResource; + var versionId = response.KeyId ?? keyResource; + var curve = ResolveCurve(response); + + return new AwsPublicKeyMaterial(keyId, versionId, curve, response.PublicKey.ToArray()); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Models.cs new file mode 100644 index 000000000..139cc0f16 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Models.cs @@ -0,0 +1,20 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +public sealed record AwsSignResult(string KeyResource, string VersionId, byte[] Signature); + +public sealed record AwsKeyMetadata(string KeyId, string Arn, DateTimeOffset CreatedAt, AwsKeyStatus Status); + +public enum AwsKeyStatus +{ + Unspecified = 0, + Enabled = 1, + Disabled = 2, + PendingDeletion = 3, + PendingImport = 4, + PendingUpdate = 5, + Unavailable = 6, +} + +public sealed record AwsPublicKeyMaterial(string KeyId, string VersionId, string Curve, byte[] SubjectPublicKeyInfo); \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Signing.cs new file mode 100644 index 000000000..101a4a6a9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.Signing.cs @@ -0,0 +1,52 @@ +using Amazon.KeyManagementService; +using Amazon.KeyManagementService.Model; +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class AwsKmsFacade +{ + public async Task SignAsync(string keyResource, ReadOnlyMemory digest, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); + + using var messageStream = new MemoryStream(digest.ToArray(), writable: false); + var request = new SignRequest + { + KeyId = keyResource, + SigningAlgorithm = SigningAlgorithmSpec.ECDSA_SHA_256, + MessageType = MessageType.DIGEST, + Message = messageStream, + }; + + var response = await _client.SignAsync(request, cancellationToken).ConfigureAwait(false); + var keyId = response.KeyId ?? keyResource; + return new AwsSignResult(keyId, keyId, response.Signature.ToArray()); + } + + public async Task VerifyAsync(string keyResource, ReadOnlyMemory digest, ReadOnlyMemory signature, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); + if (digest.IsEmpty || signature.IsEmpty) + { + return false; + } + + using var messageStream = new MemoryStream(digest.ToArray(), writable: false); + using var signatureStream = new MemoryStream(signature.ToArray(), writable: false); + var request = new VerifyRequest + { + KeyId = keyResource, + SigningAlgorithm = SigningAlgorithmSpec.ECDSA_SHA_256, + MessageType = MessageType.DIGEST, + Message = messageStream, + Signature = signatureStream, + }; + + var response = await _client.VerifyAsync(request, cancellationToken).ConfigureAwait(false); + return response.SignatureValid ?? false; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.cs index 4e6837dc8..490700b88 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsFacade.cs @@ -1,40 +1,11 @@ - using Amazon; using Amazon.KeyManagementService; -using Amazon.KeyManagementService.Model; -using System.IO; +using Microsoft.Extensions.Options; +using System; namespace StellaOps.Cryptography.Kms; -public interface IAwsKmsFacade : IDisposable -{ - Task SignAsync(string keyResource, ReadOnlyMemory digest, CancellationToken cancellationToken); - - Task VerifyAsync(string keyResource, ReadOnlyMemory digest, ReadOnlyMemory signature, CancellationToken cancellationToken); - - Task GetMetadataAsync(string keyId, CancellationToken cancellationToken); - - Task GetPublicKeyAsync(string keyResource, CancellationToken cancellationToken); -} - -public sealed record AwsSignResult(string KeyResource, string VersionId, byte[] Signature); - -public sealed record AwsKeyMetadata(string KeyId, string Arn, DateTimeOffset CreatedAt, AwsKeyStatus Status); - -public enum AwsKeyStatus -{ - Unspecified = 0, - Enabled = 1, - Disabled = 2, - PendingDeletion = 3, - PendingImport = 4, - PendingUpdate = 5, - Unavailable = 6, -} - -public sealed record AwsPublicKeyMaterial(string KeyId, string VersionId, string Curve, byte[] SubjectPublicKeyInfo); - -internal sealed class AwsKmsFacade : IAwsKmsFacade +internal sealed partial class AwsKmsFacade : IAwsKmsFacade { private readonly IAmazonKeyManagementService _client; private readonly bool _ownsClient; @@ -62,129 +33,15 @@ internal sealed class AwsKmsFacade : IAwsKmsFacade _ownsClient = true; } + public AwsKmsFacade(IOptions options, TimeProvider timeProvider) + : this(options?.Value ?? new AwsKmsOptions(), timeProvider) + { + } + public AwsKmsFacade(IAmazonKeyManagementService client, TimeProvider? timeProvider = null) { _client = client ?? throw new ArgumentNullException(nameof(client)); _timeProvider = timeProvider ?? TimeProvider.System; _ownsClient = false; } - - public async Task SignAsync(string keyResource, ReadOnlyMemory digest, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); - - using var messageStream = new MemoryStream(digest.ToArray(), writable: false); - var request = new SignRequest - { - KeyId = keyResource, - SigningAlgorithm = SigningAlgorithmSpec.ECDSA_SHA_256, - MessageType = MessageType.DIGEST, - Message = messageStream, - }; - - var response = await _client.SignAsync(request, cancellationToken).ConfigureAwait(false); - var keyId = response.KeyId ?? keyResource; - return new AwsSignResult(keyId, keyId, response.Signature.ToArray()); - } - - public async Task VerifyAsync(string keyResource, ReadOnlyMemory digest, ReadOnlyMemory signature, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); - if (digest.IsEmpty || signature.IsEmpty) - { - return false; - } - - using var messageStream = new MemoryStream(digest.ToArray(), writable: false); - using var signatureStream = new MemoryStream(signature.ToArray(), writable: false); - var request = new VerifyRequest - { - KeyId = keyResource, - SigningAlgorithm = SigningAlgorithmSpec.ECDSA_SHA_256, - MessageType = MessageType.DIGEST, - Message = messageStream, - Signature = signatureStream, - }; - - var response = await _client.VerifyAsync(request, cancellationToken).ConfigureAwait(false); - return response.SignatureValid ?? false; - } - - public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var response = await _client.DescribeKeyAsync(new DescribeKeyRequest - { - KeyId = keyId, - }, cancellationToken).ConfigureAwait(false); - - var metadata = response.KeyMetadata ?? throw new InvalidOperationException($"Key '{keyId}' was not found."); - var createdAt = metadata.CreationDate?.ToUniversalTime() ?? _timeProvider.GetUtcNow(); - - return new AwsKeyMetadata( - metadata.KeyId ?? keyId, - metadata.Arn ?? metadata.KeyId ?? keyId, - createdAt, - MapStatus(metadata.KeyState)); - } - - public async Task GetPublicKeyAsync(string keyResource, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyResource); - - var response = await _client.GetPublicKeyAsync(new GetPublicKeyRequest - { - KeyId = keyResource, - }, cancellationToken).ConfigureAwait(false); - - var keyId = response.KeyId ?? keyResource; - var versionId = response.KeyId ?? keyResource; - var curve = ResolveCurve(response); - - return new AwsPublicKeyMaterial(keyId, versionId, curve, response.PublicKey.ToArray()); - } - - private static AwsKeyStatus MapStatus(KeyState? state) - { - var name = state?.ToString(); - return name switch - { - "Enabled" => AwsKeyStatus.Enabled, - "Disabled" => AwsKeyStatus.Disabled, - "PendingDeletion" => AwsKeyStatus.PendingDeletion, - "PendingImport" => AwsKeyStatus.PendingImport, - "Unavailable" => AwsKeyStatus.Unavailable, - _ => AwsKeyStatus.Unspecified, - }; - } - - private static string ResolveCurve(GetPublicKeyResponse response) - { - if (response.KeySpec is not null) - { - var keySpecName = response.KeySpec.ToString(); - if (!string.IsNullOrWhiteSpace(keySpecName)) - { - return keySpecName switch - { - "ECC_NIST_P256" => "P-256", - "ECC_SECG_P256K1" => "secp256k1", - "ECC_NIST_P384" => "P-384", - "ECC_NIST_P521" => "P-521", - _ => keySpecName, - }; - } - } - - return "P-256"; - } - - public void Dispose() - { - if (_ownsClient && _client is IDisposable disposable) - { - disposable.Dispose(); - } - } -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsOptions.cs b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsOptions.cs index d61a0435f..914c34a0d 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsOptions.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AwsKmsOptions.cs @@ -1,5 +1,3 @@ -using System.Diagnostics.CodeAnalysis; - namespace StellaOps.Cryptography.Kms; /// @@ -7,8 +5,8 @@ namespace StellaOps.Cryptography.Kms; /// public sealed class AwsKmsOptions { - private TimeSpan metadataCacheDuration = TimeSpan.FromMinutes(5); - private TimeSpan publicKeyCacheDuration = TimeSpan.FromMinutes(10); + private TimeSpan _metadataCacheDuration = TimeSpan.FromMinutes(5); + private TimeSpan _publicKeyCacheDuration = TimeSpan.FromMinutes(10); /// /// Gets or sets the AWS region identifier (e.g. us-east-1). @@ -30,8 +28,8 @@ public sealed class AwsKmsOptions /// public TimeSpan MetadataCacheDuration { - get => metadataCacheDuration; - set => metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); + get => _metadataCacheDuration; + set => _metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); } /// @@ -39,16 +37,10 @@ public sealed class AwsKmsOptions /// public TimeSpan PublicKeyCacheDuration { - get => publicKeyCacheDuration; - set => publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(10)); + get => _publicKeyCacheDuration; + set => _publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(10)); } - /// - /// Gets or sets an optional factory that can provide a custom AWS facade. Primarily used for testing. - /// - public Func? FacadeFactory { get; set; } - private static TimeSpan EnsurePositive(TimeSpan value, TimeSpan @default) => value <= TimeSpan.Zero ? @default : value; } - diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Export.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Export.cs new file mode 100644 index 000000000..570f2b852 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Export.cs @@ -0,0 +1,25 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Fido2KmsClient +{ + public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + + var metadata = await GetMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + + return new KmsKeyMaterial( + metadata.KeyId, + metadata.KeyId, + metadata.Algorithm, + _curveName, + Array.Empty(), + _publicParameters.Q.X ?? throw new InvalidOperationException("FIDO2 public key missing X coordinate."), + _publicParameters.Q.Y ?? throw new InvalidOperationException("FIDO2 public key missing Y coordinate."), + _options.CreatedAt ?? _timeProvider.GetUtcNow()); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Helpers.cs new file mode 100644 index 000000000..b75830851 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Helpers.cs @@ -0,0 +1,39 @@ +using Microsoft.IdentityModel.Tokens; +using System; +using System.Security.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Fido2KmsClient +{ + private static byte[] ComputeSha256(ReadOnlyMemory data) + { + var digest = new byte[32]; + if (!SHA256.TryHashData(data.Span, digest, out _)) + { + throw new InvalidOperationException("Failed to hash payload with SHA-256."); + } + + return digest; + } + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(Fido2KmsClient)); + } + } + + private static string ResolveCurveName(ECCurve curve) + { + var oid = curve.Oid?.Value; + return oid switch + { + "1.2.840.10045.3.1.7" => JsonWebKeyECTypes.P256, + "1.3.132.0.34" => JsonWebKeyECTypes.P384, + "1.3.132.0.35" => JsonWebKeyECTypes.P521, + _ => throw new InvalidOperationException($"Unsupported FIDO2 curve OID '{oid}'."), + }; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Lifecycle.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Lifecycle.cs new file mode 100644 index 000000000..37e21398e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Lifecycle.cs @@ -0,0 +1,19 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Fido2KmsClient +{ + public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) + => throw new NotSupportedException("FIDO2 credential rotation requires new enrolment."); + + public Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) + => throw new NotSupportedException("FIDO2 credential revocation must be managed in the relying party."); + + public void Dispose() + { + _disposed = true; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Metadata.cs new file mode 100644 index 000000000..77936d467 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Metadata.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Immutable; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Fido2KmsClient +{ + public Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + + var now = _timeProvider.GetUtcNow(); + if (_cachedMetadata is not null && _metadataExpiresAt > now) + { + return Task.FromResult(_cachedMetadata); + } + + var createdAt = _options.CreatedAt ?? _timeProvider.GetUtcNow(); + var version = new KmsKeyVersionMetadata( + _options.CredentialId, + KmsKeyState.Active, + createdAt, + null, + Convert.ToBase64String(_subjectPublicKeyInfo), + _curveName); + + _cachedMetadata = new KmsKeyMetadata( + _options.CredentialId, + KmsAlgorithms.Es256, + KmsKeyState.Active, + createdAt, + ImmutableArray.Create(version)); + + _metadataExpiresAt = now.Add(_metadataCacheDuration); + return Task.FromResult(_cachedMetadata); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Signing.cs new file mode 100644 index 000000000..54e239c67 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.Signing.cs @@ -0,0 +1,58 @@ +using System; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Fido2KmsClient +{ + public async Task SignAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + if (data.IsEmpty) + { + throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); + } + + var digest = ComputeSha256(data); + try + { + var signature = await _authenticator.SignAsync(_options.CredentialId, digest, cancellationToken).ConfigureAwait(false); + return new KmsSignResult(_options.CredentialId, _options.CredentialId, KmsAlgorithms.Es256, signature); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } + + public Task VerifyAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + if (data.IsEmpty || signature.IsEmpty) + { + return Task.FromResult(false); + } + + var digest = ComputeSha256(data); + try + { + using var ecdsa = ECDsa.Create(_publicParameters); + return Task.FromResult(ecdsa.VerifyHash(digest, signature.ToArray())); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.cs index 65760bff5..7c0d984d3 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2KmsClient.cs @@ -1,6 +1,5 @@ - -using Microsoft.IdentityModel.Tokens; -using System.Collections.Immutable; +using Microsoft.Extensions.Options; +using System; using System.Security.Cryptography; namespace StellaOps.Cryptography.Kms; @@ -8,7 +7,7 @@ namespace StellaOps.Cryptography.Kms; /// /// FIDO2-backed KMS client suitable for high-assurance interactive workflows. /// -public sealed class Fido2KmsClient : IKmsClient +public sealed partial class Fido2KmsClient : IKmsClient { private readonly IFido2Authenticator _authenticator; private readonly Fido2Options _options; @@ -49,141 +48,8 @@ public sealed class Fido2KmsClient : IKmsClient _curveName = ResolveCurveName(_publicParameters.Curve); } - public async Task SignAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - CancellationToken cancellationToken = default) + public Fido2KmsClient(IFido2Authenticator authenticator, IOptions options, TimeProvider timeProvider) + : this(authenticator, options?.Value ?? new Fido2Options(), timeProvider) { - ThrowIfDisposed(); - if (data.IsEmpty) - { - throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); - } - - var digest = ComputeSha256(data); - try - { - var signature = await _authenticator.SignAsync(_options.CredentialId, digest, cancellationToken).ConfigureAwait(false); - return new KmsSignResult(_options.CredentialId, _options.CredentialId, KmsAlgorithms.Es256, signature); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public Task VerifyAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - ReadOnlyMemory signature, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - if (data.IsEmpty || signature.IsEmpty) - { - return Task.FromResult(false); - } - - var digest = ComputeSha256(data); - try - { - using var ecdsa = ECDsa.Create(_publicParameters); - return Task.FromResult(ecdsa.VerifyHash(digest, signature.ToArray())); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - - var now = _timeProvider.GetUtcNow(); - if (_cachedMetadata is not null && _metadataExpiresAt > now) - { - return Task.FromResult(_cachedMetadata); - } - - var createdAt = _options.CreatedAt ?? _timeProvider.GetUtcNow(); - var version = new KmsKeyVersionMetadata( - _options.CredentialId, - KmsKeyState.Active, - createdAt, - null, - Convert.ToBase64String(_subjectPublicKeyInfo), - _curveName); - - _cachedMetadata = new KmsKeyMetadata( - _options.CredentialId, - KmsAlgorithms.Es256, - KmsKeyState.Active, - createdAt, - ImmutableArray.Create(version)); - - _metadataExpiresAt = now.Add(_metadataCacheDuration); - return Task.FromResult(_cachedMetadata); - } - - public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - - var metadata = await GetMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - - return new KmsKeyMaterial( - metadata.KeyId, - metadata.KeyId, - metadata.Algorithm, - _curveName, - Array.Empty(), - _publicParameters.Q.X ?? throw new InvalidOperationException("FIDO2 public key missing X coordinate."), - _publicParameters.Q.Y ?? throw new InvalidOperationException("FIDO2 public key missing Y coordinate."), - _options.CreatedAt ?? _timeProvider.GetUtcNow()); - } - - public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) - => throw new NotSupportedException("FIDO2 credential rotation requires new enrolment."); - - public Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) - => throw new NotSupportedException("FIDO2 credential revocation must be managed in the relying party."); - - public void Dispose() - { - _disposed = true; - } - - private static byte[] ComputeSha256(ReadOnlyMemory data) - { - var digest = new byte[32]; - if (!SHA256.TryHashData(data.Span, digest, out _)) - { - throw new InvalidOperationException("Failed to hash payload with SHA-256."); - } - - return digest; - } - - private void ThrowIfDisposed() - { - if (_disposed) - { - throw new ObjectDisposedException(nameof(Fido2KmsClient)); - } - } - - private static string ResolveCurveName(ECCurve curve) - { - var oid = curve.Oid?.Value; - return oid switch - { - "1.2.840.10045.3.1.7" => JsonWebKeyECTypes.P256, - "1.3.132.0.34" => JsonWebKeyECTypes.P384, - "1.3.132.0.35" => JsonWebKeyECTypes.P521, - _ => throw new InvalidOperationException($"Unsupported FIDO2 curve OID '{oid}'."), - }; } } diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2Options.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2Options.cs index dab92fa54..c1272b538 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/Fido2Options.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Fido2Options.cs @@ -5,7 +5,7 @@ namespace StellaOps.Cryptography.Kms; /// public sealed class Fido2Options { - private TimeSpan metadataCacheDuration = TimeSpan.FromMinutes(5); + private TimeSpan _metadataCacheDuration = TimeSpan.FromMinutes(5); /// /// Gets or sets the relying party identifier (rpId) used when registering the credential. @@ -33,13 +33,8 @@ public sealed class Fido2Options /// public TimeSpan MetadataCacheDuration { - get => metadataCacheDuration; - set => metadataCacheDuration = value <= TimeSpan.Zero ? TimeSpan.FromMinutes(5) : value; + get => _metadataCacheDuration; + set => _metadataCacheDuration = value <= TimeSpan.Zero ? TimeSpan.FromMinutes(5) : value; } - - /// - /// Gets or sets an optional authenticator factory hook (mainly for testing or custom integrations). - /// - public Func? AuthenticatorFactory { get; set; } } diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Envelope.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Envelope.cs new file mode 100644 index 000000000..c6655e750 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Envelope.cs @@ -0,0 +1,94 @@ +using System; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private KeyEnvelope EncryptPrivateKey(ReadOnlySpan privateKey) + { + var salt = RandomNumberGenerator.GetBytes(16); + var nonce = RandomNumberGenerator.GetBytes(12); + var key = DeriveKey(salt); + + try + { + var ciphertext = new byte[privateKey.Length]; + var tag = new byte[16]; + var plaintextCopy = privateKey.ToArray(); + + using var aesGcm = new AesGcm(key, tag.Length); + try + { + aesGcm.Encrypt(nonce, plaintextCopy, ciphertext, tag); + } + finally + { + CryptographicOperations.ZeroMemory(plaintextCopy); + } + + return new KeyEnvelope( + Ciphertext: Convert.ToBase64String(ciphertext), + Nonce: Convert.ToBase64String(nonce), + Tag: Convert.ToBase64String(tag), + Salt: Convert.ToBase64String(salt)); + } + finally + { + CryptographicOperations.ZeroMemory(key); + } + } + + private byte[] DecryptPrivateKey(KeyEnvelope envelope) + { + var salt = Convert.FromBase64String(envelope.Salt); + var nonce = Convert.FromBase64String(envelope.Nonce); + var tag = Convert.FromBase64String(envelope.Tag); + var ciphertext = Convert.FromBase64String(envelope.Ciphertext); + + var key = DeriveKey(salt); + try + { + var plaintext = new byte[ciphertext.Length]; + using var aesGcm = new AesGcm(key, tag.Length); + aesGcm.Decrypt(nonce, ciphertext, tag, plaintext); + + return plaintext; + } + finally + { + CryptographicOperations.ZeroMemory(key); + } + } + + private byte[] DeriveKey(byte[] salt) + { + var key = new byte[32]; + try + { + var passwordBytes = Encoding.UTF8.GetBytes(_options.Password); + try + { + var derived = Rfc2898DeriveBytes.Pbkdf2( + passwordBytes, + salt, + _options.KeyDerivationIterations, + HashAlgorithmName.SHA256, + key.Length); + derived.CopyTo(key.AsSpan()); + CryptographicOperations.ZeroMemory(derived); + return key; + } + finally + { + CryptographicOperations.ZeroMemory(passwordBytes); + } + } + catch + { + CryptographicOperations.ZeroMemory(key); + throw; + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.KeyMaterial.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.KeyMaterial.cs new file mode 100644 index 000000000..e44db2a01 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.KeyMaterial.cs @@ -0,0 +1,50 @@ +using System; +using System.Security.Cryptography; +using System.Text.Json; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private EcdsaKeyData CreateKeyMaterial(string algorithm) + { + if (!string.Equals(algorithm, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase)) + { + throw new NotSupportedException($"Algorithm '{algorithm}' is not supported by the file KMS driver."); + } + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(true); + + var keyRecord = new EcdsaPrivateKeyRecord + { + Curve = "nistP256", + D = Convert.ToBase64String(parameters.D ?? Array.Empty()), + Qx = Convert.ToBase64String(parameters.Q.X ?? Array.Empty()), + Qy = Convert.ToBase64String(parameters.Q.Y ?? Array.Empty()), + }; + + var privateBlob = JsonSerializer.SerializeToUtf8Bytes(keyRecord, _jsonOptions); + + var qx = parameters.Q.X ?? Array.Empty(); + var qy = parameters.Q.Y ?? Array.Empty(); + var publicKey = new byte[qx.Length + qy.Length]; + Buffer.BlockCopy(qx, 0, publicKey, 0, qx.Length); + Buffer.BlockCopy(qy, 0, publicKey, qx.Length, qy.Length); + + return new EcdsaKeyData(privateBlob, Convert.ToBase64String(publicKey), keyRecord.Curve); + } + + private static byte[] CombinePublicCoordinates(ReadOnlySpan qx, ReadOnlySpan qy) + { + if (qx.IsEmpty || qy.IsEmpty) + { + return Array.Empty(); + } + + var publicKey = new byte[qx.Length + qy.Length]; + qx.CopyTo(publicKey); + qy.CopyTo(publicKey.AsSpan(qx.Length)); + return publicKey; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Signing.cs new file mode 100644 index 000000000..48b432fed --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Crypto.Signing.cs @@ -0,0 +1,58 @@ +using System; +using System.Security.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private byte[] SignData(EcdsaPrivateKeyRecord privateKey, ReadOnlySpan data) + { + var parameters = new ECParameters + { + Curve = ResolveCurve(privateKey.Curve), + D = Convert.FromBase64String(privateKey.D), + Q = new ECPoint + { + X = Convert.FromBase64String(privateKey.Qx), + Y = Convert.FromBase64String(privateKey.Qy), + }, + }; + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportParameters(parameters); + return ecdsa.SignData(data, HashAlgorithmName.SHA256); + } + + private bool VerifyData(string curveName, string publicKeyBase64, ReadOnlySpan data, ReadOnlySpan signature) + { + var publicKey = Convert.FromBase64String(publicKeyBase64); + if (publicKey.Length % 2 != 0) + { + return false; + } + + var half = publicKey.Length / 2; + var qx = publicKey[..half]; + var qy = publicKey[half..]; + + var parameters = new ECParameters + { + Curve = ResolveCurve(curveName), + Q = new ECPoint + { + X = qx, + Y = qy, + }, + }; + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportParameters(parameters); + return ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256); + } + + private static ECCurve ResolveCurve(string curveName) => curveName switch + { + "nistP256" or "P-256" or "ES256" => ECCurve.NamedCurves.nistP256, + _ => throw new NotSupportedException($"Curve '{curveName}' is not supported."), + }; +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Import.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Import.cs new file mode 100644 index 000000000..c340bf7aa --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Import.cs @@ -0,0 +1,105 @@ +using System; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + public async Task ImportAsync( + string keyId, + KmsKeyMaterial material, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + ArgumentNullException.ThrowIfNull(material); + + if (material.D is null || material.D.Length == 0) + { + throw new ArgumentException("Key material must include private key bytes.", nameof(material)); + } + + if (material.Qx is null || material.Qx.Length == 0 || material.Qy is null || material.Qy.Length == 0) + { + throw new ArgumentException("Key material must include public key coordinates.", nameof(material)); + } + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false) + ?? throw new InvalidOperationException("Failed to create or load key metadata."); + + if (!string.Equals(record.Algorithm, material.Algorithm, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Algorithm mismatch. Expected '{record.Algorithm}', received '{material.Algorithm}'."); + } + + var versionId = string.IsNullOrWhiteSpace(material.VersionId) + ? $"{_timeProvider.GetUtcNow():yyyyMMddTHHmmssfffZ}" + : material.VersionId; + + if (record.Versions.Any(v => string.Equals(v.VersionId, versionId, StringComparison.Ordinal))) + { + throw new InvalidOperationException($"Key version '{versionId}' already exists for key '{record.KeyId}'."); + } + + var curveName = string.IsNullOrWhiteSpace(material.Curve) ? "nistP256" : material.Curve; + ResolveCurve(curveName); // validate supported curve + + var privateKeyRecord = new EcdsaPrivateKeyRecord + { + Curve = curveName, + D = Convert.ToBase64String(material.D), + Qx = Convert.ToBase64String(material.Qx), + Qy = Convert.ToBase64String(material.Qy), + }; + + var privateBlob = JsonSerializer.SerializeToUtf8Bytes(privateKeyRecord, _jsonOptions); + try + { + var envelope = EncryptPrivateKey(privateBlob); + var fileName = $"{versionId}.key.json"; + var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName); + await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false); + + foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active)) + { + existing.State = KmsKeyState.PendingRotation; + } + + var createdAt = material.CreatedAt == default ? _timeProvider.GetUtcNow() : material.CreatedAt; + var publicKey = CombinePublicCoordinates(material.Qx, material.Qy); + + record.Versions.Add(new KeyVersionRecord + { + VersionId = versionId, + State = KmsKeyState.Active, + CreatedAt = createdAt, + PublicKey = Convert.ToBase64String(publicKey), + CurveName = curveName, + FileName = fileName, + }); + + record.CreatedAt ??= createdAt; + record.State = KmsKeyState.Active; + record.ActiveVersion = versionId; + + await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); + return ToMetadata(record); + } + finally + { + CryptographicOperations.ZeroMemory(privateBlob); + } + } + finally + { + _mutex.Release(); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.Helpers.cs new file mode 100644 index 000000000..beff65700 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.Helpers.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private static KmsKeyMetadata ToMetadata(KeyMetadataRecord record) + { + var versions = record.Versions + .Select(v => new KmsKeyVersionMetadata( + v.VersionId, + v.State, + v.CreatedAt, + v.DeactivatedAt, + v.PublicKey, + v.CurveName)) + .ToImmutableArray(); + + var createdAt = record.CreatedAt + ?? (versions.Length > 0 ? versions.Min(v => v.CreatedAt) : TimeProvider.System.GetUtcNow()); + return new KmsKeyMetadata(record.KeyId, record.Algorithm, record.State, createdAt, versions); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.cs new file mode 100644 index 000000000..996791d69 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Metadata.cs @@ -0,0 +1,58 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) + ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); + return ToMetadata(record); + } + finally + { + _mutex.Release(); + } + } + + public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) + ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); + + var version = ResolveVersion(record, keyVersion); + if (string.IsNullOrWhiteSpace(version.PublicKey)) + { + throw new InvalidOperationException($"Key '{keyId}' version '{version.VersionId}' does not have public key material."); + } + + var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false); + return new KmsKeyMaterial( + record.KeyId, + version.VersionId, + record.Algorithm, + version.CurveName, + Convert.FromBase64String(privateKey.D), + Convert.FromBase64String(privateKey.Qx), + Convert.FromBase64String(privateKey.Qy), + version.CreatedAt); + } + finally + { + _mutex.Release(); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Models.cs new file mode 100644 index 000000000..7b629fbe8 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Models.cs @@ -0,0 +1,44 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private sealed class KeyMetadataRecord + { + public string KeyId { get; set; } = string.Empty; + public string Algorithm { get; set; } = KmsAlgorithms.Es256; + public KmsKeyState State { get; set; } = KmsKeyState.Active; + public DateTimeOffset? CreatedAt { get; set; } + public string? ActiveVersion { get; set; } + public List Versions { get; set; } = new(); + } + + private sealed class KeyVersionRecord + { + public string VersionId { get; set; } = string.Empty; + public KmsKeyState State { get; set; } = KmsKeyState.Active; + public DateTimeOffset CreatedAt { get; set; } + public DateTimeOffset? DeactivatedAt { get; set; } + public string PublicKey { get; set; } = string.Empty; + public string FileName { get; set; } = string.Empty; + public string CurveName { get; set; } = string.Empty; + } + + private sealed record KeyEnvelope( + string Ciphertext, + string Nonce, + string Tag, + string Salt); + + private sealed record EcdsaKeyData(byte[] PrivateBlob, string PublicKey, string Curve); + + private sealed class EcdsaPrivateKeyRecord + { + public string Curve { get; set; } = string.Empty; + public string D { get; set; } = string.Empty; + public string Qx { get; set; } = string.Empty; + public string Qy { get; set; } = string.Empty; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Paths.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Paths.cs new file mode 100644 index 000000000..b9f6ccf07 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Paths.cs @@ -0,0 +1,47 @@ +using System; +using System.IO; +using System.Linq; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private static string GetMetadataPath(string root, string keyId) + => Path.Combine(root, keyId, "metadata.json"); + + private string GetKeyDirectory(string keyId) + { + var path = Path.Combine(_options.RootPath, keyId); + Directory.CreateDirectory(path); + return path; + } + + private static KeyVersionRecord ResolveVersion(KeyMetadataRecord record, string? keyVersion) + { + KeyVersionRecord? version = null; + if (!string.IsNullOrWhiteSpace(keyVersion)) + { + version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, keyVersion, StringComparison.Ordinal)); + if (version is null) + { + throw new InvalidOperationException($"Key version '{keyVersion}' does not exist for key '{record.KeyId}'."); + } + } + else if (!string.IsNullOrWhiteSpace(record.ActiveVersion)) + { + version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, record.ActiveVersion, StringComparison.Ordinal)); + } + + version ??= record.Versions + .Where(v => v.State == KmsKeyState.Active) + .OrderByDescending(v => v.CreatedAt) + .FirstOrDefault(); + + if (version is null) + { + throw new InvalidOperationException($"Key '{record.KeyId}' does not have an active version."); + } + + return version; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Persistence.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Persistence.cs new file mode 100644 index 000000000..3bcc43d98 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Persistence.cs @@ -0,0 +1,97 @@ +using System; +using System.IO; +using System.Security.Cryptography; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + private async Task LoadOrCreateMetadataAsync( + string keyId, + CancellationToken cancellationToken, + bool createIfMissing) + { + var metadataPath = GetMetadataPath(_options.RootPath, keyId); + if (!File.Exists(metadataPath)) + { + if (!createIfMissing) + { + return null; + } + + var record = new KeyMetadataRecord + { + KeyId = keyId, + Algorithm = _options.Algorithm, + State = KmsKeyState.Active, + CreatedAt = _timeProvider.GetUtcNow(), + }; + + await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); + return record; + } + + await using var stream = File.Open(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read); + var loadedRecord = await JsonSerializer.DeserializeAsync(stream, _jsonOptions, cancellationToken).ConfigureAwait(false); + if (loadedRecord is null) + { + return null; + } + + if (string.IsNullOrWhiteSpace(loadedRecord.Algorithm)) + { + loadedRecord.Algorithm = KmsAlgorithms.Es256; + } + + foreach (var version in loadedRecord.Versions) + { + if (string.IsNullOrWhiteSpace(version.CurveName)) + { + version.CurveName = "nistP256"; + } + } + + return loadedRecord; + } + + private async Task SaveMetadataAsync(KeyMetadataRecord record, CancellationToken cancellationToken) + { + var metadataPath = GetMetadataPath(_options.RootPath, record.KeyId); + Directory.CreateDirectory(Path.GetDirectoryName(metadataPath)!); + await using var stream = File.Open(metadataPath, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, record, _jsonOptions, cancellationToken).ConfigureAwait(false); + } + + private async Task LoadPrivateKeyAsync(KeyMetadataRecord record, KeyVersionRecord version, CancellationToken cancellationToken) + { + var keyPath = Path.Combine(GetKeyDirectory(record.KeyId), version.FileName); + if (!File.Exists(keyPath)) + { + throw new InvalidOperationException($"Key material for version '{version.VersionId}' was not found."); + } + + await using var stream = File.Open(keyPath, FileMode.Open, FileAccess.Read, FileShare.Read); + var envelope = await JsonSerializer.DeserializeAsync(stream, _jsonOptions, cancellationToken).ConfigureAwait(false) + ?? throw new InvalidOperationException("Key envelope could not be deserialized."); + + var payload = DecryptPrivateKey(envelope); + try + { + return JsonSerializer.Deserialize(payload, _jsonOptions) + ?? throw new InvalidOperationException("Key payload could not be deserialized."); + } + finally + { + CryptographicOperations.ZeroMemory(payload); + } + } + + private static async Task WriteJsonAsync(string path, T value, CancellationToken cancellationToken) + { + await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, value, _jsonOptions, cancellationToken).ConfigureAwait(false); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Rotation.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Rotation.cs new file mode 100644 index 000000000..bd66cda82 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Rotation.cs @@ -0,0 +1,99 @@ +using System; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + public async Task RotateAsync(string keyId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false) + ?? throw new InvalidOperationException("Failed to create or load key metadata."); + + if (record.State == KmsKeyState.Revoked) + { + throw new InvalidOperationException($"Key '{keyId}' has been revoked and cannot be rotated."); + } + + var timestamp = _timeProvider.GetUtcNow(); + var versionId = $"{timestamp:yyyyMMddTHHmmssfffZ}"; + var keyData = CreateKeyMaterial(record.Algorithm); + + try + { + var envelope = EncryptPrivateKey(keyData.PrivateBlob); + var fileName = $"{versionId}.key.json"; + var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName); + await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false); + + foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active)) + { + existing.State = KmsKeyState.PendingRotation; + } + + record.Versions.Add(new KeyVersionRecord + { + VersionId = versionId, + State = KmsKeyState.Active, + CreatedAt = timestamp, + PublicKey = keyData.PublicKey, + CurveName = keyData.Curve, + FileName = fileName, + }); + + record.CreatedAt ??= timestamp; + record.State = KmsKeyState.Active; + record.ActiveVersion = versionId; + + await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); + return ToMetadata(record); + } + finally + { + CryptographicOperations.ZeroMemory(keyData.PrivateBlob); + } + } + finally + { + _mutex.Release(); + } + } + + public async Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) + ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); + + var timestamp = _timeProvider.GetUtcNow(); + record.State = KmsKeyState.Revoked; + foreach (var version in record.Versions) + { + if (version.State != KmsKeyState.Revoked) + { + version.State = KmsKeyState.Revoked; + version.DeactivatedAt = timestamp; + } + } + + await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); + } + finally + { + _mutex.Release(); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Signing.cs new file mode 100644 index 000000000..a75e4507c --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Signing.cs @@ -0,0 +1,47 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + public async Task SignAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty) + { + throw new ArgumentException("Data cannot be empty.", nameof(data)); + } + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) + ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); + + if (record.State == KmsKeyState.Revoked) + { + throw new InvalidOperationException($"Key '{keyId}' is revoked and cannot be used for signing."); + } + + var version = ResolveVersion(record, keyVersion); + if (version.State != KmsKeyState.Active) + { + throw new InvalidOperationException($"Key version '{version.VersionId}' is not active. Current state: {version.State}"); + } + + var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false); + var signature = SignData(privateKey, data.Span); + return new KmsSignResult(record.KeyId, version.VersionId, record.Algorithm, signature); + } + finally + { + _mutex.Release(); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Verification.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Verification.cs new file mode 100644 index 000000000..24e47cbdc --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.Verification.cs @@ -0,0 +1,44 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class FileKmsClient +{ + public async Task VerifyAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty || signature.IsEmpty) + { + return false; + } + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false); + if (record is null) + { + return false; + } + + var version = ResolveVersion(record, keyVersion); + if (string.IsNullOrWhiteSpace(version.PublicKey)) + { + return false; + } + + return VerifyData(version.CurveName, version.PublicKey, data.Span, signature.Span); + } + finally + { + _mutex.Release(); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs index 0d37c827e..f7cdb8a82 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/FileKmsClient.cs @@ -1,17 +1,17 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; +using Microsoft.Extensions.Options; +using System; using System.Text.Json; using System.Text.Json.Serialization; +using System.Threading; namespace StellaOps.Cryptography.Kms; /// /// File-backed KMS implementation that stores encrypted key material on disk. /// -public sealed class FileKmsClient : IKmsClient, IDisposable +public sealed partial class FileKmsClient : IKmsClient, IDisposable { - private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + private static readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.Web) { WriteIndented = true, Converters = @@ -50,663 +50,10 @@ public sealed class FileKmsClient : IKmsClient, IDisposable Directory.CreateDirectory(_options.RootPath); } - public async Task SignAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - CancellationToken cancellationToken = default) + public FileKmsClient(IOptions options, TimeProvider timeProvider) + : this(options?.Value ?? throw new ArgumentNullException(nameof(options)), timeProvider) { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty) - { - throw new ArgumentException("Data cannot be empty.", nameof(data)); - } - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) - ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); - - if (record.State == KmsKeyState.Revoked) - { - throw new InvalidOperationException($"Key '{keyId}' is revoked and cannot be used for signing."); - } - - var version = ResolveVersion(record, keyVersion); - if (version.State != KmsKeyState.Active) - { - throw new InvalidOperationException($"Key version '{version.VersionId}' is not active. Current state: {version.State}"); - } - - var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false); - var signature = SignData(privateKey, data.Span); - return new KmsSignResult(record.KeyId, version.VersionId, record.Algorithm, signature); - } - finally - { - _mutex.Release(); - } } - public async Task VerifyAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - ReadOnlyMemory signature, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty || signature.IsEmpty) - { - return false; - } - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false); - if (record is null) - { - return false; - } - - var version = ResolveVersion(record, keyVersion); - if (string.IsNullOrWhiteSpace(version.PublicKey)) - { - return false; - } - - return VerifyData(version.CurveName, version.PublicKey, data.Span, signature.Span); - } - finally - { - _mutex.Release(); - } - } - - public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) - ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); - return ToMetadata(record); - } - finally - { - _mutex.Release(); - } - } - - public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) - ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); - - var version = ResolveVersion(record, keyVersion); - if (string.IsNullOrWhiteSpace(version.PublicKey)) - { - throw new InvalidOperationException($"Key '{keyId}' version '{version.VersionId}' does not have public key material."); - } - - var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false); - return new KmsKeyMaterial( - record.KeyId, - version.VersionId, - record.Algorithm, - version.CurveName, - Convert.FromBase64String(privateKey.D), - Convert.FromBase64String(privateKey.Qx), - Convert.FromBase64String(privateKey.Qy), - version.CreatedAt); - } - finally - { - _mutex.Release(); - } - } - - public async Task ImportAsync( - string keyId, - KmsKeyMaterial material, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - ArgumentNullException.ThrowIfNull(material); - - if (material.D is null || material.D.Length == 0) - { - throw new ArgumentException("Key material must include private key bytes.", nameof(material)); - } - - if (material.Qx is null || material.Qx.Length == 0 || material.Qy is null || material.Qy.Length == 0) - { - throw new ArgumentException("Key material must include public key coordinates.", nameof(material)); - } - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false) - ?? throw new InvalidOperationException("Failed to create or load key metadata."); - - if (!string.Equals(record.Algorithm, material.Algorithm, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Algorithm mismatch. Expected '{record.Algorithm}', received '{material.Algorithm}'."); - } - - var versionId = string.IsNullOrWhiteSpace(material.VersionId) - ? $"{_timeProvider.GetUtcNow():yyyyMMddTHHmmssfffZ}" - : material.VersionId; - - if (record.Versions.Any(v => string.Equals(v.VersionId, versionId, StringComparison.Ordinal))) - { - throw new InvalidOperationException($"Key version '{versionId}' already exists for key '{record.KeyId}'."); - } - - var curveName = string.IsNullOrWhiteSpace(material.Curve) ? "nistP256" : material.Curve; - ResolveCurve(curveName); // validate supported curve - - var privateKeyRecord = new EcdsaPrivateKeyRecord - { - Curve = curveName, - D = Convert.ToBase64String(material.D), - Qx = Convert.ToBase64String(material.Qx), - Qy = Convert.ToBase64String(material.Qy), - }; - - var privateBlob = JsonSerializer.SerializeToUtf8Bytes(privateKeyRecord, JsonOptions); - try - { - var envelope = EncryptPrivateKey(privateBlob); - var fileName = $"{versionId}.key.json"; - var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName); - await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false); - - foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active)) - { - existing.State = KmsKeyState.PendingRotation; - } - - var createdAt = material.CreatedAt == default ? _timeProvider.GetUtcNow() : material.CreatedAt; - var publicKey = CombinePublicCoordinates(material.Qx, material.Qy); - - record.Versions.Add(new KeyVersionRecord - { - VersionId = versionId, - State = KmsKeyState.Active, - CreatedAt = createdAt, - PublicKey = Convert.ToBase64String(publicKey), - CurveName = curveName, - FileName = fileName, - }); - - record.CreatedAt ??= createdAt; - record.State = KmsKeyState.Active; - record.ActiveVersion = versionId; - - await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); - return ToMetadata(record); - } - finally - { - CryptographicOperations.ZeroMemory(privateBlob); - } - } - finally - { - _mutex.Release(); - } - } - - public async Task RotateAsync(string keyId, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false) - ?? throw new InvalidOperationException("Failed to create or load key metadata."); - - if (record.State == KmsKeyState.Revoked) - { - throw new InvalidOperationException($"Key '{keyId}' has been revoked and cannot be rotated."); - } - - var timestamp = _timeProvider.GetUtcNow(); - var versionId = $"{timestamp:yyyyMMddTHHmmssfffZ}"; - var keyData = CreateKeyMaterial(record.Algorithm); - - try - { - var envelope = EncryptPrivateKey(keyData.PrivateBlob); - var fileName = $"{versionId}.key.json"; - var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName); - await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false); - - foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active)) - { - existing.State = KmsKeyState.PendingRotation; - } - - record.Versions.Add(new KeyVersionRecord - { - VersionId = versionId, - State = KmsKeyState.Active, - CreatedAt = timestamp, - PublicKey = keyData.PublicKey, - CurveName = keyData.Curve, - FileName = fileName, - }); - - record.CreatedAt ??= timestamp; - record.State = KmsKeyState.Active; - record.ActiveVersion = versionId; - - await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); - return ToMetadata(record); - } - finally - { - CryptographicOperations.ZeroMemory(keyData.PrivateBlob); - } - } - finally - { - _mutex.Release(); - } - } - - public async Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false) - ?? throw new InvalidOperationException($"Key '{keyId}' does not exist."); - - var timestamp = _timeProvider.GetUtcNow(); - record.State = KmsKeyState.Revoked; - foreach (var version in record.Versions) - { - if (version.State != KmsKeyState.Revoked) - { - version.State = KmsKeyState.Revoked; - version.DeactivatedAt = timestamp; - } - } - - await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); - } - finally - { - _mutex.Release(); - } - } - - private static string GetMetadataPath(string root, string keyId) - => Path.Combine(root, keyId, "metadata.json"); - - private string GetKeyDirectory(string keyId) - { - var path = Path.Combine(_options.RootPath, keyId); - Directory.CreateDirectory(path); - return path; - } - - private async Task LoadOrCreateMetadataAsync( - string keyId, - CancellationToken cancellationToken, - bool createIfMissing) - { - var metadataPath = GetMetadataPath(_options.RootPath, keyId); - if (!File.Exists(metadataPath)) - { - if (!createIfMissing) - { - return null; - } - - var record = new KeyMetadataRecord - { - KeyId = keyId, - Algorithm = _options.Algorithm, - State = KmsKeyState.Active, - CreatedAt = _timeProvider.GetUtcNow(), - }; - - await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false); - return record; - } - - await using var stream = File.Open(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read); - var loadedRecord = await JsonSerializer.DeserializeAsync(stream, JsonOptions, cancellationToken).ConfigureAwait(false); - if (loadedRecord is null) - { - return null; - } - - if (string.IsNullOrWhiteSpace(loadedRecord.Algorithm)) - { - loadedRecord.Algorithm = KmsAlgorithms.Es256; - } - - foreach (var version in loadedRecord.Versions) - { - if (string.IsNullOrWhiteSpace(version.CurveName)) - { - version.CurveName = "nistP256"; - } - } - - return loadedRecord; - } - - private async Task SaveMetadataAsync(KeyMetadataRecord record, CancellationToken cancellationToken) - { - var metadataPath = GetMetadataPath(_options.RootPath, record.KeyId); - Directory.CreateDirectory(Path.GetDirectoryName(metadataPath)!); - await using var stream = File.Open(metadataPath, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, record, JsonOptions, cancellationToken).ConfigureAwait(false); - } - - private async Task LoadPrivateKeyAsync(KeyMetadataRecord record, KeyVersionRecord version, CancellationToken cancellationToken) - { - var keyPath = Path.Combine(GetKeyDirectory(record.KeyId), version.FileName); - if (!File.Exists(keyPath)) - { - throw new InvalidOperationException($"Key material for version '{version.VersionId}' was not found."); - } - - await using var stream = File.Open(keyPath, FileMode.Open, FileAccess.Read, FileShare.Read); - var envelope = await JsonSerializer.DeserializeAsync(stream, JsonOptions, cancellationToken).ConfigureAwait(false) - ?? throw new InvalidOperationException("Key envelope could not be deserialized."); - - var payload = DecryptPrivateKey(envelope); - try - { - return JsonSerializer.Deserialize(payload, JsonOptions) - ?? throw new InvalidOperationException("Key payload could not be deserialized."); - } - finally - { - CryptographicOperations.ZeroMemory(payload); - } - } - - private static KeyVersionRecord ResolveVersion(KeyMetadataRecord record, string? keyVersion) - { - KeyVersionRecord? version = null; - if (!string.IsNullOrWhiteSpace(keyVersion)) - { - version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, keyVersion, StringComparison.Ordinal)); - if (version is null) - { - throw new InvalidOperationException($"Key version '{keyVersion}' does not exist for key '{record.KeyId}'."); - } - } - else if (!string.IsNullOrWhiteSpace(record.ActiveVersion)) - { - version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, record.ActiveVersion, StringComparison.Ordinal)); - } - - version ??= record.Versions - .Where(v => v.State == KmsKeyState.Active) - .OrderByDescending(v => v.CreatedAt) - .FirstOrDefault(); - - if (version is null) - { - throw new InvalidOperationException($"Key '{record.KeyId}' does not have an active version."); - } - - return version; - } - - private EcdsaKeyData CreateKeyMaterial(string algorithm) - { - if (!string.Equals(algorithm, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase)) - { - throw new NotSupportedException($"Algorithm '{algorithm}' is not supported by the file KMS driver."); - } - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var parameters = ecdsa.ExportParameters(true); - - var keyRecord = new EcdsaPrivateKeyRecord - { - Curve = "nistP256", - D = Convert.ToBase64String(parameters.D ?? Array.Empty()), - Qx = Convert.ToBase64String(parameters.Q.X ?? Array.Empty()), - Qy = Convert.ToBase64String(parameters.Q.Y ?? Array.Empty()), - }; - - var privateBlob = JsonSerializer.SerializeToUtf8Bytes(keyRecord, JsonOptions); - - var qx = parameters.Q.X ?? Array.Empty(); - var qy = parameters.Q.Y ?? Array.Empty(); - var publicKey = new byte[qx.Length + qy.Length]; - Buffer.BlockCopy(qx, 0, publicKey, 0, qx.Length); - Buffer.BlockCopy(qy, 0, publicKey, qx.Length, qy.Length); - - return new EcdsaKeyData(privateBlob, Convert.ToBase64String(publicKey), keyRecord.Curve); - } - - private byte[] SignData(EcdsaPrivateKeyRecord privateKey, ReadOnlySpan data) - { - var parameters = new ECParameters - { - Curve = ResolveCurve(privateKey.Curve), - D = Convert.FromBase64String(privateKey.D), - Q = new ECPoint - { - X = Convert.FromBase64String(privateKey.Qx), - Y = Convert.FromBase64String(privateKey.Qy), - }, - }; - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportParameters(parameters); - return ecdsa.SignData(data, HashAlgorithmName.SHA256); - } - - private bool VerifyData(string curveName, string publicKeyBase64, ReadOnlySpan data, ReadOnlySpan signature) - { - var publicKey = Convert.FromBase64String(publicKeyBase64); - if (publicKey.Length % 2 != 0) - { - return false; - } - - var half = publicKey.Length / 2; - var qx = publicKey[..half]; - var qy = publicKey[half..]; - - var parameters = new ECParameters - { - Curve = ResolveCurve(curveName), - Q = new ECPoint - { - X = qx, - Y = qy, - }, - }; - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportParameters(parameters); - return ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256); - } - - private KeyEnvelope EncryptPrivateKey(ReadOnlySpan privateKey) - { - var salt = RandomNumberGenerator.GetBytes(16); - var nonce = RandomNumberGenerator.GetBytes(12); - var key = DeriveKey(salt); - - try - { - var ciphertext = new byte[privateKey.Length]; - var tag = new byte[16]; - var plaintextCopy = privateKey.ToArray(); - - using var aesGcm = new AesGcm(key, tag.Length); - try - { - aesGcm.Encrypt(nonce, plaintextCopy, ciphertext, tag); - } - finally - { - CryptographicOperations.ZeroMemory(plaintextCopy); - } - - return new KeyEnvelope( - Ciphertext: Convert.ToBase64String(ciphertext), - Nonce: Convert.ToBase64String(nonce), - Tag: Convert.ToBase64String(tag), - Salt: Convert.ToBase64String(salt)); - } - finally - { - CryptographicOperations.ZeroMemory(key); - } - } - - private byte[] DecryptPrivateKey(KeyEnvelope envelope) - { - var salt = Convert.FromBase64String(envelope.Salt); - var nonce = Convert.FromBase64String(envelope.Nonce); - var tag = Convert.FromBase64String(envelope.Tag); - var ciphertext = Convert.FromBase64String(envelope.Ciphertext); - - var key = DeriveKey(salt); - try - { - var plaintext = new byte[ciphertext.Length]; - using var aesGcm = new AesGcm(key, tag.Length); - aesGcm.Decrypt(nonce, ciphertext, tag, plaintext); - - return plaintext; - } - finally - { - CryptographicOperations.ZeroMemory(key); - } - } - - private byte[] DeriveKey(byte[] salt) - { - var key = new byte[32]; - try - { - var passwordBytes = Encoding.UTF8.GetBytes(_options.Password); - try - { - var derived = Rfc2898DeriveBytes.Pbkdf2(passwordBytes, salt, _options.KeyDerivationIterations, HashAlgorithmName.SHA256, key.Length); - derived.CopyTo(key.AsSpan()); - CryptographicOperations.ZeroMemory(derived); - return key; - } - finally - { - CryptographicOperations.ZeroMemory(passwordBytes); - } - } - catch - { - CryptographicOperations.ZeroMemory(key); - throw; - } - } - - private static async Task WriteJsonAsync(string path, T value, CancellationToken cancellationToken) - { - await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, value, JsonOptions, cancellationToken).ConfigureAwait(false); - } - - private static KmsKeyMetadata ToMetadata(KeyMetadataRecord record) - { - var versions = record.Versions - .Select(v => new KmsKeyVersionMetadata( - v.VersionId, - v.State, - v.CreatedAt, - v.DeactivatedAt, - v.PublicKey, - v.CurveName)) - .ToImmutableArray(); - - var createdAt = record.CreatedAt ?? (versions.Length > 0 ? versions.Min(v => v.CreatedAt) : TimeProvider.System.GetUtcNow()); - return new KmsKeyMetadata(record.KeyId, record.Algorithm, record.State, createdAt, versions); - } - - private sealed class KeyMetadataRecord - { - public string KeyId { get; set; } = string.Empty; - public string Algorithm { get; set; } = KmsAlgorithms.Es256; - public KmsKeyState State { get; set; } = KmsKeyState.Active; - public DateTimeOffset? CreatedAt { get; set; } - public string? ActiveVersion { get; set; } - public List Versions { get; set; } = new(); - } - - private sealed class KeyVersionRecord - { - public string VersionId { get; set; } = string.Empty; - public KmsKeyState State { get; set; } = KmsKeyState.Active; - public DateTimeOffset CreatedAt { get; set; } - public DateTimeOffset? DeactivatedAt { get; set; } - public string PublicKey { get; set; } = string.Empty; - public string FileName { get; set; } = string.Empty; - public string CurveName { get; set; } = string.Empty; - } - - private sealed record KeyEnvelope( - string Ciphertext, - string Nonce, - string Tag, - string Salt); - - private sealed record EcdsaKeyData(byte[] PrivateBlob, string PublicKey, string Curve); - - private sealed class EcdsaPrivateKeyRecord - { - public string Curve { get; set; } = string.Empty; - public string D { get; set; } = string.Empty; - public string Qx { get; set; } = string.Empty; - public string Qy { get; set; } = string.Empty; - } - - private static ECCurve ResolveCurve(string curveName) => curveName switch - { - "nistP256" or "P-256" or "ES256" => ECCurve.NamedCurves.nistP256, - _ => throw new NotSupportedException($"Curve '{curveName}' is not supported."), - }; - public void Dispose() => _mutex.Dispose(); - - private static byte[] CombinePublicCoordinates(ReadOnlySpan qx, ReadOnlySpan qy) - { - if (qx.IsEmpty || qy.IsEmpty) - { - return Array.Empty(); - } - - var publicKey = new byte[qx.Length + qy.Length]; - qx.CopyTo(publicKey); - qy.CopyTo(publicKey.AsSpan(qx.Length)); - return publicKey; - } } diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Cache.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Cache.cs new file mode 100644 index 000000000..e1400bcd3 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Cache.cs @@ -0,0 +1,39 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) + { + return cached.Snapshot; + } + + var metadata = await _facade.GetCryptoKeyMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var versions = await _facade.ListKeyVersionsAsync(keyId, cancellationToken).ConfigureAwait(false); + + var snapshot = new CryptoKeySnapshot(metadata, versions); + _metadataCache[keyId] = new CachedCryptoKey(snapshot, now.Add(_metadataCacheDuration)); + return snapshot; + } + + private async Task GetCachedPublicKeyAsync(string versionName, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_publicKeyCache.TryGetValue(versionName, out var cached) && cached.ExpiresAt > now) + { + return cached.Material; + } + + var material = await _facade.GetPublicKeyAsync(versionName, cancellationToken).ConfigureAwait(false); + var der = DecodePem(material.Pem); + var publicMaterial = new GcpPublicMaterial(material.VersionName, material.Algorithm, der); + _publicKeyCache[versionName] = new CachedPublicKey(publicMaterial, now.Add(_publicKeyCacheDuration)); + return publicMaterial; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Helpers.cs new file mode 100644 index 000000000..02c87a418 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Helpers.cs @@ -0,0 +1,70 @@ +using Microsoft.IdentityModel.Tokens; +using System; +using System.IO; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + private static KmsKeyState MapState(GcpCryptoKeyVersionState state) + => state switch + { + GcpCryptoKeyVersionState.Enabled => KmsKeyState.Active, + GcpCryptoKeyVersionState.PendingGeneration or GcpCryptoKeyVersionState.PendingImport => KmsKeyState.PendingRotation, + _ => KmsKeyState.Revoked, + }; + + private static string ResolveCurve(string algorithm) + { + return algorithm switch + { + "EC_SIGN_P256_SHA256" => JsonWebKeyECTypes.P256, + "EC_SIGN_P384_SHA384" => JsonWebKeyECTypes.P384, + _ => JsonWebKeyECTypes.P256, + }; + } + + private static byte[] DecodePem(string pem) + { + if (string.IsNullOrWhiteSpace(pem)) + { + throw new InvalidOperationException("Public key PEM cannot be empty."); + } + + var builder = new StringBuilder(pem.Length); + using var reader = new StringReader(pem); + string? line; + while ((line = reader.ReadLine()) is not null) + { + if (line.StartsWith("-----", StringComparison.Ordinal)) + { + continue; + } + + builder.Append(line.Trim()); + } + + return Convert.FromBase64String(builder.ToString()); + } + + private static byte[] ComputeSha256(ReadOnlyMemory data) + { + var digest = new byte[32]; + if (!SHA256.TryHashData(data.Span, digest, out _)) + { + throw new InvalidOperationException("Failed to hash payload with SHA-256."); + } + + return digest; + } + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(GcpKmsClient)); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Metadata.cs new file mode 100644 index 000000000..79036e6f9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Metadata.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + + var versions = ImmutableArray.CreateBuilder(snapshot.Versions.Count); + foreach (var version in snapshot.Versions) + { + var publicMaterial = await GetCachedPublicKeyAsync(version.VersionName, cancellationToken).ConfigureAwait(false); + versions.Add(new KmsKeyVersionMetadata( + version.VersionName, + MapState(version.State), + version.CreateTime, + version.DestroyTime, + Convert.ToBase64String(publicMaterial.SubjectPublicKeyInfo), + ResolveCurve(publicMaterial.Algorithm))); + } + + var overallState = versions.Any(v => v.State == KmsKeyState.Active) + ? KmsKeyState.Active + : versions.Any(v => v.State == KmsKeyState.PendingRotation) + ? KmsKeyState.PendingRotation + : KmsKeyState.Revoked; + + return new KmsKeyMetadata( + snapshot.Metadata.KeyName, + KmsAlgorithms.Es256, + overallState, + snapshot.Metadata.CreateTime, + versions.MoveToImmutable()); + } + + public async Task ExportAsync( + string keyId, + string? keyVersion, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); + var publicMaterial = await GetCachedPublicKeyAsync(versionResource, cancellationToken).ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportSubjectPublicKeyInfo(publicMaterial.SubjectPublicKeyInfo, out _); + var parameters = ecdsa.ExportParameters(false); + + return new KmsKeyMaterial( + snapshot.Metadata.KeyName, + versionResource, + KmsAlgorithms.Es256, + ResolveCurve(publicMaterial.Algorithm), + Array.Empty(), + parameters.Q.X ?? throw new InvalidOperationException("Public key missing X coordinate."), + parameters.Q.Y ?? throw new InvalidOperationException("Public key missing Y coordinate."), + snapshot.Metadata.CreateTime); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Models.cs new file mode 100644 index 000000000..8c53560a8 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Models.cs @@ -0,0 +1,15 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + private sealed record CachedCryptoKey(CryptoKeySnapshot Snapshot, DateTimeOffset ExpiresAt); + + private sealed record CachedPublicKey(GcpPublicMaterial Material, DateTimeOffset ExpiresAt); + + private sealed record CryptoKeySnapshot(GcpCryptoKeyMetadata Metadata, IReadOnlyList Versions); + + private sealed record GcpPublicMaterial(string VersionName, string Algorithm, byte[] SubjectPublicKeyInfo); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Resolve.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Resolve.cs new file mode 100644 index 000000000..f5ffd76bd --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Resolve.cs @@ -0,0 +1,30 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + private async Task ResolveVersionAsync(string keyId, string? keyVersion, CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(keyVersion)) + { + return keyVersion!; + } + + var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(snapshot.Metadata.PrimaryVersionName)) + { + return snapshot.Metadata.PrimaryVersionName!; + } + + var firstActive = snapshot.Versions.FirstOrDefault(v => v.State == GcpCryptoKeyVersionState.Enabled); + if (firstActive is not null) + { + return firstActive.VersionName; + } + + throw new InvalidOperationException($"Crypto key '{keyId}' does not have an active primary version."); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Signing.cs new file mode 100644 index 000000000..16415cb75 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.Signing.cs @@ -0,0 +1,70 @@ +using System; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class GcpKmsClient +{ + public async Task SignAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty) + { + throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); + } + + var digest = ComputeSha256(data); + try + { + var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); + var result = await _facade.SignAsync(versionResource, digest, cancellationToken).ConfigureAwait(false); + + return new KmsSignResult( + keyId, + string.IsNullOrWhiteSpace(result.VersionName) ? versionResource : result.VersionName, + KmsAlgorithms.Es256, + result.Signature); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } + + public async Task VerifyAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty || signature.IsEmpty) + { + return false; + } + + var digest = ComputeSha256(data); + try + { + var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); + var publicMaterial = await GetCachedPublicKeyAsync(versionResource, cancellationToken).ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportSubjectPublicKeyInfo(publicMaterial.SubjectPublicKeyInfo, out _); + return ecdsa.VerifyHash(digest, signature.ToArray()); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.cs index eb3d38ba1..c3ca07465 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsClient.cs @@ -1,17 +1,14 @@ - -using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Options; +using System; using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.IO; -using System.Security.Cryptography; -using System.Text; +using System.Threading.Tasks; namespace StellaOps.Cryptography.Kms; /// /// Google Cloud KMS implementation of . /// -public sealed class GcpKmsClient : IKmsClient, IDisposable +public sealed partial class GcpKmsClient : IKmsClient, IDisposable { private readonly IGcpKmsFacade _facade; private readonly TimeProvider _timeProvider; @@ -32,126 +29,9 @@ public sealed class GcpKmsClient : IKmsClient, IDisposable _publicKeyCacheDuration = options.PublicKeyCacheDuration; } - public async Task SignAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - CancellationToken cancellationToken = default) + public GcpKmsClient(IGcpKmsFacade facade, IOptions options, TimeProvider timeProvider) + : this(facade, options?.Value ?? new GcpKmsOptions(), timeProvider) { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty) - { - throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); - } - - var digest = ComputeSha256(data); - try - { - var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); - var result = await _facade.SignAsync(versionResource, digest, cancellationToken).ConfigureAwait(false); - - return new KmsSignResult( - keyId, - string.IsNullOrWhiteSpace(result.VersionName) ? versionResource : result.VersionName, - KmsAlgorithms.Es256, - result.Signature); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public async Task VerifyAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - ReadOnlyMemory signature, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty || signature.IsEmpty) - { - return false; - } - - var digest = ComputeSha256(data); - try - { - var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); - var publicMaterial = await GetCachedPublicKeyAsync(versionResource, cancellationToken).ConfigureAwait(false); - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportSubjectPublicKeyInfo(publicMaterial.SubjectPublicKeyInfo, out _); - return ecdsa.VerifyHash(digest, signature.ToArray()); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - - var versions = ImmutableArray.CreateBuilder(snapshot.Versions.Count); - foreach (var version in snapshot.Versions) - { - var publicMaterial = await GetCachedPublicKeyAsync(version.VersionName, cancellationToken).ConfigureAwait(false); - versions.Add(new KmsKeyVersionMetadata( - version.VersionName, - MapState(version.State), - version.CreateTime, - version.DestroyTime, - Convert.ToBase64String(publicMaterial.SubjectPublicKeyInfo), - ResolveCurve(publicMaterial.Algorithm))); - } - - var overallState = versions.Any(v => v.State == KmsKeyState.Active) - ? KmsKeyState.Active - : versions.Any(v => v.State == KmsKeyState.PendingRotation) - ? KmsKeyState.PendingRotation - : KmsKeyState.Revoked; - - return new KmsKeyMetadata( - snapshot.Metadata.KeyName, - KmsAlgorithms.Es256, - overallState, - snapshot.Metadata.CreateTime, - versions.MoveToImmutable()); - } - - public async Task ExportAsync( - string keyId, - string? keyVersion, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var versionResource = await ResolveVersionAsync(keyId, keyVersion, cancellationToken).ConfigureAwait(false); - var publicMaterial = await GetCachedPublicKeyAsync(versionResource, cancellationToken).ConfigureAwait(false); - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportSubjectPublicKeyInfo(publicMaterial.SubjectPublicKeyInfo, out _); - var parameters = ecdsa.ExportParameters(false); - - return new KmsKeyMaterial( - snapshot.Metadata.KeyName, - versionResource, - KmsAlgorithms.Es256, - ResolveCurve(publicMaterial.Algorithm), - Array.Empty(), - parameters.Q.X ?? throw new InvalidOperationException("Public key missing X coordinate."), - parameters.Q.Y ?? throw new InvalidOperationException("Public key missing Y coordinate."), - snapshot.Metadata.CreateTime); } public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) @@ -170,125 +50,4 @@ public sealed class GcpKmsClient : IKmsClient, IDisposable _disposed = true; _facade.Dispose(); } - - private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) - { - return cached.Snapshot; - } - - var metadata = await _facade.GetCryptoKeyMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var versions = await _facade.ListKeyVersionsAsync(keyId, cancellationToken).ConfigureAwait(false); - - var snapshot = new CryptoKeySnapshot(metadata, versions); - _metadataCache[keyId] = new CachedCryptoKey(snapshot, now.Add(_metadataCacheDuration)); - return snapshot; - } - - private async Task GetCachedPublicKeyAsync(string versionName, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_publicKeyCache.TryGetValue(versionName, out var cached) && cached.ExpiresAt > now) - { - return cached.Material; - } - - var material = await _facade.GetPublicKeyAsync(versionName, cancellationToken).ConfigureAwait(false); - var der = DecodePem(material.Pem); - var publicMaterial = new GcpPublicMaterial(material.VersionName, material.Algorithm, der); - _publicKeyCache[versionName] = new CachedPublicKey(publicMaterial, now.Add(_publicKeyCacheDuration)); - return publicMaterial; - } - - private async Task ResolveVersionAsync(string keyId, string? keyVersion, CancellationToken cancellationToken) - { - if (!string.IsNullOrWhiteSpace(keyVersion)) - { - return keyVersion!; - } - - var snapshot = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(snapshot.Metadata.PrimaryVersionName)) - { - return snapshot.Metadata.PrimaryVersionName!; - } - - var firstActive = snapshot.Versions.FirstOrDefault(v => v.State == GcpCryptoKeyVersionState.Enabled); - if (firstActive is not null) - { - return firstActive.VersionName; - } - - throw new InvalidOperationException($"Crypto key '{keyId}' does not have an active primary version."); - } - - private static KmsKeyState MapState(GcpCryptoKeyVersionState state) - => state switch - { - GcpCryptoKeyVersionState.Enabled => KmsKeyState.Active, - GcpCryptoKeyVersionState.PendingGeneration or GcpCryptoKeyVersionState.PendingImport => KmsKeyState.PendingRotation, - _ => KmsKeyState.Revoked, - }; - - private static string ResolveCurve(string algorithm) - { - return algorithm switch - { - "EC_SIGN_P256_SHA256" => JsonWebKeyECTypes.P256, - "EC_SIGN_P384_SHA384" => JsonWebKeyECTypes.P384, - _ => JsonWebKeyECTypes.P256, - }; - } - - private static byte[] DecodePem(string pem) - { - if (string.IsNullOrWhiteSpace(pem)) - { - throw new InvalidOperationException("Public key PEM cannot be empty."); - } - - var builder = new StringBuilder(pem.Length); - using var reader = new StringReader(pem); - string? line; - while ((line = reader.ReadLine()) is not null) - { - if (line.StartsWith("-----", StringComparison.Ordinal)) - { - continue; - } - - builder.Append(line.Trim()); - } - - return Convert.FromBase64String(builder.ToString()); - } - - private static byte[] ComputeSha256(ReadOnlyMemory data) - { - var digest = new byte[32]; - if (!SHA256.TryHashData(data.Span, digest, out _)) - { - throw new InvalidOperationException("Failed to hash payload with SHA-256."); - } - - return digest; - } - - private void ThrowIfDisposed() - { - if (_disposed) - { - throw new ObjectDisposedException(nameof(GcpKmsClient)); - } - } - - private sealed record CachedCryptoKey(CryptoKeySnapshot Snapshot, DateTimeOffset ExpiresAt); - - private sealed record CachedPublicKey(GcpPublicMaterial Material, DateTimeOffset ExpiresAt); - - private sealed record CryptoKeySnapshot(GcpCryptoKeyMetadata Metadata, IReadOnlyList Versions); - - private sealed record GcpPublicMaterial(string VersionName, string Algorithm, byte[] SubjectPublicKeyInfo); -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Contracts.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Contracts.cs new file mode 100644 index 000000000..fab7bdf9c --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Contracts.cs @@ -0,0 +1,17 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public interface IGcpKmsFacade : IDisposable +{ + Task SignAsync(string versionName, ReadOnlyMemory digest, CancellationToken cancellationToken); + + Task GetCryptoKeyMetadataAsync(string keyName, CancellationToken cancellationToken); + + Task> ListKeyVersionsAsync(string keyName, CancellationToken cancellationToken); + + Task GetPublicKeyAsync(string versionName, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Dispose.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Dispose.cs new file mode 100644 index 000000000..a14ba9e29 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Dispose.cs @@ -0,0 +1,14 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class GcpKmsFacade +{ + public void Dispose() + { + if (_ownsClient && _client is IDisposable disposable) + { + disposable.Dispose(); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Helpers.cs new file mode 100644 index 000000000..7173d3d2a --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Helpers.cs @@ -0,0 +1,37 @@ +using Google.Cloud.Kms.V1; +using Google.Protobuf.WellKnownTypes; +using System; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class GcpKmsFacade +{ + private static GcpCryptoKeyVersionState MapState(CryptoKeyVersion.Types.CryptoKeyVersionState state) + => state switch + { + CryptoKeyVersion.Types.CryptoKeyVersionState.Enabled => GcpCryptoKeyVersionState.Enabled, + CryptoKeyVersion.Types.CryptoKeyVersionState.Disabled => GcpCryptoKeyVersionState.Disabled, + CryptoKeyVersion.Types.CryptoKeyVersionState.DestroyScheduled => GcpCryptoKeyVersionState.DestroyScheduled, + CryptoKeyVersion.Types.CryptoKeyVersionState.Destroyed => GcpCryptoKeyVersionState.Destroyed, + CryptoKeyVersion.Types.CryptoKeyVersionState.PendingGeneration => GcpCryptoKeyVersionState.PendingGeneration, + CryptoKeyVersion.Types.CryptoKeyVersionState.PendingImport => GcpCryptoKeyVersionState.PendingImport, + CryptoKeyVersion.Types.CryptoKeyVersionState.ImportFailed => GcpCryptoKeyVersionState.ImportFailed, + CryptoKeyVersion.Types.CryptoKeyVersionState.GenerationFailed => GcpCryptoKeyVersionState.GenerationFailed, + _ => GcpCryptoKeyVersionState.Unspecified, + }; + + private DateTimeOffset ToDateTimeOffsetOrUtcNow(Timestamp? timestamp) + { + if (timestamp is null) + { + return _timeProvider.GetUtcNow(); + } + + if (timestamp.Seconds == 0 && timestamp.Nanos == 0) + { + return _timeProvider.GetUtcNow(); + } + + return timestamp.ToDateTimeOffset(); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Metadata.cs new file mode 100644 index 000000000..b1a158fc7 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Metadata.cs @@ -0,0 +1,64 @@ +using Google.Cloud.Kms.V1; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class GcpKmsFacade +{ + public async Task GetCryptoKeyMetadataAsync(string keyName, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyName); + + var response = await _client.GetCryptoKeyAsync(new GetCryptoKeyRequest + { + Name = keyName, + }, cancellationToken).ConfigureAwait(false); + + return new GcpCryptoKeyMetadata( + response.Name, + response.Primary?.Name, + ToDateTimeOffsetOrUtcNow(response.CreateTime)); + } + + public async Task> ListKeyVersionsAsync(string keyName, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(keyName); + + var results = new List(); + var request = new ListCryptoKeyVersionsRequest + { + Parent = keyName, + }; + + await foreach (var version in _client.ListCryptoKeyVersionsAsync(request) + .WithCancellation(cancellationToken) + .ConfigureAwait(false)) + { + results.Add(new GcpCryptoKeyVersionMetadata( + version.Name, + MapState(version.State), + ToDateTimeOffsetOrUtcNow(version.CreateTime), + version.DestroyTime is null ? null : ToDateTimeOffsetOrUtcNow(version.DestroyTime))); + } + + return results; + } + + public async Task GetPublicKeyAsync(string versionName, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(versionName); + + var response = await _client.GetPublicKeyAsync(new GetPublicKeyRequest + { + Name = versionName, + }, cancellationToken).ConfigureAwait(false); + + return new GcpPublicKeyMaterial( + response.Name ?? versionName, + response.Algorithm.ToString(), + response.Pem); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Models.cs new file mode 100644 index 000000000..0043a1e82 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Models.cs @@ -0,0 +1,28 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +public sealed record GcpSignResult(string VersionName, byte[] Signature); + +public sealed record GcpCryptoKeyMetadata(string KeyName, string? PrimaryVersionName, DateTimeOffset CreateTime); + +public enum GcpCryptoKeyVersionState +{ + Unspecified = 0, + PendingGeneration = 1, + Enabled = 2, + Disabled = 3, + DestroyScheduled = 4, + Destroyed = 5, + PendingImport = 6, + ImportFailed = 7, + GenerationFailed = 8, +} + +public sealed record GcpCryptoKeyVersionMetadata( + string VersionName, + GcpCryptoKeyVersionState State, + DateTimeOffset CreateTime, + DateTimeOffset? DestroyTime); + +public sealed record GcpPublicKeyMaterial(string VersionName, string Algorithm, string Pem); \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Signing.cs new file mode 100644 index 000000000..0e7baadec --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.Signing.cs @@ -0,0 +1,26 @@ +using Google.Cloud.Kms.V1; +using Google.Protobuf; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class GcpKmsFacade +{ + public async Task SignAsync(string versionName, ReadOnlyMemory digest, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(versionName); + + var response = await _client.AsymmetricSignAsync(new AsymmetricSignRequest + { + Name = versionName, + Digest = new Digest + { + Sha256 = ByteString.CopyFrom(digest.ToArray()), + }, + }, cancellationToken).ConfigureAwait(false); + + return new GcpSignResult(response.Name ?? versionName, response.Signature.ToByteArray()); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.cs index bb71cc792..c2afb02d8 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsFacade.cs @@ -1,46 +1,10 @@ using Google.Cloud.Kms.V1; -using Google.Protobuf; -using Google.Protobuf.WellKnownTypes; +using Microsoft.Extensions.Options; +using System; namespace StellaOps.Cryptography.Kms; -public interface IGcpKmsFacade : IDisposable -{ - Task SignAsync(string versionName, ReadOnlyMemory digest, CancellationToken cancellationToken); - - Task GetCryptoKeyMetadataAsync(string keyName, CancellationToken cancellationToken); - - Task> ListKeyVersionsAsync(string keyName, CancellationToken cancellationToken); - - Task GetPublicKeyAsync(string versionName, CancellationToken cancellationToken); -} - -public sealed record GcpSignResult(string VersionName, byte[] Signature); - -public sealed record GcpCryptoKeyMetadata(string KeyName, string? PrimaryVersionName, DateTimeOffset CreateTime); - -public enum GcpCryptoKeyVersionState -{ - Unspecified = 0, - PendingGeneration = 1, - Enabled = 2, - Disabled = 3, - DestroyScheduled = 4, - Destroyed = 5, - PendingImport = 6, - ImportFailed = 7, - GenerationFailed = 8, -} - -public sealed record GcpCryptoKeyVersionMetadata( - string VersionName, - GcpCryptoKeyVersionState State, - DateTimeOffset CreateTime, - DateTimeOffset? DestroyTime); - -public sealed record GcpPublicKeyMaterial(string VersionName, string Algorithm, string Pem); - -internal sealed class GcpKmsFacade : IGcpKmsFacade +internal sealed partial class GcpKmsFacade : IGcpKmsFacade { private readonly KeyManagementServiceClient _client; private readonly bool _ownsClient; @@ -61,115 +25,15 @@ internal sealed class GcpKmsFacade : IGcpKmsFacade _ownsClient = true; } + public GcpKmsFacade(IOptions options, TimeProvider timeProvider) + : this(options?.Value ?? new GcpKmsOptions(), timeProvider) + { + } + public GcpKmsFacade(KeyManagementServiceClient client, TimeProvider? timeProvider = null) { _client = client ?? throw new ArgumentNullException(nameof(client)); _timeProvider = timeProvider ?? TimeProvider.System; _ownsClient = false; } - - public async Task SignAsync(string versionName, ReadOnlyMemory digest, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(versionName); - - var response = await _client.AsymmetricSignAsync(new AsymmetricSignRequest - { - Name = versionName, - Digest = new Digest - { - Sha256 = ByteString.CopyFrom(digest.ToArray()), - }, - }, cancellationToken).ConfigureAwait(false); - - return new GcpSignResult(response.Name ?? versionName, response.Signature.ToByteArray()); - } - - public async Task GetCryptoKeyMetadataAsync(string keyName, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyName); - - var response = await _client.GetCryptoKeyAsync(new GetCryptoKeyRequest - { - Name = keyName, - }, cancellationToken).ConfigureAwait(false); - - return new GcpCryptoKeyMetadata( - response.Name, - response.Primary?.Name, - ToDateTimeOffsetOrUtcNow(response.CreateTime)); - } - - public async Task> ListKeyVersionsAsync(string keyName, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(keyName); - - var results = new List(); - var request = new ListCryptoKeyVersionsRequest - { - Parent = keyName, - }; - - await foreach (var version in _client.ListCryptoKeyVersionsAsync(request).WithCancellation(cancellationToken).ConfigureAwait(false)) - { - results.Add(new GcpCryptoKeyVersionMetadata( - version.Name, - MapState(version.State), - ToDateTimeOffsetOrUtcNow(version.CreateTime), - version.DestroyTime is null ? null : ToDateTimeOffsetOrUtcNow(version.DestroyTime))); - } - - return results; - } - - public async Task GetPublicKeyAsync(string versionName, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(versionName); - - var response = await _client.GetPublicKeyAsync(new GetPublicKeyRequest - { - Name = versionName, - }, cancellationToken).ConfigureAwait(false); - - return new GcpPublicKeyMaterial( - response.Name ?? versionName, - response.Algorithm.ToString(), - response.Pem); - } - - private static GcpCryptoKeyVersionState MapState(CryptoKeyVersion.Types.CryptoKeyVersionState state) - => state switch - { - CryptoKeyVersion.Types.CryptoKeyVersionState.Enabled => GcpCryptoKeyVersionState.Enabled, - CryptoKeyVersion.Types.CryptoKeyVersionState.Disabled => GcpCryptoKeyVersionState.Disabled, - CryptoKeyVersion.Types.CryptoKeyVersionState.DestroyScheduled => GcpCryptoKeyVersionState.DestroyScheduled, - CryptoKeyVersion.Types.CryptoKeyVersionState.Destroyed => GcpCryptoKeyVersionState.Destroyed, - CryptoKeyVersion.Types.CryptoKeyVersionState.PendingGeneration => GcpCryptoKeyVersionState.PendingGeneration, - CryptoKeyVersion.Types.CryptoKeyVersionState.PendingImport => GcpCryptoKeyVersionState.PendingImport, - CryptoKeyVersion.Types.CryptoKeyVersionState.ImportFailed => GcpCryptoKeyVersionState.ImportFailed, - CryptoKeyVersion.Types.CryptoKeyVersionState.GenerationFailed => GcpCryptoKeyVersionState.GenerationFailed, - _ => GcpCryptoKeyVersionState.Unspecified, - }; - - public void Dispose() - { - if (_ownsClient && _client is IDisposable disposable) - { - disposable.Dispose(); - } - } - - private DateTimeOffset ToDateTimeOffsetOrUtcNow(Timestamp? timestamp) - { - if (timestamp is null) - { - return _timeProvider.GetUtcNow(); - } - - if (timestamp.Seconds == 0 && timestamp.Nanos == 0) - { - return _timeProvider.GetUtcNow(); - } - - return timestamp.ToDateTimeOffset(); - } -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsOptions.cs b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsOptions.cs index 3d578e899..8537fddc1 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsOptions.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/GcpKmsOptions.cs @@ -5,8 +5,8 @@ namespace StellaOps.Cryptography.Kms; /// public sealed class GcpKmsOptions { - private TimeSpan metadataCacheDuration = TimeSpan.FromMinutes(5); - private TimeSpan publicKeyCacheDuration = TimeSpan.FromMinutes(10); + private TimeSpan _metadataCacheDuration = TimeSpan.FromMinutes(5); + private TimeSpan _publicKeyCacheDuration = TimeSpan.FromMinutes(10); /// /// Gets or sets the service endpoint (default: kms.googleapis.com). @@ -18,8 +18,8 @@ public sealed class GcpKmsOptions /// public TimeSpan MetadataCacheDuration { - get => metadataCacheDuration; - set => metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); + get => _metadataCacheDuration; + set => _metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); } /// @@ -27,16 +27,10 @@ public sealed class GcpKmsOptions /// public TimeSpan PublicKeyCacheDuration { - get => publicKeyCacheDuration; - set => publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(10)); + get => _publicKeyCacheDuration; + set => _publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(10)); } - /// - /// Gets or sets an optional factory that can construct a custom GCP facade (primarily used for testing). - /// - public Func? FacadeFactory { get; set; } - private static TimeSpan EnsurePositive(TimeSpan value, TimeSpan @default) => value <= TimeSpan.Zero ? @default : value; } - diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/InternalsVisibleTo.cs b/src/__Libraries/StellaOps.Cryptography.Kms/InternalsVisibleTo.cs index f908b041f..352505950 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/InternalsVisibleTo.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/InternalsVisibleTo.cs @@ -2,3 +2,4 @@ using System.Runtime.CompilerServices; [assembly: InternalsVisibleTo("StellaOps.Cryptography.Kms.Tests")] +namespace StellaOps.Cryptography.Kms; diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Curves.cs b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Curves.cs new file mode 100644 index 000000000..134adedc4 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Curves.cs @@ -0,0 +1,41 @@ +using Microsoft.IdentityModel.Tokens; +using System.Security.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class KmsCryptoProvider +{ + private static string ResolveCurveName(ECCurve curve) + { + if (!string.IsNullOrWhiteSpace(curve.Oid.FriendlyName)) + { + return curve.Oid.FriendlyName switch + { + "nistP256" => JsonWebKeyECTypes.P256, + "nistP384" => JsonWebKeyECTypes.P384, + "nistP521" => JsonWebKeyECTypes.P521, + _ => JsonWebKeyECTypes.P256 + }; + } + + return JsonWebKeyECTypes.P256; + } + + private static string ResolveCurveName(int coordinateLength) + => coordinateLength switch + { + 32 => JsonWebKeyECTypes.P256, + 48 => JsonWebKeyECTypes.P384, + 66 => JsonWebKeyECTypes.P521, + _ => JsonWebKeyECTypes.P256 + }; + + private static ECCurve ResolveCurve(string curve) + => curve switch + { + JsonWebKeyECTypes.P256 or "P-256" => ECCurve.NamedCurves.nistP256, + JsonWebKeyECTypes.P384 or "P-384" => ECCurve.NamedCurves.nistP384, + JsonWebKeyECTypes.P521 or "P-521" => ECCurve.NamedCurves.nistP521, + _ => ECCurve.NamedCurves.nistP256 + }; +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.PublicKey.cs b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.PublicKey.cs new file mode 100644 index 000000000..5cf9a3808 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.PublicKey.cs @@ -0,0 +1,93 @@ +using System; +using System.Security.Cryptography; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class KmsCryptoProvider +{ + internal static class KmsMetadataKeys + { + public const string Version = "kms.version"; + } + + private static bool TryCreateSigningKey(KmsSigningRegistration registration, out CryptoSigningKey signingKey) + { + signingKey = default!; + if (registration.PublicKey is null) + { + return false; + } + + var curve = ResolveCurve(registration.PublicKey.Curve); + var parameters = new ECParameters + { + Curve = curve, + Q = new ECPoint + { + X = registration.PublicKey.Qx, + Y = registration.PublicKey.Qy + } + }; + + signingKey = new CryptoSigningKey( + registration.Reference, + registration.Algorithm, + in parameters, + verificationOnly: true, + registration.CreatedAt, + metadata: registration.Metadata); + return true; + } + + private static bool TryResolvePublicKey(CryptoSigningKey signingKey, out KmsPublicKey publicKey) + { + if (TryCreatePublicKey(signingKey.PublicParameters, out publicKey)) + { + return true; + } + + if (!signingKey.PublicKey.IsEmpty && TryCreatePublicKey(signingKey.PublicKey, out publicKey)) + { + return true; + } + + publicKey = default!; + return false; + } + + private static bool TryCreatePublicKey(ECParameters parameters, out KmsPublicKey publicKey) + { + if (parameters.Q.X is null || parameters.Q.Y is null) + { + publicKey = default!; + return false; + } + + var curve = ResolveCurveName(parameters.Curve); + publicKey = new KmsPublicKey(curve, (byte[])parameters.Q.X.Clone(), (byte[])parameters.Q.Y.Clone()); + return true; + } + + private static bool TryCreatePublicKey(ReadOnlyMemory rawKey, out KmsPublicKey publicKey) + { + if (rawKey.IsEmpty) + { + publicKey = default!; + return false; + } + + if (rawKey.Length % 2 != 0) + { + publicKey = default!; + return false; + } + + var coordinateLength = rawKey.Length / 2; + var qx = rawKey.Slice(0, coordinateLength).ToArray(); + var qy = rawKey.Slice(coordinateLength, coordinateLength).ToArray(); + var curve = ResolveCurveName(coordinateLength); + publicKey = new KmsPublicKey(curve, qx, qy); + return true; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Registration.cs b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Registration.cs new file mode 100644 index 000000000..9da461e57 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.Registration.cs @@ -0,0 +1,15 @@ +using System; +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +internal sealed record KmsSigningRegistration( + CryptoKeyReference Reference, + string VersionId, + string Algorithm, + DateTimeOffset CreatedAt, + IReadOnlyDictionary Metadata, + KmsPublicKey? PublicKey); + +internal sealed record KmsPublicKey(string Curve, byte[] Qx, byte[] Qy); \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs index e7b77e650..defa7bb19 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/KmsCryptoProvider.cs @@ -1,15 +1,14 @@ - -using Microsoft.IdentityModel.Tokens; -using StellaOps.Cryptography; +using System; using System.Collections.Concurrent; -using System.Security.Cryptography; +using System.Collections.Generic; +using StellaOps.Cryptography; namespace StellaOps.Cryptography.Kms; /// /// Crypto provider that delegates signing operations to a KMS backend. /// -public sealed class KmsCryptoProvider : ICryptoProvider +public sealed partial class KmsCryptoProvider : ICryptoProvider { private readonly IKmsClient _kmsClient; private readonly ConcurrentDictionary _registrations = new(StringComparer.OrdinalIgnoreCase); @@ -68,7 +67,20 @@ public sealed class KmsCryptoProvider : ICryptoProvider throw new InvalidOperationException("KMS signing keys must include metadata entry 'kms.version'."); } - var registration = new KmsSigningRegistration(signingKey.Reference.KeyId, versionId!, signingKey.AlgorithmId); + KmsPublicKey? publicKey = null; + if (TryResolvePublicKey(signingKey, out var resolved)) + { + publicKey = resolved; + } + + var registration = new KmsSigningRegistration( + signingKey.Reference, + versionId!, + signingKey.AlgorithmId, + signingKey.CreatedAt, + signingKey.Metadata, + publicKey); + _registrations.AddOrUpdate(signingKey.Reference.KeyId, registration, (_, _) => registration); } @@ -85,53 +97,17 @@ public sealed class KmsCryptoProvider : ICryptoProvider public IReadOnlyCollection GetSigningKeys() { var list = new List(); + foreach (var registration in _registrations.Values) { - var material = _kmsClient.ExportAsync(registration.KeyId, registration.VersionId).GetAwaiter().GetResult(); - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - [KmsMetadataKeys.Version] = material.VersionId - }; - - var reference = new CryptoKeyReference(material.KeyId, Name); - CryptoSigningKey signingKey; - - if (material.D.Length == 0) + if (!TryCreateSigningKey(registration, out var signingKey)) { continue; } - else - { - var parameters = new ECParameters - { - Curve = ECCurve.NamedCurves.nistP256, - D = material.D, - Q = new ECPoint - { - X = material.Qx, - Y = material.Qy, - }, - }; - - signingKey = new CryptoSigningKey( - reference, - material.Algorithm, - in parameters, - material.CreatedAt, - metadata: metadata); - } list.Add(signingKey); } return list; } - - internal static class KmsMetadataKeys - { - public const string Version = "kms.version"; - } - } - -internal sealed record KmsSigningRegistration(string KeyId, string VersionId, string Algorithm); diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs b/src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs index d9b86df1c..27748421c 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/KmsSigner.cs @@ -1,7 +1,6 @@ - using Microsoft.IdentityModel.Tokens; using StellaOps.Cryptography; -using System.Security.Cryptography; +using System; using System.Threading; using System.Threading.Tasks; @@ -10,47 +9,53 @@ namespace StellaOps.Cryptography.Kms; internal sealed class KmsSigner : ICryptoSigner { private readonly IKmsClient _client; - private readonly string _keyId; - private readonly string _versionId; - private readonly string _algorithm; + private readonly KmsSigningRegistration _registration; public KmsSigner(IKmsClient client, KmsSigningRegistration registration) { - _client = client; - _keyId = registration.KeyId; - _versionId = registration.VersionId; - _algorithm = registration.Algorithm; + _client = client ?? throw new ArgumentNullException(nameof(client)); + _registration = registration ?? throw new ArgumentNullException(nameof(registration)); } - public string KeyId => _keyId; + public string KeyId => _registration.Reference.KeyId; - public string AlgorithmId => _algorithm; + public string AlgorithmId => _registration.Algorithm; public async ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) { - var result = await _client.SignAsync(_keyId, _versionId, data, cancellationToken).ConfigureAwait(false); + var result = await _client.SignAsync( + _registration.Reference.KeyId, + _registration.VersionId, + data, + cancellationToken).ConfigureAwait(false); return result.Signature; } public ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) - => new(_client.VerifyAsync(_keyId, _versionId, data, signature, cancellationToken)); + => new(_client.VerifyAsync( + _registration.Reference.KeyId, + _registration.VersionId, + data, + signature, + cancellationToken)); public JsonWebKey ExportPublicJsonWebKey() { - var material = _client.ExportAsync(_keyId, _versionId).GetAwaiter().GetResult(); + var publicKey = _registration.PublicKey + ?? throw new InvalidOperationException("KMS signing key is missing public key material."); var jwk = new JsonWebKey { - Kid = material.KeyId, - Alg = material.Algorithm, + Kid = _registration.Reference.KeyId, + Alg = _registration.Algorithm, Kty = JsonWebAlgorithmsKeyTypes.EllipticCurve, Use = JsonWebKeyUseNames.Sig, - Crv = JsonWebKeyECTypes.P256, + Crv = publicKey.Curve, }; jwk.KeyOps.Add("sign"); jwk.KeyOps.Add("verify"); - jwk.X = Base64UrlEncoder.Encode(material.Qx); - jwk.Y = Base64UrlEncoder.Encode(material.Qy); + jwk.X = Base64UrlEncoder.Encode(publicKey.Qx); + jwk.Y = Base64UrlEncoder.Encode(publicKey.Qy); return jwk; } -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/MissingFido2Authenticator.cs b/src/__Libraries/StellaOps.Cryptography.Kms/MissingFido2Authenticator.cs new file mode 100644 index 000000000..7f0602b05 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/MissingFido2Authenticator.cs @@ -0,0 +1,11 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed class MissingFido2Authenticator : IFido2Authenticator +{ + public Task SignAsync(string credentialId, ReadOnlyMemory digest, CancellationToken cancellationToken = default) + => throw new InvalidOperationException("IFido2Authenticator must be registered to use FIDO2 KMS."); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Contracts.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Contracts.cs new file mode 100644 index 000000000..0f8620049 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Contracts.cs @@ -0,0 +1,14 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public interface IPkcs11Facade : IDisposable +{ + Task GetKeyAsync(CancellationToken cancellationToken); + + Task GetPublicKeyAsync(CancellationToken cancellationToken); + + Task SignDigestAsync(ReadOnlyMemory digest, CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Helpers.cs new file mode 100644 index 000000000..99b5a71d8 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Helpers.cs @@ -0,0 +1,106 @@ +using Microsoft.IdentityModel.Tokens; +using Net.Pkcs11Interop.Common; +using Net.Pkcs11Interop.HighLevelAPI; +using System; +using System.Collections.Generic; +using System.Formats.Asn1; +using System.Linq; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class Pkcs11InteropFacade +{ + private IObjectHandle? FindKey(ISession session, CKO objectClass, string? label) + { + var template = new List + { + _factories.ObjectAttributeFactory.Create(CKA.CKA_CLASS, (uint)objectClass) + }; + + if (!string.IsNullOrWhiteSpace(label)) + { + template.Add(_factories.ObjectAttributeFactory.Create(CKA.CKA_LABEL, label)); + } + + var handles = session.FindAllObjects(template); + return handles.FirstOrDefault(); + } + + private IObjectAttribute? GetAttribute(ISession session, IObjectHandle handle, CKA type) + { + var cacheKey = $"{handle.ObjectId}:{(uint)type}"; + if (_attributeCache.TryGetValue(cacheKey, out var cached)) + { + return cached.FirstOrDefault(); + } + + var attributes = session.GetAttributeValue(handle, new List { type }) + ?.ToArray() ?? Array.Empty(); + + if (attributes.Length > 0) + { + _attributeCache[cacheKey] = attributes; + return attributes[0]; + } + + return null; + } + + private static ISlot? ResolveSlot(IPkcs11Library pkcs11, Pkcs11Options options) + { + var slots = pkcs11.GetSlotList(SlotsType.WithTokenPresent); + if (slots.Count == 0) + { + return null; + } + + if (!string.IsNullOrWhiteSpace(options.SlotId)) + { + return slots.FirstOrDefault(slot => string.Equals(slot.SlotId.ToString(), options.SlotId, StringComparison.OrdinalIgnoreCase)); + } + + if (!string.IsNullOrWhiteSpace(options.TokenLabel)) + { + return slots.FirstOrDefault(slot => + { + var info = slot.GetTokenInfo(); + return string.Equals(info.Label?.Trim(), options.TokenLabel.Trim(), StringComparison.Ordinal); + }); + } + + return slots[0]; + } + + private static byte[] ExtractEcPoint(byte[] derEncoded) + { + var reader = new AsnReader(derEncoded, AsnEncodingRules.DER); + var point = reader.ReadOctetString(); + reader.ThrowIfNotEmpty(); + return point; + } + + private static (string CurveName, int CoordinateSize) DecodeCurve(byte[] ecParamsDer) + { + var reader = new AsnReader(ecParamsDer, AsnEncodingRules.DER); + var oid = reader.ReadObjectIdentifier(); + reader.ThrowIfNotEmpty(); + + var curve = oid switch + { + "1.2.840.10045.3.1.7" => JsonWebKeyECTypes.P256, + "1.3.132.0.34" => JsonWebKeyECTypes.P384, + "1.3.132.0.35" => JsonWebKeyECTypes.P521, + _ => throw new InvalidOperationException($"Unsupported EC curve OID '{oid}'."), + }; + + var coordinateSize = curve switch + { + JsonWebKeyECTypes.P256 => 32, + JsonWebKeyECTypes.P384 => 48, + JsonWebKeyECTypes.P521 => 66, + _ => throw new InvalidOperationException($"Unsupported EC curve '{curve}'."), + }; + + return (curve, coordinateSize); + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Models.cs new file mode 100644 index 000000000..5252121d7 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Models.cs @@ -0,0 +1,14 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +public sealed record Pkcs11KeyDescriptor( + string KeyId, + string? Label, + DateTimeOffset CreatedAt); + +public sealed record Pkcs11PublicKeyMaterial( + string KeyId, + string Curve, + byte[] Qx, + byte[] Qy); \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Session.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Session.cs new file mode 100644 index 000000000..2997ae6ba --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.Session.cs @@ -0,0 +1,77 @@ +using Net.Pkcs11Interop.Common; +using Net.Pkcs11Interop.HighLevelAPI; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +internal sealed partial class Pkcs11InteropFacade +{ + private async Task OpenSessionAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + var session = _slot.OpenSession(SessionType.ReadOnly); + + var loggedIn = false; + + try + { + if (!string.IsNullOrWhiteSpace(_options.UserPin)) + { + session.Login(CKU.CKU_USER, _options.UserPin); + loggedIn = true; + } + + return new SessionContext(session, loggedIn); + } + catch + { + if (loggedIn) + { + try { session.Logout(); } catch { } + } + + session.Dispose(); + throw; + } + } + + private sealed class SessionContext : IDisposable + { + private readonly ISession _session; + private readonly bool _logoutOnDispose; + private bool _disposed; + + public SessionContext(ISession session, bool logoutOnDispose) + { + _session = session ?? throw new ArgumentNullException(nameof(session)); + _logoutOnDispose = logoutOnDispose; + } + + public ISession Session => _session; + + public void Dispose() + { + if (_disposed) + { + return; + } + + if (_logoutOnDispose) + { + try + { + _session.Logout(); + } + catch + { + // ignore logout failures + } + } + + _session.Dispose(); + _disposed = true; + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.cs index 14bad6959..8c97d6d28 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Facade.cs @@ -1,35 +1,15 @@ - -using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Options; using Net.Pkcs11Interop.Common; using Net.Pkcs11Interop.HighLevelAPI; using Net.Pkcs11Interop.HighLevelAPI.Factories; +using System; using System.Collections.Concurrent; -using System.Formats.Asn1; -using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; namespace StellaOps.Cryptography.Kms; -public interface IPkcs11Facade : IDisposable -{ - Task GetKeyAsync(CancellationToken cancellationToken); - - Task GetPublicKeyAsync(CancellationToken cancellationToken); - - Task SignDigestAsync(ReadOnlyMemory digest, CancellationToken cancellationToken); -} - -public sealed record Pkcs11KeyDescriptor( - string KeyId, - string? Label, - DateTimeOffset CreatedAt); - -public sealed record Pkcs11PublicKeyMaterial( - string KeyId, - string Curve, - byte[] Qx, - byte[] Qy); - -internal sealed class Pkcs11InteropFacade : IPkcs11Facade +internal sealed partial class Pkcs11InteropFacade : IPkcs11Facade { private readonly Pkcs11Options _options; private readonly Pkcs11InteropFactories _factories; @@ -53,6 +33,11 @@ internal sealed class Pkcs11InteropFacade : IPkcs11Facade ?? throw new InvalidOperationException("Could not resolve PKCS#11 slot."); } + public Pkcs11InteropFacade(IOptions options, TimeProvider timeProvider) + : this(options?.Value ?? new Pkcs11Options(), timeProvider) + { + } + public async Task GetKeyAsync(CancellationToken cancellationToken) { using var context = await OpenSessionAsync(cancellationToken).ConfigureAwait(false); @@ -119,169 +104,8 @@ internal sealed class Pkcs11InteropFacade : IPkcs11Facade return session.Sign(mechanism, privateHandle, digest.ToArray()); } - private async Task OpenSessionAsync(CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - var session = _slot.OpenSession(SessionType.ReadOnly); - - var loggedIn = false; - - try - { - if (!string.IsNullOrWhiteSpace(_options.UserPin)) - { - session.Login(CKU.CKU_USER, _options.UserPin); - loggedIn = true; - } - - return new SessionContext(session, loggedIn); - } - catch - { - if (loggedIn) - { - try { session.Logout(); } catch { /* ignore */ } - } - - session.Dispose(); - throw; - } - } - - private IObjectHandle? FindKey(ISession session, CKO objectClass, string? label) - { - var template = new List - { - _factories.ObjectAttributeFactory.Create(CKA.CKA_CLASS, (uint)objectClass) - }; - - if (!string.IsNullOrWhiteSpace(label)) - { - template.Add(_factories.ObjectAttributeFactory.Create(CKA.CKA_LABEL, label)); - } - - var handles = session.FindAllObjects(template); - return handles.FirstOrDefault(); - } - - private IObjectAttribute? GetAttribute(ISession session, IObjectHandle handle, CKA type) - { - var cacheKey = $"{handle.ObjectId}:{(uint)type}"; - if (_attributeCache.TryGetValue(cacheKey, out var cached)) - { - return cached.FirstOrDefault(); - } - - var attributes = session.GetAttributeValue(handle, new List { type }) - ?.ToArray() ?? Array.Empty(); - - if (attributes.Length > 0) - { - _attributeCache[cacheKey] = attributes; - return attributes[0]; - } - - return null; - } - - private static ISlot? ResolveSlot(IPkcs11Library pkcs11, Pkcs11Options options) - { - var slots = pkcs11.GetSlotList(SlotsType.WithTokenPresent); - if (slots.Count == 0) - { - return null; - } - - if (!string.IsNullOrWhiteSpace(options.SlotId)) - { - return slots.FirstOrDefault(slot => string.Equals(slot.SlotId.ToString(), options.SlotId, StringComparison.OrdinalIgnoreCase)); - } - - if (!string.IsNullOrWhiteSpace(options.TokenLabel)) - { - return slots.FirstOrDefault(slot => - { - var info = slot.GetTokenInfo(); - return string.Equals(info.Label?.Trim(), options.TokenLabel.Trim(), StringComparison.Ordinal); - }); - } - - return slots[0]; - } - - private static byte[] ExtractEcPoint(byte[] derEncoded) - { - var reader = new AsnReader(derEncoded, AsnEncodingRules.DER); - var point = reader.ReadOctetString(); - reader.ThrowIfNotEmpty(); - return point; - } - - private static (string CurveName, int CoordinateSize) DecodeCurve(byte[] ecParamsDer) - { - var reader = new AsnReader(ecParamsDer, AsnEncodingRules.DER); - var oid = reader.ReadObjectIdentifier(); - reader.ThrowIfNotEmpty(); - - var curve = oid switch - { - "1.2.840.10045.3.1.7" => JsonWebKeyECTypes.P256, - "1.3.132.0.34" => JsonWebKeyECTypes.P384, - "1.3.132.0.35" => JsonWebKeyECTypes.P521, - _ => throw new InvalidOperationException($"Unsupported EC curve OID '{oid}'."), - }; - - var coordinateSize = curve switch - { - JsonWebKeyECTypes.P256 => 32, - JsonWebKeyECTypes.P384 => 48, - JsonWebKeyECTypes.P521 => 66, - _ => throw new InvalidOperationException($"Unsupported EC curve '{curve}'."), - }; - - return (curve, coordinateSize); - } - public void Dispose() { _library.Dispose(); } - - private sealed class SessionContext : System.IDisposable - { - private readonly ISession _session; - private readonly bool _logoutOnDispose; - private bool _disposed; - - public SessionContext(ISession session, bool logoutOnDispose) - { - _session = session ?? throw new System.ArgumentNullException(nameof(session)); - _logoutOnDispose = logoutOnDispose; - } - - public ISession Session => _session; - - public void Dispose() - { - if (_disposed) - { - return; - } - - if (_logoutOnDispose) - { - try - { - _session.Logout(); - } - catch - { - // ignore logout failures - } - } - - _session.Dispose(); - _disposed = true; - } - } -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Cache.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Cache.cs new file mode 100644 index 000000000..3a57f68b9 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Cache.cs @@ -0,0 +1,36 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) + { + return cached; + } + + var descriptor = await _facade.GetKeyAsync(cancellationToken).ConfigureAwait(false); + var entry = new CachedMetadata(descriptor, now.Add(_metadataCacheDuration)); + _metadataCache[keyId] = entry; + return entry; + } + + private async Task GetCachedPublicKeyAsync(string keyId, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + if (_publicKeyCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) + { + return cached; + } + + var material = await _facade.GetPublicKeyAsync(cancellationToken).ConfigureAwait(false); + var entry = new CachedPublicKey(material, now.Add(_publicKeyCacheDuration)); + _publicKeyCache[keyId] = entry; + return entry; + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Helpers.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Helpers.cs new file mode 100644 index 000000000..eb7252ce7 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Helpers.cs @@ -0,0 +1,36 @@ +using Microsoft.IdentityModel.Tokens; +using System; +using System.Security.Cryptography; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + private static byte[] ComputeSha256(ReadOnlyMemory data) + { + var digest = new byte[32]; + if (!SHA256.TryHashData(data.Span, digest, out _)) + { + throw new InvalidOperationException("Failed to hash payload with SHA-256."); + } + + return digest; + } + + private static ECCurve ResolveCurve(string curve) + => curve switch + { + JsonWebKeyECTypes.P256 => ECCurve.NamedCurves.nistP256, + JsonWebKeyECTypes.P384 => ECCurve.NamedCurves.nistP384, + JsonWebKeyECTypes.P521 => ECCurve.NamedCurves.nistP521, + _ => throw new InvalidOperationException($"Unsupported EC curve '{curve}'."), + }; + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(Pkcs11KmsClient)); + } + } +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Lifecycle.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Lifecycle.cs new file mode 100644 index 000000000..5af7cf478 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Lifecycle.cs @@ -0,0 +1,25 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) + => throw new NotSupportedException("PKCS#11 rotation requires HSM administrative tooling."); + + public Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) + => throw new NotSupportedException("PKCS#11 revocation must be handled by HSM policies."); + + public void Dispose() + { + if (_disposed) + { + return; + } + + _disposed = true; + _facade.Dispose(); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Metadata.cs new file mode 100644 index 000000000..f6a071fee --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Metadata.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(new ECParameters + { + Curve = ResolveCurve(publicMaterial.Material.Curve), + Q = + { + X = publicMaterial.Material.Qx, + Y = publicMaterial.Material.Qy, + }, + }); + + var subjectInfo = Convert.ToBase64String(ecdsa.ExportSubjectPublicKeyInfo()); + + var version = new KmsKeyVersionMetadata( + descriptor.Descriptor.KeyId, + KmsKeyState.Active, + descriptor.Descriptor.CreatedAt, + null, + subjectInfo, + publicMaterial.Material.Curve); + + return new KmsKeyMetadata( + descriptor.Descriptor.KeyId, + KmsAlgorithms.Es256, + KmsKeyState.Active, + descriptor.Descriptor.CreatedAt, + ImmutableArray.Create(version)); + } + + public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + + var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); + + return new KmsKeyMaterial( + descriptor.Descriptor.KeyId, + descriptor.Descriptor.KeyId, + KmsAlgorithms.Es256, + publicMaterial.Material.Curve, + Array.Empty(), + publicMaterial.Material.Qx, + publicMaterial.Material.Qy, + descriptor.Descriptor.CreatedAt); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Models.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Models.cs new file mode 100644 index 000000000..4e8f0b032 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Models.cs @@ -0,0 +1,10 @@ +using System; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + private sealed record CachedMetadata(Pkcs11KeyDescriptor Descriptor, DateTimeOffset ExpiresAt); + + private sealed record CachedPublicKey(Pkcs11PublicKeyMaterial Material, DateTimeOffset ExpiresAt); +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Signing.cs new file mode 100644 index 000000000..77b3b7f5a --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.Signing.cs @@ -0,0 +1,77 @@ +using System; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Kms; + +public sealed partial class Pkcs11KmsClient +{ + public async Task SignAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty) + { + throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); + } + + var digest = ComputeSha256(data); + try + { + var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); + var signature = await _facade.SignDigestAsync(digest, cancellationToken).ConfigureAwait(false); + + return new KmsSignResult( + descriptor.Descriptor.KeyId, + descriptor.Descriptor.KeyId, + KmsAlgorithms.Es256, + signature); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } + + public async Task VerifyAsync( + string keyId, + string? keyVersion, + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + if (data.IsEmpty || signature.IsEmpty) + { + return false; + } + + var digest = ComputeSha256(data); + try + { + var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); + + using var ecdsa = ECDsa.Create(new ECParameters + { + Curve = ResolveCurve(publicMaterial.Material.Curve), + Q = + { + X = publicMaterial.Material.Qx, + Y = publicMaterial.Material.Qy, + }, + }); + + return ecdsa.VerifyHash(digest, signature.ToArray()); + } + finally + { + CryptographicOperations.ZeroMemory(digest.AsSpan()); + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.cs index bdfb09617..1eeff4eab 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11KmsClient.cs @@ -1,15 +1,13 @@ - -using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Options; +using System; using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Security.Cryptography; namespace StellaOps.Cryptography.Kms; /// /// PKCS#11-backed implementation of . /// -public sealed class Pkcs11KmsClient : IKmsClient +public sealed partial class Pkcs11KmsClient : IKmsClient { private readonly IPkcs11Facade _facade; private readonly TimeSpan _metadataCacheDuration; @@ -30,203 +28,8 @@ public sealed class Pkcs11KmsClient : IKmsClient _publicKeyCacheDuration = options.PublicKeyCacheDuration; } - public async Task SignAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - CancellationToken cancellationToken = default) + public Pkcs11KmsClient(IPkcs11Facade facade, IOptions options, TimeProvider timeProvider) + : this(facade, options?.Value ?? new Pkcs11Options(), timeProvider) { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty) - { - throw new ArgumentException("Signing payload cannot be empty.", nameof(data)); - } - - var digest = ComputeSha256(data); - try - { - var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var signature = await _facade.SignDigestAsync(digest, cancellationToken).ConfigureAwait(false); - - return new KmsSignResult( - descriptor.Descriptor.KeyId, - descriptor.Descriptor.KeyId, - KmsAlgorithms.Es256, - signature); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } } - - public async Task VerifyAsync( - string keyId, - string? keyVersion, - ReadOnlyMemory data, - ReadOnlyMemory signature, - CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - if (data.IsEmpty || signature.IsEmpty) - { - return false; - } - - var digest = ComputeSha256(data); - try - { - var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); - - using var ecdsa = ECDsa.Create(new ECParameters - { - Curve = ResolveCurve(publicMaterial.Material.Curve), - Q = - { - X = publicMaterial.Material.Qx, - Y = publicMaterial.Material.Qy, - }, - }); - - return ecdsa.VerifyHash(digest, signature.ToArray()); - } - finally - { - CryptographicOperations.ZeroMemory(digest.AsSpan()); - } - } - - public async Task GetMetadataAsync(string keyId, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); - - using var ecdsa = ECDsa.Create(new ECParameters - { - Curve = ResolveCurve(publicMaterial.Material.Curve), - Q = - { - X = publicMaterial.Material.Qx, - Y = publicMaterial.Material.Qy, - }, - }); - - var subjectInfo = Convert.ToBase64String(ecdsa.ExportSubjectPublicKeyInfo()); - - var version = new KmsKeyVersionMetadata( - descriptor.Descriptor.KeyId, - KmsKeyState.Active, - descriptor.Descriptor.CreatedAt, - null, - subjectInfo, - publicMaterial.Material.Curve); - - return new KmsKeyMetadata( - descriptor.Descriptor.KeyId, - KmsAlgorithms.Es256, - KmsKeyState.Active, - descriptor.Descriptor.CreatedAt, - ImmutableArray.Create(version)); - } - - public async Task ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default) - { - ThrowIfDisposed(); - ArgumentException.ThrowIfNullOrWhiteSpace(keyId); - - var descriptor = await GetCachedMetadataAsync(keyId, cancellationToken).ConfigureAwait(false); - var publicMaterial = await GetCachedPublicKeyAsync(keyId, cancellationToken).ConfigureAwait(false); - - return new KmsKeyMaterial( - descriptor.Descriptor.KeyId, - descriptor.Descriptor.KeyId, - KmsAlgorithms.Es256, - publicMaterial.Material.Curve, - Array.Empty(), - publicMaterial.Material.Qx, - publicMaterial.Material.Qy, - descriptor.Descriptor.CreatedAt); - } - - public Task RotateAsync(string keyId, CancellationToken cancellationToken = default) - => throw new NotSupportedException("PKCS#11 rotation requires HSM administrative tooling."); - - public Task RevokeAsync(string keyId, CancellationToken cancellationToken = default) - => throw new NotSupportedException("PKCS#11 revocation must be handled by HSM policies."); - - public void Dispose() - { - if (_disposed) - { - return; - } - - _disposed = true; - _facade.Dispose(); - } - - private async Task GetCachedMetadataAsync(string keyId, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_metadataCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) - { - return cached; - } - - var descriptor = await _facade.GetKeyAsync(cancellationToken).ConfigureAwait(false); - var entry = new CachedMetadata(descriptor, now.Add(_metadataCacheDuration)); - _metadataCache[keyId] = entry; - return entry; - } - - private async Task GetCachedPublicKeyAsync(string keyId, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - if (_publicKeyCache.TryGetValue(keyId, out var cached) && cached.ExpiresAt > now) - { - return cached; - } - - var material = await _facade.GetPublicKeyAsync(cancellationToken).ConfigureAwait(false); - var entry = new CachedPublicKey(material, now.Add(_publicKeyCacheDuration)); - _publicKeyCache[keyId] = entry; - return entry; - } - - private static byte[] ComputeSha256(ReadOnlyMemory data) - { - var digest = new byte[32]; - if (!SHA256.TryHashData(data.Span, digest, out _)) - { - throw new InvalidOperationException("Failed to hash payload with SHA-256."); - } - - return digest; - } - - private static ECCurve ResolveCurve(string curve) - => curve switch - { - JsonWebKeyECTypes.P256 => ECCurve.NamedCurves.nistP256, - JsonWebKeyECTypes.P384 => ECCurve.NamedCurves.nistP384, - JsonWebKeyECTypes.P521 => ECCurve.NamedCurves.nistP521, - _ => throw new InvalidOperationException($"Unsupported EC curve '{curve}'."), - }; - - private void ThrowIfDisposed() - { - if (_disposed) - { - throw new ObjectDisposedException(nameof(Pkcs11KmsClient)); - } - } - - private sealed record CachedMetadata(Pkcs11KeyDescriptor Descriptor, DateTimeOffset ExpiresAt); - - private sealed record CachedPublicKey(Pkcs11PublicKeyMaterial Material, DateTimeOffset ExpiresAt); } diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Options.cs b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Options.cs index 9a2a0a40b..0df5ee5f2 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Options.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/Pkcs11Options.cs @@ -5,8 +5,8 @@ namespace StellaOps.Cryptography.Kms; /// public sealed class Pkcs11Options { - private TimeSpan metadataCacheDuration = TimeSpan.FromMinutes(5); - private TimeSpan publicKeyCacheDuration = TimeSpan.FromMinutes(5); + private TimeSpan _metadataCacheDuration = TimeSpan.FromMinutes(5); + private TimeSpan _publicKeyCacheDuration = TimeSpan.FromMinutes(5); /// /// Gets or sets the native PKCS#11 library path. @@ -48,8 +48,8 @@ public sealed class Pkcs11Options /// public TimeSpan MetadataCacheDuration { - get => metadataCacheDuration; - set => metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); + get => _metadataCacheDuration; + set => _metadataCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); } /// @@ -57,15 +57,10 @@ public sealed class Pkcs11Options /// public TimeSpan PublicKeyCacheDuration { - get => publicKeyCacheDuration; - set => publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); + get => _publicKeyCacheDuration; + set => _publicKeyCacheDuration = EnsurePositive(value, TimeSpan.FromMinutes(5)); } - /// - /// Gets or sets an optional factory for advanced facade injection (testing, custom providers). - /// - public Func? FacadeFactory { get; set; } - private static TimeSpan EnsurePositive(TimeSpan value, TimeSpan fallback) => value <= TimeSpan.Zero ? fallback : value; } diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.AwsKms.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.AwsKms.cs new file mode 100644 index 000000000..bb1ff0d0a --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.AwsKms.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System; + +namespace StellaOps.Cryptography.Kms; + +public static partial class ServiceCollectionExtensions +{ + public static IServiceCollection AddAwsKms( + this IServiceCollection services, + Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + RemoveKmsServices(services); + services.Configure(configure); + RegisterKmsProvider(services); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Fido2Kms.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Fido2Kms.cs new file mode 100644 index 000000000..49a5375cd --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Fido2Kms.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System; + +namespace StellaOps.Cryptography.Kms; + +public static partial class ServiceCollectionExtensions +{ + public static IServiceCollection AddFido2Kms( + this IServiceCollection services, + Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + RemoveKmsServices(services); + services.Configure(configure); + RegisterKmsProvider(services); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.FileKms.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.FileKms.cs new file mode 100644 index 000000000..9ac2ea481 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.FileKms.cs @@ -0,0 +1,23 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System; + +namespace StellaOps.Cryptography.Kms; + +public static partial class ServiceCollectionExtensions +{ + public static IServiceCollection AddFileKms( + this IServiceCollection services, + Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + RemoveKmsServices(services); + services.Configure(configure); + RegisterKmsProvider(services); + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.GcpKms.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.GcpKms.cs new file mode 100644 index 000000000..b1ea860b1 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.GcpKms.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System; + +namespace StellaOps.Cryptography.Kms; + +public static partial class ServiceCollectionExtensions +{ + public static IServiceCollection AddGcpKms( + this IServiceCollection services, + Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + RemoveKmsServices(services); + services.Configure(configure); + RegisterKmsProvider(services); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Pkcs11Kms.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Pkcs11Kms.cs new file mode 100644 index 000000000..d7db899b7 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.Pkcs11Kms.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using System; + +namespace StellaOps.Cryptography.Kms; + +public static partial class ServiceCollectionExtensions +{ + public static IServiceCollection AddPkcs11Kms( + this IServiceCollection services, + Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + RemoveKmsServices(services); + services.Configure(configure); + RegisterKmsProvider(services); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.cs index 490578cdb..464540ac8 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.cs +++ b/src/__Libraries/StellaOps.Cryptography.Kms/ServiceCollectionExtensions.cs @@ -1,167 +1,22 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using StellaOps.Cryptography; +using System; namespace StellaOps.Cryptography.Kms; -/// -/// Dependency injection helpers for the KMS client and crypto provider. -/// -public static class ServiceCollectionExtensions +public static partial class ServiceCollectionExtensions { - public static IServiceCollection AddFileKms( - this IServiceCollection services, - Action configure) + private static void RemoveKmsServices(IServiceCollection services) { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configure); - services.RemoveAll(); services.RemoveAll(); services.RemoveAll(); services.RemoveAll(); - - services.Configure(configure); - - services.TryAddSingleton(sp => - { - var options = sp.GetRequiredService>().Value; - return new FileKmsClient(options); - }); - - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - return services; } - public static IServiceCollection AddAwsKms( - this IServiceCollection services, - Action configure) + private static void RegisterKmsProvider(IServiceCollection services) { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configure); - - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - - services.Configure(configure); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new AwsKmsOptions(); - return options.FacadeFactory?.Invoke(sp) ?? new AwsKmsFacade(options); - }); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new AwsKmsOptions(); - var facade = sp.GetRequiredService(); - return new AwsKmsClient(facade, options); - }); - + services.TryAddSingleton(TimeProvider.System); services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - return services; } - - public static IServiceCollection AddGcpKms( - this IServiceCollection services, - Action configure) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configure); - - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - - services.Configure(configure); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new GcpKmsOptions(); - return options.FacadeFactory?.Invoke(sp) ?? new GcpKmsFacade(options); - }); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new GcpKmsOptions(); - var facade = sp.GetRequiredService(); - return new GcpKmsClient(facade, options); - }); - - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - return services; - } - - public static IServiceCollection AddPkcs11Kms( - this IServiceCollection services, - Action configure) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configure); - - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - services.RemoveAll(); - - services.Configure(configure); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new Pkcs11Options(); - return options.FacadeFactory?.Invoke(sp) ?? new Pkcs11InteropFacade(options); - }); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new Pkcs11Options(); - var facade = sp.GetRequiredService(); - return new Pkcs11KmsClient(facade, options); - }); - - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - return services; - } - - public static IServiceCollection AddFido2Kms( - this IServiceCollection services, - Action configure) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configure); - - services.RemoveAll(); - - services.Configure(configure); - - services.TryAddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new Fido2Options(); - if (options.AuthenticatorFactory is null) - { - throw new InvalidOperationException("Fido2Options.AuthenticatorFactory must be provided or IFido2Authenticator registered separately."); - } - - return options.AuthenticatorFactory(sp); - }); - - services.AddSingleton(sp => - { - var options = sp.GetRequiredService>().Value ?? new Fido2Options(); - var authenticator = sp.GetRequiredService(); - return new Fido2KmsClient(authenticator, options); - }); - - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - - return services; - } -} +} \ No newline at end of file diff --git a/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md b/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md index 788b25fe8..6a28facbd 100644 --- a/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0051-T | DONE | Revalidated 2026-01-08. | | AUDIT-0051-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Async naming + file splits <= 100 lines; service locator removal; KMS public key handling updated; `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/StellaOps.Cryptography.Kms.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` (9 tests, MTP0001 warning) and `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` (326 tests) passed 2026-02-04. | diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.KeyNormalization.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.KeyNormalization.cs new file mode 100644 index 000000000..7d6c74797 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.KeyNormalization.cs @@ -0,0 +1,48 @@ +using Org.BouncyCastle.Crypto.Parameters; +using StellaOps.Cryptography; +using System; + +namespace StellaOps.Cryptography.Plugin.BouncyCastle; + +public sealed partial class BouncyCastleEd25519CryptoProvider +{ + private static void EnsureAlgorithmSupported(string algorithmId) + { + if (!_supportedAlgorithms.Contains(algorithmId)) + { + throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider 'bouncycastle.ed25519'."); + } + } + + private static string NormalizeAlgorithm(string algorithmId) + => string.Equals(algorithmId, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase) + ? SignatureAlgorithms.Ed25519 + : SignatureAlgorithms.Ed25519; + + private static byte[] NormalizePrivateKey(ReadOnlyMemory privateKey) + { + var span = privateKey.Span; + return span.Length switch + { + 32 => span.ToArray(), + 64 => span[..32].ToArray(), + _ => throw new InvalidOperationException("Ed25519 private key must be 32 or 64 bytes.") + }; + } + + private static byte[] NormalizePublicKey(ReadOnlyMemory publicKey, byte[] privateKey) + { + if (publicKey.IsEmpty) + { + var privateParams = new Ed25519PrivateKeyParameters(privateKey, 0); + return privateParams.GeneratePublicKey().GetEncoded(); + } + + if (publicKey.Span.Length != 32) + { + throw new InvalidOperationException("Ed25519 public key must be 32 bytes."); + } + + return publicKey.ToArray(); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.Signer.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.Signer.cs new file mode 100644 index 000000000..357e3e24f --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.Signer.cs @@ -0,0 +1,76 @@ +using Microsoft.IdentityModel.Tokens; +using Org.BouncyCastle.Crypto.Parameters; +using Org.BouncyCastle.Crypto.Signers; +using StellaOps.Cryptography; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cryptography.Plugin.BouncyCastle; + +public sealed partial class BouncyCastleEd25519CryptoProvider +{ + private sealed record KeyEntry( + CryptoSigningKey Descriptor, + Ed25519PrivateKeyParameters PrivateKey, + Ed25519PublicKeyParameters PublicKey); + + private sealed class Ed25519SignerWrapper : ICryptoSigner + { + private readonly KeyEntry _entry; + + public Ed25519SignerWrapper(KeyEntry entry) + { + _entry = entry ?? throw new ArgumentNullException(nameof(entry)); + } + + public string KeyId => _entry.Descriptor.Reference.KeyId; + + public string AlgorithmId => _entry.Descriptor.AlgorithmId; + + public ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var signer = new Ed25519Signer(); + var buffer = data.ToArray(); + signer.Init(true, _entry.PrivateKey); + signer.BlockUpdate(buffer, 0, buffer.Length); + var signature = signer.GenerateSignature(); + return ValueTask.FromResult(signature); + } + + public ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var verifier = new Ed25519Signer(); + var buffer = data.ToArray(); + verifier.Init(false, _entry.PublicKey); + verifier.BlockUpdate(buffer, 0, buffer.Length); + var verified = verifier.VerifySignature(signature.ToArray()); + return ValueTask.FromResult(verified); + } + + public JsonWebKey ExportPublicJsonWebKey() + { + var jwk = new JsonWebKey + { + Kid = _entry.Descriptor.Reference.KeyId, + Alg = SignatureAlgorithms.EdDsa, + Kty = "OKP", + Use = JsonWebKeyUseNames.Sig, + Crv = "Ed25519" + }; + + foreach (var op in _defaultKeyOps) + { + jwk.KeyOps.Add(op); + } + + jwk.X = Base64UrlEncoder.Encode(_entry.PublicKey.GetEncoded()); + + return jwk; + } + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs index d5a82947c..5cd9b3ef5 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs @@ -1,26 +1,25 @@ - -using Microsoft.IdentityModel.Tokens; using Org.BouncyCastle.Crypto.Parameters; -using Org.BouncyCastle.Crypto.Signers; using StellaOps.Cryptography; +using System; using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; namespace StellaOps.Cryptography.Plugin.BouncyCastle; /// /// Ed25519 signing provider backed by BouncyCastle primitives. /// -public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider +public sealed partial class BouncyCastleEd25519CryptoProvider : ICryptoProvider { - private static readonly HashSet SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + private static readonly HashSet _supportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) { SignatureAlgorithms.Ed25519, SignatureAlgorithms.EdDsa }; - private static readonly string[] DefaultKeyOps = { "sign", "verify" }; - - private readonly ConcurrentDictionary signingKeys = new(StringComparer.Ordinal); + private static readonly string[] _defaultKeyOps = { "sign", "verify" }; + private readonly ConcurrentDictionary _signingKeys = new(StringComparer.Ordinal); public string Name => "bouncycastle.ed25519"; @@ -33,7 +32,7 @@ public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider return capability switch { - CryptoCapability.Signing or CryptoCapability.Verification => SupportedAlgorithms.Contains(algorithmId), + CryptoCapability.Signing or CryptoCapability.Verification => _supportedAlgorithms.Contains(algorithmId), _ => false }; } @@ -49,7 +48,7 @@ public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider ArgumentException.ThrowIfNullOrWhiteSpace(algorithmId); ArgumentNullException.ThrowIfNull(keyReference); - if (!signingKeys.TryGetValue(keyReference.KeyId, out var entry)) + if (!_signingKeys.TryGetValue(keyReference.KeyId, out var entry)) { throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'."); } @@ -90,7 +89,7 @@ public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider publicKey, signingKey.Metadata); - signingKeys.AddOrUpdate( + _signingKeys.AddOrUpdate( signingKey.Reference.KeyId, _ => new KeyEntry(descriptor, privateKeyParameters, publicKeyParameters), (_, _) => new KeyEntry(descriptor, privateKeyParameters, publicKeyParameters)); @@ -103,113 +102,9 @@ public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider return false; } - return signingKeys.TryRemove(keyId, out _); + return _signingKeys.TryRemove(keyId, out _); } public IReadOnlyCollection GetSigningKeys() - => signingKeys.Values.Select(static entry => entry.Descriptor).ToArray(); - - private static void EnsureAlgorithmSupported(string algorithmId) - { - if (!SupportedAlgorithms.Contains(algorithmId)) - { - throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider 'bouncycastle.ed25519'."); - } - } - - private static string NormalizeAlgorithm(string algorithmId) - => string.Equals(algorithmId, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase) - ? SignatureAlgorithms.Ed25519 - : SignatureAlgorithms.Ed25519; - - private static byte[] NormalizePrivateKey(ReadOnlyMemory privateKey) - { - var span = privateKey.Span; - return span.Length switch - { - 32 => span.ToArray(), - 64 => span[..32].ToArray(), - _ => throw new InvalidOperationException("Ed25519 private key must be 32 or 64 bytes.") - }; - } - - private static byte[] NormalizePublicKey(ReadOnlyMemory publicKey, byte[] privateKey) - { - if (publicKey.IsEmpty) - { - var privateParams = new Ed25519PrivateKeyParameters(privateKey, 0); - return privateParams.GeneratePublicKey().GetEncoded(); - } - - if (publicKey.Span.Length != 32) - { - throw new InvalidOperationException("Ed25519 public key must be 32 bytes."); - } - - return publicKey.ToArray(); - } - - private sealed record KeyEntry( - CryptoSigningKey Descriptor, - Ed25519PrivateKeyParameters PrivateKey, - Ed25519PublicKeyParameters PublicKey); - - private sealed class Ed25519SignerWrapper : ICryptoSigner - { - private readonly KeyEntry entry; - - public Ed25519SignerWrapper(KeyEntry entry) - { - this.entry = entry ?? throw new ArgumentNullException(nameof(entry)); - } - - public string KeyId => entry.Descriptor.Reference.KeyId; - - public string AlgorithmId => entry.Descriptor.AlgorithmId; - - public ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - var signer = new Ed25519Signer(); - var buffer = data.ToArray(); - signer.Init(true, entry.PrivateKey); - signer.BlockUpdate(buffer, 0, buffer.Length); - var signature = signer.GenerateSignature(); - return ValueTask.FromResult(signature); - } - - public ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - var verifier = new Ed25519Signer(); - var buffer = data.ToArray(); - verifier.Init(false, entry.PublicKey); - verifier.BlockUpdate(buffer, 0, buffer.Length); - var verified = verifier.VerifySignature(signature.ToArray()); - return ValueTask.FromResult(verified); - } - - public JsonWebKey ExportPublicJsonWebKey() - { - var jwk = new JsonWebKey - { - Kid = entry.Descriptor.Reference.KeyId, - Alg = SignatureAlgorithms.EdDsa, - Kty = "OKP", - Use = JsonWebKeyUseNames.Sig, - Crv = "Ed25519" - }; - - foreach (var op in DefaultKeyOps) - { - jwk.KeyOps.Add(op); - } - - jwk.X = Base64UrlEncoder.Encode(entry.PublicKey.GetEncoded()); - - return jwk; - } - } + => _signingKeys.Values.Select(static entry => entry.Descriptor).ToArray(); } diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/TASKS.md b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/TASKS.md index 5f4506c7a..a94d08456 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0052-T | DONE | Revalidated 2026-01-08. | | AUDIT-0052-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Private field naming fixed; file split into partials <= 100 lines; key normalization tests added; `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (330 tests) 2026-02-04. | diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Export.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Export.cs new file mode 100644 index 000000000..a5acfac69 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Export.cs @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using StellaOps.Cryptography; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public partial class EidasCryptoProviderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ExportPublicJsonWebKey_ReturnsStubJwk() + { + var keyRef = new CryptoKeyReference("test-key-local"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + var jwk = signer.ExportPublicJsonWebKey(); + + Assert.NotNull(jwk); + Assert.Equal("EC", jwk.Kty); + Assert.Equal("P-256", jwk.Crv); + Assert.Equal("sig", jwk.Use); + Assert.Equal("test-key-local", jwk.Kid); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Keys.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Keys.cs new file mode 100644 index 000000000..810d52d42 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Keys.cs @@ -0,0 +1,68 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using StellaOps.Cryptography; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public partial class EidasCryptoProviderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GetSigner_ReturnsEidasSigner() + { + var keyRef = new CryptoKeyReference("test-key-local"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + Assert.NotNull(signer); + Assert.Equal("test-key-local", signer.KeyId); + Assert.Equal("ECDSA-P256", signer.AlgorithmId); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void UpsertSigningKey_AddsKey() + { + var keyRef = new CryptoKeyReference("test-upsert"); + var signingKey = new CryptoSigningKey( + keyRef, + "ECDSA-P256", + new byte[] { 1, 2, 3, 4 }, + FixedUtcNow); + + _provider.UpsertSigningKey(signingKey); + + var keys = _provider.GetSigningKeys(); + Assert.Contains(keys, k => k.Reference.KeyId == "test-upsert"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void RemoveSigningKey_RemovesKey() + { + var keyRef = new CryptoKeyReference("test-remove"); + var signingKey = new CryptoSigningKey( + keyRef, + "ECDSA-P256", + new byte[] { 1, 2, 3, 4 }, + FixedUtcNow); + + _provider.UpsertSigningKey(signingKey); + Assert.Contains(_provider.GetSigningKeys(), k => k.Reference.KeyId == "test-remove"); + + var removed = _provider.RemoveSigningKey("test-remove"); + + Assert.True(removed); + Assert.DoesNotContain(_provider.GetSigningKeys(), k => k.Reference.KeyId == "test-remove"); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void RemoveSigningKey_ReturnsFalseForNonExistentKey() + { + var removed = _provider.RemoveSigningKey("non-existent-key"); + + Assert.False(removed); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Metadata.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Metadata.cs new file mode 100644 index 000000000..489e4d02f --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Metadata.cs @@ -0,0 +1,40 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using StellaOps.Cryptography; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public partial class EidasCryptoProviderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Provider_Name_IsEidas() + { + Assert.Equal("eidas", _provider.Name); + } + + [Trait("Category", TestCategories.Unit)] + [Theory] + [InlineData(CryptoCapability.Signing, "ECDSA-P256", true)] + [InlineData(CryptoCapability.Signing, "ECDSA-P384", true)] + [InlineData(CryptoCapability.Signing, "ECDSA-P521", true)] + [InlineData(CryptoCapability.Signing, "RSA-PSS-2048", true)] + [InlineData(CryptoCapability.Signing, "RSA-PSS-4096", true)] + [InlineData(CryptoCapability.Signing, "EdDSA-Ed25519", true)] + [InlineData(CryptoCapability.Signing, "EdDSA-Ed448", true)] + [InlineData(CryptoCapability.Verification, "ECDSA-P256", true)] + [InlineData(CryptoCapability.Signing, "UNKNOWN-ALGO", false)] + [InlineData(CryptoCapability.ContentHashing, "ECDSA-P256", false)] + [InlineData(CryptoCapability.PasswordHashing, "ECDSA-P256", false)] + public void Supports_ReturnsExpectedResults( + CryptoCapability capability, + string algorithmId, + bool expected) + { + var result = _provider.Supports(capability, algorithmId); + + Assert.Equal(expected, result); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Signing.cs new file mode 100644 index 000000000..237a7201e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Signing.cs @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using System.Collections.Generic; +using StellaOps.Cryptography; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public partial class EidasCryptoProviderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SignWithLocalKey_ReturnsSignatureAsync() + { + var keyRef = new CryptoKeyReference("test-key-local"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + var data = "Test data for signing"u8.ToArray(); + var signature = await signer.SignAsync(data); + + Assert.NotNull(signature); + Assert.NotEmpty(signature); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task VerifyWithLocalKey_ReturnsTrueAsync() + { + var keyRef = new CryptoKeyReference("test-key-local"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + var data = "Test data for verification"u8.ToArray(); + var signature = await signer.SignAsync(data); + var isValid = await signer.VerifyAsync(data, signature); + + Assert.True(isValid); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SignWithTspKey_ReturnsSignatureAsync() + { + var keyRef = new CryptoKeyReference("test-key-tsp"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + var data = "Test data for TSP signing"u8.ToArray(); + var signature = await signer.SignAsync(data); + + Assert.NotNull(signature); + Assert.NotEmpty(signature); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SignWithUnknownKey_ThrowsKeyNotFoundExceptionAsync() + { + var keyRef = new CryptoKeyReference("test-key-missing"); + var signer = _provider.GetSigner("ECDSA-P256", keyRef); + + var data = "Test data for missing key"u8.ToArray(); + + await Assert.ThrowsAsync(() => signer.SignAsync(data).AsTask()); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Unsupported.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Unsupported.cs new file mode 100644 index 000000000..07b95e623 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.Unsupported.cs @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public partial class EidasCryptoProviderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GetPasswordHasher_ThrowsNotSupported() + { + Assert.Throws(() => _provider.GetPasswordHasher("PBKDF2")); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void GetHasher_ThrowsNotSupported() + { + Assert.Throws(() => _provider.GetHasher("SHA256")); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.cs index 28672adf6..9a08c18b5 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasCryptoProviderTests.cs @@ -1,85 +1,51 @@ // SPDX-License-Identifier: BUSL-1.1 // Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests - -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; +using System; using System.IO; +using System.Net.Http; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; -using StellaOps.Cryptography; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; using StellaOps.Cryptography.Plugin.EIDAS; using StellaOps.Cryptography.Plugin.EIDAS.Configuration; -using StellaOps.Cryptography.Plugin.EIDAS.DependencyInjection; using StellaOps.Cryptography.Plugin.EIDAS.Models; -using Xunit; -using StellaOps.TestKit; namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; -public class EidasCryptoProviderTests : IDisposable +public partial class EidasCryptoProviderTests : IDisposable { - private readonly ServiceProvider _serviceProvider; + private static readonly DateTimeOffset FixedUtcNow = new(2026, 1, 1, 0, 0, 0, TimeSpan.Zero); + private const string KeystorePassword = "test-password"; + + private readonly HttpClient _httpClient; private readonly EidasCryptoProvider _provider; private readonly string _keystorePath; - private const string KeystorePassword = "test-password"; public EidasCryptoProviderTests() { _keystorePath = CreateTestKeystore(); - var services = new ServiceCollection(); + var options = CreateOptions(_keystorePath); - // Configure eIDAS options - services.Configure(options => - { - options.SignatureLevel = SignatureLevel.AdES; - options.SignatureFormat = SignatureFormat.CAdES; - options.DefaultAlgorithm = "ECDSA-P256"; - options.DigestAlgorithm = "SHA256"; + _httpClient = new HttpClient(); + var tspClient = new TrustServiceProviderClient( + NullLogger.Instance, + _httpClient, + options); + var localProvider = new LocalEidasProvider( + NullLogger.Instance, + options); - // Add test key configuration - options.Keys.Add(new EidasKeyConfig - { - KeyId = "test-key-local", - Source = "local" - }); - - options.Keys.Add(new EidasKeyConfig - { - KeyId = "test-key-tsp", - Source = "tsp" - }); - - // Configure local signing (stub) - options.Local = new LocalSigningOptions - { - Type = "PKCS12", - Path = _keystorePath, - Password = KeystorePassword - }; - - // Configure TSP (stub) - options.Tsp = new TspOptions - { - Endpoint = "https://tsp.example.com", - ApiKey = "test-api-key" - }; - }); - - services.AddLogging(builder => builder.AddConsole().SetMinimumLevel(LogLevel.Debug)); - services.AddHttpClient(); - services.AddSingleton(); - services.AddSingleton(); - - _serviceProvider = services.BuildServiceProvider(); - _provider = _serviceProvider.GetRequiredService() as EidasCryptoProvider - ?? throw new InvalidOperationException("Failed to resolve EidasCryptoProvider"); + _provider = new EidasCryptoProvider( + NullLogger.Instance, + options, + tspClient, + localProvider); } public void Dispose() { - _serviceProvider.Dispose(); + _httpClient.Dispose(); if (File.Exists(_keystorePath)) { @@ -87,6 +53,43 @@ public class EidasCryptoProviderTests : IDisposable } } + private static IOptions CreateOptions(string keystorePath) + { + var options = new EidasOptions + { + SignatureLevel = SignatureLevel.AdES, + SignatureFormat = SignatureFormat.CAdES, + DefaultAlgorithm = "ECDSA-P256", + DigestAlgorithm = "SHA256", + Local = new LocalSigningOptions + { + Type = "PKCS12", + Path = keystorePath, + Password = KeystorePassword + }, + Tsp = new TspOptions + { + Endpoint = "https://tsp.example.com", + ApiKey = "test-api-key", + TimeoutSeconds = 30 + } + }; + + options.Keys.Add(new EidasKeyConfig + { + KeyId = "test-key-local", + Source = "local" + }); + + options.Keys.Add(new EidasKeyConfig + { + KeyId = "test-key-tsp", + Source = "tsp" + }); + + return Options.Create(options); + } + private static string CreateTestKeystore() { var path = Path.Combine(Path.GetTempPath(), $"eidas-test-{Guid.NewGuid():N}.p12"); @@ -106,229 +109,4 @@ public class EidasCryptoProviderTests : IDisposable return path; } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void Provider_Name_IsEidas() - { - Assert.Equal("eidas", _provider.Name); - } - - [Trait("Category", TestCategories.Unit)] - [Theory] - [InlineData(CryptoCapability.Signing, "ECDSA-P256", true)] - [InlineData(CryptoCapability.Signing, "ECDSA-P384", true)] - [InlineData(CryptoCapability.Signing, "ECDSA-P521", true)] - [InlineData(CryptoCapability.Signing, "RSA-PSS-2048", true)] - [InlineData(CryptoCapability.Signing, "RSA-PSS-4096", true)] - [InlineData(CryptoCapability.Signing, "EdDSA-Ed25519", true)] - [InlineData(CryptoCapability.Signing, "EdDSA-Ed448", true)] - [InlineData(CryptoCapability.Verification, "ECDSA-P256", true)] - [InlineData(CryptoCapability.Signing, "UNKNOWN-ALGO", false)] - [InlineData(CryptoCapability.ContentHashing, "ECDSA-P256", false)] - [InlineData(CryptoCapability.PasswordHashing, "ECDSA-P256", false)] - public void Supports_ReturnsExpectedResults(CryptoCapability capability, string algorithmId, bool expected) - { - var result = _provider.Supports(capability, algorithmId); - Assert.Equal(expected, result); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GetPasswordHasher_ThrowsNotSupported() - { - Assert.Throws(() => _provider.GetPasswordHasher("PBKDF2")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GetHasher_ThrowsNotSupported() - { - Assert.Throws(() => _provider.GetHasher("SHA256")); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void GetSigner_ReturnsEidasSigner() - { - var keyRef = new CryptoKeyReference("test-key-local"); - var signer = _provider.GetSigner("ECDSA-P256", keyRef); - - Assert.NotNull(signer); - Assert.Equal("test-key-local", signer.KeyId); - Assert.Equal("ECDSA-P256", signer.AlgorithmId); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void UpsertSigningKey_AddsKey() - { - var keyRef = new CryptoKeyReference("test-upsert"); - var signingKey = new CryptoSigningKey( - keyRef, - "ECDSA-P256", - new byte[] { 1, 2, 3, 4 }, - DateTimeOffset.UtcNow - ); - - _provider.UpsertSigningKey(signingKey); - - var keys = _provider.GetSigningKeys(); - Assert.Contains(keys, k => k.Reference.KeyId == "test-upsert"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void RemoveSigningKey_RemovesKey() - { - var keyRef = new CryptoKeyReference("test-remove"); - var signingKey = new CryptoSigningKey( - keyRef, - "ECDSA-P256", - new byte[] { 1, 2, 3, 4 }, - DateTimeOffset.UtcNow - ); - - _provider.UpsertSigningKey(signingKey); - Assert.Contains(_provider.GetSigningKeys(), k => k.Reference.KeyId == "test-remove"); - - var removed = _provider.RemoveSigningKey("test-remove"); - Assert.True(removed); - Assert.DoesNotContain(_provider.GetSigningKeys(), k => k.Reference.KeyId == "test-remove"); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void RemoveSigningKey_ReturnsFalseForNonExistentKey() - { - var removed = _provider.RemoveSigningKey("non-existent-key"); - Assert.False(removed); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SignAsync_WithLocalKey_ReturnsSignature() - { - // Note: This test will use the stub implementation - // In production, would require actual PKCS#12 keystore - - var keyRef = new CryptoKeyReference("test-key-local"); - var signer = _provider.GetSigner("ECDSA-P256", keyRef); - - var data = "Test data for signing"u8.ToArray(); - var signature = await signer.SignAsync(data); - - Assert.NotNull(signature); - Assert.NotEmpty(signature); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task VerifyAsync_WithLocalKey_ReturnsTrue() - { - // Note: This test will use the stub implementation - // In production, would require actual PKCS#12 keystore - - var keyRef = new CryptoKeyReference("test-key-local"); - var signer = _provider.GetSigner("ECDSA-P256", keyRef); - - var data = "Test data for verification"u8.ToArray(); - var signature = await signer.SignAsync(data); - var isValid = await signer.VerifyAsync(data, signature); - - Assert.True(isValid); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task SignAsync_WithTspKey_ReturnsSignature() - { - // Note: This test will use the stub TSP implementation - // In production, would call actual TSP API - - var keyRef = new CryptoKeyReference("test-key-tsp"); - var signer = _provider.GetSigner("ECDSA-P256", keyRef); - - var data = "Test data for TSP signing"u8.ToArray(); - var signature = await signer.SignAsync(data); - - Assert.NotNull(signature); - Assert.NotEmpty(signature); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ExportPublicJsonWebKey_ReturnsStubJwk() - { - var keyRef = new CryptoKeyReference("test-key-local"); - var signer = _provider.GetSigner("ECDSA-P256", keyRef); - - var jwk = signer.ExportPublicJsonWebKey(); - - Assert.NotNull(jwk); - Assert.Equal("EC", jwk.Kty); - Assert.Equal("P-256", jwk.Crv); - Assert.Equal("sig", jwk.Use); - Assert.Equal("test-key-local", jwk.Kid); - } -} - -public class EidasDependencyInjectionTests -{ - [Trait("Category", TestCategories.Unit)] - [Fact] - public void AddEidasCryptoProviders_RegistersServices() - { - var services = new ServiceCollection(); - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["StellaOps:Crypto:Profiles:eidas:SignatureLevel"] = "AdES", - ["StellaOps:Crypto:Profiles:eidas:SignatureFormat"] = "CAdES", - ["StellaOps:Crypto:Profiles:eidas:DefaultAlgorithm"] = "ECDSA-P256", - ["StellaOps:Crypto:Profiles:eidas:Tsp:Endpoint"] = "https://tsp.example.com", - ["StellaOps:Crypto:Profiles:eidas:Tsp:ApiKey"] = "test-api-key" - }) - .Build(); - - services.AddLogging(); - services.AddEidasCryptoProviders(configuration); - - var serviceProvider = services.BuildServiceProvider(); - - var provider = serviceProvider.GetService(); - Assert.NotNull(provider); - Assert.IsType(provider); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void AddEidasCryptoProviders_WithAction_RegistersServices() - { - var services = new ServiceCollection(); - - services.AddLogging(); - services.AddEidasCryptoProviders(options => - { - options.SignatureLevel = SignatureLevel.QES; - options.SignatureFormat = SignatureFormat.XAdES; - options.DefaultAlgorithm = "RSA-PSS-4096"; - options.Tsp = new TspOptions - { - Endpoint = "https://tsp.example.com", - ApiKey = "test-api-key" - }; - }); - - var serviceProvider = services.BuildServiceProvider(); - - var provider = serviceProvider.GetService(); - Assert.NotNull(provider); - Assert.IsType(provider); - - var eidasOptions = serviceProvider.GetRequiredService>().Value; - Assert.Equal(SignatureLevel.QES, eidasOptions.SignatureLevel); - Assert.Equal(SignatureFormat.XAdES, eidasOptions.SignatureFormat); - Assert.Equal("RSA-PSS-4096", eidasOptions.DefaultAlgorithm); - } } diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasDependencyInjectionTests.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasDependencyInjectionTests.cs new file mode 100644 index 000000000..d5f36c173 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/EidasDependencyInjectionTests.cs @@ -0,0 +1,85 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin Tests +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Plugin.EIDAS; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; +using StellaOps.Cryptography.Plugin.EIDAS.DependencyInjection; +using StellaOps.Cryptography.Plugin.EIDAS.Models; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Plugin.EIDAS.Tests; + +public class EidasDependencyInjectionTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddEidasCryptoProviders_RegistersServices() + { + var services = new ServiceCollection(); + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["StellaOps:Crypto:Profiles:eidas:SignatureLevel"] = "AdES", + ["StellaOps:Crypto:Profiles:eidas:SignatureFormat"] = "CAdES", + ["StellaOps:Crypto:Profiles:eidas:DefaultAlgorithm"] = "ECDSA-P256", + ["StellaOps:Crypto:Profiles:eidas:Tsp:Endpoint"] = "https://tsp.example.com", + ["StellaOps:Crypto:Profiles:eidas:Tsp:ApiKey"] = "test-api-key" + }) + .Build(); + + services.AddEidasCryptoProviders(configuration); + + AssertServiceRegistered(services, ServiceLifetime.Singleton); + AssertServiceRegistered(services, ServiceLifetime.Singleton); + AssertServiceRegistered(services); + AssertServiceRegistered>(services); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddEidasCryptoProviders_WithAction_RegistersServices() + { + var services = new ServiceCollection(); + + services.AddEidasCryptoProviders(options => + { + options.SignatureLevel = SignatureLevel.QES; + options.SignatureFormat = SignatureFormat.XAdES; + options.DefaultAlgorithm = "RSA-PSS-4096"; + options.Tsp = new TspOptions + { + Endpoint = "https://tsp.example.com", + ApiKey = "test-api-key" + }; + }); + + AssertServiceRegistered(services, ServiceLifetime.Singleton); + AssertServiceRegistered(services, ServiceLifetime.Singleton); + AssertServiceRegistered(services); + AssertServiceRegistered>(services); + } + + private static void AssertServiceRegistered( + IServiceCollection services, + ServiceLifetime lifetime) + { + var descriptor = services.LastOrDefault(service => service.ServiceType == typeof(TService)); + + Assert.NotNull(descriptor); + Assert.Equal(typeof(TImplementation), descriptor.ImplementationType); + Assert.Equal(lifetime, descriptor.Lifetime); + } + + private static void AssertServiceRegistered(IServiceCollection services) + { + var hasService = services.Any(service => service.ServiceType == typeof(TService)); + + Assert.True(hasService, $"Missing service registration for {typeof(TService).Name}."); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/TASKS.md b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/TASKS.md index 8c71bff71..c976fd9c3 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0056-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0056-A | DONE | Waived (test project; revalidated 2026-01-08). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Split tests into <=100-line partials, removed service locator, added missing key test; `dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/StellaOps.Cryptography.Plugin.EIDAS.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (25 tests). | diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasKeyConfig.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasKeyConfig.cs new file mode 100644 index 000000000..0c1824443 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasKeyConfig.cs @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +namespace StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +/// +/// eIDAS key configuration. +/// +public class EidasKeyConfig +{ + /// + /// Unique key identifier. + /// + public required string KeyId { get; set; } + + /// + /// Key source: "tsp" (remote) or "local" (PKCS#12). + /// + public required string Source { get; set; } + + /// + /// Certificate in PEM format (optional for validation). + /// + public string? Certificate { get; set; } + + /// + /// Certificate subject DN. + /// + public string? SubjectDn { get; set; } + + /// + /// Certificate serial number. + /// + public string? SerialNumber { get; set; } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasOptions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasOptions.cs index 130408ef6..da8468cdc 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasOptions.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/EidasOptions.cs @@ -1,6 +1,5 @@ // SPDX-License-Identifier: BUSL-1.1 // Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin - using StellaOps.Cryptography.Plugin.EIDAS.Models; namespace StellaOps.Cryptography.Plugin.EIDAS.Configuration; @@ -60,113 +59,3 @@ public class EidasOptions /// public List Keys { get; set; } = new(); } - -/// -/// Trust Service Provider configuration for remote QES signing. -/// -public class TspOptions -{ - /// - /// TSP API endpoint URL. - /// - public required string Endpoint { get; set; } - - /// - /// TSP API key for authentication. - /// - public required string ApiKey { get; set; } - - /// - /// TSP certificate for mutual TLS (optional). - /// - public string? Certificate { get; set; } - - /// - /// Request timeout in seconds. - /// - public int TimeoutSeconds { get; set; } = 30; -} - -/// -/// Local signing configuration (PKCS#12 keystore). -/// -public class LocalSigningOptions -{ - /// - /// Keystore type (PKCS12, PEM). - /// - public string Type { get; set; } = "PKCS12"; - - /// - /// Path to keystore file. - /// - public required string Path { get; set; } - - /// - /// Keystore password. - /// - public required string Password { get; set; } - - /// - /// Path to certificate chain file (PEM format). - /// - public string? CertificateChainPath { get; set; } -} - -/// -/// EU Trusted List configuration. -/// -public class TrustedListOptions -{ - /// - /// EU Trusted List (EUTL) URL. - /// Default: https://ec.europa.eu/tools/lotl/eu-lotl.xml - /// - public string Url { get; set; } = "https://ec.europa.eu/tools/lotl/eu-lotl.xml"; - - /// - /// Local cache directory for trusted list. - /// - public string CachePath { get; set; } = "./crypto/eutl-cache"; - - /// - /// Refresh interval in hours. - /// - public int RefreshIntervalHours { get; set; } = 24; - - /// - /// Enable strict validation (fail on any validation error). - /// - public bool StrictValidation { get; set; } = true; -} - -/// -/// eIDAS key configuration. -/// -public class EidasKeyConfig -{ - /// - /// Unique key identifier. - /// - public required string KeyId { get; set; } - - /// - /// Key source: "tsp" (remote) or "local" (PKCS#12). - /// - public required string Source { get; set; } - - /// - /// Certificate in PEM format (optional for validation). - /// - public string? Certificate { get; set; } - - /// - /// Certificate subject DN. - /// - public string? SubjectDn { get; set; } - - /// - /// Certificate serial number. - /// - public string? SerialNumber { get; set; } -} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/LocalSigningOptions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/LocalSigningOptions.cs new file mode 100644 index 000000000..e0dd7ab2e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/LocalSigningOptions.cs @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +namespace StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +/// +/// Local signing configuration (PKCS#12 keystore). +/// +public class LocalSigningOptions +{ + /// + /// Keystore type (PKCS12, PEM). + /// + public string Type { get; set; } = "PKCS12"; + + /// + /// Path to keystore file. + /// + public required string Path { get; set; } + + /// + /// Keystore password. + /// + public required string Password { get; set; } + + /// + /// Path to certificate chain file (PEM format). + /// + public string? CertificateChainPath { get; set; } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TrustedListOptions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TrustedListOptions.cs new file mode 100644 index 000000000..d1862d844 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TrustedListOptions.cs @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +namespace StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +/// +/// EU Trusted List configuration. +/// +public class TrustedListOptions +{ + /// + /// EU Trusted List (EUTL) URL. + /// Default: https://ec.europa.eu/tools/lotl/eu-lotl.xml + /// + public string Url { get; set; } = "https://ec.europa.eu/tools/lotl/eu-lotl.xml"; + + /// + /// Local cache directory for trusted list. + /// + public string CachePath { get; set; } = "./crypto/eutl-cache"; + + /// + /// Refresh interval in hours. + /// + public int RefreshIntervalHours { get; set; } = 24; + + /// + /// Enable strict validation (fail on any validation error). + /// + public bool StrictValidation { get; set; } = true; +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TspOptions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TspOptions.cs new file mode 100644 index 000000000..970259e5e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/Configuration/TspOptions.cs @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +namespace StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +/// +/// Trust Service Provider configuration for remote QES signing. +/// +public class TspOptions +{ + /// + /// TSP API endpoint URL. + /// + public required string Endpoint { get; set; } + + /// + /// TSP API key for authentication. + /// + public required string ApiKey { get; set; } + + /// + /// TSP certificate for mutual TLS (optional). + /// + public string? Certificate { get; set; } + + /// + /// Request timeout in seconds. + /// + public int TimeoutSeconds { get; set; } = 30; +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasCryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasCryptoProvider.cs index a2160b383..e6174ca7f 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasCryptoProvider.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasCryptoProvider.cs @@ -1,6 +1,5 @@ // SPDX-License-Identifier: BUSL-1.1 // Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Cryptography; @@ -82,6 +81,7 @@ public class EidasCryptoProvider : ICryptoProvider { _logger.LogInformation("eIDAS signing key removed: keyId={KeyId}", keyId); } + return removed; } @@ -90,112 +90,3 @@ public class EidasCryptoProvider : ICryptoProvider return _signingKeys.Values.ToList().AsReadOnly(); } } - -/// -/// eIDAS signer implementation that routes to TSP or local provider. -/// -internal class EidasSigner : ICryptoSigner -{ - private readonly ILogger _logger; - private readonly EidasOptions _options; - private readonly TrustServiceProviderClient _tspClient; - private readonly LocalEidasProvider _localProvider; - private readonly string _algorithmId; - private readonly CryptoKeyReference _keyReference; - - public EidasSigner( - ILogger logger, - EidasOptions options, - TrustServiceProviderClient tspClient, - LocalEidasProvider localProvider, - string algorithmId, - CryptoKeyReference keyReference) - { - _logger = logger; - _options = options; - _tspClient = tspClient; - _localProvider = localProvider; - _algorithmId = algorithmId; - _keyReference = keyReference; - } - - public string KeyId => _keyReference.KeyId; - public string AlgorithmId => _algorithmId; - - public async ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) - { - _logger.LogDebug("eIDAS signing request: keyId={KeyId}, algorithm={Algorithm}", - _keyReference.KeyId, _algorithmId); - - // Resolve key configuration - var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); - if (keyConfig == null) - { - throw new KeyNotFoundException($"eIDAS key '{_keyReference.KeyId}' not configured"); - } - - // Route to appropriate signer based on key source - byte[] signature = keyConfig.Source.ToLowerInvariant() switch - { - "tsp" => await _tspClient.RemoteSignAsync(data.ToArray(), _algorithmId, keyConfig, cancellationToken), - "local" => await _localProvider.LocalSignAsync(data.ToArray(), _algorithmId, keyConfig, cancellationToken), - _ => throw new InvalidOperationException($"Unsupported eIDAS key source: {keyConfig.Source}") - }; - - _logger.LogInformation("eIDAS signature created: keyId={KeyId}, signatureLength={Length}, level={Level}", - _keyReference.KeyId, signature.Length, _options.SignatureLevel); - - return signature; - } - - public async ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) - { - _logger.LogDebug("eIDAS verification request: keyId={KeyId}, algorithm={Algorithm}", - _keyReference.KeyId, _algorithmId); - - // Resolve key configuration - var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); - if (keyConfig == null) - { - throw new KeyNotFoundException($"eIDAS key '{_keyReference.KeyId}' not configured"); - } - - // Route to appropriate verifier - bool isValid = keyConfig.Source.ToLowerInvariant() switch - { - "tsp" => await _tspClient.RemoteVerifyAsync(data.ToArray(), signature.ToArray(), _algorithmId, keyConfig, cancellationToken), - "local" => await _localProvider.LocalVerifyAsync(data.ToArray(), signature.ToArray(), _algorithmId, keyConfig, cancellationToken), - _ => throw new InvalidOperationException($"Unsupported eIDAS key source: {keyConfig.Source}") - }; - - _logger.LogInformation("eIDAS verification result: keyId={KeyId}, valid={Valid}", - _keyReference.KeyId, isValid); - - return isValid; - } - - public Microsoft.IdentityModel.Tokens.JsonWebKey ExportPublicJsonWebKey() - { - // For eIDAS, public key export requires certificate parsing - // Stub implementation - in production, extract from certificate - _logger.LogWarning("eIDAS ExportPublicJsonWebKey is not fully implemented - returning stub JWK"); - - var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); - if (keyConfig?.Certificate != null) - { - // Production: Parse certificate and extract public key - // var cert = X509Certificate2.CreateFromPem(keyConfig.Certificate); - // var ecdsa = cert.GetECDsaPublicKey(); - // return JsonWebKeyConverter.ConvertFromECDsaSecurityKey(new ECDsaSecurityKey(ecdsa)); - } - - return new Microsoft.IdentityModel.Tokens.JsonWebKey - { - Kty = "EC", - Crv = "P-256", - Use = "sig", - Kid = _keyReference.KeyId, - Alg = _algorithmId - }; - } -} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.Export.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.Export.cs new file mode 100644 index 000000000..c9e807fc1 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.Export.cs @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using Microsoft.Extensions.Logging; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +internal partial class EidasSigner +{ + public Microsoft.IdentityModel.Tokens.JsonWebKey ExportPublicJsonWebKey() + { + // For eIDAS, public key export requires certificate parsing + // Stub implementation - in production, extract from certificate + _logger.LogWarning("eIDAS ExportPublicJsonWebKey is not fully implemented - returning stub JWK"); + + var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); + if (keyConfig?.Certificate != null) + { + // Production: Parse certificate and extract public key + // var cert = X509Certificate2.CreateFromPem(keyConfig.Certificate); + // var ecdsa = cert.GetECDsaPublicKey(); + // return JsonWebKeyConverter.ConvertFromECDsaSecurityKey(new ECDsaSecurityKey(ecdsa)); + } + + return new Microsoft.IdentityModel.Tokens.JsonWebKey + { + Kty = "EC", + Crv = "P-256", + Use = "sig", + Kid = _keyReference.KeyId, + Alg = _algorithmId + }; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.cs new file mode 100644 index 000000000..d0b1d8903 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/EidasSigner.cs @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using Microsoft.Extensions.Logging; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +/// +/// eIDAS signer implementation that routes to TSP or local provider. +/// +internal partial class EidasSigner : ICryptoSigner +{ + private readonly ILogger _logger; + private readonly EidasOptions _options; + private readonly TrustServiceProviderClient _tspClient; + private readonly LocalEidasProvider _localProvider; + private readonly string _algorithmId; + private readonly CryptoKeyReference _keyReference; + + public EidasSigner( + ILogger logger, + EidasOptions options, + TrustServiceProviderClient tspClient, + LocalEidasProvider localProvider, + string algorithmId, + CryptoKeyReference keyReference) + { + _logger = logger; + _options = options; + _tspClient = tspClient; + _localProvider = localProvider; + _algorithmId = algorithmId; + _keyReference = keyReference; + } + + public string KeyId => _keyReference.KeyId; + public string AlgorithmId => _algorithmId; + + public async ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) + { + _logger.LogDebug("eIDAS signing request: keyId={KeyId}, algorithm={Algorithm}", + _keyReference.KeyId, _algorithmId); + + // Resolve key configuration + var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); + if (keyConfig == null) + { + throw new KeyNotFoundException($"eIDAS key '{_keyReference.KeyId}' not configured"); + } + + // Route to appropriate signer based on key source + byte[] signature = keyConfig.Source.ToLowerInvariant() switch + { + "tsp" => await _tspClient.RemoteSignAsync(data.ToArray(), _algorithmId, keyConfig, cancellationToken) + .ConfigureAwait(false), + "local" => await _localProvider.LocalSignAsync(data.ToArray(), _algorithmId, keyConfig, cancellationToken) + .ConfigureAwait(false), + _ => throw new InvalidOperationException($"Unsupported eIDAS key source: {keyConfig.Source}") + }; + + _logger.LogInformation("eIDAS signature created: keyId={KeyId}, signatureLength={Length}, level={Level}", + _keyReference.KeyId, signature.Length, _options.SignatureLevel); + + return signature; + } + + public async ValueTask VerifyAsync( + ReadOnlyMemory data, + ReadOnlyMemory signature, + CancellationToken cancellationToken = default) + { + _logger.LogDebug("eIDAS verification request: keyId={KeyId}, algorithm={Algorithm}", + _keyReference.KeyId, _algorithmId); + + // Resolve key configuration + var keyConfig = _options.Keys.FirstOrDefault(k => k.KeyId == _keyReference.KeyId); + if (keyConfig == null) + { + throw new KeyNotFoundException($"eIDAS key '{_keyReference.KeyId}' not configured"); + } + + // Route to appropriate verifier + bool isValid = keyConfig.Source.ToLowerInvariant() switch + { + "tsp" => await _tspClient.RemoteVerifyAsync(data.ToArray(), signature.ToArray(), _algorithmId, keyConfig, cancellationToken) + .ConfigureAwait(false), + "local" => await _localProvider.LocalVerifyAsync(data.ToArray(), signature.ToArray(), _algorithmId, keyConfig, cancellationToken) + .ConfigureAwait(false), + _ => throw new InvalidOperationException($"Unsupported eIDAS key source: {keyConfig.Source}") + }; + + _logger.LogInformation("eIDAS verification result: keyId={KeyId}, valid={Valid}", + _keyReference.KeyId, isValid); + + return isValid; + } + +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Signing.cs new file mode 100644 index 000000000..0a9caeb0d --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Signing.cs @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +public partial class LocalEidasProvider +{ + /// + /// Local signing with PKCS#12 certificate (stub implementation). + /// + public async Task LocalSignAsync( + byte[] data, + string algorithmId, + EidasKeyConfig keyConfig, + CancellationToken cancellationToken) + { + _logger.LogDebug("Local eIDAS signing: keyId={KeyId}, algorithm={Algorithm}, dataLength={Length}", + keyConfig.KeyId, algorithmId, data.Length); + + if (_options == null) + { + throw new InvalidOperationException("Local signing options not configured"); + } + + // Load certificate from PKCS#12 keystore (cached) + _certificate ??= LoadCertificate(_options); + + // Stub implementation - in production, use actual certificate signing + _logger.LogWarning("Using stub local signing - replace with actual PKCS#12 signing in production"); + + // Compute hash + var hash = algorithmId.Contains("SHA256") ? SHA256.HashData(data) : SHA512.HashData(data); + + // Stub: Create mock signature + var stubSignature = new byte[64]; // ECDSA-P256 signature + RandomNumberGenerator.Fill(stubSignature); + + _logger.LogInformation("Local eIDAS signature created (stub): keyId={KeyId}, signatureLength={Length}", + keyConfig.KeyId, stubSignature.Length); + + await Task.CompletedTask.ConfigureAwait(false); + return stubSignature; + + // Production implementation: + // using var rsa = _certificate.GetRSAPrivateKey(); + // using var ecdsa = _certificate.GetECDsaPrivateKey(); + // + // return algorithmId switch + // { + // "RSA-PSS-2048" or "RSA-PSS-4096" => rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pss), + // "ECDSA-P256" or "ECDSA-P384" or "ECDSA-P521" => ecdsa.SignData(data, HashAlgorithmName.SHA256), + // _ => throw new NotSupportedException($"Algorithm {algorithmId} not supported for local signing") + // }; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Verification.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Verification.cs new file mode 100644 index 000000000..16c0b5d33 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.Verification.cs @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using Microsoft.Extensions.Logging; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +public partial class LocalEidasProvider +{ + /// + /// Local verification with PKCS#12 certificate (stub implementation). + /// + public async Task LocalVerifyAsync( + byte[] data, + byte[] signature, + string algorithmId, + EidasKeyConfig keyConfig, + CancellationToken cancellationToken) + { + _logger.LogDebug("Local eIDAS verification: keyId={KeyId}, algorithm={Algorithm}", + keyConfig.KeyId, algorithmId); + + if (_options == null) + { + throw new InvalidOperationException("Local signing options not configured"); + } + + // Load certificate from PKCS#12 keystore + _certificate ??= LoadCertificate(_options); + + // Stub: Always return true + _logger.LogWarning("Using stub local verification - replace with actual PKCS#12 verification in production"); + await Task.Delay(10, cancellationToken).ConfigureAwait(false); // Simulate crypto operation + + _logger.LogInformation("Local eIDAS verification complete (stub): keyId={KeyId}, valid=true", + keyConfig.KeyId); + + return true; + + // Production implementation: + // using var rsa = _certificate.GetRSAPublicKey(); + // using var ecdsa = _certificate.GetECDsaPublicKey(); + // + // return algorithmId switch + // { + // "RSA-PSS-2048" or "RSA-PSS-4096" => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pss), + // "ECDSA-P256" or "ECDSA-P384" or "ECDSA-P521" => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256), + // _ => throw new NotSupportedException($"Algorithm {algorithmId} not supported for local verification") + // }; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.cs index e61c72a86..ad5a2760e 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/LocalEidasProvider.cs @@ -1,10 +1,8 @@ // SPDX-License-Identifier: BUSL-1.1 // Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Cryptography.Plugin.EIDAS.Configuration; -using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; namespace StellaOps.Cryptography.Plugin.EIDAS; @@ -13,7 +11,7 @@ namespace StellaOps.Cryptography.Plugin.EIDAS; /// Local eIDAS signing provider using PKCS#12 keystores. /// Suitable for development and AdES-level signatures. /// -public class LocalEidasProvider +public partial class LocalEidasProvider { private readonly ILogger _logger; private readonly LocalSigningOptions? _options; @@ -27,96 +25,6 @@ public class LocalEidasProvider _options = options.Value.Local; } - /// - /// Local signing with PKCS#12 certificate (stub implementation). - /// - public async Task LocalSignAsync( - byte[] data, - string algorithmId, - EidasKeyConfig keyConfig, - CancellationToken cancellationToken) - { - _logger.LogDebug("Local eIDAS signing: keyId={KeyId}, algorithm={Algorithm}, dataLength={Length}", - keyConfig.KeyId, algorithmId, data.Length); - - if (_options == null) - { - throw new InvalidOperationException("Local signing options not configured"); - } - - // Load certificate from PKCS#12 keystore (cached) - _certificate ??= LoadCertificate(_options); - - // Stub implementation - in production, use actual certificate signing - _logger.LogWarning("Using stub local signing - replace with actual PKCS#12 signing in production"); - - // Compute hash - var hash = algorithmId.Contains("SHA256") ? SHA256.HashData(data) : SHA512.HashData(data); - - // Stub: Create mock signature - var stubSignature = new byte[64]; // ECDSA-P256 signature - RandomNumberGenerator.Fill(stubSignature); - - _logger.LogInformation("Local eIDAS signature created (stub): keyId={KeyId}, signatureLength={Length}", - keyConfig.KeyId, stubSignature.Length); - - await Task.CompletedTask; // For async signature - return stubSignature; - - // Production implementation: - // using var rsa = _certificate.GetRSAPrivateKey(); - // using var ecdsa = _certificate.GetECDsaPrivateKey(); - // - // return algorithmId switch - // { - // "RSA-PSS-2048" or "RSA-PSS-4096" => rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pss), - // "ECDSA-P256" or "ECDSA-P384" or "ECDSA-P521" => ecdsa.SignData(data, HashAlgorithmName.SHA256), - // _ => throw new NotSupportedException($"Algorithm {algorithmId} not supported for local signing") - // }; - } - - /// - /// Local verification with PKCS#12 certificate (stub implementation). - /// - public async Task LocalVerifyAsync( - byte[] data, - byte[] signature, - string algorithmId, - EidasKeyConfig keyConfig, - CancellationToken cancellationToken) - { - _logger.LogDebug("Local eIDAS verification: keyId={KeyId}, algorithm={Algorithm}", - keyConfig.KeyId, algorithmId); - - if (_options == null) - { - throw new InvalidOperationException("Local signing options not configured"); - } - - // Load certificate from PKCS#12 keystore - _certificate ??= LoadCertificate(_options); - - // Stub: Always return true - _logger.LogWarning("Using stub local verification - replace with actual PKCS#12 verification in production"); - await Task.Delay(10, cancellationToken); // Simulate crypto operation - - _logger.LogInformation("Local eIDAS verification complete (stub): keyId={KeyId}, valid=true", - keyConfig.KeyId); - - return true; - - // Production implementation: - // using var rsa = _certificate.GetRSAPublicKey(); - // using var ecdsa = _certificate.GetECDsaPublicKey(); - // - // return algorithmId switch - // { - // "RSA-PSS-2048" or "RSA-PSS-4096" => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pss), - // "ECDSA-P256" or "ECDSA-P384" or "ECDSA-P521" => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256), - // _ => throw new NotSupportedException($"Algorithm {algorithmId} not supported for local verification") - // }; - } - private X509Certificate2 LoadCertificate(LocalSigningOptions options) { _logger.LogDebug("Loading eIDAS certificate from keystore: path={Path}, type={Type}", @@ -141,7 +49,8 @@ public class LocalEidasProvider return cert; } - else if (options.Type.Equals("PEM", StringComparison.OrdinalIgnoreCase)) + + if (options.Type.Equals("PEM", StringComparison.OrdinalIgnoreCase)) { // Load PEM certificate (requires separate key file) var certPem = File.ReadAllText(options.Path); @@ -152,10 +61,8 @@ public class LocalEidasProvider return cert; } - else - { - throw new NotSupportedException($"Keystore type '{options.Type}' not supported"); - } + + throw new NotSupportedException($"Keystore type '{options.Type}' not supported"); } catch (Exception ex) { diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TASKS.md b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TASKS.md index 281565414..f27d35225 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TASKS.md @@ -10,3 +10,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0057-A | TODO | Revalidated 2026-01-08 (open findings). | | TASK-033-004 | DONE | Fixed keystore/TSP test config; EIDAS tests pass (SPRINT_20260120_033). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-05 | DONE | Split provider/options/client into <=100-line partials, added ConfigureAwait(false) in library awaits; `dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS.Tests/StellaOps.Cryptography.Plugin.EIDAS.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (25 tests). | diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Signing.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Signing.cs new file mode 100644 index 000000000..5685a5e5e --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Signing.cs @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +public partial class TrustServiceProviderClient +{ + /// + /// Remote signing via TSP (stub implementation). + /// + public async Task RemoteSignAsync( + byte[] data, + string algorithmId, + EidasKeyConfig keyConfig, + CancellationToken cancellationToken) + { + _logger.LogDebug("TSP remote signing request: keyId={KeyId}, algorithm={Algorithm}, dataLength={Length}", + keyConfig.KeyId, algorithmId, data.Length); + + // Stub implementation - in production, this would call actual TSP API + // Example TSP request format (vendor-specific): + // POST /api/v1/sign + // { + // "keyId": "...", + // "algorithm": "ECDSA-P256", + // "digestAlgorithm": "SHA256", + // "dataHash": "base64-encoded-hash", + // "signatureLevel": "QES" + // } + + _logger.LogWarning("Using stub TSP implementation - replace with actual TSP API call in production"); + + // Compute hash for signing + var hash = algorithmId.Contains("SHA256") ? SHA256.HashData(data) : SHA512.HashData(data); + + // Stub: Return mock signature + var stubSignature = new byte[64]; // ECDSA-P256 signature is 64 bytes + RandomNumberGenerator.Fill(stubSignature); + + _logger.LogInformation("TSP remote signature created (stub): keyId={KeyId}, signatureLength={Length}", + keyConfig.KeyId, stubSignature.Length); + + return stubSignature; + + // Production implementation would be: + // var request = new + // { + // keyId = keyConfig.KeyId, + // algorithm = algorithmId, + // digestAlgorithm = "SHA256", + // dataHash = Convert.ToBase64String(hash), + // signatureLevel = "QES" + // }; + // + // var response = await _httpClient.PostAsJsonAsync("/api/v1/sign", request, cancellationToken); + // response.EnsureSuccessStatusCode(); + // + // var result = await response.Content.ReadFromJsonAsync(cancellationToken); + // return Convert.FromBase64String(result.Signature); + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Verification.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Verification.cs new file mode 100644 index 000000000..e067d95f6 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.Verification.cs @@ -0,0 +1,49 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +using Microsoft.Extensions.Logging; +using StellaOps.Cryptography.Plugin.EIDAS.Configuration; + +namespace StellaOps.Cryptography.Plugin.EIDAS; + +public partial class TrustServiceProviderClient +{ + /// + /// Remote verification via TSP (stub implementation). + /// + public async Task RemoteVerifyAsync( + byte[] data, + byte[] signature, + string algorithmId, + EidasKeyConfig keyConfig, + CancellationToken cancellationToken) + { + _logger.LogDebug("TSP remote verification request: keyId={KeyId}, algorithm={Algorithm}", + keyConfig.KeyId, algorithmId); + + _logger.LogWarning("Using stub TSP verification - replace with actual TSP API call in production"); + + // Stub: Always return true + await Task.Delay(50, cancellationToken).ConfigureAwait(false); // Simulate network latency + + _logger.LogInformation("TSP remote verification complete: keyId={KeyId}, valid=true", + keyConfig.KeyId); + + return true; + + // Production implementation would be: + // var hash = SHA256.HashData(data); + // var request = new + // { + // keyId = keyConfig.KeyId, + // algorithm = algorithmId, + // dataHash = Convert.ToBase64String(hash), + // signature = Convert.ToBase64String(signature) + // }; + // + // var response = await _httpClient.PostAsJsonAsync("/api/v1/verify", request, cancellationToken); + // response.EnsureSuccessStatusCode(); + // + // var result = await response.Content.ReadFromJsonAsync(cancellationToken); + // return result.Valid; + } +} diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.cs index 97f603c29..0ff68a450 100644 --- a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.cs +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TrustServiceProviderClient.cs @@ -1,12 +1,8 @@ // SPDX-License-Identifier: BUSL-1.1 // Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Cryptography.Plugin.EIDAS.Configuration; -using System.Net.Http.Json; -using System.Security.Cryptography; -using System.Text.Json; namespace StellaOps.Cryptography.Plugin.EIDAS; @@ -14,7 +10,7 @@ namespace StellaOps.Cryptography.Plugin.EIDAS; /// Client for Trust Service Provider (TSP) remote signing API. /// Implements QES (Qualified Electronic Signature) with remote QSCD. /// -public class TrustServiceProviderClient +public partial class TrustServiceProviderClient { private readonly ILogger _logger; private readonly HttpClient _httpClient; @@ -34,102 +30,4 @@ public class TrustServiceProviderClient _httpClient.Timeout = TimeSpan.FromSeconds(_options.TimeoutSeconds); _httpClient.DefaultRequestHeaders.Add("X-API-Key", _options.ApiKey); } - - /// - /// Remote signing via TSP (stub implementation). - /// - public async Task RemoteSignAsync( - byte[] data, - string algorithmId, - EidasKeyConfig keyConfig, - CancellationToken cancellationToken) - { - _logger.LogDebug("TSP remote signing request: keyId={KeyId}, algorithm={Algorithm}, dataLength={Length}", - keyConfig.KeyId, algorithmId, data.Length); - - // Stub implementation - in production, this would call actual TSP API - // Example TSP request format (vendor-specific): - // POST /api/v1/sign - // { - // "keyId": "...", - // "algorithm": "ECDSA-P256", - // "digestAlgorithm": "SHA256", - // "dataHash": "base64-encoded-hash", - // "signatureLevel": "QES" - // } - - _logger.LogWarning("Using stub TSP implementation - replace with actual TSP API call in production"); - - // Compute hash for signing - var hash = algorithmId.Contains("SHA256") ? SHA256.HashData(data) : SHA512.HashData(data); - - // Stub: Return mock signature - var stubSignature = new byte[64]; // ECDSA-P256 signature is 64 bytes - RandomNumberGenerator.Fill(stubSignature); - - _logger.LogInformation("TSP remote signature created (stub): keyId={KeyId}, signatureLength={Length}", - keyConfig.KeyId, stubSignature.Length); - - return stubSignature; - - // Production implementation would be: - // var request = new - // { - // keyId = keyConfig.KeyId, - // algorithm = algorithmId, - // digestAlgorithm = "SHA256", - // dataHash = Convert.ToBase64String(hash), - // signatureLevel = "QES" - // }; - // - // var response = await _httpClient.PostAsJsonAsync("/api/v1/sign", request, cancellationToken); - // response.EnsureSuccessStatusCode(); - // - // var result = await response.Content.ReadFromJsonAsync(cancellationToken); - // return Convert.FromBase64String(result.Signature); - } - - /// - /// Remote verification via TSP (stub implementation). - /// - public async Task RemoteVerifyAsync( - byte[] data, - byte[] signature, - string algorithmId, - EidasKeyConfig keyConfig, - CancellationToken cancellationToken) - { - _logger.LogDebug("TSP remote verification request: keyId={KeyId}, algorithm={Algorithm}", - keyConfig.KeyId, algorithmId); - - _logger.LogWarning("Using stub TSP verification - replace with actual TSP API call in production"); - - // Stub: Always return true - await Task.Delay(50, cancellationToken); // Simulate network latency - - _logger.LogInformation("TSP remote verification complete (stub): keyId={KeyId}, valid=true", - keyConfig.KeyId); - - return true; - - // Production implementation would be: - // var hash = SHA256.HashData(data); - // var request = new - // { - // keyId = keyConfig.KeyId, - // algorithm = algorithmId, - // dataHash = Convert.ToBase64String(hash), - // signature = Convert.ToBase64String(signature) - // }; - // - // var response = await _httpClient.PostAsJsonAsync("/api/v1/verify", request, cancellationToken); - // response.EnsureSuccessStatusCode(); - // - // var result = await response.Content.ReadFromJsonAsync(cancellationToken); - // return result.Valid; - } } - -// DTOs for TSP API (vendor-specific, examples only) -internal record TspSignResponse(string Signature, string Certificate, string Timestamp); -internal record TspVerifyResponse(bool Valid, string? Error); diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TspResponses.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TspResponses.cs new file mode 100644 index 000000000..2cf0692e2 --- /dev/null +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.EIDAS/TspResponses.cs @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: BUSL-1.1 +// Sprint: SPRINT_4100_0006_0002 - eIDAS Crypto Plugin +namespace StellaOps.Cryptography.Plugin.EIDAS; + +// DTOs for TSP API (vendor-specific, examples only) +internal record TspSignResponse(string Signature, string Certificate, string Timestamp); +internal record TspVerifyResponse(bool Valid, string? Error); diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Bundles/VerdictAnchorResult.cs b/src/__Libraries/StellaOps.DeltaVerdict/Bundles/VerdictAnchorResult.cs index 23c7bd273..75c27f377 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Bundles/VerdictAnchorResult.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Bundles/VerdictAnchorResult.cs @@ -1,3 +1,5 @@ +using StellaOps.DeltaVerdict.Manifest; + namespace StellaOps.DeltaVerdict.Bundles; /// diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Engine/IDeltaComputationEngine.cs b/src/__Libraries/StellaOps.DeltaVerdict/Engine/IDeltaComputationEngine.cs index cb96ffb82..d8a1f9919 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Engine/IDeltaComputationEngine.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Engine/IDeltaComputationEngine.cs @@ -1,6 +1,8 @@ +using StellaOps.DeltaVerdict.Models; + namespace StellaOps.DeltaVerdict.Engine; public interface IDeltaComputationEngine { - DeltaVerdict.Models.DeltaVerdict ComputeDelta(Verdict baseVerdict, Verdict headVerdict); + Models.DeltaVerdict ComputeDelta(Verdict baseVerdict, Verdict headVerdict); } diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.IssuerTrust.cs b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.IssuerTrust.cs index 0c61a1a9d..2564e146c 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.IssuerTrust.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.IssuerTrust.cs @@ -1,3 +1,4 @@ +using Microsoft.Extensions.Logging; using StellaOps.DeltaVerdict.Inputs; namespace StellaOps.DeltaVerdict.Trust; diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Keys.cs b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Keys.cs index a9e9422e7..be045fb48 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Keys.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Keys.cs @@ -1,3 +1,5 @@ +using Microsoft.Extensions.Logging; + namespace StellaOps.DeltaVerdict.Trust; public sealed partial class ScoringTrustProvider diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.Helpers.cs b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.Helpers.cs index 7d84acf05..e9d749042 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.Helpers.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.Helpers.cs @@ -1,3 +1,6 @@ +using Microsoft.Extensions.Logging; +using StellaOps.DeltaVerdict.Inputs; + namespace StellaOps.DeltaVerdict.Trust; public sealed partial class ScoringTrustProvider diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.cs b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.cs index 25216133b..de28dac32 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Trust/ScoringTrustProvider.Vex.cs @@ -1,3 +1,4 @@ +using Microsoft.Extensions.Logging; using StellaOps.DeltaVerdict.Inputs; namespace StellaOps.DeltaVerdict.Trust; diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Helpers.cs b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Helpers.cs index 9dcc82c9b..ab27b1cb1 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Helpers.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Helpers.cs @@ -1,3 +1,4 @@ +using Microsoft.Extensions.Logging; using StellaOps.DeltaVerdict.Inputs; namespace StellaOps.DeltaVerdict.Validation; diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Status.cs b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Status.cs index f6cecb2c5..f2008b192 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Status.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Status.cs @@ -1,3 +1,4 @@ +using StellaOps.DeltaVerdict.Inputs; using StellaOps.DeltaVerdict.Trust; namespace StellaOps.DeltaVerdict.Validation; diff --git a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Validation.cs b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Validation.cs index f427ee019..2030bd33d 100644 --- a/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Validation.cs +++ b/src/__Libraries/StellaOps.DeltaVerdict/Validation/AdversarialInputValidator.Validation.cs @@ -1,3 +1,4 @@ +using Microsoft.Extensions.Logging; using StellaOps.DeltaVerdict.Inputs; using System.Linq; diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffEvidence.cs index 24acdaafe..64d036242 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffEvidence.cs @@ -4,7 +4,6 @@ // using System.Collections.Immutable; -using System.Text.Json.Serialization; namespace StellaOps.Evidence.Bundle; @@ -98,270 +97,3 @@ public sealed class BinaryDiffEvidence /// public DateTimeOffset? ComputedAt { get; init; } } - -/// -/// Type of binary diff analysis. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum BinaryDiffType -{ - /// Structural diff (sections, symbols). - Structural, - - /// Semantic diff (IR-based). - Semantic, - - /// Combined structural and semantic. - Combined, - - /// Fast hash-only comparison. - HashOnly -} - -/// -/// Function-level diff entry. -/// -public sealed class BinaryFunctionDiff -{ - /// - /// Diff operation type. - /// - public required BinaryDiffOperation Operation { get; init; } - - /// - /// Function name or symbol. - /// - public required string FunctionName { get; init; } - - /// - /// Function address in previous binary. - /// - public ulong? PreviousAddress { get; init; } - - /// - /// Function address in current binary. - /// - public ulong? CurrentAddress { get; init; } - - /// - /// Previous size in bytes. - /// - public int? PreviousSize { get; init; } - - /// - /// Current size in bytes. - /// - public int? CurrentSize { get; init; } - - /// - /// Semantic similarity score (0.0-1.0) for modified functions. - /// - public double? Similarity { get; init; } - - /// - /// Node hash for the function (for reachability correlation). - /// - public string? NodeHash { get; init; } - - /// - /// Whether this function is security-sensitive. - /// - public bool SecuritySensitive { get; init; } - - /// - /// Brief description of the change. - /// - public string? ChangeDescription { get; init; } -} - -/// -/// Symbol-level diff entry. -/// -public sealed class BinarySymbolDiff -{ - /// - /// Diff operation type. - /// - public required BinaryDiffOperation Operation { get; init; } - - /// - /// Symbol name. - /// - public required string SymbolName { get; init; } - - /// - /// Symbol type (function, object, etc.). - /// - public string? SymbolType { get; init; } - - /// - /// Section containing the symbol. - /// - public string? Section { get; init; } - - /// - /// Symbol visibility. - /// - public string? Visibility { get; init; } -} - -/// -/// Section-level diff entry. -/// -public sealed class BinarySectionDiff -{ - /// - /// Diff operation type. - /// - public required BinaryDiffOperation Operation { get; init; } - - /// - /// Section name. - /// - public required string SectionName { get; init; } - - /// - /// Previous section size. - /// - public long? PreviousSize { get; init; } - - /// - /// Current section size. - /// - public long? CurrentSize { get; init; } - - /// - /// Size delta. - /// - public long? SizeDelta { get; init; } - - /// - /// Section permissions/flags. - /// - public string? Permissions { get; init; } -} - -/// -/// Semantic diff summary. -/// -public sealed class BinarySemanticDiff -{ - /// - /// Previous semantic fingerprint hash. - /// - public string? PreviousFingerprint { get; init; } - - /// - /// Current semantic fingerprint hash. - /// - public string? CurrentFingerprint { get; init; } - - /// - /// Overall semantic similarity (0.0-1.0). - /// - public double Similarity { get; init; } - - /// - /// Number of semantically identical functions. - /// - public int IdenticalFunctions { get; init; } - - /// - /// Number of semantically similar functions. - /// - public int SimilarFunctions { get; init; } - - /// - /// Number of semantically different functions. - /// - public int DifferentFunctions { get; init; } - - /// - /// IR normalization recipe version used. - /// - public string? NormalizationRecipe { get; init; } -} - -/// -/// Security-relevant change in binary. -/// -public sealed class BinarySecurityChange -{ - /// - /// Type of security change. - /// - public required BinarySecurityChangeType ChangeType { get; init; } - - /// - /// Severity of the change (low, medium, high, critical). - /// - public required string Severity { get; init; } - - /// - /// Description of the change. - /// - public required string Description { get; init; } - - /// - /// Affected function or symbol. - /// - public string? AffectedSymbol { get; init; } - - /// - /// CVE IDs potentially related to this change. - /// - public ImmutableArray RelatedCves { get; init; } = []; -} - -/// -/// Type of security-relevant change. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum BinarySecurityChangeType -{ - /// New security-sensitive function added. - SecurityFunctionAdded, - - /// Security-sensitive function removed. - SecurityFunctionRemoved, - - /// Security-sensitive function modified. - SecurityFunctionModified, - - /// Crypto function changed. - CryptoChange, - - /// Memory safety function changed. - MemorySafetyChange, - - /// Authentication/authorization function changed. - AuthChange, - - /// Input validation function changed. - InputValidationChange, - - /// Hardening feature added or removed. - HardeningChange -} - -/// -/// Binary diff operation types. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum BinaryDiffOperation -{ - /// Element was added. - Added, - - /// Element was removed. - Removed, - - /// Element was modified. - Modified, - - /// Element was renamed. - Renamed, - - /// Element was moved to different location. - Moved -} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffOperation.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffOperation.cs new file mode 100644 index 000000000..36b600337 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffOperation.cs @@ -0,0 +1,25 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Evidence.Bundle; + +/// +/// Binary diff operation types. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum BinaryDiffOperation +{ + /// Element was added. + Added, + + /// Element was removed. + Removed, + + /// Element was modified. + Modified, + + /// Element was renamed. + Renamed, + + /// Element was moved to different location. + Moved +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffType.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffType.cs new file mode 100644 index 000000000..706d362c2 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryDiffType.cs @@ -0,0 +1,22 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Evidence.Bundle; + +/// +/// Type of binary diff analysis. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum BinaryDiffType +{ + /// Structural diff (sections, symbols). + Structural, + + /// Semantic diff (IR-based). + Semantic, + + /// Combined structural and semantic. + Combined, + + /// Fast hash-only comparison. + HashOnly +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinaryFunctionDiff.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryFunctionDiff.cs new file mode 100644 index 000000000..263837ef8 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinaryFunctionDiff.cs @@ -0,0 +1,57 @@ +namespace StellaOps.Evidence.Bundle; + +/// +/// Function-level diff entry. +/// +public sealed class BinaryFunctionDiff +{ + /// + /// Diff operation type. + /// + public required BinaryDiffOperation Operation { get; init; } + + /// + /// Function name or symbol. + /// + public required string FunctionName { get; init; } + + /// + /// Function address in previous binary. + /// + public ulong? PreviousAddress { get; init; } + + /// + /// Function address in current binary. + /// + public ulong? CurrentAddress { get; init; } + + /// + /// Previous size in bytes. + /// + public int? PreviousSize { get; init; } + + /// + /// Current size in bytes. + /// + public int? CurrentSize { get; init; } + + /// + /// Semantic similarity score (0.0-1.0) for modified functions. + /// + public double? Similarity { get; init; } + + /// + /// Node hash for the function (for reachability correlation). + /// + public string? NodeHash { get; init; } + + /// + /// Whether this function is security-sensitive. + /// + public bool SecuritySensitive { get; init; } + + /// + /// Brief description of the change. + /// + public string? ChangeDescription { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinarySectionDiff.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySectionDiff.cs new file mode 100644 index 000000000..778e8d30e --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySectionDiff.cs @@ -0,0 +1,37 @@ +namespace StellaOps.Evidence.Bundle; + +/// +/// Section-level diff entry. +/// +public sealed class BinarySectionDiff +{ + /// + /// Diff operation type. + /// + public required BinaryDiffOperation Operation { get; init; } + + /// + /// Section name. + /// + public required string SectionName { get; init; } + + /// + /// Previous section size. + /// + public long? PreviousSize { get; init; } + + /// + /// Current section size. + /// + public long? CurrentSize { get; init; } + + /// + /// Size delta. + /// + public long? SizeDelta { get; init; } + + /// + /// Section permissions/flags. + /// + public string? Permissions { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChange.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChange.cs new file mode 100644 index 000000000..8f4ab76ca --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChange.cs @@ -0,0 +1,34 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Bundle; + +/// +/// Security-relevant change in binary. +/// +public sealed class BinarySecurityChange +{ + /// + /// Type of security change. + /// + public required BinarySecurityChangeType ChangeType { get; init; } + + /// + /// Severity of the change (low, medium, high, critical). + /// + public required string Severity { get; init; } + + /// + /// Description of the change. + /// + public required string Description { get; init; } + + /// + /// Affected function or symbol. + /// + public string? AffectedSymbol { get; init; } + + /// + /// CVE IDs potentially related to this change. + /// + public ImmutableArray RelatedCves { get; init; } = []; +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChangeType.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChangeType.cs new file mode 100644 index 000000000..292db29e1 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySecurityChangeType.cs @@ -0,0 +1,34 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Evidence.Bundle; + +/// +/// Type of security-relevant change. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum BinarySecurityChangeType +{ + /// New security-sensitive function added. + SecurityFunctionAdded, + + /// Security-sensitive function removed. + SecurityFunctionRemoved, + + /// Security-sensitive function modified. + SecurityFunctionModified, + + /// Crypto function changed. + CryptoChange, + + /// Memory safety function changed. + MemorySafetyChange, + + /// Authentication/authorization function changed. + AuthChange, + + /// Input validation function changed. + InputValidationChange, + + /// Hardening feature added or removed. + HardeningChange +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinarySemanticDiff.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySemanticDiff.cs new file mode 100644 index 000000000..daf6ce073 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySemanticDiff.cs @@ -0,0 +1,42 @@ +namespace StellaOps.Evidence.Bundle; + +/// +/// Semantic diff summary. +/// +public sealed class BinarySemanticDiff +{ + /// + /// Previous semantic fingerprint hash. + /// + public string? PreviousFingerprint { get; init; } + + /// + /// Current semantic fingerprint hash. + /// + public string? CurrentFingerprint { get; init; } + + /// + /// Overall semantic similarity (0.0-1.0). + /// + public double Similarity { get; init; } + + /// + /// Number of semantically identical functions. + /// + public int IdenticalFunctions { get; init; } + + /// + /// Number of semantically similar functions. + /// + public int SimilarFunctions { get; init; } + + /// + /// Number of semantically different functions. + /// + public int DifferentFunctions { get; init; } + + /// + /// IR normalization recipe version used. + /// + public string? NormalizationRecipe { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BinarySymbolDiff.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySymbolDiff.cs new file mode 100644 index 000000000..44ea51d3f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BinarySymbolDiff.cs @@ -0,0 +1,32 @@ +namespace StellaOps.Evidence.Bundle; + +/// +/// Symbol-level diff entry. +/// +public sealed class BinarySymbolDiff +{ + /// + /// Diff operation type. + /// + public required BinaryDiffOperation Operation { get; init; } + + /// + /// Symbol name. + /// + public required string SymbolName { get; init; } + + /// + /// Symbol type (function, object, etc.). + /// + public string? SymbolType { get; init; } + + /// + /// Section containing the symbol. + /// + public string? Section { get; init; } + + /// + /// Symbol visibility. + /// + public string? Visibility { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/BuildAncestry.cs b/src/__Libraries/StellaOps.Evidence.Bundle/BuildAncestry.cs new file mode 100644 index 000000000..12bf05b51 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/BuildAncestry.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class BuildAncestry +{ + public string? ImageDigest { get; init; } + public string? LayerDigest { get; init; } + public string? ArtifactDigest { get; init; } + public string? CommitHash { get; init; } + public string? BuildId { get; init; } + public DateTimeOffset? BuildTime { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/CallStackEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/CallStackEvidence.cs index a8524c9a5..f826996a7 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/CallStackEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/CallStackEvidence.cs @@ -10,14 +10,3 @@ public sealed class CallStackEvidence public int? SourceFrameIndex { get; init; } public string? UnavailableReason { get; init; } } - -public sealed class StackFrame -{ - public required string FunctionName { get; init; } - public required string FilePath { get; init; } - public required int Line { get; init; } - public int? Column { get; init; } - public string? SourceSnippet { get; init; } - public bool IsSink { get; init; } - public bool IsSource { get; init; } -} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DiffEntry.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DiffEntry.cs new file mode 100644 index 000000000..bbda92ad5 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DiffEntry.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class DiffEntry +{ + public required DiffOperation Operation { get; init; } + public required string Path { get; init; } + public string? OldValue { get; init; } + public string? NewValue { get; init; } + public string? ComponentPurl { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DiffEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DiffEvidence.cs index f4020829b..6852797e8 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/DiffEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DiffEvidence.cs @@ -11,16 +11,3 @@ public sealed class DiffEvidence public DateTimeOffset? PreviousScanTime { get; init; } public string? UnavailableReason { get; init; } } - -public enum DiffType { Sbom, Vex, Combined } - -public sealed class DiffEntry -{ - public required DiffOperation Operation { get; init; } - public required string Path { get; init; } - public string? OldValue { get; init; } - public string? NewValue { get; init; } - public string? ComponentPurl { get; init; } -} - -public enum DiffOperation { Added, Removed, Modified } diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DiffOperation.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DiffOperation.cs new file mode 100644 index 000000000..126b7c06f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DiffOperation.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Evidence.Bundle; + +public enum DiffOperation +{ + Added, + Removed, + Modified +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DiffType.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DiffType.cs new file mode 100644 index 000000000..707f573be --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DiffType.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Evidence.Bundle; + +public enum DiffType +{ + Sbom, + Vex, + Combined +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DsseEnvelope.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DsseEnvelope.cs new file mode 100644 index 000000000..956db30d3 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DsseEnvelope.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class DsseEnvelope +{ + public required string PayloadType { get; init; } + public required string Payload { get; init; } + public required IReadOnlyList Signatures { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/DsseSignature.cs b/src/__Libraries/StellaOps.Evidence.Bundle/DsseSignature.cs new file mode 100644 index 000000000..8284250cc --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/DsseSignature.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class DsseSignature +{ + public required string KeyId { get; init; } + public required string Sig { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/FunctionPathNode.cs b/src/__Libraries/StellaOps.Evidence.Bundle/FunctionPathNode.cs new file mode 100644 index 000000000..e8405cd70 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/FunctionPathNode.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class FunctionPathNode +{ + public required string FunctionName { get; init; } + public required string FilePath { get; init; } + public required int Line { get; init; } + public int? Column { get; init; } + public string? ModuleName { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/PackageImportNode.cs b/src/__Libraries/StellaOps.Evidence.Bundle/PackageImportNode.cs new file mode 100644 index 000000000..5cc50c1d2 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/PackageImportNode.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class PackageImportNode +{ + public required string PackageName { get; init; } + public string? Version { get; init; } + public string? ImportedBy { get; init; } + public string? ImportPath { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/ProvenanceEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/ProvenanceEvidence.cs index 3156ab1a1..a92cb52a9 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/ProvenanceEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/ProvenanceEvidence.cs @@ -11,34 +11,3 @@ public sealed class ProvenanceEvidence public string? VerificationStatus { get; init; } public string? UnavailableReason { get; init; } } - -public sealed class DsseEnvelope -{ - public required string PayloadType { get; init; } - public required string Payload { get; init; } - public required IReadOnlyList Signatures { get; init; } -} - -public sealed class DsseSignature -{ - public required string KeyId { get; init; } - public required string Sig { get; init; } -} - -public sealed class BuildAncestry -{ - public string? ImageDigest { get; init; } - public string? LayerDigest { get; init; } - public string? ArtifactDigest { get; init; } - public string? CommitHash { get; init; } - public string? BuildId { get; init; } - public DateTimeOffset? BuildTime { get; init; } -} - -public sealed class RekorReference -{ - public required string LogId { get; init; } - public required long LogIndex { get; init; } - public string? Uuid { get; init; } - public DateTimeOffset? IntegratedTime { get; init; } -} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityEvidence.cs index b0f94b8ae..897e2014a 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityEvidence.cs @@ -12,22 +12,3 @@ public sealed class ReachabilityEvidence public int? ConfidenceTier { get; init; } public string? UnavailableReason { get; init; } } - -public enum ReachabilityProofType { FunctionLevel, PackageLevel, ImportChain, Heuristic, Unknown } - -public sealed class FunctionPathNode -{ - public required string FunctionName { get; init; } - public required string FilePath { get; init; } - public required int Line { get; init; } - public int? Column { get; init; } - public string? ModuleName { get; init; } -} - -public sealed class PackageImportNode -{ - public required string PackageName { get; init; } - public string? Version { get; init; } - public string? ImportedBy { get; init; } - public string? ImportPath { get; init; } -} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityProofType.cs b/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityProofType.cs new file mode 100644 index 000000000..ca81df7e9 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/ReachabilityProofType.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Bundle; + +public enum ReachabilityProofType +{ + FunctionLevel, + PackageLevel, + ImportChain, + Heuristic, + Unknown +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/RekorReference.cs b/src/__Libraries/StellaOps.Evidence.Bundle/RekorReference.cs new file mode 100644 index 000000000..00e493669 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/RekorReference.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class RekorReference +{ + public required string LogId { get; init; } + public required long LogIndex { get; init; } + public string? Uuid { get; init; } + public DateTimeOffset? IntegratedTime { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/StackFrame.cs b/src/__Libraries/StellaOps.Evidence.Bundle/StackFrame.cs new file mode 100644 index 000000000..c5e133a1f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/StackFrame.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class StackFrame +{ + public required string FunctionName { get; init; } + public required string FilePath { get; init; } + public required int Line { get; init; } + public int? Column { get; init; } + public string? SourceSnippet { get; init; } + public bool IsSink { get; init; } + public bool IsSource { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/TASKS.md b/src/__Libraries/StellaOps.Evidence.Bundle/TASKS.md index 815adca82..69c522c58 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/TASKS.md +++ b/src/__Libraries/StellaOps.Evidence.Bundle/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0078-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0078-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Split evidence model types into single-purpose files; dotnet test 2026-02-04 (29 tests). | diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/VexStatement.cs b/src/__Libraries/StellaOps.Evidence.Bundle/VexStatement.cs new file mode 100644 index 000000000..c10150329 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Bundle/VexStatement.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Evidence.Bundle; + +public sealed class VexStatement +{ + public required string VexStatus { get; init; } + public string? Justification { get; init; } + public string? ImpactStatement { get; init; } + public string? ActionStatement { get; init; } + public DateTimeOffset? Timestamp { get; init; } + public string? Source { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Bundle/VexStatusEvidence.cs b/src/__Libraries/StellaOps.Evidence.Bundle/VexStatusEvidence.cs index c20934cc2..8257047d6 100644 --- a/src/__Libraries/StellaOps.Evidence.Bundle/VexStatusEvidence.cs +++ b/src/__Libraries/StellaOps.Evidence.Bundle/VexStatusEvidence.cs @@ -9,13 +9,3 @@ public sealed class VexStatusEvidence public IReadOnlyList? History { get; init; } public string? UnavailableReason { get; init; } } - -public sealed class VexStatement -{ - public required string VexStatus { get; init; } - public string? Justification { get; init; } - public string? ImpactStatement { get; init; } - public string? ActionStatement { get; init; } - public DateTimeOffset? Timestamp { get; init; } - public string? Source { get; init; } -} diff --git a/src/__Libraries/StellaOps.Evidence.Core.Tests/EvidenceBundleAdapterTests.cs b/src/__Libraries/StellaOps.Evidence.Core.Tests/EvidenceBundleAdapterTests.cs new file mode 100644 index 000000000..1c982c7e1 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core.Tests/EvidenceBundleAdapterTests.cs @@ -0,0 +1,49 @@ +using StellaOps.Evidence.Bundle; +using StellaOps.Evidence.Core.Adapters; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Evidence.Core.Tests; + +public sealed class EvidenceBundleAdapterTests +{ + private readonly EvidenceBundleAdapter _adapter = new(); + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CanConvert_WithNull_ReturnsFalse() + { + Assert.False(_adapter.CanConvert(null!)); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Convert_WithBinaryDiff_ReturnsArtifactEvidence() + { + var bundle = new EvidenceBundle + { + AlertId = "ALERT-100", + ArtifactId = "sha256:artifact", + Hashes = EvidenceHashSet.Empty(), + CreatedAt = new DateTimeOffset(2026, 2, 4, 0, 0, 0, TimeSpan.Zero), + BinaryDiff = new BinaryDiffEvidence + { + Status = EvidenceStatus.Available, + DiffType = BinaryDiffType.Structural + } + }; + + var provenance = new EvidenceProvenance + { + GeneratorId = "stellaops/scanner/test", + GeneratorVersion = "1.0.0", + GeneratedAt = new DateTimeOffset(2026, 2, 4, 0, 0, 0, TimeSpan.Zero) + }; + + var results = _adapter.Convert(bundle, "sha256:subject", provenance); + + Assert.Single(results); + Assert.Equal(EvidenceType.Artifact, results[0].EvidenceType); + Assert.False(results[0].Payload.IsEmpty); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core.Tests/TASKS.md b/src/__Libraries/StellaOps.Evidence.Core.Tests/TASKS.md index abd31e3c7..49adcf5e9 100644 --- a/src/__Libraries/StellaOps.Evidence.Core.Tests/TASKS.md +++ b/src/__Libraries/StellaOps.Evidence.Core.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0079-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0079-A | DONE | Waived (test project; revalidated 2026-01-08). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Added EvidenceBundleAdapter coverage; dotnet test 2026-02-04 (113 tests). | diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.BinaryDiff.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.BinaryDiff.cs new file mode 100644 index 000000000..88f4ec06f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.BinaryDiff.cs @@ -0,0 +1,49 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + // Sprint: SPRINT_20260112_008_LB_binary_diff_evidence_models (BINDIFF-LB-003) + private static IEvidence ConvertBinaryDiff( + BinaryDiffEvidence binaryDiff, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new BinaryDiffPayload + { + Hash = binaryDiff.Hash, + DiffType = binaryDiff.DiffType.ToString(), + PreviousBinaryDigest = binaryDiff.PreviousBinaryDigest, + CurrentBinaryDigest = binaryDiff.CurrentBinaryDigest, + BinaryFormat = binaryDiff.BinaryFormat, + ToolVersion = binaryDiff.ToolVersion, + SimilarityScore = binaryDiff.SimilarityScore, + FunctionChangeCount = binaryDiff.FunctionChanges.Length, + SymbolChangeCount = binaryDiff.SymbolChanges.Length, + SectionChangeCount = binaryDiff.SectionChanges.Length, + SecurityChangeCount = binaryDiff.SecurityChanges.Length, + HasSemanticDiff = binaryDiff.SemanticDiff is not null, + SemanticSimilarity = binaryDiff.SemanticDiff?.Similarity + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Artifact, payload, provenance, SchemaVersions.BinaryDiff); + } + + internal sealed record BinaryDiffPayload + { + public string? Hash { get; init; } + public string? DiffType { get; init; } + public string? PreviousBinaryDigest { get; init; } + public string? CurrentBinaryDigest { get; init; } + public string? BinaryFormat { get; init; } + public string? ToolVersion { get; init; } + public double? SimilarityScore { get; init; } + public int FunctionChangeCount { get; init; } + public int SymbolChangeCount { get; init; } + public int SectionChangeCount { get; init; } + public int SecurityChangeCount { get; init; } + public bool HasSemanticDiff { get; init; } + public double? SemanticSimilarity { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.CallStack.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.CallStack.cs new file mode 100644 index 000000000..680113a57 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.CallStack.cs @@ -0,0 +1,48 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertCallStack( + CallStackEvidence callStack, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new CallStackPayload + { + Hash = callStack.Hash, + SinkFrameIndex = callStack.SinkFrameIndex, + SourceFrameIndex = callStack.SourceFrameIndex, + Frames = callStack.Frames?.Select(f => new StackFramePayload + { + FunctionName = f.FunctionName, + FilePath = f.FilePath, + Line = f.Line, + Column = f.Column, + IsSink = f.IsSink, + IsSource = f.IsSource + }).ToList() + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Runtime, payload, provenance, SchemaVersions.CallStack); + } + + internal sealed record CallStackPayload + { + public string? Hash { get; init; } + public int? SinkFrameIndex { get; init; } + public int? SourceFrameIndex { get; init; } + public IReadOnlyList? Frames { get; init; } + } + + internal sealed record StackFramePayload + { + public required string FunctionName { get; init; } + public required string FilePath { get; init; } + public required int Line { get; init; } + public int? Column { get; init; } + public bool IsSink { get; init; } + public bool IsSource { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Diff.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Diff.cs new file mode 100644 index 000000000..ed0e99649 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Diff.cs @@ -0,0 +1,48 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertDiff( + DiffEvidence diff, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new DiffPayload + { + Hash = diff.Hash, + DiffType = diff.DiffType.ToString(), + PreviousScanId = diff.PreviousScanId, + PreviousScanTime = diff.PreviousScanTime, + Entries = diff.Entries?.Select(e => new DiffEntryPayload + { + Operation = e.Operation.ToString(), + Path = e.Path, + OldValue = e.OldValue, + NewValue = e.NewValue, + ComponentPurl = e.ComponentPurl + }).ToList() + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Artifact, payload, provenance, SchemaVersions.Diff); + } + + internal sealed record DiffPayload + { + public string? Hash { get; init; } + public string? DiffType { get; init; } + public string? PreviousScanId { get; init; } + public DateTimeOffset? PreviousScanTime { get; init; } + public IReadOnlyList? Entries { get; init; } + } + + internal sealed record DiffEntryPayload + { + public required string Operation { get; init; } + public required string Path { get; init; } + public string? OldValue { get; init; } + public string? NewValue { get; init; } + public string? ComponentPurl { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.GraphRevision.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.GraphRevision.cs new file mode 100644 index 000000000..d100ed90f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.GraphRevision.cs @@ -0,0 +1,34 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertGraphRevision( + GraphRevisionEvidence graphRevision, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new GraphRevisionPayload + { + Hash = graphRevision.Hash, + RevisionId = graphRevision.GraphRevisionId, + VerdictReceipt = graphRevision.VerdictReceipt, + GraphComputedAt = graphRevision.GraphComputedAt, + NodeCount = graphRevision.TotalNodes, + EdgeCount = graphRevision.TotalEdges + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Dependency, payload, provenance, SchemaVersions.GraphRevision); + } + + internal sealed record GraphRevisionPayload + { + public string? Hash { get; init; } + public string? RevisionId { get; init; } + public string? VerdictReceipt { get; init; } + public DateTimeOffset? GraphComputedAt { get; init; } + public int? NodeCount { get; init; } + public int? EdgeCount { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Provenance.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Provenance.cs new file mode 100644 index 000000000..82079b9d8 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Provenance.cs @@ -0,0 +1,38 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertProvenance( + ProvenanceEvidence provenanceEvidence, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new ProvenancePayload + { + Hash = provenanceEvidence.Hash, + BuilderId = provenanceEvidence.Ancestry?.BuildId, + BuildTime = provenanceEvidence.Ancestry?.BuildTime, + ImageDigest = provenanceEvidence.Ancestry?.ImageDigest, + LayerDigest = provenanceEvidence.Ancestry?.LayerDigest, + CommitHash = provenanceEvidence.Ancestry?.CommitHash, + VerificationStatus = provenanceEvidence.VerificationStatus, + RekorLogIndex = provenanceEvidence.RekorEntry?.LogIndex + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Provenance, payload, provenance, SchemaVersions.Provenance); + } + + internal sealed record ProvenancePayload + { + public string? Hash { get; init; } + public string? BuilderId { get; init; } + public DateTimeOffset? BuildTime { get; init; } + public string? ImageDigest { get; init; } + public string? LayerDigest { get; init; } + public string? CommitHash { get; init; } + public string? VerificationStatus { get; init; } + public long? RekorLogIndex { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Reachability.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Reachability.cs new file mode 100644 index 000000000..7338da327 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.Reachability.cs @@ -0,0 +1,64 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertReachability( + ReachabilityEvidence reachability, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new ReachabilityPayload + { + Hash = reachability.Hash, + ProofType = reachability.ProofType.ToString(), + FunctionPath = reachability.FunctionPath?.Select(f => new FunctionPathPayload + { + FunctionName = f.FunctionName, + FilePath = f.FilePath, + Line = f.Line, + Column = f.Column, + ModuleName = f.ModuleName + }).ToList(), + ImportChain = reachability.ImportChain?.Select(i => new ImportChainPayload + { + PackageName = i.PackageName, + Version = i.Version, + ImportedBy = i.ImportedBy, + ImportPath = i.ImportPath + }).ToList(), + LatticeState = reachability.LatticeState, + ConfidenceTier = reachability.ConfidenceTier + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Reachability, payload, provenance, SchemaVersions.Reachability); + } + + internal sealed record ReachabilityPayload + { + public string? Hash { get; init; } + public string? ProofType { get; init; } + public IReadOnlyList? FunctionPath { get; init; } + public IReadOnlyList? ImportChain { get; init; } + public string? LatticeState { get; init; } + public int? ConfidenceTier { get; init; } + } + + internal sealed record FunctionPathPayload + { + public required string FunctionName { get; init; } + public required string FilePath { get; init; } + public required int Line { get; init; } + public int? Column { get; init; } + public string? ModuleName { get; init; } + } + + internal sealed record ImportChainPayload + { + public required string PackageName { get; init; } + public string? Version { get; init; } + public string? ImportedBy { get; init; } + public string? ImportPath { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.VexStatus.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.VexStatus.cs new file mode 100644 index 000000000..398239d32 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.VexStatus.cs @@ -0,0 +1,36 @@ +using StellaOps.Evidence.Bundle; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceBundleAdapter +{ + private static IEvidence ConvertVexStatus( + VexStatusEvidence vexStatus, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new VexStatusPayload + { + Hash = vexStatus.Hash, + VexStatus = vexStatus.Current?.VexStatus, + Justification = vexStatus.Current?.Justification, + ImpactStatement = vexStatus.Current?.ImpactStatement, + ActionStatement = vexStatus.Current?.ActionStatement, + StatementSource = vexStatus.Current?.Source, + StatementTimestamp = vexStatus.Current?.Timestamp + }; + + return CreateEvidence(subjectNodeId, EvidenceType.Vex, payload, provenance, SchemaVersions.Vex); + } + + internal sealed record VexStatusPayload + { + public string? Hash { get; init; } + public string? VexStatus { get; init; } + public string? Justification { get; init; } + public string? ImpactStatement { get; init; } + public string? ActionStatement { get; init; } + public string? StatementSource { get; init; } + public DateTimeOffset? StatementTimestamp { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.cs index 5e4077152..fab746e6e 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceBundleAdapter.cs @@ -7,7 +7,7 @@ namespace StellaOps.Evidence.Core.Adapters; /// An EvidenceBundle may contain multiple evidence types (reachability, VEX, provenance, etc.), /// each converted to a separate IEvidence record. /// -public sealed class EvidenceBundleAdapter : EvidenceAdapterBase, IEvidenceAdapter +public sealed partial class EvidenceBundleAdapter : EvidenceAdapterBase, IEvidenceAdapter { /// /// Schema version constants for evidence payloads. @@ -87,284 +87,4 @@ public sealed class EvidenceBundleAdapter : EvidenceAdapterBase, IEvidenceAdapte return results; } - - private static IEvidence ConvertReachability( - ReachabilityEvidence reachability, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new ReachabilityPayload - { - Hash = reachability.Hash, - ProofType = reachability.ProofType.ToString(), - FunctionPath = reachability.FunctionPath?.Select(f => new FunctionPathPayload - { - FunctionName = f.FunctionName, - FilePath = f.FilePath, - Line = f.Line, - Column = f.Column, - ModuleName = f.ModuleName - }).ToList(), - ImportChain = reachability.ImportChain?.Select(i => new ImportChainPayload - { - PackageName = i.PackageName, - Version = i.Version, - ImportedBy = i.ImportedBy, - ImportPath = i.ImportPath - }).ToList(), - LatticeState = reachability.LatticeState, - ConfidenceTier = reachability.ConfidenceTier - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Reachability, payload, provenance, SchemaVersions.Reachability); - } - - private static IEvidence ConvertVexStatus( - VexStatusEvidence vexStatus, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new VexStatusPayload - { - Hash = vexStatus.Hash, - VexStatus = vexStatus.Current?.VexStatus, - Justification = vexStatus.Current?.Justification, - ImpactStatement = vexStatus.Current?.ImpactStatement, - ActionStatement = vexStatus.Current?.ActionStatement, - StatementSource = vexStatus.Current?.Source, - StatementTimestamp = vexStatus.Current?.Timestamp - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Vex, payload, provenance, SchemaVersions.Vex); - } - - private static IEvidence ConvertProvenance( - ProvenanceEvidence provenanceEvidence, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new ProvenancePayload - { - Hash = provenanceEvidence.Hash, - BuilderId = provenanceEvidence.Ancestry?.BuildId, - BuildTime = provenanceEvidence.Ancestry?.BuildTime, - ImageDigest = provenanceEvidence.Ancestry?.ImageDigest, - LayerDigest = provenanceEvidence.Ancestry?.LayerDigest, - CommitHash = provenanceEvidence.Ancestry?.CommitHash, - VerificationStatus = provenanceEvidence.VerificationStatus, - RekorLogIndex = provenanceEvidence.RekorEntry?.LogIndex - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Provenance, payload, provenance, SchemaVersions.Provenance); - } - - private static IEvidence ConvertCallStack( - CallStackEvidence callStack, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new CallStackPayload - { - Hash = callStack.Hash, - SinkFrameIndex = callStack.SinkFrameIndex, - SourceFrameIndex = callStack.SourceFrameIndex, - Frames = callStack.Frames?.Select(f => new StackFramePayload - { - FunctionName = f.FunctionName, - FilePath = f.FilePath, - Line = f.Line, - Column = f.Column, - IsSink = f.IsSink, - IsSource = f.IsSource - }).ToList() - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Runtime, payload, provenance, SchemaVersions.CallStack); - } - - private static IEvidence ConvertDiff( - DiffEvidence diff, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new DiffPayload - { - Hash = diff.Hash, - DiffType = diff.DiffType.ToString(), - PreviousScanId = diff.PreviousScanId, - PreviousScanTime = diff.PreviousScanTime, - Entries = diff.Entries?.Select(e => new DiffEntryPayload - { - Operation = e.Operation.ToString(), - Path = e.Path, - OldValue = e.OldValue, - NewValue = e.NewValue, - ComponentPurl = e.ComponentPurl - }).ToList() - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Artifact, payload, provenance, SchemaVersions.Diff); - } - - private static IEvidence ConvertGraphRevision( - GraphRevisionEvidence graphRevision, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new GraphRevisionPayload - { - Hash = graphRevision.Hash, - RevisionId = graphRevision.GraphRevisionId, - VerdictReceipt = graphRevision.VerdictReceipt, - GraphComputedAt = graphRevision.GraphComputedAt, - NodeCount = graphRevision.TotalNodes, - EdgeCount = graphRevision.TotalEdges - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Dependency, payload, provenance, SchemaVersions.GraphRevision); - } - - // Sprint: SPRINT_20260112_008_LB_binary_diff_evidence_models (BINDIFF-LB-003) - private static IEvidence ConvertBinaryDiff( - BinaryDiffEvidence binaryDiff, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new BinaryDiffPayload - { - Hash = binaryDiff.Hash, - DiffType = binaryDiff.DiffType.ToString(), - PreviousBinaryDigest = binaryDiff.PreviousBinaryDigest, - CurrentBinaryDigest = binaryDiff.CurrentBinaryDigest, - BinaryFormat = binaryDiff.BinaryFormat, - ToolVersion = binaryDiff.ToolVersion, - SimilarityScore = binaryDiff.SimilarityScore, - FunctionChangeCount = binaryDiff.FunctionChanges.Length, - SymbolChangeCount = binaryDiff.SymbolChanges.Length, - SectionChangeCount = binaryDiff.SectionChanges.Length, - SecurityChangeCount = binaryDiff.SecurityChanges.Length, - HasSemanticDiff = binaryDiff.SemanticDiff is not null, - SemanticSimilarity = binaryDiff.SemanticDiff?.Similarity - }; - - return CreateEvidence(subjectNodeId, EvidenceType.Artifact, payload, provenance, SchemaVersions.BinaryDiff); - } - - #region Payload Records - - internal sealed record ReachabilityPayload - { - public string? Hash { get; init; } - public string? ProofType { get; init; } - public IReadOnlyList? FunctionPath { get; init; } - public IReadOnlyList? ImportChain { get; init; } - public string? LatticeState { get; init; } - public int? ConfidenceTier { get; init; } - } - - internal sealed record FunctionPathPayload - { - public required string FunctionName { get; init; } - public required string FilePath { get; init; } - public required int Line { get; init; } - public int? Column { get; init; } - public string? ModuleName { get; init; } - } - - internal sealed record ImportChainPayload - { - public required string PackageName { get; init; } - public string? Version { get; init; } - public string? ImportedBy { get; init; } - public string? ImportPath { get; init; } - } - - internal sealed record VexStatusPayload - { - public string? Hash { get; init; } - public string? VexStatus { get; init; } - public string? Justification { get; init; } - public string? ImpactStatement { get; init; } - public string? ActionStatement { get; init; } - public string? StatementSource { get; init; } - public DateTimeOffset? StatementTimestamp { get; init; } - } - - internal sealed record ProvenancePayload - { - public string? Hash { get; init; } - public string? BuilderId { get; init; } - public DateTimeOffset? BuildTime { get; init; } - public string? ImageDigest { get; init; } - public string? LayerDigest { get; init; } - public string? CommitHash { get; init; } - public string? VerificationStatus { get; init; } - public long? RekorLogIndex { get; init; } - } - - internal sealed record CallStackPayload - { - public string? Hash { get; init; } - public int? SinkFrameIndex { get; init; } - public int? SourceFrameIndex { get; init; } - public IReadOnlyList? Frames { get; init; } - } - - internal sealed record StackFramePayload - { - public required string FunctionName { get; init; } - public required string FilePath { get; init; } - public required int Line { get; init; } - public int? Column { get; init; } - public bool IsSink { get; init; } - public bool IsSource { get; init; } - } - - internal sealed record DiffPayload - { - public string? Hash { get; init; } - public string? DiffType { get; init; } - public string? PreviousScanId { get; init; } - public DateTimeOffset? PreviousScanTime { get; init; } - public IReadOnlyList? Entries { get; init; } - } - - internal sealed record DiffEntryPayload - { - public required string Operation { get; init; } - public required string Path { get; init; } - public string? OldValue { get; init; } - public string? NewValue { get; init; } - public string? ComponentPurl { get; init; } - } - - internal sealed record GraphRevisionPayload - { - public string? Hash { get; init; } - public string? RevisionId { get; init; } - public string? VerdictReceipt { get; init; } - public DateTimeOffset? GraphComputedAt { get; init; } - public int? NodeCount { get; init; } - public int? EdgeCount { get; init; } - } - - // Sprint: SPRINT_20260112_008_LB_binary_diff_evidence_models (BINDIFF-LB-003) - internal sealed record BinaryDiffPayload - { - public string? Hash { get; init; } - public string? DiffType { get; init; } - public string? PreviousBinaryDigest { get; init; } - public string? CurrentBinaryDigest { get; init; } - public string? BinaryFormat { get; init; } - public string? ToolVersion { get; init; } - public double? SimilarityScore { get; init; } - public int FunctionChangeCount { get; init; } - public int SymbolChangeCount { get; init; } - public int SectionChangeCount { get; init; } - public int SecurityChangeCount { get; init; } - public bool HasSemanticDiff { get; init; } - public double? SemanticSimilarity { get; init; } - } - - #endregion } diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.Payload.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.Payload.cs new file mode 100644 index 000000000..57c85d81e --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.Payload.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class EvidenceStatementAdapter +{ + internal sealed record EvidenceStatementPayload + { + public required string Source { get; init; } + public required string SourceVersion { get; init; } + public required DateTimeOffset CollectionTime { get; init; } + public required string SbomEntryId { get; init; } + public string? VulnerabilityId { get; init; } + public string? RawFindingHash { get; init; } + public string? OriginalEvidenceId { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.cs index dee8ea4f5..692cc535c 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementAdapter.cs @@ -14,7 +14,7 @@ namespace StellaOps.Evidence.Core.Adapters; /// - Payload from the predicate /// - Provenance from source/sourceVersion/collectionTime /// -public sealed class EvidenceStatementAdapter : EvidenceAdapterBase, IEvidenceAdapter +public sealed partial class EvidenceStatementAdapter : EvidenceAdapterBase, IEvidenceAdapter { private const string SchemaVersion = "evidence-statement/v1"; @@ -84,65 +84,4 @@ public sealed class EvidenceStatementAdapter : EvidenceAdapterBase, IEvidenceAda }; } - #region Payload Records - - internal sealed record EvidenceStatementPayload - { - public required string Source { get; init; } - public required string SourceVersion { get; init; } - public required DateTimeOffset CollectionTime { get; init; } - public required string SbomEntryId { get; init; } - public string? VulnerabilityId { get; init; } - public string? RawFindingHash { get; init; } - public string? OriginalEvidenceId { get; init; } - } - - #endregion -} - -/// -/// Input DTO for EvidenceStatementAdapter. -/// Decouples the adapter from direct dependency on StellaOps.Attestor.ProofChain. -/// -public sealed record EvidenceStatementInput -{ - /// - /// Subject artifact digest from the in-toto statement. - /// - public required string SubjectDigest { get; init; } - - /// - /// Scanner or feed name that produced this evidence. - /// - public required string Source { get; init; } - - /// - /// Version of the source tool. - /// - public required string SourceVersion { get; init; } - - /// - /// UTC timestamp when evidence was collected. - /// - public required DateTimeOffset CollectionTime { get; init; } - - /// - /// Reference to the SBOM entry this evidence relates to. - /// - public required string SbomEntryId { get; init; } - - /// - /// CVE or vulnerability identifier if applicable. - /// - public string? VulnerabilityId { get; init; } - - /// - /// Hash of the raw finding data (to avoid storing large payloads). - /// - public string? RawFindingHash { get; init; } - - /// - /// Original content-addressed evidence ID from the statement. - /// - public string? EvidenceId { get; init; } } diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementInput.cs new file mode 100644 index 000000000..cf967e0fa --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/EvidenceStatementInput.cs @@ -0,0 +1,48 @@ +namespace StellaOps.Evidence.Core.Adapters; + +/// +/// Input DTO for EvidenceStatementAdapter. +/// Decouples the adapter from direct dependency on StellaOps.Attestor.ProofChain. +/// +public sealed record EvidenceStatementInput +{ + /// + /// Subject artifact digest from the in-toto statement. + /// + public required string SubjectDigest { get; init; } + + /// + /// Scanner or feed name that produced this evidence. + /// + public required string Source { get; init; } + + /// + /// Version of the source tool. + /// + public required string SourceVersion { get; init; } + + /// + /// UTC timestamp when evidence was collected. + /// + public required DateTimeOffset CollectionTime { get; init; } + + /// + /// Reference to the SBOM entry this evidence relates to. + /// + public required string SbomEntryId { get; init; } + + /// + /// CVE or vulnerability identifier if applicable. + /// + public string? VulnerabilityId { get; init; } + + /// + /// Hash of the raw finding data (to avoid storing large payloads). + /// + public string? RawFindingHash { get; init; } + + /// + /// Original content-addressed evidence ID from the statement. + /// + public string? EvidenceId { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.Payload.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.Payload.cs new file mode 100644 index 000000000..05c1a76b7 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.Payload.cs @@ -0,0 +1,18 @@ +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class ProofSegmentAdapter +{ + internal sealed record ProofSegmentPayload + { + public required string SegmentId { get; init; } + public required string SegmentType { get; init; } + public required int Index { get; init; } + public required string InputHash { get; init; } + public required string ResultHash { get; init; } + public string? PrevSegmentHash { get; init; } + public required string ToolId { get; init; } + public required string ToolVersion { get; init; } + public required string Status { get; init; } + public string? SpineId { get; init; } + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.cs index 4d6089153..5bed4f350 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentAdapter.cs @@ -4,7 +4,7 @@ namespace StellaOps.Evidence.Core.Adapters; /// Converts Scanner's ProofSegment to unified records. /// Each segment represents a step in the proof chain from SBOM to VEX verdict. /// -public sealed class ProofSegmentAdapter : EvidenceAdapterBase, IEvidenceAdapter +public sealed partial class ProofSegmentAdapter : EvidenceAdapterBase, IEvidenceAdapter { private const string SchemaVersion = "proof-segment/v1"; @@ -67,78 +67,4 @@ public sealed class ProofSegmentAdapter : EvidenceAdapterBase, IEvidenceAdapter< _ => EvidenceType.Custom }; - #region Payload Records - - internal sealed record ProofSegmentPayload - { - public required string SegmentId { get; init; } - public required string SegmentType { get; init; } - public required int Index { get; init; } - public required string InputHash { get; init; } - public required string ResultHash { get; init; } - public string? PrevSegmentHash { get; init; } - public required string ToolId { get; init; } - public required string ToolVersion { get; init; } - public required string Status { get; init; } - public string? SpineId { get; init; } - } - - #endregion -} - -/// -/// Input DTO for ProofSegmentAdapter. -/// Decouples the adapter from direct dependency on StellaOps.Scanner.ProofSpine. -/// -public sealed record ProofSegmentInput -{ - /// - /// Unique segment identifier. - /// - public required string SegmentId { get; init; } - - /// - /// Segment type (e.g., "SbomSlice", "Match", "Reachability", "GuardAnalysis", "RuntimeObservation", "PolicyEval"). - /// - public required string SegmentType { get; init; } - - /// - /// Position in the proof chain (0-based). - /// - public required int Index { get; init; } - - /// - /// Hash of input data to this segment. - /// - public required string InputHash { get; init; } - - /// - /// Hash of output/result from this segment. - /// - public required string ResultHash { get; init; } - - /// - /// Hash of the previous segment (for chaining verification). - /// - public string? PrevSegmentHash { get; init; } - - /// - /// Tool that produced this segment. - /// - public required string ToolId { get; init; } - - /// - /// Version of the tool. - /// - public required string ToolVersion { get; init; } - - /// - /// Verification status (e.g., "Pending", "Verified", "Invalid", "Untrusted"). - /// - public required string Status { get; init; } - - /// - /// Parent spine ID for correlation. - /// - public string? SpineId { get; init; } } diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentInput.cs new file mode 100644 index 000000000..ed3c09558 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/ProofSegmentInput.cs @@ -0,0 +1,58 @@ +namespace StellaOps.Evidence.Core.Adapters; + +/// +/// Input DTO for ProofSegmentAdapter. +/// Decouples the adapter from direct dependency on StellaOps.Scanner.ProofSpine. +/// +public sealed record ProofSegmentInput +{ + /// + /// Unique segment identifier. + /// + public required string SegmentId { get; init; } + + /// + /// Segment type (e.g., "SbomSlice", "Match", "Reachability", "GuardAnalysis", "RuntimeObservation", "PolicyEval"). + /// + public required string SegmentType { get; init; } + + /// + /// Position in the proof chain (0-based). + /// + public required int Index { get; init; } + + /// + /// Hash of input data to this segment. + /// + public required string InputHash { get; init; } + + /// + /// Hash of output/result from this segment. + /// + public required string ResultHash { get; init; } + + /// + /// Hash of the previous segment (for chaining verification). + /// + public string? PrevSegmentHash { get; init; } + + /// + /// Tool that produced this segment. + /// + public required string ToolId { get; init; } + + /// + /// Version of the tool. + /// + public required string ToolVersion { get; init; } + + /// + /// Verification status (e.g., "Pending", "Verified", "Invalid", "Untrusted"). + /// + public required string Status { get; init; } + + /// + /// Parent spine ID for correlation. + /// + public string? SpineId { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Payloads.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Payloads.cs new file mode 100644 index 000000000..eed4344b3 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Payloads.cs @@ -0,0 +1,45 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class VexObservationAdapter +{ + /// + /// Payload for observation-level (provenance) evidence record. + /// + private sealed record VexObservationPayload( + string ObservationId, + string Tenant, + string ProviderId, + string StreamId, + string UpstreamId, + string? DocumentVersion, + string ContentHash, + string Format, + string? SpecVersion, + int StatementCount, + ImmutableArray Supersedes, + DateTimeOffset FetchedAt, + DateTimeOffset ReceivedAt, + DateTimeOffset CreatedAt); + + /// + /// Payload for statement-level VEX evidence record. + /// + private sealed record VexStatementPayload( + string ObservationId, + int StatementIndex, + string VulnerabilityId, + string ProductKey, + string Status, + string? Justification, + DateTimeOffset? LastObserved, + string? Locator, + string? IntroducedVersion, + string? FixedVersion, + string? Purl, + string? Cpe, + int EvidenceCount, + string ProviderId, + string StreamId); +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Records.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Records.cs new file mode 100644 index 000000000..3d59b2e60 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Records.cs @@ -0,0 +1,71 @@ +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class VexObservationAdapter +{ + private EvidenceRecord CreateObservationRecord( + VexObservationInput observation, + string subjectNodeId, + EvidenceProvenance provenance) + { + var payload = new VexObservationPayload( + ObservationId: observation.ObservationId, + Tenant: observation.Tenant, + ProviderId: observation.ProviderId, + StreamId: observation.StreamId, + UpstreamId: observation.Upstream.UpstreamId, + DocumentVersion: observation.Upstream.DocumentVersion, + ContentHash: observation.Upstream.ContentHash, + Format: observation.Content.Format, + SpecVersion: observation.Content.SpecVersion, + StatementCount: observation.Statements.Length, + Supersedes: observation.Supersedes, + FetchedAt: observation.Upstream.FetchedAt, + ReceivedAt: observation.Upstream.ReceivedAt, + CreatedAt: observation.CreatedAt); + + var signatures = BuildObservationSignatures(observation.Upstream.Signature); + + return CreateEvidence( + subjectNodeId: subjectNodeId, + evidenceType: EvidenceType.Provenance, + payload: payload, + provenance: provenance, + payloadSchemaVersion: PayloadSchemaVersion, + signatures: signatures); + } + + private EvidenceRecord CreateStatementRecord( + VexObservationInput observation, + VexObservationStatementInput statement, + string subjectNodeId, + EvidenceProvenance provenance, + int statementIndex) + { + var payload = new VexStatementPayload( + ObservationId: observation.ObservationId, + StatementIndex: statementIndex, + VulnerabilityId: statement.VulnerabilityId, + ProductKey: statement.ProductKey, + Status: statement.Status, + Justification: statement.Justification, + LastObserved: statement.LastObserved, + Locator: statement.Locator, + IntroducedVersion: statement.IntroducedVersion, + FixedVersion: statement.FixedVersion, + Purl: statement.Purl, + Cpe: statement.Cpe, + EvidenceCount: statement.Evidence.Length, + ProviderId: observation.ProviderId, + StreamId: observation.StreamId); + + var signatures = BuildObservationSignatures(observation.Upstream.Signature); + + return CreateEvidence( + subjectNodeId: subjectNodeId, + evidenceType: EvidenceType.Vex, + payload: payload, + provenance: provenance, + payloadSchemaVersion: PayloadSchemaVersion, + signatures: signatures); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Signatures.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Signatures.cs new file mode 100644 index 000000000..f975d1eef --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.Signatures.cs @@ -0,0 +1,26 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed partial class VexObservationAdapter +{ + private static ImmutableArray BuildObservationSignatures( + VexObservationSignatureInput signature) + { + if (!signature.Present || string.IsNullOrWhiteSpace(signature.Signature)) + { + return []; + } + + var sig = new EvidenceSignature + { + SignerId = signature.KeyId ?? "unknown", + Algorithm = signature.Format ?? "unknown", + SignatureBase64 = signature.Signature, + SignedAt = DateTimeOffset.UtcNow, + SignerType = SignerType.Vendor + }; + + return [sig]; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.cs index ea0f82eda..36c3210c2 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationAdapter.cs @@ -1,72 +1,8 @@ // // SPDX-License-Identifier: BUSL-1.1 // - -using System.Collections.Immutable; -using System.Text.Json.Nodes; - namespace StellaOps.Evidence.Core.Adapters; -/// -/// Input DTO for VexObservation data, decoupling from Excititor.Core dependency. -/// -public sealed record VexObservationInput -{ - public required string ObservationId { get; init; } - public required string Tenant { get; init; } - public required string ProviderId { get; init; } - public required string StreamId { get; init; } - public required VexObservationUpstreamInput Upstream { get; init; } - public required ImmutableArray Statements { get; init; } - public required VexObservationContentInput Content { get; init; } - public required DateTimeOffset CreatedAt { get; init; } - public ImmutableArray Supersedes { get; init; } = []; - public ImmutableDictionary Attributes { get; init; } = ImmutableDictionary.Empty; -} - -public sealed record VexObservationUpstreamInput -{ - public required string UpstreamId { get; init; } - public string? DocumentVersion { get; init; } - public required DateTimeOffset FetchedAt { get; init; } - public required DateTimeOffset ReceivedAt { get; init; } - public required string ContentHash { get; init; } - public required VexObservationSignatureInput Signature { get; init; } - public ImmutableDictionary Metadata { get; init; } = ImmutableDictionary.Empty; -} - -public sealed record VexObservationSignatureInput -{ - public bool Present { get; init; } - public string? Format { get; init; } - public string? KeyId { get; init; } - public string? Signature { get; init; } -} - -public sealed record VexObservationContentInput -{ - public required string Format { get; init; } - public string? SpecVersion { get; init; } - public JsonNode? Raw { get; init; } - public ImmutableDictionary Metadata { get; init; } = ImmutableDictionary.Empty; -} - -public sealed record VexObservationStatementInput -{ - public required string VulnerabilityId { get; init; } - public required string ProductKey { get; init; } - public required string Status { get; init; } - public DateTimeOffset? LastObserved { get; init; } - public string? Locator { get; init; } - public string? Justification { get; init; } - public string? IntroducedVersion { get; init; } - public string? FixedVersion { get; init; } - public string? Purl { get; init; } - public string? Cpe { get; init; } - public ImmutableArray Evidence { get; init; } = []; - public ImmutableDictionary Metadata { get; init; } = ImmutableDictionary.Empty; -} - /// /// Adapter that converts Excititor's VexObservation into unified IEvidence records. /// Uses DTO to avoid circular dependencies. @@ -75,7 +11,7 @@ public sealed record VexObservationStatementInput /// VexObservations contain multiple statements; each statement becomes a separate evidence record. /// An additional observation-level evidence record captures the overall document provenance. /// -public sealed class VexObservationAdapter : EvidenceAdapterBase, IEvidenceAdapter +public sealed partial class VexObservationAdapter : EvidenceAdapterBase, IEvidenceAdapter { private const string PayloadSchemaVersion = "1.0.0"; private const string AdapterSource = "VexObservationAdapter"; @@ -119,130 +55,4 @@ public sealed class VexObservationAdapter : EvidenceAdapterBase, IEvidenceAdapte return records; } - - private EvidenceRecord CreateObservationRecord( - VexObservationInput observation, - string subjectNodeId, - EvidenceProvenance provenance) - { - var payload = new VexObservationPayload( - ObservationId: observation.ObservationId, - Tenant: observation.Tenant, - ProviderId: observation.ProviderId, - StreamId: observation.StreamId, - UpstreamId: observation.Upstream.UpstreamId, - DocumentVersion: observation.Upstream.DocumentVersion, - ContentHash: observation.Upstream.ContentHash, - Format: observation.Content.Format, - SpecVersion: observation.Content.SpecVersion, - StatementCount: observation.Statements.Length, - Supersedes: observation.Supersedes, - FetchedAt: observation.Upstream.FetchedAt, - ReceivedAt: observation.Upstream.ReceivedAt, - CreatedAt: observation.CreatedAt); - - var signatures = BuildObservationSignatures(observation.Upstream.Signature); - - return CreateEvidence( - subjectNodeId: subjectNodeId, - evidenceType: EvidenceType.Provenance, - payload: payload, - provenance: provenance, - payloadSchemaVersion: PayloadSchemaVersion, - signatures: signatures); - } - - private EvidenceRecord CreateStatementRecord( - VexObservationInput observation, - VexObservationStatementInput statement, - string subjectNodeId, - EvidenceProvenance provenance, - int statementIndex) - { - var payload = new VexStatementPayload( - ObservationId: observation.ObservationId, - StatementIndex: statementIndex, - VulnerabilityId: statement.VulnerabilityId, - ProductKey: statement.ProductKey, - Status: statement.Status, - Justification: statement.Justification, - LastObserved: statement.LastObserved, - Locator: statement.Locator, - IntroducedVersion: statement.IntroducedVersion, - FixedVersion: statement.FixedVersion, - Purl: statement.Purl, - Cpe: statement.Cpe, - EvidenceCount: statement.Evidence.Length, - ProviderId: observation.ProviderId, - StreamId: observation.StreamId); - - var signatures = BuildObservationSignatures(observation.Upstream.Signature); - - return CreateEvidence( - subjectNodeId: subjectNodeId, - evidenceType: EvidenceType.Vex, - payload: payload, - provenance: provenance, - payloadSchemaVersion: PayloadSchemaVersion, - signatures: signatures); - } - - private static ImmutableArray BuildObservationSignatures( - VexObservationSignatureInput signature) - { - if (!signature.Present || string.IsNullOrWhiteSpace(signature.Signature)) - { - return []; - } - - var sig = new EvidenceSignature - { - SignerId = signature.KeyId ?? "unknown", - Algorithm = signature.Format ?? "unknown", - SignatureBase64 = signature.Signature, - SignedAt = DateTimeOffset.UtcNow, - SignerType = SignerType.Vendor - }; - - return [sig]; - } - - /// - /// Payload for observation-level (provenance) evidence record. - /// - private sealed record VexObservationPayload( - string ObservationId, - string Tenant, - string ProviderId, - string StreamId, - string UpstreamId, - string? DocumentVersion, - string ContentHash, - string Format, - string? SpecVersion, - int StatementCount, - ImmutableArray Supersedes, - DateTimeOffset FetchedAt, - DateTimeOffset ReceivedAt, - DateTimeOffset CreatedAt); - - /// - /// Payload for statement-level VEX evidence record. - /// - private sealed record VexStatementPayload( - string ObservationId, - int StatementIndex, - string VulnerabilityId, - string ProductKey, - string Status, - string? Justification, - DateTimeOffset? LastObserved, - string? Locator, - string? IntroducedVersion, - string? FixedVersion, - string? Purl, - string? Cpe, - int EvidenceCount, - string ProviderId, - string StreamId); } diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationContentInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationContentInput.cs new file mode 100644 index 000000000..03c7b706a --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationContentInput.cs @@ -0,0 +1,13 @@ +using System.Collections.Immutable; +using System.Text.Json.Nodes; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed record VexObservationContentInput +{ + public required string Format { get; init; } + public string? SpecVersion { get; init; } + public JsonNode? Raw { get; init; } + public ImmutableDictionary Metadata { get; init; } = + ImmutableDictionary.Empty; +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationInput.cs new file mode 100644 index 000000000..530282eac --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationInput.cs @@ -0,0 +1,21 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Core.Adapters; + +/// +/// Input DTO for VexObservation data, decoupling from Excititor.Core dependency. +/// +public sealed record VexObservationInput +{ + public required string ObservationId { get; init; } + public required string Tenant { get; init; } + public required string ProviderId { get; init; } + public required string StreamId { get; init; } + public required VexObservationUpstreamInput Upstream { get; init; } + public required ImmutableArray Statements { get; init; } + public required VexObservationContentInput Content { get; init; } + public required DateTimeOffset CreatedAt { get; init; } + public ImmutableArray Supersedes { get; init; } = []; + public ImmutableDictionary Attributes { get; init; } = + ImmutableDictionary.Empty; +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationSignatureInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationSignatureInput.cs new file mode 100644 index 000000000..f9fd323db --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationSignatureInput.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Evidence.Core.Adapters; + +public sealed record VexObservationSignatureInput +{ + public bool Present { get; init; } + public string? Format { get; init; } + public string? KeyId { get; init; } + public string? Signature { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationStatementInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationStatementInput.cs new file mode 100644 index 000000000..895a5e9bb --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationStatementInput.cs @@ -0,0 +1,21 @@ +using System.Collections.Immutable; +using System.Text.Json.Nodes; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed record VexObservationStatementInput +{ + public required string VulnerabilityId { get; init; } + public required string ProductKey { get; init; } + public required string Status { get; init; } + public DateTimeOffset? LastObserved { get; init; } + public string? Locator { get; init; } + public string? Justification { get; init; } + public string? IntroducedVersion { get; init; } + public string? FixedVersion { get; init; } + public string? Purl { get; init; } + public string? Cpe { get; init; } + public ImmutableArray Evidence { get; init; } = []; + public ImmutableDictionary Metadata { get; init; } = + ImmutableDictionary.Empty; +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationUpstreamInput.cs b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationUpstreamInput.cs new file mode 100644 index 000000000..91d08f3e6 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/Adapters/VexObservationUpstreamInput.cs @@ -0,0 +1,15 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Core.Adapters; + +public sealed record VexObservationUpstreamInput +{ + public required string UpstreamId { get; init; } + public string? DocumentVersion { get; init; } + public required DateTimeOffset FetchedAt { get; init; } + public required DateTimeOffset ReceivedAt { get; init; } + public required string ContentHash { get; init; } + public required VexObservationSignatureInput Signature { get; init; } + public ImmutableDictionary Metadata { get; init; } = + ImmutableDictionary.Empty; +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/EvidenceHashInput.cs b/src/__Libraries/StellaOps.Evidence.Core/EvidenceHashInput.cs new file mode 100644 index 000000000..227cdce85 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/EvidenceHashInput.cs @@ -0,0 +1,13 @@ +namespace StellaOps.Evidence.Core; + +/// +/// Internal record for evidence ID hash computation. +/// Fields are sorted alphabetically for deterministic canonicalization. +/// +internal sealed record EvidenceHashInput( + string GeneratedAt, + string GeneratorId, + string GeneratorVersion, + string EvidenceType, + string PayloadBase64, + string SubjectNodeId); diff --git a/src/__Libraries/StellaOps.Evidence.Core/EvidenceRecord.cs b/src/__Libraries/StellaOps.Evidence.Core/EvidenceRecord.cs index c0addc2d8..046362d3d 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/EvidenceRecord.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/EvidenceRecord.cs @@ -111,14 +111,3 @@ public sealed record EvidenceRecord : IEvidence } } -/// -/// Internal record for evidence ID hash computation. -/// Fields are sorted alphabetically for deterministic canonicalization. -/// -internal sealed record EvidenceHashInput( - string GeneratedAt, - string GeneratorId, - string GeneratorVersion, - string EvidenceType, - string PayloadBase64, - string SubjectNodeId); diff --git a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Delete.cs b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Delete.cs new file mode 100644 index 000000000..cc3b0738e --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Delete.cs @@ -0,0 +1,20 @@ +namespace StellaOps.Evidence.Core; + +public sealed partial class InMemoryEvidenceStore +{ + /// + public Task DeleteAsync(string evidenceId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); + ct.ThrowIfCancellationRequested(); + + if (!_byId.TryRemove(evidenceId, out var evidence)) + { + return Task.FromResult(false); + } + + // Note: We don't remove from _bySubject index (ConcurrentBag doesn't support removal). + // The GetBySubject method filters out null entries. + return Task.FromResult(true); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Lookup.cs b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Lookup.cs new file mode 100644 index 000000000..b8fe48343 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Lookup.cs @@ -0,0 +1,66 @@ +namespace StellaOps.Evidence.Core; + +public sealed partial class InMemoryEvidenceStore +{ + /// + public Task GetByIdAsync(string evidenceId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); + ct.ThrowIfCancellationRequested(); + + _byId.TryGetValue(evidenceId, out var evidence); + return Task.FromResult(evidence); + } + + /// + public Task> GetByTypeAsync( + EvidenceType evidenceType, + int limit = 100, + CancellationToken ct = default) + { + ct.ThrowIfCancellationRequested(); + + var results = _byId.Values + .Where(e => e.EvidenceType == evidenceType) + .Take(limit) + .ToList(); + + return Task.FromResult>(results); + } + + /// + public Task ExistsAsync(string subjectNodeId, EvidenceType type, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + ct.ThrowIfCancellationRequested(); + + if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) + { + return Task.FromResult(false); + } + + var exists = evidenceIds + .Distinct() + .Any(id => _byId.TryGetValue(id, out var e) && e.EvidenceType == type); + + return Task.FromResult(exists); + } + + /// + public Task CountBySubjectAsync(string subjectNodeId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + ct.ThrowIfCancellationRequested(); + + if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) + { + return Task.FromResult(0); + } + + var count = evidenceIds + .Distinct() + .Count(id => _byId.ContainsKey(id)); + + return Task.FromResult(count); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Store.cs b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Store.cs new file mode 100644 index 000000000..ed4d9e2ec --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Store.cs @@ -0,0 +1,40 @@ +namespace StellaOps.Evidence.Core; + +public sealed partial class InMemoryEvidenceStore +{ + /// + public Task StoreAsync(IEvidence evidence, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(evidence); + ct.ThrowIfCancellationRequested(); + + _byId.TryAdd(evidence.EvidenceId, evidence); + var subjectBag = _bySubject.GetOrAdd(evidence.SubjectNodeId, _ => []); + if (!subjectBag.Contains(evidence.EvidenceId)) + { + subjectBag.Add(evidence.EvidenceId); + } + + return Task.FromResult(evidence.EvidenceId); + } + + /// + public Task StoreBatchAsync(IEnumerable evidenceRecords, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(evidenceRecords); + ct.ThrowIfCancellationRequested(); + + var count = 0; + foreach (var evidence in evidenceRecords) + { + if (_byId.TryAdd(evidence.EvidenceId, evidence)) + { + var subjectBag = _bySubject.GetOrAdd(evidence.SubjectNodeId, _ => []); + subjectBag.Add(evidence.EvidenceId); + count++; + } + } + + return Task.FromResult(count); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Subject.cs b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Subject.cs new file mode 100644 index 000000000..023e7bf44 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.Subject.cs @@ -0,0 +1,29 @@ +namespace StellaOps.Evidence.Core; + +public sealed partial class InMemoryEvidenceStore +{ + /// + public Task> GetBySubjectAsync( + string subjectNodeId, + EvidenceType? typeFilter = null, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + ct.ThrowIfCancellationRequested(); + + if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) + { + return Task.FromResult>([]); + } + + var results = evidenceIds + .Distinct() + .Select(id => _byId.TryGetValue(id, out var e) ? e : null) + .Where(e => e is not null) + .Where(e => typeFilter is null || e!.EvidenceType == typeFilter) + .Cast() + .ToList(); + + return Task.FromResult>(results); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.cs b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.cs index 39d77bef4..1fe3cbe8c 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.cs +++ b/src/__Libraries/StellaOps.Evidence.Core/InMemoryEvidenceStore.cs @@ -6,151 +6,11 @@ namespace StellaOps.Evidence.Core; /// Thread-safe in-memory implementation of . /// Intended for testing, development, and ephemeral processing. /// -public sealed class InMemoryEvidenceStore : IEvidenceStore +public sealed partial class InMemoryEvidenceStore : IEvidenceStore { private readonly ConcurrentDictionary _byId = new(StringComparer.Ordinal); private readonly ConcurrentDictionary> _bySubject = new(StringComparer.Ordinal); - /// - public Task StoreAsync(IEvidence evidence, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(evidence); - ct.ThrowIfCancellationRequested(); - - _byId.TryAdd(evidence.EvidenceId, evidence); - - var subjectBag = _bySubject.GetOrAdd(evidence.SubjectNodeId, _ => []); - if (!subjectBag.Contains(evidence.EvidenceId)) - { - subjectBag.Add(evidence.EvidenceId); - } - - return Task.FromResult(evidence.EvidenceId); - } - - /// - public Task StoreBatchAsync(IEnumerable evidenceRecords, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(evidenceRecords); - ct.ThrowIfCancellationRequested(); - - var count = 0; - foreach (var evidence in evidenceRecords) - { - if (_byId.TryAdd(evidence.EvidenceId, evidence)) - { - var subjectBag = _bySubject.GetOrAdd(evidence.SubjectNodeId, _ => []); - subjectBag.Add(evidence.EvidenceId); - count++; - } - } - - return Task.FromResult(count); - } - - /// - public Task GetByIdAsync(string evidenceId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); - ct.ThrowIfCancellationRequested(); - - _byId.TryGetValue(evidenceId, out var evidence); - return Task.FromResult(evidence); - } - - /// - public Task> GetBySubjectAsync( - string subjectNodeId, - EvidenceType? typeFilter = null, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - ct.ThrowIfCancellationRequested(); - - if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) - { - return Task.FromResult>([]); - } - - var results = evidenceIds - .Distinct() - .Select(id => _byId.TryGetValue(id, out var e) ? e : null) - .Where(e => e is not null) - .Where(e => typeFilter is null || e!.EvidenceType == typeFilter) - .Cast() - .ToList(); - - return Task.FromResult>(results); - } - - /// - public Task> GetByTypeAsync( - EvidenceType evidenceType, - int limit = 100, - CancellationToken ct = default) - { - ct.ThrowIfCancellationRequested(); - - var results = _byId.Values - .Where(e => e.EvidenceType == evidenceType) - .Take(limit) - .ToList(); - - return Task.FromResult>(results); - } - - /// - public Task ExistsAsync(string subjectNodeId, EvidenceType type, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - ct.ThrowIfCancellationRequested(); - - if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) - { - return Task.FromResult(false); - } - - var exists = evidenceIds - .Distinct() - .Any(id => _byId.TryGetValue(id, out var e) && e.EvidenceType == type); - - return Task.FromResult(exists); - } - - /// - public Task DeleteAsync(string evidenceId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); - ct.ThrowIfCancellationRequested(); - - if (!_byId.TryRemove(evidenceId, out var evidence)) - { - return Task.FromResult(false); - } - - // Note: We don't remove from _bySubject index (ConcurrentBag doesn't support removal). - // The GetBySubject method filters out null entries. - return Task.FromResult(true); - } - - /// - public Task CountBySubjectAsync(string subjectNodeId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - ct.ThrowIfCancellationRequested(); - - if (!_bySubject.TryGetValue(subjectNodeId, out var evidenceIds)) - { - return Task.FromResult(0); - } - - var count = evidenceIds - .Distinct() - .Count(id => _byId.ContainsKey(id)); - - return Task.FromResult(count); - } - /// /// Clears all stored evidence. For testing only. /// diff --git a/src/__Libraries/StellaOps.Evidence.Core/TASKS.md b/src/__Libraries/StellaOps.Evidence.Core/TASKS.md index fe2bfbbd2..0975125fc 100644 --- a/src/__Libraries/StellaOps.Evidence.Core/TASKS.md +++ b/src/__Libraries/StellaOps.Evidence.Core/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0080-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0080-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Split adapters/store into <=100-line partials; added EvidenceBundleAdapter test; dotnet test 2026-02-04 (113 tests). | diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Count.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Count.cs new file mode 100644 index 000000000..5ec0e6489 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Count.cs @@ -0,0 +1,29 @@ +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task CountBySubjectAsync(string subjectNodeId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + + const string sql = """ + SELECT COUNT(*) + FROM evidence.records + WHERE subject_node_id = @subjectNodeId + AND tenant_id = @tenantId + """; + + var result = await ExecuteScalarAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@subjectNodeId", subjectNodeId); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + }, + ct).ConfigureAwait(false); + + return (int)result; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Delete.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Delete.cs new file mode 100644 index 000000000..3aa26d52d --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Delete.cs @@ -0,0 +1,28 @@ +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task DeleteAsync(string evidenceId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); + + const string sql = """ + DELETE FROM evidence.records + WHERE evidence_id = @evidenceId + AND tenant_id = @tenantId + """; + + var affected = await ExecuteAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@evidenceId", evidenceId); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + }, + ct).ConfigureAwait(false); + + return affected > 0; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Exists.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Exists.cs new file mode 100644 index 000000000..d8c383c2d --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Exists.cs @@ -0,0 +1,34 @@ +using StellaOps.Evidence.Core; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task ExistsAsync(string subjectNodeId, EvidenceType type, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + + const string sql = """ + SELECT EXISTS( + SELECT 1 FROM evidence.records + WHERE subject_node_id = @subjectNodeId + AND evidence_type = @evidenceType + AND tenant_id = @tenantId + ) + """; + + var result = await ExecuteScalarAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@subjectNodeId", subjectNodeId); + AddParameter(cmd, "@evidenceType", (short)type); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + }, + ct).ConfigureAwait(false); + + return result; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetById.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetById.cs new file mode 100644 index 000000000..b6b0f0489 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetById.cs @@ -0,0 +1,31 @@ +using StellaOps.Evidence.Core; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task GetByIdAsync(string evidenceId, CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); + + const string sql = """ + SELECT evidence_id, subject_node_id, evidence_type, payload, + payload_schema_ver, external_cid, provenance, signatures + FROM evidence.records + WHERE evidence_id = @evidenceId + AND tenant_id = @tenantId + """; + + return await QuerySingleOrDefaultAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@evidenceId", evidenceId); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + }, + MapEvidence, + ct).ConfigureAwait(false); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetBySubject.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetBySubject.cs new file mode 100644 index 000000000..cb898c16a --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetBySubject.cs @@ -0,0 +1,45 @@ +using StellaOps.Evidence.Core; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task> GetBySubjectAsync( + string subjectNodeId, + EvidenceType? typeFilter = null, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); + + var sql = """ + SELECT evidence_id, subject_node_id, evidence_type, payload, + payload_schema_ver, external_cid, provenance, signatures + FROM evidence.records + WHERE subject_node_id = @subjectNodeId + AND tenant_id = @tenantId + """; + + if (typeFilter.HasValue) + { + sql += " AND evidence_type = @evidenceType"; + } + + sql += " ORDER BY created_at DESC"; + + return await QueryAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@subjectNodeId", subjectNodeId); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + if (typeFilter.HasValue) + { + AddParameter(cmd, "@evidenceType", (short)typeFilter.Value); + } + }, + MapEvidence, + ct).ConfigureAwait(false); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetByType.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetByType.cs new file mode 100644 index 000000000..d065dfff5 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.GetByType.cs @@ -0,0 +1,35 @@ +using StellaOps.Evidence.Core; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task> GetByTypeAsync( + EvidenceType evidenceType, + int limit = 100, + CancellationToken ct = default) + { + const string sql = """ + SELECT evidence_id, subject_node_id, evidence_type, payload, + payload_schema_ver, external_cid, provenance, signatures + FROM evidence.records + WHERE evidence_type = @evidenceType + AND tenant_id = @tenantId + ORDER BY created_at DESC + LIMIT @limit + """; + + return await QueryAsync( + _tenantId, + sql, + cmd => + { + AddParameter(cmd, "@evidenceType", (short)evidenceType); + AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); + AddParameter(cmd, "@limit", limit); + }, + MapEvidence, + ct).ConfigureAwait(false); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Map.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Map.cs new file mode 100644 index 000000000..549064765 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Map.cs @@ -0,0 +1,38 @@ +using Npgsql; +using StellaOps.Evidence.Core; +using System.Text.Json; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + private static IEvidence MapEvidence(NpgsqlDataReader reader) + { + var evidenceId = reader.GetString(0); + var subjectNodeId = reader.GetString(1); + var evidenceType = (EvidenceType)reader.GetInt16(2); + var payload = reader.GetFieldValue(3); + var payloadSchemaVer = reader.GetString(4); + var externalCid = GetNullableString(reader, 5); + var provenanceJson = reader.GetString(6); + var signaturesJson = reader.GetString(7); + + var provenance = JsonSerializer.Deserialize(provenanceJson, _jsonOptions) + ?? throw new InvalidOperationException($"Failed to deserialize provenance for evidence {evidenceId}"); + + var signatures = JsonSerializer.Deserialize>(signaturesJson, _jsonOptions) + ?? []; + + return new EvidenceRecord + { + EvidenceId = evidenceId, + SubjectNodeId = subjectNodeId, + EvidenceType = evidenceType, + Payload = payload, + PayloadSchemaVersion = payloadSchemaVer, + ExternalPayloadCid = externalCid, + Provenance = provenance, + Signatures = signatures + }; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Store.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Store.cs new file mode 100644 index 000000000..45994f202 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.Store.cs @@ -0,0 +1,54 @@ +using Npgsql; +using NpgsqlTypes; +using StellaOps.Evidence.Core; +using System.Text.Json; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task StoreAsync(IEvidence evidence, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(evidence); + + const string sql = """ + INSERT INTO evidence.records ( + evidence_id, subject_node_id, evidence_type, payload, + payload_schema_ver, external_cid, provenance, signatures, tenant_id + ) VALUES ( + @evidenceId, @subjectNodeId, @evidenceType, @payload, + @payloadSchemaVer, @externalCid, @provenance, @signatures, @tenantId + ) + ON CONFLICT (evidence_id) DO NOTHING + RETURNING evidence_id + """; + + await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct) + .ConfigureAwait(false); + await using var command = CreateCommand(sql, connection); + + AddEvidenceParameters(command, evidence); + + var result = await command.ExecuteScalarAsync(ct).ConfigureAwait(false); + + // If result is null, row already existed (idempotent) + return evidence.EvidenceId; + } + + private void AddEvidenceParameters(NpgsqlCommand command, IEvidence evidence) + { + AddParameter(command, "@evidenceId", evidence.EvidenceId); + AddParameter(command, "@subjectNodeId", evidence.SubjectNodeId); + AddParameter(command, "@evidenceType", (short)evidence.EvidenceType); + command.Parameters.Add(new NpgsqlParameter("@payload", NpgsqlDbType.Bytea) + { + TypedValue = evidence.Payload.ToArray() + }); + AddParameter(command, "@payloadSchemaVer", evidence.PayloadSchemaVersion); + AddParameter(command, "@externalCid", evidence.ExternalPayloadCid); + AddJsonbParameter(command, "@provenance", JsonSerializer.Serialize(evidence.Provenance, _jsonOptions)); + AddJsonbParameter(command, "@signatures", JsonSerializer.Serialize(evidence.Signatures, _jsonOptions)); + AddParameter(command, "@tenantId", Guid.Parse(_tenantId)); + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.StoreBatch.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.StoreBatch.cs new file mode 100644 index 000000000..2561b8d43 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.StoreBatch.cs @@ -0,0 +1,55 @@ +using Npgsql; +using StellaOps.Evidence.Core; + +namespace StellaOps.Evidence.Persistence.Postgres; + +public sealed partial class PostgresEvidenceStore +{ + /// + public async Task StoreBatchAsync(IEnumerable evidenceRecords, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(evidenceRecords); + + var records = evidenceRecords.ToList(); + if (records.Count == 0) + { + return 0; + } + + await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct) + .ConfigureAwait(false); + await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false); + + var storedCount = 0; + + foreach (var evidence in records) + { + const string sql = """ + INSERT INTO evidence.records ( + evidence_id, subject_node_id, evidence_type, payload, + payload_schema_ver, external_cid, provenance, signatures, tenant_id + ) VALUES ( + @evidenceId, @subjectNodeId, @evidenceType, @payload, + @payloadSchemaVer, @externalCid, @provenance, @signatures, @tenantId + ) + ON CONFLICT (evidence_id) DO NOTHING + """; + + await using var command = new NpgsqlCommand(sql, connection, transaction) + { + CommandTimeout = CommandTimeoutSeconds + }; + + AddEvidenceParameters(command, evidence); + + var affected = await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + if (affected > 0) + { + storedCount++; + } + } + + await transaction.CommitAsync(ct).ConfigureAwait(false); + return storedCount; + } +} diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.cs b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.cs index 8e913b19c..19c8102d1 100644 --- a/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.cs +++ b/src/__Libraries/StellaOps.Evidence.Persistence/Postgres/PostgresEvidenceStore.cs @@ -1,7 +1,4 @@ - using Microsoft.Extensions.Logging; -using Npgsql; -using NpgsqlTypes; using StellaOps.Evidence.Core; using StellaOps.Infrastructure.Postgres.Repositories; using System.Text.Json; @@ -12,10 +9,10 @@ namespace StellaOps.Evidence.Persistence.Postgres; /// PostgreSQL implementation of . /// Stores evidence records with content-addressed IDs and tenant isolation via RLS. /// -public sealed class PostgresEvidenceStore : RepositoryBase, IEvidenceStore +public sealed partial class PostgresEvidenceStore : RepositoryBase, IEvidenceStore { private readonly string _tenantId; - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; @@ -35,297 +32,4 @@ public sealed class PostgresEvidenceStore : RepositoryBase, ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); _tenantId = tenantId; } - - /// - public async Task StoreAsync(IEvidence evidence, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(evidence); - - const string sql = """ - INSERT INTO evidence.records ( - evidence_id, subject_node_id, evidence_type, payload, - payload_schema_ver, external_cid, provenance, signatures, tenant_id - ) VALUES ( - @evidenceId, @subjectNodeId, @evidenceType, @payload, - @payloadSchemaVer, @externalCid, @provenance, @signatures, @tenantId - ) - ON CONFLICT (evidence_id) DO NOTHING - RETURNING evidence_id - """; - - await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct) - .ConfigureAwait(false); - await using var command = CreateCommand(sql, connection); - - AddEvidenceParameters(command, evidence); - - var result = await command.ExecuteScalarAsync(ct).ConfigureAwait(false); - - // If result is null, row already existed (idempotent) - return evidence.EvidenceId; - } - - /// - public async Task StoreBatchAsync(IEnumerable evidenceRecords, CancellationToken ct = default) - { - ArgumentNullException.ThrowIfNull(evidenceRecords); - - var records = evidenceRecords.ToList(); - if (records.Count == 0) - { - return 0; - } - - await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct) - .ConfigureAwait(false); - await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false); - - var storedCount = 0; - - foreach (var evidence in records) - { - const string sql = """ - INSERT INTO evidence.records ( - evidence_id, subject_node_id, evidence_type, payload, - payload_schema_ver, external_cid, provenance, signatures, tenant_id - ) VALUES ( - @evidenceId, @subjectNodeId, @evidenceType, @payload, - @payloadSchemaVer, @externalCid, @provenance, @signatures, @tenantId - ) - ON CONFLICT (evidence_id) DO NOTHING - """; - - await using var command = new NpgsqlCommand(sql, connection, transaction) - { - CommandTimeout = CommandTimeoutSeconds - }; - - AddEvidenceParameters(command, evidence); - - var affected = await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false); - if (affected > 0) - { - storedCount++; - } - } - - await transaction.CommitAsync(ct).ConfigureAwait(false); - return storedCount; - } - - /// - public async Task GetByIdAsync(string evidenceId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); - - const string sql = """ - SELECT evidence_id, subject_node_id, evidence_type, payload, - payload_schema_ver, external_cid, provenance, signatures - FROM evidence.records - WHERE evidence_id = @evidenceId - AND tenant_id = @tenantId - """; - - return await QuerySingleOrDefaultAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@evidenceId", evidenceId); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - }, - MapEvidence, - ct).ConfigureAwait(false); - } - - /// - public async Task> GetBySubjectAsync( - string subjectNodeId, - EvidenceType? typeFilter = null, - CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - - var sql = """ - SELECT evidence_id, subject_node_id, evidence_type, payload, - payload_schema_ver, external_cid, provenance, signatures - FROM evidence.records - WHERE subject_node_id = @subjectNodeId - AND tenant_id = @tenantId - """; - - if (typeFilter.HasValue) - { - sql += " AND evidence_type = @evidenceType"; - } - - sql += " ORDER BY created_at DESC"; - - return await QueryAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@subjectNodeId", subjectNodeId); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - if (typeFilter.HasValue) - { - AddParameter(cmd, "@evidenceType", (short)typeFilter.Value); - } - }, - MapEvidence, - ct).ConfigureAwait(false); - } - - /// - public async Task> GetByTypeAsync( - EvidenceType evidenceType, - int limit = 100, - CancellationToken ct = default) - { - const string sql = """ - SELECT evidence_id, subject_node_id, evidence_type, payload, - payload_schema_ver, external_cid, provenance, signatures - FROM evidence.records - WHERE evidence_type = @evidenceType - AND tenant_id = @tenantId - ORDER BY created_at DESC - LIMIT @limit - """; - - return await QueryAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@evidenceType", (short)evidenceType); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - AddParameter(cmd, "@limit", limit); - }, - MapEvidence, - ct).ConfigureAwait(false); - } - - /// - public async Task ExistsAsync(string subjectNodeId, EvidenceType type, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - - const string sql = """ - SELECT EXISTS( - SELECT 1 FROM evidence.records - WHERE subject_node_id = @subjectNodeId - AND evidence_type = @evidenceType - AND tenant_id = @tenantId - ) - """; - - var result = await ExecuteScalarAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@subjectNodeId", subjectNodeId); - AddParameter(cmd, "@evidenceType", (short)type); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - }, - ct).ConfigureAwait(false); - - return result; - } - - /// - public async Task DeleteAsync(string evidenceId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(evidenceId); - - const string sql = """ - DELETE FROM evidence.records - WHERE evidence_id = @evidenceId - AND tenant_id = @tenantId - """; - - var affected = await ExecuteAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@evidenceId", evidenceId); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - }, - ct).ConfigureAwait(false); - - return affected > 0; - } - - /// - public async Task CountBySubjectAsync(string subjectNodeId, CancellationToken ct = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(subjectNodeId); - - const string sql = """ - SELECT COUNT(*) - FROM evidence.records - WHERE subject_node_id = @subjectNodeId - AND tenant_id = @tenantId - """; - - var result = await ExecuteScalarAsync( - _tenantId, - sql, - cmd => - { - AddParameter(cmd, "@subjectNodeId", subjectNodeId); - AddParameter(cmd, "@tenantId", Guid.Parse(_tenantId)); - }, - ct).ConfigureAwait(false); - - return (int)result; - } - - private void AddEvidenceParameters(NpgsqlCommand command, IEvidence evidence) - { - AddParameter(command, "@evidenceId", evidence.EvidenceId); - AddParameter(command, "@subjectNodeId", evidence.SubjectNodeId); - AddParameter(command, "@evidenceType", (short)evidence.EvidenceType); - command.Parameters.Add(new NpgsqlParameter("@payload", NpgsqlDbType.Bytea) - { - TypedValue = evidence.Payload.ToArray() - }); - AddParameter(command, "@payloadSchemaVer", evidence.PayloadSchemaVersion); - AddParameter(command, "@externalCid", evidence.ExternalPayloadCid); - AddJsonbParameter(command, "@provenance", JsonSerializer.Serialize(evidence.Provenance, JsonOptions)); - AddJsonbParameter(command, "@signatures", JsonSerializer.Serialize(evidence.Signatures, JsonOptions)); - AddParameter(command, "@tenantId", Guid.Parse(_tenantId)); - } - - private static IEvidence MapEvidence(NpgsqlDataReader reader) - { - var evidenceId = reader.GetString(0); - var subjectNodeId = reader.GetString(1); - var evidenceType = (EvidenceType)reader.GetInt16(2); - var payload = reader.GetFieldValue(3); - var payloadSchemaVer = reader.GetString(4); - var externalCid = GetNullableString(reader, 5); - var provenanceJson = reader.GetString(6); - var signaturesJson = reader.GetString(7); - - var provenance = JsonSerializer.Deserialize(provenanceJson, JsonOptions) - ?? throw new InvalidOperationException($"Failed to deserialize provenance for evidence {evidenceId}"); - - var signatures = JsonSerializer.Deserialize>(signaturesJson, JsonOptions) - ?? []; - - return new EvidenceRecord - { - EvidenceId = evidenceId, - SubjectNodeId = subjectNodeId, - EvidenceType = evidenceType, - Payload = payload, - PayloadSchemaVersion = payloadSchemaVer, - ExternalPayloadCid = externalCid, - Provenance = provenance, - Signatures = signatures - }; - } } diff --git a/src/__Libraries/StellaOps.Evidence.Persistence/TASKS.md b/src/__Libraries/StellaOps.Evidence.Persistence/TASKS.md index 71bb7caa9..72af25b3f 100644 --- a/src/__Libraries/StellaOps.Evidence.Persistence/TASKS.md +++ b/src/__Libraries/StellaOps.Evidence.Persistence/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0081-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0081-A | TODO | Revalidated 2026-01-08 (open findings). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Split PostgresEvidenceStore into partials; dotnet test 2026-02-04 (35 tests). | diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/BudgetCheckResult.cs b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetCheckResult.cs new file mode 100644 index 000000000..36e3a0703 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetCheckResult.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record BudgetCheckResult +{ + public required bool IsWithinBudget { get; init; } + public IReadOnlyList Issues { get; init; } = []; + public BudgetExceededAction RecommendedAction { get; init; } + public bool CanAutoPrune { get; init; } + public long BytesToFree { get; init; } + + public static BudgetCheckResult WithinBudget() => new() { IsWithinBudget = true }; +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/BudgetExceededAction.cs b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetExceededAction.cs new file mode 100644 index 000000000..ac608ba15 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetExceededAction.cs @@ -0,0 +1,13 @@ +namespace StellaOps.Evidence.Budgets; + +public enum BudgetExceededAction +{ + /// Log warning but continue. + Warn, + + /// Block the operation. + Block, + + /// Automatically prune lowest priority evidence. + AutoPrune +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/BudgetStatus.cs b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetStatus.cs new file mode 100644 index 000000000..bef6bcc13 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/BudgetStatus.cs @@ -0,0 +1,19 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record BudgetStatus +{ + public required Guid ScanId { get; init; } + public required long TotalBudgetBytes { get; init; } + public required long UsedBytes { get; init; } + public required long RemainingBytes { get; init; } + public required decimal UtilizationPercent { get; init; } + public required IReadOnlyDictionary ByType { get; init; } +} + +public sealed record TypeBudgetStatus +{ + public required EvidenceType Type { get; init; } + public required long UsedBytes { get; init; } + public long? LimitBytes { get; init; } + public decimal UtilizationPercent { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/CompressionLevel.cs b/src/__Libraries/StellaOps.Evidence/Budgets/CompressionLevel.cs new file mode 100644 index 000000000..93347b50d --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/CompressionLevel.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Evidence.Budgets; + +public enum CompressionLevel +{ + None, + Fast, + Optimal, + Maximum +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudget.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudget.cs index 92208fa39..2a0cdc83b 100644 --- a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudget.cs +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudget.cs @@ -51,69 +51,3 @@ public sealed record EvidenceBudget } }; } - -public enum EvidenceType -{ - Verdict, - PolicyTrace, - CallGraph, - RuntimeCapture, - Sbom, - Vex, - Attestation, - PathWitness, - Advisory -} - -public enum RetentionTier -{ - /// Immediately accessible, highest cost. - Hot, - - /// Quick retrieval, moderate cost. - Warm, - - /// Delayed retrieval, lower cost. - Cold, - - /// Long-term storage, lowest cost. - Archive -} - -public sealed record RetentionPolicy -{ - /// - /// How long evidence stays in this tier. - /// - public required TimeSpan Duration { get; init; } - - /// - /// Compression algorithm for this tier. - /// - public CompressionLevel Compression { get; init; } = CompressionLevel.None; - - /// - /// Whether to deduplicate within this tier. - /// - public bool Deduplicate { get; init; } = true; -} - -public enum CompressionLevel -{ - None, - Fast, - Optimal, - Maximum -} - -public enum BudgetExceededAction -{ - /// Log warning but continue. - Warn, - - /// Block the operation. - Block, - - /// Automatically prune lowest priority evidence. - AutoPrune -} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Prune.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Prune.cs new file mode 100644 index 000000000..7b0d0858a --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Prune.cs @@ -0,0 +1,71 @@ +using Microsoft.Extensions.Logging; + +namespace StellaOps.Evidence.Budgets; + +public sealed partial class EvidenceBudgetService +{ + public async Task PruneToFitAsync(Guid scanId, long targetBytes, CancellationToken ct) + { + var budget = _options.CurrentValue; + var usage = await GetCurrentUsageAsync(scanId, ct).ConfigureAwait(false); + + if (usage.TotalBytes <= targetBytes) + { + return PruneResult.NoPruningNeeded(); + } + + var bytesToPrune = usage.TotalBytes - targetBytes; + var pruned = new List(); + + // Get all evidence items, sorted by pruning priority + var items = await _repository.GetByScanIdAsync(scanId, ct).ConfigureAwait(false); + var candidates = items + .Where(i => !budget.AlwaysPreserve.Contains(i.Type)) + .OrderBy(i => GetPrunePriority(i)) + .ThenBy(i => i.CreatedAt.UtcDateTime.Ticks) + .ThenBy(i => i.Id) + .ToList(); + + long prunedBytes = 0; + foreach (var item in candidates) + { + if (prunedBytes >= bytesToPrune) + break; + + // Move to archive tier or delete + await _repository.MoveToTierAsync(item.Id, RetentionTier.Archive, ct).ConfigureAwait(false); + pruned.Add(new PrunedItem(item.Id, item.Type, item.SizeBytes)); + prunedBytes += item.SizeBytes; + } + + _logger.LogInformation( + "Pruned {Count} items ({Bytes:N0} bytes) for scan {ScanId}", + pruned.Count, prunedBytes, scanId); + + return new PruneResult + { + Success = prunedBytes >= bytesToPrune, + BytesPruned = prunedBytes, + ItemsPruned = pruned, + BytesRemaining = usage.TotalBytes - prunedBytes + }; + } + + private static int GetPrunePriority(EvidenceItem item) + { + // Lower = prune first + return item.Type switch + { + EvidenceType.RuntimeCapture => 1, + EvidenceType.CallGraph => 2, + EvidenceType.Advisory => 3, + EvidenceType.PathWitness => 4, + EvidenceType.PolicyTrace => 5, + EvidenceType.Sbom => 6, + EvidenceType.Vex => 7, + EvidenceType.Attestation => 8, + EvidenceType.Verdict => 9, // Never prune + _ => 5 + }; + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Status.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Status.cs new file mode 100644 index 000000000..fb46e7f98 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Status.cs @@ -0,0 +1,30 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed partial class EvidenceBudgetService +{ + public async Task GetBudgetStatusAsync(Guid scanId, CancellationToken ct) + { + var budget = _options.CurrentValue; + var usage = await GetCurrentUsageAsync(scanId, ct).ConfigureAwait(false); + + return new BudgetStatus + { + ScanId = scanId, + TotalBudgetBytes = budget.MaxScanSizeBytes, + UsedBytes = usage.TotalBytes, + RemainingBytes = Math.Max(0, budget.MaxScanSizeBytes - usage.TotalBytes), + UtilizationPercent = (decimal)usage.TotalBytes / budget.MaxScanSizeBytes * 100, + ByType = usage.ByType.ToDictionary( + kvp => kvp.Key, + kvp => new TypeBudgetStatus + { + Type = kvp.Key, + UsedBytes = kvp.Value, + LimitBytes = budget.MaxPerType.GetValueOrDefault(kvp.Key), + UtilizationPercent = budget.MaxPerType.TryGetValue(kvp.Key, out var limit) + ? (decimal)kvp.Value / limit * 100 + : 0 + }) + }; + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Usage.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Usage.cs new file mode 100644 index 000000000..00729fa4c --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.Usage.cs @@ -0,0 +1,22 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed partial class EvidenceBudgetService +{ + private async Task GetCurrentUsageAsync(Guid scanId, CancellationToken ct) + { + // Implementation to calculate current usage from repository + var items = await _repository.GetByScanIdAsync(scanId, ct).ConfigureAwait(false); + + var totalBytes = items.Sum(i => i.SizeBytes); + var byType = items + .GroupBy(i => i.Type) + .OrderBy(g => g.Key) + .ToDictionary(g => g.Key, g => g.Sum(i => i.SizeBytes)); + + return new UsageStats + { + TotalBytes = totalBytes, + ByType = byType + }; + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.cs index bc19517c0..675f586fd 100644 --- a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.cs +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceBudgetService.cs @@ -1,18 +1,10 @@ - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using System.Globalization; namespace StellaOps.Evidence.Budgets; -public interface IEvidenceBudgetService -{ - Task CheckBudgetAsync(Guid scanId, EvidenceItem item, CancellationToken ct); - Task GetBudgetStatusAsync(Guid scanId, CancellationToken ct); - Task PruneToFitAsync(Guid scanId, long targetBytes, CancellationToken ct); -} - -public sealed class EvidenceBudgetService : IEvidenceBudgetService +public sealed partial class EvidenceBudgetService : IEvidenceBudgetService { private readonly IEvidenceRepository _repository; private readonly IOptionsMonitor _options; @@ -32,7 +24,7 @@ public sealed class EvidenceBudgetService : IEvidenceBudgetService { ArgumentNullException.ThrowIfNull(item); var budget = _options.CurrentValue; - var currentUsage = await GetCurrentUsageAsync(scanId, ct); + var currentUsage = await GetCurrentUsageAsync(scanId, ct).ConfigureAwait(false); var issues = new List(); @@ -72,182 +64,4 @@ public sealed class EvidenceBudgetService : IEvidenceBudgetService BytesToFree = Math.Max(0, projectedTotal - budget.MaxScanSizeBytes) }; } - - public async Task GetBudgetStatusAsync(Guid scanId, CancellationToken ct) - { - var budget = _options.CurrentValue; - var usage = await GetCurrentUsageAsync(scanId, ct); - - return new BudgetStatus - { - ScanId = scanId, - TotalBudgetBytes = budget.MaxScanSizeBytes, - UsedBytes = usage.TotalBytes, - RemainingBytes = Math.Max(0, budget.MaxScanSizeBytes - usage.TotalBytes), - UtilizationPercent = (decimal)usage.TotalBytes / budget.MaxScanSizeBytes * 100, - ByType = usage.ByType.ToDictionary( - kvp => kvp.Key, - kvp => new TypeBudgetStatus - { - Type = kvp.Key, - UsedBytes = kvp.Value, - LimitBytes = budget.MaxPerType.GetValueOrDefault(kvp.Key), - UtilizationPercent = budget.MaxPerType.TryGetValue(kvp.Key, out var limit) - ? (decimal)kvp.Value / limit * 100 - : 0 - }) - }; - } - - public async Task PruneToFitAsync(Guid scanId, long targetBytes, CancellationToken ct) - { - var budget = _options.CurrentValue; - var usage = await GetCurrentUsageAsync(scanId, ct); - - if (usage.TotalBytes <= targetBytes) - { - return PruneResult.NoPruningNeeded(); - } - - var bytesToPrune = usage.TotalBytes - targetBytes; - var pruned = new List(); - - // Get all evidence items, sorted by pruning priority - var items = await _repository.GetByScanIdAsync(scanId, ct); - var candidates = items - .Where(i => !budget.AlwaysPreserve.Contains(i.Type)) - .OrderBy(i => GetPrunePriority(i)) - .ThenBy(i => i.CreatedAt.UtcDateTime.Ticks) - .ThenBy(i => i.Id) - .ToList(); - - long prunedBytes = 0; - foreach (var item in candidates) - { - if (prunedBytes >= bytesToPrune) - break; - - // Move to archive tier or delete - await _repository.MoveToTierAsync(item.Id, RetentionTier.Archive, ct); - pruned.Add(new PrunedItem(item.Id, item.Type, item.SizeBytes)); - prunedBytes += item.SizeBytes; - } - - _logger.LogInformation( - "Pruned {Count} items ({Bytes:N0} bytes) for scan {ScanId}", - pruned.Count, prunedBytes, scanId); - - return new PruneResult - { - Success = prunedBytes >= bytesToPrune, - BytesPruned = prunedBytes, - ItemsPruned = pruned, - BytesRemaining = usage.TotalBytes - prunedBytes - }; - } - - private static int GetPrunePriority(EvidenceItem item) - { - // Lower = prune first - return item.Type switch - { - EvidenceType.RuntimeCapture => 1, - EvidenceType.CallGraph => 2, - EvidenceType.Advisory => 3, - EvidenceType.PathWitness => 4, - EvidenceType.PolicyTrace => 5, - EvidenceType.Sbom => 6, - EvidenceType.Vex => 7, - EvidenceType.Attestation => 8, - EvidenceType.Verdict => 9, // Never prune - _ => 5 - }; - } - - private async Task GetCurrentUsageAsync(Guid scanId, CancellationToken ct) - { - // Implementation to calculate current usage from repository - var items = await _repository.GetByScanIdAsync(scanId, ct); - - var totalBytes = items.Sum(i => i.SizeBytes); - var byType = items - .GroupBy(i => i.Type) - .OrderBy(g => g.Key) - .ToDictionary(g => g.Key, g => g.Sum(i => i.SizeBytes)); - - return new UsageStats - { - TotalBytes = totalBytes, - ByType = byType - }; - } -} - -public sealed record BudgetCheckResult -{ - public required bool IsWithinBudget { get; init; } - public IReadOnlyList Issues { get; init; } = []; - public BudgetExceededAction RecommendedAction { get; init; } - public bool CanAutoPrune { get; init; } - public long BytesToFree { get; init; } - - public static BudgetCheckResult WithinBudget() => new() { IsWithinBudget = true }; -} - -public sealed record BudgetStatus -{ - public required Guid ScanId { get; init; } - public required long TotalBudgetBytes { get; init; } - public required long UsedBytes { get; init; } - public required long RemainingBytes { get; init; } - public required decimal UtilizationPercent { get; init; } - public required IReadOnlyDictionary ByType { get; init; } -} - -public sealed record TypeBudgetStatus -{ - public required EvidenceType Type { get; init; } - public required long UsedBytes { get; init; } - public long? LimitBytes { get; init; } - public decimal UtilizationPercent { get; init; } -} - -public sealed record PruneResult -{ - public required bool Success { get; init; } - public long BytesPruned { get; init; } - public IReadOnlyList ItemsPruned { get; init; } = []; - public long BytesRemaining { get; init; } - - public static PruneResult NoPruningNeeded() => new() { Success = true }; -} - -public sealed record PrunedItem(Guid ItemId, EvidenceType Type, long SizeBytes); - -public sealed record UsageStats -{ - public long TotalBytes { get; init; } - public IReadOnlyDictionary ByType { get; init; } = new Dictionary(); -} - -// Supporting interfaces and types - -public interface IEvidenceRepository -{ - Task> GetByScanIdAsync(Guid scanId, CancellationToken ct); - Task> GetByScanIdAndTypeAsync(Guid scanId, EvidenceType type, CancellationToken ct); - Task> GetOlderThanAsync(RetentionTier tier, DateTimeOffset cutoff, CancellationToken ct); - Task MoveToTierAsync(Guid itemId, RetentionTier tier, CancellationToken ct); - Task UpdateContentAsync(Guid itemId, byte[] content, CancellationToken ct); -} - -public sealed record EvidenceItem -{ - public required Guid Id { get; init; } - public required Guid ScanId { get; init; } - public required EvidenceType Type { get; init; } - public required long SizeBytes { get; init; } - public required RetentionTier Tier { get; init; } - public required DateTimeOffset CreatedAt { get; init; } - public string? ArchiveKey { get; init; } } diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceItem.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceItem.cs new file mode 100644 index 000000000..db73c1ad1 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceItem.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record EvidenceItem +{ + public required Guid Id { get; init; } + public required Guid ScanId { get; init; } + public required EvidenceType Type { get; init; } + public required long SizeBytes { get; init; } + public required RetentionTier Tier { get; init; } + public required DateTimeOffset CreatedAt { get; init; } + public string? ArchiveKey { get; init; } +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceType.cs b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceType.cs new file mode 100644 index 000000000..4572d0729 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/EvidenceType.cs @@ -0,0 +1,14 @@ +namespace StellaOps.Evidence.Budgets; + +public enum EvidenceType +{ + Verdict, + PolicyTrace, + CallGraph, + RuntimeCapture, + Sbom, + Vex, + Attestation, + PathWitness, + Advisory +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceBudgetService.cs b/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceBudgetService.cs new file mode 100644 index 000000000..83aa9282d --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceBudgetService.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Evidence.Budgets; + +public interface IEvidenceBudgetService +{ + Task CheckBudgetAsync(Guid scanId, EvidenceItem item, CancellationToken ct); + Task GetBudgetStatusAsync(Guid scanId, CancellationToken ct); + Task PruneToFitAsync(Guid scanId, long targetBytes, CancellationToken ct); +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceRepository.cs b/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceRepository.cs new file mode 100644 index 000000000..d6785f046 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/IEvidenceRepository.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Budgets; + +public interface IEvidenceRepository +{ + Task> GetByScanIdAsync(Guid scanId, CancellationToken ct); + Task> GetByScanIdAndTypeAsync(Guid scanId, EvidenceType type, CancellationToken ct); + Task> GetOlderThanAsync(RetentionTier tier, DateTimeOffset cutoff, CancellationToken ct); + Task MoveToTierAsync(Guid itemId, RetentionTier tier, CancellationToken ct); + Task UpdateContentAsync(Guid itemId, byte[] content, CancellationToken ct); +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/PruneResult.cs b/src/__Libraries/StellaOps.Evidence/Budgets/PruneResult.cs new file mode 100644 index 000000000..22b32aea7 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/PruneResult.cs @@ -0,0 +1,13 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record PruneResult +{ + public required bool Success { get; init; } + public long BytesPruned { get; init; } + public IReadOnlyList ItemsPruned { get; init; } = []; + public long BytesRemaining { get; init; } + + public static PruneResult NoPruningNeeded() => new() { Success = true }; +} + +public sealed record PrunedItem(Guid ItemId, EvidenceType Type, long SizeBytes); diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/RetentionPolicy.cs b/src/__Libraries/StellaOps.Evidence/Budgets/RetentionPolicy.cs new file mode 100644 index 000000000..84bbd7736 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/RetentionPolicy.cs @@ -0,0 +1,19 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record RetentionPolicy +{ + /// + /// How long evidence stays in this tier. + /// + public required TimeSpan Duration { get; init; } + + /// + /// Compression algorithm for this tier. + /// + public CompressionLevel Compression { get; init; } = CompressionLevel.None; + + /// + /// Whether to deduplicate within this tier. + /// + public bool Deduplicate { get; init; } = true; +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/RetentionTier.cs b/src/__Libraries/StellaOps.Evidence/Budgets/RetentionTier.cs new file mode 100644 index 000000000..b2fc26917 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/RetentionTier.cs @@ -0,0 +1,16 @@ +namespace StellaOps.Evidence.Budgets; + +public enum RetentionTier +{ + /// Immediately accessible, highest cost. + Hot, + + /// Quick retrieval, moderate cost. + Warm, + + /// Delayed retrieval, lower cost. + Cold, + + /// Long-term storage, lowest cost. + Archive +} diff --git a/src/__Libraries/StellaOps.Evidence/Budgets/UsageStats.cs b/src/__Libraries/StellaOps.Evidence/Budgets/UsageStats.cs new file mode 100644 index 000000000..42cb94b9f --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Budgets/UsageStats.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Evidence.Budgets; + +public sealed record UsageStats +{ + public long TotalBytes { get; init; } + public IReadOnlyDictionary ByType { get; init; } = new Dictionary(); +} diff --git a/src/__Libraries/StellaOps.Evidence/Models/AttestationEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/AttestationEvidence.cs new file mode 100644 index 000000000..a31f680ff --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/AttestationEvidence.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Models; + +public sealed record AttestationEvidence( + string AttestationId, + string Type, + string Digest, + string SignerKeyId, + bool SignatureValid, + DateTimeOffset SignedAt, + string? RekorLogIndex); diff --git a/src/__Libraries/StellaOps.Evidence/Models/EvidenceIndex.cs b/src/__Libraries/StellaOps.Evidence/Models/EvidenceIndex.cs index b436d784f..295cc75df 100644 --- a/src/__Libraries/StellaOps.Evidence/Models/EvidenceIndex.cs +++ b/src/__Libraries/StellaOps.Evidence/Models/EvidenceIndex.cs @@ -1,4 +1,4 @@ -using System.Collections.Immutable; +using System.Collections.Immutable; namespace StellaOps.Evidence.Models; @@ -20,83 +20,3 @@ public sealed record EvidenceIndex public required DateTimeOffset CreatedAt { get; init; } public string? IndexDigest { get; init; } } - -public sealed record VerdictReference( - string VerdictId, - string Digest, - VerdictOutcome Outcome, - string? PolicyVersion); - -public enum VerdictOutcome -{ - Pass, - Fail, - Warn, - Unknown -} - -public sealed record SbomEvidence( - string SbomId, - string Format, - string Digest, - string? Uri, - int ComponentCount, - DateTimeOffset GeneratedAt); - -public sealed record AttestationEvidence( - string AttestationId, - string Type, - string Digest, - string SignerKeyId, - bool SignatureValid, - DateTimeOffset SignedAt, - string? RekorLogIndex); - -public sealed record VexEvidence( - string VexId, - string Format, - string Digest, - string Source, - int StatementCount, - ImmutableArray AffectedVulnerabilities); - -public sealed record ReachabilityEvidence( - string ProofId, - string VulnerabilityId, - string ComponentPurl, - ReachabilityStatus Status, - string? EntryPoint, - ImmutableArray CallPath, - string Digest); - -public enum ReachabilityStatus -{ - Reachable, - NotReachable, - Inconclusive, - NotAnalyzed -} - -public sealed record UnknownEvidence( - string UnknownId, - string ReasonCode, - string Description, - string? ComponentPurl, - string? VulnerabilityId, - UnknownSeverity Severity); - -public enum UnknownSeverity -{ - Low, - Medium, - High, - Critical -} - -public sealed record ToolChainEvidence( - string ScannerVersion, - string SbomGeneratorVersion, - string ReachabilityEngineVersion, - string AttestorVersion, - string PolicyEngineVersion, - ImmutableDictionary AdditionalTools); diff --git a/src/__Libraries/StellaOps.Evidence/Models/ProofConclusion.cs b/src/__Libraries/StellaOps.Evidence/Models/ProofConclusion.cs new file mode 100644 index 000000000..8123b3fee --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/ProofConclusion.cs @@ -0,0 +1,32 @@ +namespace StellaOps.Evidence.Models; + +/// +/// The conclusion drawn from a proof record. +/// +public enum ProofConclusion +{ + /// + /// The component is affected by the vulnerability. + /// + Affected, + + /// + /// The component is not affected by the vulnerability. + /// + NotAffected, + + /// + /// The vulnerability has been fixed in this component. + /// + Fixed, + + /// + /// The effect is still under investigation. + /// + UnderInvestigation, + + /// + /// The proof is inconclusive. + /// + Inconclusive +} diff --git a/src/__Libraries/StellaOps.Evidence/Models/ProofRecord.cs b/src/__Libraries/StellaOps.Evidence/Models/ProofRecord.cs index e9eb63c75..0e02471d7 100644 --- a/src/__Libraries/StellaOps.Evidence/Models/ProofRecord.cs +++ b/src/__Libraries/StellaOps.Evidence/Models/ProofRecord.cs @@ -1,5 +1,4 @@ // Licensed to StellaOps under the BUSL-1.1 license. - using System.Collections.Immutable; namespace StellaOps.Evidence.Models; @@ -72,42 +71,11 @@ public sealed record ProofRecord /// /// Gets additional attributes for extensibility. /// - public ImmutableDictionary Attributes { get; init; } = - ImmutableDictionary.Empty; + public ImmutableDictionary Attributes { get; init; } + = ImmutableDictionary.Empty; /// /// Gets the content-addressed digest of this proof for deduplication. /// public string? Digest { get; init; } } - -/// -/// The conclusion drawn from a proof record. -/// -public enum ProofConclusion -{ - /// - /// The component is affected by the vulnerability. - /// - Affected, - - /// - /// The component is not affected by the vulnerability. - /// - NotAffected, - - /// - /// The vulnerability has been fixed in this component. - /// - Fixed, - - /// - /// The effect is still under investigation. - /// - UnderInvestigation, - - /// - /// The proof is inconclusive. - /// - Inconclusive -} diff --git a/src/__Libraries/StellaOps.Evidence/Models/ReachabilityEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/ReachabilityEvidence.cs new file mode 100644 index 000000000..f80a43a4a --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/ReachabilityEvidence.cs @@ -0,0 +1,20 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Models; + +public sealed record ReachabilityEvidence( + string ProofId, + string VulnerabilityId, + string ComponentPurl, + ReachabilityStatus Status, + string? EntryPoint, + ImmutableArray CallPath, + string Digest); + +public enum ReachabilityStatus +{ + Reachable, + NotReachable, + Inconclusive, + NotAnalyzed +} diff --git a/src/__Libraries/StellaOps.Evidence/Models/SbomEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/SbomEvidence.cs new file mode 100644 index 000000000..62d4ef728 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/SbomEvidence.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Evidence.Models; + +public sealed record SbomEvidence( + string SbomId, + string Format, + string Digest, + string? Uri, + int ComponentCount, + DateTimeOffset GeneratedAt); diff --git a/src/__Libraries/StellaOps.Evidence/Models/ToolChainEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/ToolChainEvidence.cs new file mode 100644 index 000000000..2aec1fd18 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/ToolChainEvidence.cs @@ -0,0 +1,11 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Models; + +public sealed record ToolChainEvidence( + string ScannerVersion, + string SbomGeneratorVersion, + string ReachabilityEngineVersion, + string AttestorVersion, + string PolicyEngineVersion, + ImmutableDictionary AdditionalTools); diff --git a/src/__Libraries/StellaOps.Evidence/Models/UnknownEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/UnknownEvidence.cs new file mode 100644 index 000000000..258366042 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/UnknownEvidence.cs @@ -0,0 +1,17 @@ +namespace StellaOps.Evidence.Models; + +public sealed record UnknownEvidence( + string UnknownId, + string ReasonCode, + string Description, + string? ComponentPurl, + string? VulnerabilityId, + UnknownSeverity Severity); + +public enum UnknownSeverity +{ + Low, + Medium, + High, + Critical +} diff --git a/src/__Libraries/StellaOps.Evidence/Models/VerdictReference.cs b/src/__Libraries/StellaOps.Evidence/Models/VerdictReference.cs new file mode 100644 index 000000000..0497928c9 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/VerdictReference.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Evidence.Models; + +public sealed record VerdictReference( + string VerdictId, + string Digest, + VerdictOutcome Outcome, + string? PolicyVersion); + +public enum VerdictOutcome +{ + Pass, + Fail, + Warn, + Unknown +} diff --git a/src/__Libraries/StellaOps.Evidence/Models/VexEvidence.cs b/src/__Libraries/StellaOps.Evidence/Models/VexEvidence.cs new file mode 100644 index 000000000..6ffd5e79c --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Models/VexEvidence.cs @@ -0,0 +1,11 @@ +using System.Collections.Immutable; + +namespace StellaOps.Evidence.Models; + +public sealed record VexEvidence( + string VexId, + string Format, + string Digest, + string Source, + int StatementCount, + ImmutableArray AffectedVulnerabilities); diff --git a/src/__Libraries/StellaOps.Evidence/Retention/IArchiveStorage.cs b/src/__Libraries/StellaOps.Evidence/Retention/IArchiveStorage.cs new file mode 100644 index 000000000..cd605d0e5 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/IArchiveStorage.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Evidence.Retention; + +/// +/// Archive storage interface for long-term retention. +/// +public interface IArchiveStorage +{ + Task RetrieveAsync(string archiveKey, CancellationToken ct); + Task StoreAsync(byte[] content, CancellationToken ct); +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/IRetentionTierManager.cs b/src/__Libraries/StellaOps.Evidence/Retention/IRetentionTierManager.cs new file mode 100644 index 000000000..ecfe88c7b --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/IRetentionTierManager.cs @@ -0,0 +1,10 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public interface IRetentionTierManager +{ + Task RunMigrationAsync(CancellationToken ct); + RetentionTier GetCurrentTier(EvidenceItem item); + Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct); +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/MigratedItem.cs b/src/__Libraries/StellaOps.Evidence/Retention/MigratedItem.cs new file mode 100644 index 000000000..c92561db8 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/MigratedItem.cs @@ -0,0 +1,5 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public sealed record MigratedItem(Guid ItemId, RetentionTier FromTier, RetentionTier ToTier); diff --git a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Audit.cs b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Audit.cs new file mode 100644 index 000000000..cf115bff0 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Audit.cs @@ -0,0 +1,33 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public sealed partial class RetentionTierManager +{ + public async Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct) + { + var budget = _options.CurrentValue; + + // Ensure all AlwaysPreserve types are in Hot tier for audit export + foreach (var type in budget.AlwaysPreserve) + { + var items = await _repository.GetByScanIdAndTypeAsync(scanId, type, ct).ConfigureAwait(false); + foreach (var item in items.Where(i => i.Tier != RetentionTier.Hot)) + { + await RestoreToHotAsync(item, ct).ConfigureAwait(false); + } + } + } + + private async Task RestoreToHotAsync(EvidenceItem item, CancellationToken ct) + { + if (item.Tier == RetentionTier.Archive) + { + // Retrieve from archive storage + var content = await _archiveStorage.RetrieveAsync(item.ArchiveKey!, ct).ConfigureAwait(false); + await _repository.UpdateContentAsync(item.Id, content, ct).ConfigureAwait(false); + } + + await _repository.MoveToTierAsync(item.Id, RetentionTier.Hot, ct).ConfigureAwait(false); + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Compress.cs b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Compress.cs new file mode 100644 index 000000000..b86b419cc --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Compress.cs @@ -0,0 +1,15 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public sealed partial class RetentionTierManager +{ + private Task CompressAsync( + EvidenceItem item, + CompressionLevel level, + CancellationToken ct) + { + return Task.FromException(new NotSupportedException( + "Compression requires repository content retrieval, which is not implemented.")); + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.CurrentTier.cs b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.CurrentTier.cs new file mode 100644 index 000000000..19de0fd5d --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.CurrentTier.cs @@ -0,0 +1,21 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public sealed partial class RetentionTierManager +{ + public RetentionTier GetCurrentTier(EvidenceItem item) + { + var budget = _options.CurrentValue; + var age = _timeProvider.GetUtcNow() - item.CreatedAt; + + if (age < budget.RetentionPolicies[RetentionTier.Hot].Duration) + return RetentionTier.Hot; + if (age < budget.RetentionPolicies[RetentionTier.Warm].Duration) + return RetentionTier.Warm; + if (age < budget.RetentionPolicies[RetentionTier.Cold].Duration) + return RetentionTier.Cold; + + return RetentionTier.Archive; + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Migrate.cs b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Migrate.cs new file mode 100644 index 000000000..0773dc662 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.Migrate.cs @@ -0,0 +1,20 @@ +using StellaOps.Evidence.Budgets; + +namespace StellaOps.Evidence.Retention; + +public sealed partial class RetentionTierManager +{ + private async Task MigrateAsync(EvidenceItem item, RetentionTier targetTier, CancellationToken ct) + { + var policy = _options.CurrentValue.RetentionPolicies[targetTier]; + + if (policy.Compression != CompressionLevel.None) + { + // Compress before migration + var compressed = await CompressAsync(item, policy.Compression, ct).ConfigureAwait(false); + await _repository.UpdateContentAsync(item.Id, compressed, ct).ConfigureAwait(false); + } + + await _repository.MoveToTierAsync(item.Id, targetTier, ct).ConfigureAwait(false); + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.cs b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.cs index f4cef0647..7e12e5988 100644 --- a/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.cs +++ b/src/__Libraries/StellaOps.Evidence/Retention/RetentionTierManager.cs @@ -3,14 +3,7 @@ using StellaOps.Evidence.Budgets; namespace StellaOps.Evidence.Retention; -public interface IRetentionTierManager -{ - Task RunMigrationAsync(CancellationToken ct); - RetentionTier GetCurrentTier(EvidenceItem item); - Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct); -} - -public sealed class RetentionTierManager : IRetentionTierManager +public sealed partial class RetentionTierManager : IRetentionTierManager { private readonly IEvidenceRepository _repository; private readonly IArchiveStorage _archiveStorage; @@ -37,28 +30,28 @@ public sealed class RetentionTierManager : IRetentionTierManager // Hot -> Warm var hotExpiry = now - budget.RetentionPolicies[RetentionTier.Hot].Duration; - var toWarm = await _repository.GetOlderThanAsync(RetentionTier.Hot, hotExpiry, ct); + var toWarm = await _repository.GetOlderThanAsync(RetentionTier.Hot, hotExpiry, ct).ConfigureAwait(false); foreach (var item in toWarm) { - await MigrateAsync(item, RetentionTier.Warm, ct); + await MigrateAsync(item, RetentionTier.Warm, ct).ConfigureAwait(false); migrated.Add(new MigratedItem(item.Id, RetentionTier.Hot, RetentionTier.Warm)); } // Warm -> Cold var warmExpiry = now - budget.RetentionPolicies[RetentionTier.Warm].Duration; - var toCold = await _repository.GetOlderThanAsync(RetentionTier.Warm, warmExpiry, ct); + var toCold = await _repository.GetOlderThanAsync(RetentionTier.Warm, warmExpiry, ct).ConfigureAwait(false); foreach (var item in toCold) { - await MigrateAsync(item, RetentionTier.Cold, ct); + await MigrateAsync(item, RetentionTier.Cold, ct).ConfigureAwait(false); migrated.Add(new MigratedItem(item.Id, RetentionTier.Warm, RetentionTier.Cold)); } // Cold -> Archive var coldExpiry = now - budget.RetentionPolicies[RetentionTier.Cold].Duration; - var toArchive = await _repository.GetOlderThanAsync(RetentionTier.Cold, coldExpiry, ct); + var toArchive = await _repository.GetOlderThanAsync(RetentionTier.Cold, coldExpiry, ct).ConfigureAwait(false); foreach (var item in toArchive) { - await MigrateAsync(item, RetentionTier.Archive, ct); + await MigrateAsync(item, RetentionTier.Archive, ct).ConfigureAwait(false); migrated.Add(new MigratedItem(item.Id, RetentionTier.Cold, RetentionTier.Archive)); } @@ -68,86 +61,4 @@ public sealed class RetentionTierManager : IRetentionTierManager Items = migrated }; } - - public RetentionTier GetCurrentTier(EvidenceItem item) - { - var budget = _options.CurrentValue; - var age = _timeProvider.GetUtcNow() - item.CreatedAt; - - if (age < budget.RetentionPolicies[RetentionTier.Hot].Duration) - return RetentionTier.Hot; - if (age < budget.RetentionPolicies[RetentionTier.Warm].Duration) - return RetentionTier.Warm; - if (age < budget.RetentionPolicies[RetentionTier.Cold].Duration) - return RetentionTier.Cold; - - return RetentionTier.Archive; - } - - public async Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct) - { - var budget = _options.CurrentValue; - - // Ensure all AlwaysPreserve types are in Hot tier for audit export - foreach (var type in budget.AlwaysPreserve) - { - var items = await _repository.GetByScanIdAndTypeAsync(scanId, type, ct); - foreach (var item in items.Where(i => i.Tier != RetentionTier.Hot)) - { - await RestoreToHotAsync(item, ct); - } - } - } - - private async Task MigrateAsync(EvidenceItem item, RetentionTier targetTier, CancellationToken ct) - { - var policy = _options.CurrentValue.RetentionPolicies[targetTier]; - - if (policy.Compression != CompressionLevel.None) - { - // Compress before migration - var compressed = await CompressAsync(item, policy.Compression, ct); - await _repository.UpdateContentAsync(item.Id, compressed, ct); - } - - await _repository.MoveToTierAsync(item.Id, targetTier, ct); - } - - private async Task RestoreToHotAsync(EvidenceItem item, CancellationToken ct) - { - if (item.Tier == RetentionTier.Archive) - { - // Retrieve from archive storage - var content = await _archiveStorage.RetrieveAsync(item.ArchiveKey!, ct); - await _repository.UpdateContentAsync(item.Id, content, ct); - } - - await _repository.MoveToTierAsync(item.Id, RetentionTier.Hot, ct); - } - - private Task CompressAsync( - EvidenceItem item, - CompressionLevel level, - CancellationToken ct) - { - return Task.FromException(new NotSupportedException( - "Compression requires repository content retrieval, which is not implemented.")); - } -} - -public sealed record TierMigrationResult -{ - public required int MigratedCount { get; init; } - public IReadOnlyList Items { get; init; } = []; -} - -public sealed record MigratedItem(Guid ItemId, RetentionTier FromTier, RetentionTier ToTier); - -/// -/// Archive storage interface for long-term retention. -/// -public interface IArchiveStorage -{ - Task RetrieveAsync(string archiveKey, CancellationToken ct); - Task StoreAsync(byte[] content, CancellationToken ct); } diff --git a/src/__Libraries/StellaOps.Evidence/Retention/TierMigrationResult.cs b/src/__Libraries/StellaOps.Evidence/Retention/TierMigrationResult.cs new file mode 100644 index 000000000..d8b29cc4c --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Retention/TierMigrationResult.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Evidence.Retention; + +public sealed record TierMigrationResult +{ + public required int MigratedCount { get; init; } + public IReadOnlyList Items { get; init; } = []; +} diff --git a/src/__Libraries/StellaOps.Evidence/Serialization/EvidenceIndexSerializer.cs b/src/__Libraries/StellaOps.Evidence/Serialization/EvidenceIndexSerializer.cs index 00a8c3c7c..0a7feb0b6 100644 --- a/src/__Libraries/StellaOps.Evidence/Serialization/EvidenceIndexSerializer.cs +++ b/src/__Libraries/StellaOps.Evidence/Serialization/EvidenceIndexSerializer.cs @@ -1,4 +1,3 @@ - using StellaOps.Canonical.Json; using StellaOps.Evidence.Models; using System.Security.Cryptography; @@ -14,7 +13,7 @@ namespace StellaOps.Evidence.Serialization; /// public static class EvidenceIndexSerializer { - private static readonly JsonSerializerOptions JsonOptions = new() + private static readonly JsonSerializerOptions _jsonOptions = new() { WriteIndented = false, PropertyNamingPolicy = JsonNamingPolicy.CamelCase, @@ -29,20 +28,20 @@ public static class EvidenceIndexSerializer public static string Serialize(EvidenceIndex index) { - var canonicalBytes = CanonJson.Canonicalize(index, JsonOptions); + var canonicalBytes = CanonJson.Canonicalize(index, _jsonOptions); return Encoding.UTF8.GetString(canonicalBytes); } public static EvidenceIndex Deserialize(string json) { - return JsonSerializer.Deserialize(json, JsonOptions) + return JsonSerializer.Deserialize(json, _jsonOptions) ?? throw new InvalidOperationException("Failed to deserialize evidence index"); } public static string ComputeDigest(EvidenceIndex index) { var withoutDigest = index with { IndexDigest = null }; - var canonicalBytes = CanonJson.Canonicalize(withoutDigest, JsonOptions); + var canonicalBytes = CanonJson.Canonicalize(withoutDigest, _jsonOptions); var hash = SHA256.HashData(canonicalBytes); return Convert.ToHexString(hash).ToLowerInvariant(); } diff --git a/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.Build.cs b/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.Build.cs new file mode 100644 index 000000000..ed5127811 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.Build.cs @@ -0,0 +1,97 @@ +using StellaOps.Evidence.Models; +using StellaOps.Evidence.Serialization; +using System.Collections.Immutable; +using System.Globalization; + +namespace StellaOps.Evidence.Services; + +public sealed partial class EvidenceLinker +{ + public EvidenceIndex Build(VerdictReference verdict, string runManifestDigest) + { + ToolChainEvidence toolChain; + ImmutableArray sboms; + ImmutableArray attestations; + ImmutableArray vexDocuments; + ImmutableArray reachabilityProofs; + ImmutableArray unknowns; + + lock (_lock) + { + toolChain = _toolChain ?? throw new InvalidOperationException("ToolChain must be set before building index"); + sboms = _sboms.ToImmutableArray(); + attestations = _attestations.ToImmutableArray(); + vexDocuments = _vexDocuments.ToImmutableArray(); + reachabilityProofs = _reachabilityProofs.ToImmutableArray(); + unknowns = _unknowns.ToImmutableArray(); + } + + var orderedSboms = sboms + .OrderBy(s => s.Digest, StringComparer.Ordinal) + .ThenBy(s => s.SbomId, StringComparer.Ordinal) + .ThenBy(s => s.Format, StringComparer.Ordinal) + .ThenBy(s => NormalizeSortKey(s.Uri), StringComparer.Ordinal) + .ThenBy(s => s.ComponentCount) + .ThenBy(s => s.GeneratedAt.UtcDateTime.Ticks) + .ToImmutableArray(); + + var orderedAttestations = attestations + .OrderBy(a => a.Type, StringComparer.Ordinal) + .ThenBy(a => a.Digest, StringComparer.Ordinal) + .ThenBy(a => a.SignerKeyId, StringComparer.Ordinal) + .ThenBy(a => a.AttestationId, StringComparer.Ordinal) + .ThenBy(a => a.SignedAt.UtcDateTime.Ticks) + .ThenBy(a => a.SignatureValid) + .ThenBy(a => NormalizeSortKey(a.RekorLogIndex), StringComparer.Ordinal) + .ToImmutableArray(); + + var orderedVex = vexDocuments + .Select(v => v with + { + AffectedVulnerabilities = v.AffectedVulnerabilities + .OrderBy(id => id, StringComparer.Ordinal) + .ToImmutableArray() + }) + .OrderBy(v => v.Digest, StringComparer.Ordinal) + .ThenBy(v => v.VexId, StringComparer.Ordinal) + .ThenBy(v => v.Format, StringComparer.Ordinal) + .ThenBy(v => v.Source, StringComparer.Ordinal) + .ThenBy(v => v.StatementCount) + .ToImmutableArray(); + + var orderedReachability = reachabilityProofs + .OrderBy(r => r.VulnerabilityId, StringComparer.Ordinal) + .ThenBy(r => r.ComponentPurl, StringComparer.Ordinal) + .ThenBy(r => r.Status) + .ThenBy(r => NormalizeSortKey(r.EntryPoint), StringComparer.Ordinal) + .ThenBy(r => r.Digest, StringComparer.Ordinal) + .ThenBy(r => r.ProofId, StringComparer.Ordinal) + .ToImmutableArray(); + + var orderedUnknowns = unknowns + .OrderBy(u => u.ReasonCode, StringComparer.Ordinal) + .ThenBy(u => NormalizeSortKey(u.VulnerabilityId), StringComparer.Ordinal) + .ThenBy(u => NormalizeSortKey(u.ComponentPurl), StringComparer.Ordinal) + .ThenBy(u => u.Severity) + .ThenBy(u => u.UnknownId, StringComparer.Ordinal) + .ThenBy(u => u.Description, StringComparer.Ordinal) + .ToImmutableArray(); + + var index = new EvidenceIndex + { + IndexId = _guidProvider.NewGuid().ToString("D", CultureInfo.InvariantCulture), + SchemaVersion = "1.0.0", + Verdict = verdict, + Sboms = orderedSboms, + Attestations = orderedAttestations, + VexDocuments = orderedVex, + ReachabilityProofs = orderedReachability, + Unknowns = orderedUnknowns, + ToolChain = toolChain, + RunManifestDigest = runManifestDigest, + CreatedAt = _timeProvider.GetUtcNow() + }; + + return EvidenceIndexSerializer.WithDigest(index); + } +} diff --git a/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.cs b/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.cs index f2ea995a7..6ee3f3950 100644 --- a/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.cs +++ b/src/__Libraries/StellaOps.Evidence/Services/EvidenceLinker.cs @@ -1,16 +1,12 @@ - using StellaOps.Determinism; using StellaOps.Evidence.Models; -using StellaOps.Evidence.Serialization; -using System.Collections.Immutable; -using System.Globalization; namespace StellaOps.Evidence.Services; /// /// Collects evidence entries and builds a deterministic EvidenceIndex. /// -public sealed class EvidenceLinker : IEvidenceLinker +public sealed partial class EvidenceLinker : IEvidenceLinker { private readonly TimeProvider _timeProvider; private readonly IGuidProvider _guidProvider; @@ -81,104 +77,5 @@ public sealed class EvidenceLinker : IEvidenceLinker } } - public EvidenceIndex Build(VerdictReference verdict, string runManifestDigest) - { - ToolChainEvidence toolChain; - ImmutableArray sboms; - ImmutableArray attestations; - ImmutableArray vexDocuments; - ImmutableArray reachabilityProofs; - ImmutableArray unknowns; - - lock (_lock) - { - toolChain = _toolChain ?? throw new InvalidOperationException("ToolChain must be set before building index"); - sboms = _sboms.ToImmutableArray(); - attestations = _attestations.ToImmutableArray(); - vexDocuments = _vexDocuments.ToImmutableArray(); - reachabilityProofs = _reachabilityProofs.ToImmutableArray(); - unknowns = _unknowns.ToImmutableArray(); - } - - var orderedSboms = sboms - .OrderBy(s => s.Digest, StringComparer.Ordinal) - .ThenBy(s => s.SbomId, StringComparer.Ordinal) - .ThenBy(s => s.Format, StringComparer.Ordinal) - .ThenBy(s => NormalizeSortKey(s.Uri), StringComparer.Ordinal) - .ThenBy(s => s.ComponentCount) - .ThenBy(s => s.GeneratedAt.UtcDateTime.Ticks) - .ToImmutableArray(); - - var orderedAttestations = attestations - .OrderBy(a => a.Type, StringComparer.Ordinal) - .ThenBy(a => a.Digest, StringComparer.Ordinal) - .ThenBy(a => a.SignerKeyId, StringComparer.Ordinal) - .ThenBy(a => a.AttestationId, StringComparer.Ordinal) - .ThenBy(a => a.SignedAt.UtcDateTime.Ticks) - .ThenBy(a => a.SignatureValid) - .ThenBy(a => NormalizeSortKey(a.RekorLogIndex), StringComparer.Ordinal) - .ToImmutableArray(); - - var orderedVex = vexDocuments - .Select(v => v with - { - AffectedVulnerabilities = v.AffectedVulnerabilities - .OrderBy(id => id, StringComparer.Ordinal) - .ToImmutableArray() - }) - .OrderBy(v => v.Digest, StringComparer.Ordinal) - .ThenBy(v => v.VexId, StringComparer.Ordinal) - .ThenBy(v => v.Format, StringComparer.Ordinal) - .ThenBy(v => v.Source, StringComparer.Ordinal) - .ThenBy(v => v.StatementCount) - .ToImmutableArray(); - - var orderedReachability = reachabilityProofs - .OrderBy(r => r.VulnerabilityId, StringComparer.Ordinal) - .ThenBy(r => r.ComponentPurl, StringComparer.Ordinal) - .ThenBy(r => r.Status) - .ThenBy(r => NormalizeSortKey(r.EntryPoint), StringComparer.Ordinal) - .ThenBy(r => r.Digest, StringComparer.Ordinal) - .ThenBy(r => r.ProofId, StringComparer.Ordinal) - .ToImmutableArray(); - - var orderedUnknowns = unknowns - .OrderBy(u => u.ReasonCode, StringComparer.Ordinal) - .ThenBy(u => NormalizeSortKey(u.VulnerabilityId), StringComparer.Ordinal) - .ThenBy(u => NormalizeSortKey(u.ComponentPurl), StringComparer.Ordinal) - .ThenBy(u => u.Severity) - .ThenBy(u => u.UnknownId, StringComparer.Ordinal) - .ThenBy(u => u.Description, StringComparer.Ordinal) - .ToImmutableArray(); - - var index = new EvidenceIndex - { - IndexId = _guidProvider.NewGuid().ToString("D", CultureInfo.InvariantCulture), - SchemaVersion = "1.0.0", - Verdict = verdict, - Sboms = orderedSboms, - Attestations = orderedAttestations, - VexDocuments = orderedVex, - ReachabilityProofs = orderedReachability, - Unknowns = orderedUnknowns, - ToolChain = toolChain, - RunManifestDigest = runManifestDigest, - CreatedAt = _timeProvider.GetUtcNow() - }; - - return EvidenceIndexSerializer.WithDigest(index); - } - private static string NormalizeSortKey(string? value) => value ?? string.Empty; } - -public interface IEvidenceLinker -{ - void AddSbom(SbomEvidence sbom); - void AddAttestation(AttestationEvidence attestation); - void AddVex(VexEvidence vex); - void AddReachabilityProof(ReachabilityEvidence proof); - void AddUnknown(UnknownEvidence unknown); - void SetToolChain(ToolChainEvidence toolChain); - EvidenceIndex Build(VerdictReference verdict, string runManifestDigest); -} diff --git a/src/__Libraries/StellaOps.Evidence/Services/IEvidenceLinker.cs b/src/__Libraries/StellaOps.Evidence/Services/IEvidenceLinker.cs new file mode 100644 index 000000000..dc4a8c5ac --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Services/IEvidenceLinker.cs @@ -0,0 +1,14 @@ +using StellaOps.Evidence.Models; + +namespace StellaOps.Evidence.Services; + +public interface IEvidenceLinker +{ + void AddSbom(SbomEvidence sbom); + void AddAttestation(AttestationEvidence attestation); + void AddVex(VexEvidence vex); + void AddReachabilityProof(ReachabilityEvidence proof); + void AddUnknown(UnknownEvidence unknown); + void SetToolChain(ToolChainEvidence toolChain); + EvidenceIndex Build(VerdictReference verdict, string runManifestDigest); +} diff --git a/src/__Libraries/StellaOps.Evidence/TASKS.md b/src/__Libraries/StellaOps.Evidence/TASKS.md index 6b2be60fe..c82192839 100644 --- a/src/__Libraries/StellaOps.Evidence/TASKS.md +++ b/src/__Libraries/StellaOps.Evidence/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0082-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0082-A | DONE | Applied 2026-01-13; determinism, schema validation, budget async, retention safeguards, tests. | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Remediated csproj findings (split budget/retention/models/services/validation, ConfigureAwait(false), private field naming); tests passed 2026-02-04 (24 tests). | diff --git a/src/__Libraries/StellaOps.Evidence/Validation/EvidenceIndexValidator.cs b/src/__Libraries/StellaOps.Evidence/Validation/EvidenceIndexValidator.cs index 0604c94b2..06aba1d2c 100644 --- a/src/__Libraries/StellaOps.Evidence/Validation/EvidenceIndexValidator.cs +++ b/src/__Libraries/StellaOps.Evidence/Validation/EvidenceIndexValidator.cs @@ -1,4 +1,3 @@ - using Json.Schema; using StellaOps.Evidence.Models; using StellaOps.Evidence.Serialization; @@ -86,11 +85,3 @@ public sealed class EvidenceIndexValidator : IEvidenceIndexValidator return new ValidationResult(errors.Count == 0, errors); } } - -public interface IEvidenceIndexValidator -{ - ValidationResult Validate(EvidenceIndex index); -} - -public sealed record ValidationResult(bool IsValid, IReadOnlyList Errors); -public sealed record ValidationError(string Field, string Message); diff --git a/src/__Libraries/StellaOps.Evidence/Validation/IEvidenceIndexValidator.cs b/src/__Libraries/StellaOps.Evidence/Validation/IEvidenceIndexValidator.cs new file mode 100644 index 000000000..ebf2e3b92 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Validation/IEvidenceIndexValidator.cs @@ -0,0 +1,8 @@ +using StellaOps.Evidence.Models; + +namespace StellaOps.Evidence.Validation; + +public interface IEvidenceIndexValidator +{ + ValidationResult Validate(EvidenceIndex index); +} diff --git a/src/__Libraries/StellaOps.Evidence/Validation/ValidationError.cs b/src/__Libraries/StellaOps.Evidence/Validation/ValidationError.cs new file mode 100644 index 000000000..c2165258e --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Validation/ValidationError.cs @@ -0,0 +1,3 @@ +namespace StellaOps.Evidence.Validation; + +public sealed record ValidationError(string Field, string Message); diff --git a/src/__Libraries/StellaOps.Evidence/Validation/ValidationResult.cs b/src/__Libraries/StellaOps.Evidence/Validation/ValidationResult.cs new file mode 100644 index 000000000..0b5bdc6a0 --- /dev/null +++ b/src/__Libraries/StellaOps.Evidence/Validation/ValidationResult.cs @@ -0,0 +1,3 @@ +namespace StellaOps.Evidence.Validation; + +public sealed record ValidationResult(bool IsValid, IReadOnlyList Errors); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryCacheOptions.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryCacheOptions.cs new file mode 100644 index 000000000..42637b83a --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryCacheOptions.cs @@ -0,0 +1,8 @@ +namespace StellaOps.IssuerDirectory.Client; + +public sealed class IssuerDirectoryCacheOptions +{ + public TimeSpan Keys { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan Trust { get; set; } = TimeSpan.FromMinutes(5); +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Cache.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Cache.cs new file mode 100644 index 000000000..92c37e537 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Cache.cs @@ -0,0 +1,25 @@ +using System; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed partial class IssuerDirectoryClient +{ + private static string CacheKey(string prefix, params string[] parts) + { + if (parts is null || parts.Length == 0) + { + return prefix; + } + + var segments = new string[1 + parts.Length]; + segments[0] = prefix; + Array.Copy(parts, 0, segments, 1, parts.Length); + return string.Join('|', segments); + } + + private void InvalidateTrustCache(string tenantId, string issuerId) + { + _cache.Remove(CacheKey("trust", tenantId, issuerId, bool.FalseString)); + _cache.Remove(CacheKey("trust", tenantId, issuerId, bool.TrueString)); + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Keys.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Keys.cs new file mode 100644 index 000000000..127fbbac4 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Keys.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed partial class IssuerDirectoryClient +{ + public async ValueTask> GetIssuerKeysAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + var normalizedTenant = NormalizeRequired(tenantId, nameof(tenantId)); + var normalizedIssuer = NormalizeRequired(issuerId, nameof(issuerId)); + var includeGlobalValue = includeGlobal.ToString(CultureInfo.InvariantCulture); + + var cacheKey = CacheKey("keys", normalizedTenant, normalizedIssuer, includeGlobalValue); + if (_cache.TryGetValue(cacheKey, out IReadOnlyList? cached) && cached is not null) + { + return cached; + } + + var requestUri = + $"issuer-directory/issuers/{Uri.EscapeDataString(normalizedIssuer)}/keys?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory key lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + normalizedIssuer, + normalizedTenant, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + var payload = await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + IReadOnlyList result = payload?.ToArray() ?? Array.Empty(); + _cache.Set(cacheKey, result, _options.Cache.Keys); + return result; + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Delete.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Delete.cs new file mode 100644 index 000000000..1aa5dba00 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Delete.cs @@ -0,0 +1,42 @@ +using System; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed partial class IssuerDirectoryClient +{ + public async ValueTask DeleteIssuerTrustAsync( + string tenantId, + string issuerId, + string? reason, + CancellationToken cancellationToken) + { + var normalizedTenant = NormalizeRequired(tenantId, nameof(tenantId)); + var normalizedIssuer = NormalizeRequired(issuerId, nameof(issuerId)); + var normalizedReason = NormalizeOptional(reason); + var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(normalizedIssuer)}/trust"; + + using var request = new HttpRequestMessage(HttpMethod.Delete, requestUri); + request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); + if (!string.IsNullOrWhiteSpace(normalizedReason)) + { + request.Headers.TryAddWithoutValidation(_options.AuditReasonHeader, normalizedReason); + } + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory trust delete failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + normalizedIssuer, + normalizedTenant, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + InvalidateTrustCache(normalizedTenant, normalizedIssuer); + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Get.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Get.cs new file mode 100644 index 000000000..370711764 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Get.cs @@ -0,0 +1,52 @@ +using System; +using System.Globalization; +using System.Net.Http; +using System.Net.Http.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed partial class IssuerDirectoryClient +{ + public async ValueTask GetIssuerTrustAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + var normalizedTenant = NormalizeRequired(tenantId, nameof(tenantId)); + var normalizedIssuer = NormalizeRequired(issuerId, nameof(issuerId)); + var includeGlobalValue = includeGlobal.ToString(CultureInfo.InvariantCulture); + + var cacheKey = CacheKey("trust", normalizedTenant, normalizedIssuer, includeGlobalValue); + if (_cache.TryGetValue(cacheKey, out IssuerTrustResponseModel? cached) && cached is not null) + { + return cached; + } + + var requestUri = + $"issuer-directory/issuers/{Uri.EscapeDataString(normalizedIssuer)}/trust?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory trust lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + normalizedIssuer, + normalizedTenant, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false) ?? new IssuerTrustResponseModel(null, null, 0m); + + _cache.Set(cacheKey, payload, _options.Cache.Trust); + return payload; + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Set.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Set.cs new file mode 100644 index 000000000..333bd5b31 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.Trust.Set.cs @@ -0,0 +1,53 @@ +using System; +using System.Net.Http; +using System.Net.Http.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed partial class IssuerDirectoryClient +{ + public async ValueTask SetIssuerTrustAsync( + string tenantId, + string issuerId, + decimal weight, + string? reason, + CancellationToken cancellationToken) + { + var normalizedTenant = NormalizeRequired(tenantId, nameof(tenantId)); + var normalizedIssuer = NormalizeRequired(issuerId, nameof(issuerId)); + var normalizedReason = NormalizeOptional(reason); + var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(normalizedIssuer)}/trust"; + + using var request = new HttpRequestMessage(HttpMethod.Put, requestUri) + { + Content = JsonContent.Create(new IssuerTrustSetRequestModel(weight, normalizedReason)) + }; + + request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); + if (!string.IsNullOrWhiteSpace(normalizedReason)) + { + request.Headers.TryAddWithoutValidation(_options.AuditReasonHeader, normalizedReason); + } + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory trust update failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + normalizedIssuer, + normalizedTenant, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + InvalidateTrustCache(normalizedTenant, normalizedIssuer); + + var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false) ?? new IssuerTrustResponseModel(null, null, 0m); + + return payload; + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs index b3730e3b3..ac65bbf65 100644 --- a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs @@ -1,15 +1,13 @@ +using System; +using System.Net.Http; using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using System; -using System.Globalization; -using System.Net.Http; -using System.Net.Http.Json; namespace StellaOps.IssuerDirectory.Client; -internal sealed class IssuerDirectoryClient : IIssuerDirectoryClient +internal sealed partial class IssuerDirectoryClient : IIssuerDirectoryClient { private readonly HttpClient _httpClient; private readonly IMemoryCache _cache; @@ -29,184 +27,19 @@ internal sealed class IssuerDirectoryClient : IIssuerDirectoryClient _options = options.Value; _options.Validate(); + + _httpClient.BaseAddress = _options.BaseAddress; + _httpClient.Timeout = _options.HttpTimeout; } - public async ValueTask> GetIssuerKeysAsync( - string tenantId, - string issuerId, - bool includeGlobal, - CancellationToken cancellationToken) + private static string NormalizeRequired(string value, string paramName) { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - tenantId = tenantId.Trim(); - issuerId = issuerId.Trim(); - - var cacheKey = CacheKey("keys", tenantId, issuerId, includeGlobal.ToString(CultureInfo.InvariantCulture)); - if (_cache.TryGetValue(cacheKey, out IReadOnlyList? cached) && cached is not null) - { - return cached; - } - - var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/keys?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; - using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); - request.Headers.TryAddWithoutValidation(_options.TenantHeader, tenantId); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - _logger.LogWarning( - "Issuer Directory key lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", - issuerId, - tenantId, - response.StatusCode); - response.EnsureSuccessStatusCode(); - } - - var payload = await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken) - .ConfigureAwait(false); - - IReadOnlyList result = payload?.ToArray() ?? Array.Empty(); - _cache.Set(cacheKey, result, _options.Cache.Keys); - return result; + ArgumentException.ThrowIfNullOrWhiteSpace(value, paramName); + return value.Trim(); } - public async ValueTask GetIssuerTrustAsync( - string tenantId, - string issuerId, - bool includeGlobal, - CancellationToken cancellationToken) + private static string? NormalizeOptional(string? value) { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - tenantId = tenantId.Trim(); - issuerId = issuerId.Trim(); - - var cacheKey = CacheKey("trust", tenantId, issuerId, includeGlobal.ToString(CultureInfo.InvariantCulture)); - if (_cache.TryGetValue(cacheKey, out IssuerTrustResponseModel? cached) && cached is not null) - { - return cached; - } - - var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/trust?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; - using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); - request.Headers.TryAddWithoutValidation(_options.TenantHeader, tenantId); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - _logger.LogWarning( - "Issuer Directory trust lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", - issuerId, - tenantId, - response.StatusCode); - response.EnsureSuccessStatusCode(); - } - - var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) - .ConfigureAwait(false) ?? new IssuerTrustResponseModel(null, null, 0m); - - _cache.Set(cacheKey, payload, _options.Cache.Trust); - return payload; + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); } - - public async ValueTask SetIssuerTrustAsync( - string tenantId, - string issuerId, - decimal weight, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var normalizedTenant = tenantId.Trim(); - var normalizedReason = string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(); - var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/trust"; - - using var request = new HttpRequestMessage(HttpMethod.Put, requestUri) - { - Content = JsonContent.Create(new IssuerTrustSetRequestModel(weight, normalizedReason)) - }; - - request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); - if (!string.IsNullOrWhiteSpace(normalizedReason)) - { - request.Headers.TryAddWithoutValidation(_options.AuditReasonHeader, normalizedReason); - } - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - _logger.LogWarning( - "Issuer Directory trust update failed for {IssuerId} (tenant={TenantId}) {StatusCode}", - issuerId, - normalizedTenant, - response.StatusCode); - response.EnsureSuccessStatusCode(); - } - - InvalidateTrustCache(normalizedTenant, issuerId); - - var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) - .ConfigureAwait(false) ?? new IssuerTrustResponseModel(null, null, 0m); - - return payload; - } - - public async ValueTask DeleteIssuerTrustAsync( - string tenantId, - string issuerId, - string? reason, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); - - var normalizedTenant = tenantId.Trim(); - var normalizedReason = string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(); - var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/trust"; - - using var request = new HttpRequestMessage(HttpMethod.Delete, requestUri); - request.Headers.TryAddWithoutValidation(_options.TenantHeader, normalizedTenant); - if (!string.IsNullOrWhiteSpace(normalizedReason)) - { - request.Headers.TryAddWithoutValidation(_options.AuditReasonHeader, normalizedReason); - } - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - _logger.LogWarning( - "Issuer Directory trust delete failed for {IssuerId} (tenant={TenantId}) {StatusCode}", - issuerId, - normalizedTenant, - response.StatusCode); - response.EnsureSuccessStatusCode(); - } - - InvalidateTrustCache(normalizedTenant, issuerId); - } - - private static string CacheKey(string prefix, params string[] parts) - { - if (parts is null || parts.Length == 0) - { - return prefix; - } - - var segments = new string[1 + parts.Length]; - segments[0] = prefix; - Array.Copy(parts, 0, segments, 1, parts.Length); - return string.Join('|', segments); - } - - private void InvalidateTrustCache(string tenantId, string issuerId) - { - _cache.Remove(CacheKey("trust", tenantId, issuerId, bool.FalseString)); - _cache.Remove(CacheKey("trust", tenantId, issuerId, bool.TrueString)); - } - } diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs index 94348445b..cfec4251b 100644 --- a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs @@ -42,10 +42,3 @@ public sealed class IssuerDirectoryClientOptions } } } - -public sealed class IssuerDirectoryCacheOptions -{ - public TimeSpan Keys { get; set; } = TimeSpan.FromMinutes(5); - - public TimeSpan Trust { get; set; } = TimeSpan.FromMinutes(5); -} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerKeyModel.cs similarity index 50% rename from src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs rename to src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerKeyModel.cs index a3bcc0671..c977bf160 100644 --- a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerKeyModel.cs @@ -15,20 +15,3 @@ public sealed record IssuerKeyModel( [property: JsonPropertyName("retiredAtUtc")] DateTimeOffset? RetiredAtUtc, [property: JsonPropertyName("revokedAtUtc")] DateTimeOffset? RevokedAtUtc, [property: JsonPropertyName("replacesKeyId")] string? ReplacesKeyId); - -public sealed record IssuerTrustOverrideModel( - [property: JsonPropertyName("weight")] decimal Weight, - [property: JsonPropertyName("reason")] string? Reason, - [property: JsonPropertyName("updatedAtUtc")] DateTimeOffset UpdatedAtUtc, - [property: JsonPropertyName("updatedBy")] string UpdatedBy, - [property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc, - [property: JsonPropertyName("createdBy")] string CreatedBy); - -public sealed record IssuerTrustResponseModel( - [property: JsonPropertyName("tenantOverride")] IssuerTrustOverrideModel? TenantOverride, - [property: JsonPropertyName("globalOverride")] IssuerTrustOverrideModel? GlobalOverride, - [property: JsonPropertyName("effectiveWeight")] decimal EffectiveWeight); - -public sealed record IssuerTrustSetRequestModel( - [property: JsonPropertyName("weight")] decimal Weight, - [property: JsonPropertyName("reason")] string? Reason); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustOverrideModel.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustOverrideModel.cs new file mode 100644 index 000000000..acd11027d --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustOverrideModel.cs @@ -0,0 +1,11 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.IssuerDirectory.Client; + +public sealed record IssuerTrustOverrideModel( + [property: JsonPropertyName("weight")] decimal Weight, + [property: JsonPropertyName("reason")] string? Reason, + [property: JsonPropertyName("updatedAtUtc")] DateTimeOffset UpdatedAtUtc, + [property: JsonPropertyName("updatedBy")] string UpdatedBy, + [property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc, + [property: JsonPropertyName("createdBy")] string CreatedBy); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustResponseModel.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustResponseModel.cs new file mode 100644 index 000000000..2d039ec66 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustResponseModel.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.IssuerDirectory.Client; + +public sealed record IssuerTrustResponseModel( + [property: JsonPropertyName("tenantOverride")] IssuerTrustOverrideModel? TenantOverride, + [property: JsonPropertyName("globalOverride")] IssuerTrustOverrideModel? GlobalOverride, + [property: JsonPropertyName("effectiveWeight")] decimal EffectiveWeight); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustSetRequestModel.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustSetRequestModel.cs new file mode 100644 index 000000000..0fdbc3b6b --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerTrustSetRequestModel.cs @@ -0,0 +1,7 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.IssuerDirectory.Client; + +public sealed record IssuerTrustSetRequestModel( + [property: JsonPropertyName("weight")] decimal Weight, + [property: JsonPropertyName("reason")] string? Reason); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs index d3a763d83..a7fa39002 100644 --- a/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs @@ -1,9 +1,8 @@ -using Microsoft.Extensions.Caching.Memory; +using System; + using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Options; -using System; namespace StellaOps.IssuerDirectory.Client; @@ -45,13 +44,7 @@ public static class IssuerDirectoryClientServiceCollectionExtensions }) .ValidateOnStart(); - services.AddHttpClient((provider, client) => - { - var opts = provider.GetRequiredService>().Value; - opts.Validate(); - client.BaseAddress = opts.BaseAddress; - client.Timeout = opts.HttpTimeout; - }); + services.AddHttpClient(); return services; } diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/TASKS.md b/src/__Libraries/StellaOps.IssuerDirectory.Client/TASKS.md index ddcab0a3d..ede420f2d 100644 --- a/src/__Libraries/StellaOps.IssuerDirectory.Client/TASKS.md +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0092-T | DONE | Revalidated 2026-01-08; test coverage audit for IssuerDirectory.Client. | | AUDIT-0092-A | TODO | Pending approval (revalidated 2026-01-08). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-04: Split client/model/options files, removed service locator, added cache normalization tests (SPRINT_20260130_002). | diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Determinism.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Determinism.cs new file mode 100644 index 000000000..21828ad4f --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Determinism.cs @@ -0,0 +1,85 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Threading.Tasks; +using FluentAssertions; +using StellaOps.Replay.Core.Export; +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Export; + +public sealed partial class ReplayManifestExporterTests +{ + [Fact] + public async Task ExportAsync_IsDeterministic_SameInputsProduceSameDigestAsync() + { + var timestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + var manifest1 = CreateTestManifest("fixed-scan-id", timestamp); + var manifest2 = CreateTestManifest("fixed-scan-id", timestamp); + + var outputPath1 = Path.Combine(_tempDir, "replay1.json"); + var outputPath2 = Path.Combine(_tempDir, "replay2.json"); + + var options1 = new ReplayExportOptions + { + OutputPath = outputPath1, + IncludeCiEnvironment = false, + GenerateVerificationCommand = false + }; + var options2 = new ReplayExportOptions + { + OutputPath = outputPath2, + IncludeCiEnvironment = false, + GenerateVerificationCommand = false + }; + + var result1 = await _exporter.ExportAsync(manifest1, options1); + var result2 = await _exporter.ExportAsync(manifest2, options2); + + result1.Success.Should().BeTrue(); + result2.Success.Should().BeTrue(); + result1.ManifestDigest.Should().Be(result2.ManifestDigest); + } + + [Fact] + public async Task ExportAsync_CompactJson_OmitsIndentationAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-compact.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + PrettyPrint = false + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + var json = await File.ReadAllTextAsync(outputPath); + json.Should().NotContain("\n "); + } + + [Fact] + public async Task ExportAsync_RoundTrip_PreservesAllFieldsAsync() + { + var manifest = CreateTestManifest(); + manifest.Scan.PolicyDigest = "sha256:policy123"; + manifest.Scan.ScorePolicyDigest = "sha256:score456"; + manifest.Scan.AnalyzerSetDigest = "sha256:analyzer789"; + + var outputPath = Path.Combine(_tempDir, "replay-roundtrip.json"); + var options = new ReplayExportOptions { OutputPath = outputPath }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + + var json = await File.ReadAllTextAsync(outputPath); + var reloaded = JsonSerializer.Deserialize(json); + + reloaded.Should().NotBeNull(); + reloaded!.Outputs.VerdictDigest.Should().Be("sha256:analyzer789"); + reloaded.Verification.ExpectedVerdictHash.Should().Be("sha256:analyzer789"); + reloaded.Verification.ExpectedSbomHash.Should().Be("sha256:score456"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Export.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Export.cs new file mode 100644 index 000000000..bd91cf6cb --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Export.cs @@ -0,0 +1,116 @@ +using System.IO; +using System.Text.Json; +using System.Threading.Tasks; +using FluentAssertions; +using StellaOps.Replay.Core.Export; +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Export; + +public sealed partial class ReplayManifestExporterTests +{ + [Fact] + public async Task ExportAsync_WithValidManifest_CreatesExportFileAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + PrettyPrint = true + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.ManifestPath.Should().Be(outputPath); + result.ManifestDigest.Should().StartWith("sha256:"); + File.Exists(outputPath).Should().BeTrue(); + } + + [Fact] + public async Task ExportAsync_ProducesValidJsonSchemaAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-schema.json"); + var options = new ReplayExportOptions { OutputPath = outputPath }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + + var json = await File.ReadAllTextAsync(outputPath); + var exportManifest = JsonSerializer.Deserialize(json); + + exportManifest.Should().NotBeNull(); + exportManifest!.Version.Should().Be("1.0.0"); + exportManifest.Snapshot.Should().NotBeNull(); + exportManifest.Toolchain.Should().NotBeNull(); + exportManifest.Inputs.Should().NotBeNull(); + exportManifest.Outputs.Should().NotBeNull(); + exportManifest.Verification.Should().NotBeNull(); + } + + [Fact] + public async Task ExportAsync_IncludesToolchainVersions_WhenEnabledAsync() + { + var manifest = CreateTestManifest(); + manifest.Reachability.Graphs.Add(new ReplayReachabilityGraphReference + { + Analyzer = "java-callgraph", + Version = "1.2.3", + Hash = "sha256:abc123", + CasUri = "cas://graphs/java" + }); + + var outputPath = Path.Combine(_tempDir, "replay-toolchain.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + IncludeToolchainVersions = true + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Toolchain.AnalyzerVersions.Should().ContainKey("java-callgraph"); + result.Manifest.Toolchain.AnalyzerVersions!["java-callgraph"].Should().Be("1.2.3"); + } + + [Fact] + public async Task ExportAsync_IncludesFeedSnapshots_WhenEnabledAsync() + { + var manifest = CreateTestManifest(); + manifest.Scan.FeedSnapshot = "sha256:feedhash123456"; + + var outputPath = Path.Combine(_tempDir, "replay-feeds.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + IncludeFeedSnapshots = true + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Inputs.Feeds.Should().NotBeEmpty(); + result.Manifest.Inputs.Feeds[0].Digest.Should().Contain("feedhash123456"); + } + + [Fact] + public async Task ExportAsync_ExcludesFeedSnapshots_WhenDisabledAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-no-feeds.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + IncludeFeedSnapshots = false + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Inputs.Feeds.Should().BeEmpty(); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.ExportReachability.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.ExportReachability.cs new file mode 100644 index 000000000..51ad67759 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.ExportReachability.cs @@ -0,0 +1,73 @@ +using System.IO; +using System.Threading.Tasks; +using FluentAssertions; +using StellaOps.Replay.Core.Export; +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Export; + +public sealed partial class ReplayManifestExporterTests +{ + [Fact] + public async Task ExportAsync_IncludesReachability_WhenEnabledAsync() + { + var manifest = CreateTestManifest(); + manifest.Reachability.Graphs.Add(new ReplayReachabilityGraphReference + { + Kind = "static", + CasUri = "cas://graphs/main", + Hash = "sha256:graphhash", + CallgraphId = "main-entry", + Analyzer = "dotnet-callgraph", + Version = "2.0.0" + }); + + var outputPath = Path.Combine(_tempDir, "replay-reach.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + IncludeReachability = true + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Inputs.Reachability.Should().NotBeNullOrEmpty(); + result.Manifest.Inputs.Reachability![0].EntryPoint.Should().Be("main-entry"); + } + + [Fact] + public async Task ExportAsync_GeneratesVerificationCommand_WhenEnabledAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-verify.json"); + var options = new ReplayExportOptions + { + OutputPath = outputPath, + GenerateVerificationCommand = true + }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Verification.Command.Should().Contain("stella replay verify"); + result.Manifest.Verification.Command.Should().Contain("--manifest"); + result.Manifest.Verification.Command.Should().Contain("--fail-on-drift"); + } + + [Fact] + public async Task ExportAsync_SetsCorrectExitCodesAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-exit.json"); + var options = new ReplayExportOptions { OutputPath = outputPath }; + + var result = await _exporter.ExportAsync(manifest, options); + + result.Success.Should().BeTrue(); + result.Manifest!.Verification.ExitCodes.Should().NotBeNull(); + result.Manifest.Verification.ExitCodes!.Success.Should().Be(0); + result.Manifest.Verification.ExitCodes.Drift.Should().Be(1); + result.Manifest.Verification.ExitCodes.Error.Should().Be(2); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Verify.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Verify.cs new file mode 100644 index 000000000..b0ef1de05 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.Verify.cs @@ -0,0 +1,43 @@ +using System.IO; +using System.Threading.Tasks; +using FluentAssertions; +using StellaOps.Replay.Core.Export; +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Export; + +public sealed partial class ReplayManifestExporterTests +{ + [Fact] + public async Task VerifyAsync_WithNonExistentFile_ReturnsErrorAsync() + { + var options = new ReplayVerifyOptions(); + + var result = await _exporter.VerifyAsync("/nonexistent/path.json", options); + + result.Success.Should().BeFalse(); + result.ExitCode.Should().Be(2); + result.Error.Should().Contain("not found"); + } + + [Fact] + public async Task VerifyAsync_WithValidManifest_ReturnsSuccessAsync() + { + var manifest = CreateTestManifest(); + var outputPath = Path.Combine(_tempDir, "replay-to-verify.json"); + var exportOptions = new ReplayExportOptions { OutputPath = outputPath }; + + await _exporter.ExportAsync(manifest, exportOptions); + + var verifyOptions = new ReplayVerifyOptions + { + FailOnSbomDrift = true, + FailOnVerdictDrift = true + }; + + var result = await _exporter.VerifyAsync(outputPath, verifyOptions); + + result.Success.Should().BeTrue(); + result.ExitCode.Should().Be(0); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.cs index 36f854ac0..9564e73b2 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Export/ReplayManifestExporterTests.cs @@ -1,19 +1,14 @@ -// ----------------------------------------------------------------------------- -// ReplayManifestExporterTests.cs -// Sprint: SPRINT_20251228_001_BE_replay_manifest_ci (T7) -// Description: Integration tests for replay manifest export and verification -// ----------------------------------------------------------------------------- - -using FluentAssertions; +using System; +using System.IO; +using StellaOps.Replay.Core; using StellaOps.Replay.Core.Export; -using System.Text.Json; namespace StellaOps.Replay.Core.Tests.Export; /// /// Tests for . /// -public sealed class ReplayManifestExporterTests : IDisposable +public sealed partial class ReplayManifestExporterTests : IDisposable { private readonly string _tempDir; private readonly ReplayManifestExporter _exporter; @@ -33,318 +28,6 @@ public sealed class ReplayManifestExporterTests : IDisposable } } - [Fact] - public async Task ExportAsync_WithValidManifest_CreatesExportFile() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - PrettyPrint = true - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.ManifestPath.Should().Be(outputPath); - result.ManifestDigest.Should().StartWith("sha256:"); - File.Exists(outputPath).Should().BeTrue(); - } - - [Fact] - public async Task ExportAsync_ProducesValidJsonSchema() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-schema.json"); - var options = new ReplayExportOptions { OutputPath = outputPath }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - - var json = await File.ReadAllTextAsync(outputPath); - var exportManifest = JsonSerializer.Deserialize(json); - - exportManifest.Should().NotBeNull(); - exportManifest!.Version.Should().Be("1.0.0"); - exportManifest.Snapshot.Should().NotBeNull(); - exportManifest.Toolchain.Should().NotBeNull(); - exportManifest.Inputs.Should().NotBeNull(); - exportManifest.Outputs.Should().NotBeNull(); - exportManifest.Verification.Should().NotBeNull(); - } - - [Fact] - public async Task ExportAsync_IncludesToolchainVersions_WhenEnabled() - { - // Arrange - var manifest = CreateTestManifest(); - manifest.Reachability.Graphs.Add(new ReplayReachabilityGraphReference - { - Analyzer = "java-callgraph", - Version = "1.2.3", - Hash = "sha256:abc123", - CasUri = "cas://graphs/java" - }); - - var outputPath = Path.Combine(_tempDir, "replay-toolchain.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - IncludeToolchainVersions = true - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Toolchain.AnalyzerVersions.Should().ContainKey("java-callgraph"); - result.Manifest.Toolchain.AnalyzerVersions!["java-callgraph"].Should().Be("1.2.3"); - } - - [Fact] - public async Task ExportAsync_IncludesFeedSnapshots_WhenEnabled() - { - // Arrange - var manifest = CreateTestManifest(); - manifest.Scan.FeedSnapshot = "sha256:feedhash123456"; - - var outputPath = Path.Combine(_tempDir, "replay-feeds.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - IncludeFeedSnapshots = true - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Inputs.Feeds.Should().NotBeEmpty(); - result.Manifest.Inputs.Feeds[0].Digest.Should().Contain("feedhash123456"); - } - - [Fact] - public async Task ExportAsync_ExcludesFeedSnapshots_WhenDisabled() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-no-feeds.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - IncludeFeedSnapshots = false - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Inputs.Feeds.Should().BeEmpty(); - } - - [Fact] - public async Task ExportAsync_IncludesReachability_WhenEnabled() - { - // Arrange - var manifest = CreateTestManifest(); - manifest.Reachability.Graphs.Add(new ReplayReachabilityGraphReference - { - Kind = "static", - CasUri = "cas://graphs/main", - Hash = "sha256:graphhash", - CallgraphId = "main-entry", - Analyzer = "dotnet-callgraph", - Version = "2.0.0" - }); - - var outputPath = Path.Combine(_tempDir, "replay-reach.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - IncludeReachability = true - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Inputs.Reachability.Should().NotBeNullOrEmpty(); - result.Manifest.Inputs.Reachability![0].EntryPoint.Should().Be("main-entry"); - } - - [Fact] - public async Task ExportAsync_GeneratesVerificationCommand_WhenEnabled() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-verify.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - GenerateVerificationCommand = true - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Verification.Command.Should().Contain("stella replay verify"); - result.Manifest.Verification.Command.Should().Contain("--manifest"); - result.Manifest.Verification.Command.Should().Contain("--fail-on-drift"); - } - - [Fact] - public async Task ExportAsync_SetsCorrectExitCodes() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-exit.json"); - var options = new ReplayExportOptions { OutputPath = outputPath }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - result.Manifest!.Verification.ExitCodes.Should().NotBeNull(); - result.Manifest.Verification.ExitCodes!.Success.Should().Be(0); - result.Manifest.Verification.ExitCodes.Drift.Should().Be(1); - result.Manifest.Verification.ExitCodes.Error.Should().Be(2); - } - - [Fact] - public async Task ExportAsync_IsDeterministic_SameInputsProduceSameDigest() - { - // Arrange - var manifest1 = CreateTestManifest("fixed-scan-id", new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)); - var manifest2 = CreateTestManifest("fixed-scan-id", new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)); - - var outputPath1 = Path.Combine(_tempDir, "replay1.json"); - var outputPath2 = Path.Combine(_tempDir, "replay2.json"); - - var options1 = new ReplayExportOptions - { - OutputPath = outputPath1, - IncludeCiEnvironment = false, // Disable CI env to ensure determinism - GenerateVerificationCommand = false // Disable to avoid path-specific commands - }; - var options2 = new ReplayExportOptions - { - OutputPath = outputPath2, - IncludeCiEnvironment = false, - GenerateVerificationCommand = false - }; - - // Act - var result1 = await _exporter.ExportAsync(manifest1, options1); - var result2 = await _exporter.ExportAsync(manifest2, options2); - - // Assert - result1.Success.Should().BeTrue(); - result2.Success.Should().BeTrue(); - result1.ManifestDigest.Should().Be(result2.ManifestDigest); - } - - [Fact] - public async Task ExportAsync_CompactJson_OmitsIndentation() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-compact.json"); - var options = new ReplayExportOptions - { - OutputPath = outputPath, - PrettyPrint = false - }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - var json = await File.ReadAllTextAsync(outputPath); - json.Should().NotContain("\n "); // No indented newlines - } - - [Fact] - public async Task VerifyAsync_WithNonExistentFile_ReturnsError() - { - // Arrange - var options = new ReplayVerifyOptions(); - - // Act - var result = await _exporter.VerifyAsync("/nonexistent/path.json", options); - - // Assert - result.Success.Should().BeFalse(); - result.ExitCode.Should().Be(2); // Error exit code - result.Error.Should().Contain("not found"); - } - - [Fact] - public async Task VerifyAsync_WithValidManifest_ReturnsSuccess() - { - // Arrange - var manifest = CreateTestManifest(); - var outputPath = Path.Combine(_tempDir, "replay-to-verify.json"); - var exportOptions = new ReplayExportOptions { OutputPath = outputPath }; - - await _exporter.ExportAsync(manifest, exportOptions); - - var verifyOptions = new ReplayVerifyOptions - { - FailOnSbomDrift = true, - FailOnVerdictDrift = true - }; - - // Act - var result = await _exporter.VerifyAsync(outputPath, verifyOptions); - - // Assert - result.Success.Should().BeTrue(); - result.ExitCode.Should().Be(0); - } - - [Fact] - public async Task ExportAsync_RoundTrip_PreservesAllFields() - { - // Arrange - var manifest = CreateTestManifest(); - manifest.Scan.PolicyDigest = "sha256:policy123"; - manifest.Scan.ScorePolicyDigest = "sha256:score456"; - manifest.Scan.AnalyzerSetDigest = "sha256:analyzer789"; - - var outputPath = Path.Combine(_tempDir, "replay-roundtrip.json"); - var options = new ReplayExportOptions { OutputPath = outputPath }; - - // Act - var result = await _exporter.ExportAsync(manifest, options); - - // Assert - result.Success.Should().BeTrue(); - - var json = await File.ReadAllTextAsync(outputPath); - var reloaded = JsonSerializer.Deserialize(json); - - reloaded.Should().NotBeNull(); - reloaded!.Outputs.VerdictDigest.Should().Be("sha256:analyzer789"); - reloaded.Verification.ExpectedVerdictHash.Should().Be("sha256:analyzer789"); - reloaded.Verification.ExpectedSbomHash.Should().Be("sha256:score456"); - } - private static ReplayManifest CreateTestManifest( string? scanId = null, DateTimeOffset? time = null) @@ -354,8 +37,8 @@ public sealed class ReplayManifestExporterTests : IDisposable SchemaVersion = ReplayManifestVersions.V2, Scan = new ReplayScanMetadata { - Id = scanId ?? $"scan-{Guid.NewGuid():N}", - Time = time ?? DateTimeOffset.UtcNow, + Id = scanId ?? "scan-fixed", + Time = time ?? new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), Toolchain = "stellaops-scanner/1.0.0" }, Reachability = new ReplayReachabilitySection diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FakeSourceProvider.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FakeSourceProvider.cs new file mode 100644 index 000000000..2ca31ff51 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FakeSourceProvider.cs @@ -0,0 +1,48 @@ +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Replay.Core.FeedSnapshot; + +namespace StellaOps.Replay.Core.Tests.FeedSnapshot; + +internal sealed class FakeSourceProvider : IFeedSourceProvider +{ + private readonly string _version; + private readonly string _digest; + private readonly long _recordCount; + + public FakeSourceProvider(string sourceId, string version, string digest, long recordCount) + { + SourceId = sourceId; + _version = version; + _digest = digest; + _recordCount = recordCount; + } + + public string SourceId { get; } + public string DisplayName => $"Fake {SourceId}"; + public int Priority => 0; + + public Task CreateSnapshotAsync(CancellationToken cancellationToken = default) + { + return Task.FromResult(new SourceSnapshot + { + SourceId = SourceId, + Version = _version, + Digest = _digest, + RecordCount = _recordCount + }); + } + + public Task GetCurrentDigestAsync(CancellationToken cancellationToken = default) + => Task.FromResult(_digest); + + public Task GetRecordCountAsync(CancellationToken cancellationToken = default) + => Task.FromResult(_recordCount); + + public Task ExportAsync(SourceSnapshot snapshot, Stream outputStream, CancellationToken cancellationToken = default) + => Task.CompletedTask; + + public Task ImportAsync(Stream inputStream, CancellationToken cancellationToken = default) + => CreateSnapshotAsync(cancellationToken); +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.CreateSnapshot.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.CreateSnapshot.cs new file mode 100644 index 000000000..186ec8d2f --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.CreateSnapshot.cs @@ -0,0 +1,63 @@ +using System; +using Xunit; + +namespace StellaOps.Replay.Core.Tests.FeedSnapshot; + +public sealed partial class FeedSnapshotCoordinatorTests +{ + [Fact] + public async Task CreateSnapshot_WithMultipleSources_ProducesConsistentDigestAsync() + { + var coordinator = CreateCoordinator( + Provider("nvd", "v1", DigestNvd, 100), + Provider("ghsa", "v2", DigestGhsa, 200), + Provider("osv", "v3", DigestOsv, 150)); + + var snapshot1 = await coordinator.CreateSnapshotAsync("test-label"); + var snapshot2 = await coordinator.CreateSnapshotAsync("test-label"); + + Assert.Equal(snapshot1.CompositeDigest, snapshot2.CompositeDigest); + Assert.Equal(3, snapshot1.Sources.Count); + } + + [Fact] + public async Task CreateSnapshot_SourcesAreSortedAlphabeticallyAsync() + { + var coordinator = CreateCoordinator( + Provider("zebra", "v1", DigestZebra, 10), + Provider("alpha", "v2", DigestAlpha, 20), + Provider("middle", "v3", DigestMiddle, 30)); + + var snapshot = await coordinator.CreateSnapshotAsync(); + + Assert.Equal("alpha", snapshot.Sources[0].SourceId); + Assert.Equal("middle", snapshot.Sources[1].SourceId); + Assert.Equal("zebra", snapshot.Sources[2].SourceId); + } + + [Fact] + public async Task CreateSnapshot_WithSubsetOfSources_IncludesOnlyRequestedAsync() + { + var coordinator = CreateCoordinator( + Provider("nvd", "v1", DigestNvd, 100), + Provider("ghsa", "v2", DigestGhsa, 200), + Provider("osv", "v3", DigestOsv, 150)); + + var snapshot = await coordinator.CreateSnapshotAsync(["nvd", "osv"]); + + Assert.Equal(2, snapshot.Sources.Count); + Assert.Contains(snapshot.Sources, s => s.SourceId == "nvd"); + Assert.Contains(snapshot.Sources, s => s.SourceId == "osv"); + Assert.DoesNotContain(snapshot.Sources, s => s.SourceId == "ghsa"); + } + + [Fact] + public async Task CreateSnapshot_WithUnknownSource_ThrowsAsync() + { + var coordinator = CreateCoordinator( + Provider("nvd", "v1", DigestNvd, 100)); + + await Assert.ThrowsAsync(() => + coordinator.CreateSnapshotAsync(["nvd", "unknown-source"])); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.GetSnapshot.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.GetSnapshot.cs new file mode 100644 index 000000000..9e1a2fb44 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.GetSnapshot.cs @@ -0,0 +1,35 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.FeedSnapshot; + +public sealed partial class FeedSnapshotCoordinatorTests +{ + [Fact] + public async Task GetSnapshot_ReturnsStoredBundleAsync() + { + var coordinator = CreateCoordinator( + Provider("nvd", "v1", DigestNvd, 100)); + + var created = await coordinator.CreateSnapshotAsync("test"); + var retrieved = await coordinator.GetSnapshotAsync(created.CompositeDigest); + + Assert.NotNull(retrieved); + Assert.Equal(created.SnapshotId, retrieved.SnapshotId); + Assert.Equal(created.CompositeDigest, retrieved.CompositeDigest); + } + + [Fact] + public async Task ValidateSnapshot_WhenNoChanges_ReturnsValidAsync() + { + var coordinator = CreateCoordinator( + Provider("nvd", "v1", DigestNvd, 100)); + + var snapshot = await coordinator.CreateSnapshotAsync(); + var result = await coordinator.ValidateSnapshotAsync(snapshot.CompositeDigest); + + Assert.NotNull(result); + Assert.True(result.IsValid); + Assert.True(result.MissingSources is null or { Count: 0 }); + Assert.True(result.DriftedSources is null or { Count: 0 }); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.RegisteredSources.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.RegisteredSources.cs new file mode 100644 index 000000000..f2dcb7d7c --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.RegisteredSources.cs @@ -0,0 +1,20 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.FeedSnapshot; + +public sealed partial class FeedSnapshotCoordinatorTests +{ + [Fact] + public void RegisteredSources_ReturnsSortedList() + { + var coordinator = CreateCoordinator( + Provider("zebra", "v1", DigestA1, 10), + Provider("alpha", "v2", DigestB2, 20)); + + var registered = coordinator.RegisteredSources; + + Assert.Equal(2, registered.Count); + Assert.Equal("alpha", registered[0]); + Assert.Equal("zebra", registered[1]); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.cs index 0e125b1d9..3c9a80b25 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/FeedSnapshotCoordinatorTests.cs @@ -1,257 +1,36 @@ -// ----------------------------------------------------------------------------- -// FeedSnapshotCoordinatorTests.cs -// Sprint: SPRINT_20251226_007_BE_determinism_gaps -// Task: DET-GAP-02 -// Description: Tests for feed snapshot coordinator determinism -// ----------------------------------------------------------------------------- - using StellaOps.Replay.Core.FeedSnapshot; +using StellaOps.TestKit; using Xunit; namespace StellaOps.Replay.Core.Tests.FeedSnapshot; -public sealed class FeedSnapshotCoordinatorTests +[Trait("Category", TestCategories.Unit)] +public sealed partial class FeedSnapshotCoordinatorTests { - [Fact] - public async Task CreateSnapshot_WithMultipleSources_ProducesConsistentDigest() + private const string DigestNvd = + "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1"; + private const string DigestGhsa = + "sha256:def456def456def456def456def456def456def456def456def456def456def4"; + private const string DigestOsv = + "sha256:789012789012789012789012789012789012789012789012789012789012789a"; + private const string DigestZebra = + "sha256:aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1"; + private const string DigestAlpha = + "sha256:bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2"; + private const string DigestMiddle = + "sha256:ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3"; + private const string DigestA1 = + "sha256:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1"; + private const string DigestB2 = + "sha256:b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2"; + + private static FeedSnapshotCoordinatorService CreateCoordinator(params FakeSourceProvider[] providers) { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("nvd", "v1", "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", 100), - new FakeSourceProvider("ghsa", "v2", "sha256:def456def456def456def456def456def456def456def456def456def456def4", 200), - new FakeSourceProvider("osv", "v3", "sha256:789012789012789012789012789012789012789012789012789012789012789a", 150) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - // Act - var snapshot1 = await coordinator.CreateSnapshotAsync("test-label"); - var snapshot2 = await coordinator.CreateSnapshotAsync("test-label"); - - // Assert - same providers should produce same composite digest - Assert.Equal(snapshot1.CompositeDigest, snapshot2.CompositeDigest); - Assert.Equal(3, snapshot1.Sources.Count); + return new FeedSnapshotCoordinatorService(providers, new InMemorySnapshotStore()); } - [Fact] - public async Task CreateSnapshot_SourcesAreSortedAlphabetically() + private static FakeSourceProvider Provider(string sourceId, string version, string digest, long recordCount) { - // Arrange - providers added in non-alphabetical order - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("zebra", "v1", "sha256:aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1aaa1", 10), - new FakeSourceProvider("alpha", "v2", "sha256:bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2bbb2", 20), - new FakeSourceProvider("middle", "v3", "sha256:ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3ccc3", 30) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - // Act - var snapshot = await coordinator.CreateSnapshotAsync(); - - // Assert - sources should be sorted alphabetically - Assert.Equal("alpha", snapshot.Sources[0].SourceId); - Assert.Equal("middle", snapshot.Sources[1].SourceId); - Assert.Equal("zebra", snapshot.Sources[2].SourceId); - } - - [Fact] - public async Task CreateSnapshot_WithSubsetOfSources_IncludesOnlyRequested() - { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("nvd", "v1", "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", 100), - new FakeSourceProvider("ghsa", "v2", "sha256:def456def456def456def456def456def456def456def456def456def456def4", 200), - new FakeSourceProvider("osv", "v3", "sha256:789012789012789012789012789012789012789012789012789012789012789a", 150) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - // Act - var snapshot = await coordinator.CreateSnapshotAsync(["nvd", "osv"]); - - // Assert - Assert.Equal(2, snapshot.Sources.Count); - Assert.Contains(snapshot.Sources, s => s.SourceId == "nvd"); - Assert.Contains(snapshot.Sources, s => s.SourceId == "osv"); - Assert.DoesNotContain(snapshot.Sources, s => s.SourceId == "ghsa"); - } - - [Fact] - public async Task RegisteredSources_ReturnsSortedList() - { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("zebra", "v1", "sha256:a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1", 10), - new FakeSourceProvider("alpha", "v2", "sha256:b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2", 20) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - // Act - var registered = coordinator.RegisteredSources; - - // Assert - Assert.Equal(2, registered.Count); - Assert.Equal("alpha", registered[0]); - Assert.Equal("zebra", registered[1]); - } - - [Fact] - public async Task GetSnapshot_ReturnsStoredBundle() - { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("nvd", "v1", "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", 100) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - var created = await coordinator.CreateSnapshotAsync("test"); - - // Act - var retrieved = await coordinator.GetSnapshotAsync(created.CompositeDigest); - - // Assert - Assert.NotNull(retrieved); - Assert.Equal(created.SnapshotId, retrieved.SnapshotId); - Assert.Equal(created.CompositeDigest, retrieved.CompositeDigest); - } - - [Fact] - public async Task ValidateSnapshot_WhenNoChanges_ReturnsValid() - { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("nvd", "v1", "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", 100) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - var snapshot = await coordinator.CreateSnapshotAsync(); - - // Act - var result = await coordinator.ValidateSnapshotAsync(snapshot.CompositeDigest); - - // Assert - Assert.NotNull(result); - Assert.True(result.IsValid); - // No missing or drifted sources (either null or empty is acceptable) - Assert.True(result.MissingSources is null or { Count: 0 }); - Assert.True(result.DriftedSources is null or { Count: 0 }); - } - - [Fact] - public async Task CreateSnapshot_WithUnknownSource_Throws() - { - // Arrange - var providers = new IFeedSourceProvider[] - { - new FakeSourceProvider("nvd", "v1", "sha256:abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", 100) - }; - var store = new InMemorySnapshotStore(); - var coordinator = new FeedSnapshotCoordinatorService(providers, store); - - // Act & Assert - await Assert.ThrowsAsync(() => - coordinator.CreateSnapshotAsync(["nvd", "unknown-source"])); - } - - private sealed class FakeSourceProvider : IFeedSourceProvider - { - private readonly string _version; - private readonly string _digest; - private readonly long _recordCount; - - public FakeSourceProvider(string sourceId, string version, string digest, long recordCount) - { - SourceId = sourceId; - _version = version; - _digest = digest; - _recordCount = recordCount; - } - - public string SourceId { get; } - public string DisplayName => $"Fake {SourceId}"; - public int Priority => 0; - - public Task CreateSnapshotAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(new SourceSnapshot - { - SourceId = SourceId, - Version = _version, - Digest = _digest, - RecordCount = _recordCount - }); - } - - public Task GetCurrentDigestAsync(CancellationToken cancellationToken = default) => - Task.FromResult(_digest); - - public Task GetRecordCountAsync(CancellationToken cancellationToken = default) => - Task.FromResult(_recordCount); - - public Task ExportAsync(SourceSnapshot snapshot, Stream outputStream, CancellationToken cancellationToken = default) => - Task.CompletedTask; - - public Task ImportAsync(Stream inputStream, CancellationToken cancellationToken = default) => - CreateSnapshotAsync(cancellationToken); - } - - private sealed class InMemorySnapshotStore : IFeedSnapshotStore - { - private readonly Dictionary _byDigest = new(StringComparer.OrdinalIgnoreCase); - private readonly Dictionary _byId = new(StringComparer.OrdinalIgnoreCase); - - public Task SaveAsync(FeedSnapshotBundle bundle, CancellationToken cancellationToken = default) - { - _byDigest[bundle.CompositeDigest] = bundle; - _byId[bundle.SnapshotId] = bundle; - return Task.CompletedTask; - } - - public Task GetByDigestAsync(string compositeDigest, CancellationToken cancellationToken = default) => - Task.FromResult(_byDigest.GetValueOrDefault(compositeDigest)); - - public Task GetByIdAsync(string snapshotId, CancellationToken cancellationToken = default) => - Task.FromResult(_byId.GetValueOrDefault(snapshotId)); - - public async IAsyncEnumerable ListAsync( - DateTimeOffset? from = null, - DateTimeOffset? to = null, - [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var bundle in _byDigest.Values.OrderByDescending(b => b.CreatedAt)) - { - if (from.HasValue && bundle.CreatedAt < from.Value) continue; - if (to.HasValue && bundle.CreatedAt > to.Value) continue; - - yield return new FeedSnapshotSummary - { - SnapshotId = bundle.SnapshotId, - CompositeDigest = bundle.CompositeDigest, - Label = bundle.Label, - CreatedAt = bundle.CreatedAt, - SourceCount = bundle.Sources.Count, - TotalRecordCount = bundle.Sources.Sum(s => s.RecordCount) - }; - } - } - - public Task DeleteAsync(string compositeDigest, CancellationToken cancellationToken = default) - { - var existed = _byDigest.Remove(compositeDigest, out var bundle); - if (existed && bundle is not null) - { - _byId.Remove(bundle.SnapshotId); - } - return Task.FromResult(existed); - } + return new FakeSourceProvider(sourceId, version, digest, recordCount); } } diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/InMemorySnapshotStore.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/InMemorySnapshotStore.cs new file mode 100644 index 000000000..053d2c216 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/FeedSnapshot/InMemorySnapshotStore.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Replay.Core.FeedSnapshot; + +namespace StellaOps.Replay.Core.Tests.FeedSnapshot; + +internal sealed class InMemorySnapshotStore : IFeedSnapshotStore +{ + private readonly Dictionary _byDigest = + new(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary _byId = + new(StringComparer.OrdinalIgnoreCase); + + public Task SaveAsync(FeedSnapshotBundle bundle, CancellationToken cancellationToken = default) + { + _byDigest[bundle.CompositeDigest] = bundle; + _byId[bundle.SnapshotId] = bundle; + return Task.CompletedTask; + } + + public Task GetByDigestAsync( + string compositeDigest, + CancellationToken cancellationToken = default) => + Task.FromResult(_byDigest.GetValueOrDefault(compositeDigest)); + + public Task GetByIdAsync( + string snapshotId, + CancellationToken cancellationToken = default) => + Task.FromResult(_byId.GetValueOrDefault(snapshotId)); + + public async IAsyncEnumerable ListAsync( + DateTimeOffset? from = null, + DateTimeOffset? to = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var bundle in _byDigest.Values.OrderByDescending(b => b.CreatedAt)) + { + if (from.HasValue && bundle.CreatedAt < from.Value) + { + continue; + } + + if (to.HasValue && bundle.CreatedAt > to.Value) + { + continue; + } + + yield return new FeedSnapshotSummary + { + SnapshotId = bundle.SnapshotId, + CompositeDigest = bundle.CompositeDigest, + Label = bundle.Label, + CreatedAt = bundle.CreatedAt, + SourceCount = bundle.Sources.Count, + TotalRecordCount = bundle.Sources.Sum(s => s.RecordCount) + }; + } + } + + public Task DeleteAsync(string compositeDigest, CancellationToken cancellationToken = default) + { + var existed = _byDigest.Remove(compositeDigest, out var bundle); + if (existed && bundle is not null) + { + _byId.Remove(bundle.SnapshotId); + } + + return Task.FromResult(existed); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestTests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestTests.cs index 65dd9fbfa..59cea41c2 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestTests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestTests.cs @@ -1,12 +1,15 @@ +using System; using System.Text.Json; using StellaOps.Replay.Core; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; -public class ReplayManifestTests +namespace StellaOps.Replay.Core.Tests; + +public sealed class ReplayManifestTests { [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void SerializesWithNamespacesAndAnalysis_V1() { var manifest = new ReplayManifest @@ -24,7 +27,7 @@ public class ReplayManifestTests CasUri = "cas://reachability_graphs/aa/aagraph.tar.zst", Hash = "sha256:aa", HashAlgorithm = "sha256", - Sha256 = "aa", // Legacy field for v1 compat + Sha256 = "aa", Namespace = "reachability_graphs", CallgraphId = "cg-1", Analyzer = "scanner", @@ -37,9 +40,9 @@ public class ReplayManifestTests CasUri = "cas://runtime_traces/bb/bbtrace.tar.zst", Hash = "sha256:bb", HashAlgorithm = "sha256", - Sha256 = "bb", // Legacy field for v1 compat + Sha256 = "bb", Namespace = "runtime_traces", - RecordedAt = System.DateTimeOffset.Parse("2025-11-26T00:00:00Z") + RecordedAt = DateTimeOffset.Parse("2025-11-26T00:00:00Z") }); var json = JsonSerializer.Serialize(manifest, new JsonSerializerOptions(JsonSerializerDefaults.Web)); @@ -51,7 +54,7 @@ public class ReplayManifestTests } [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void SerializesWithV2HashFields() { var manifest = new ReplayManifest @@ -67,7 +70,7 @@ public class ReplayManifestTests { Kind = "static", CasUri = "cas://reachability/graphs/blake3:abc123", - Hash = "blake3:abc123def456789012345678901234567890123456789012345678901234", + Hash = "blake3:abc123def4567890123456789012345678901234567890123456778901234", HashAlgorithm = "blake3-256", Namespace = "reachability_graphs", Analyzer = "scanner.java@10.0.0", @@ -79,7 +82,6 @@ public class ReplayManifestTests Assert.Contains("\"schemaVersion\":\"2.0\"", json); Assert.Contains("\"hash\":\"blake3:", json); Assert.Contains("\"hashAlg\":\"blake3-256\"", json); - // v2 manifests should not emit legacy sha256 field (JsonIgnore when null) Assert.DoesNotContain("\"sha256\":", json); } } diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.CodeIdCoverage.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.CodeIdCoverage.cs new file mode 100644 index 000000000..392626289 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.CodeIdCoverage.cs @@ -0,0 +1,29 @@ +using System.Text.Json; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CodeIdCoverage_SerializesWithSnakeCaseKeys() + { + var coverage = new CodeIdCoverage + { + TotalNodes = 1247, + NodesWithSymbolId = 1189, + NodesWithCodeId = 58, + CoveragePercent = 100.0 + }; + + var json = JsonSerializer.Serialize(coverage, _jsonOptions); + + Assert.Contains("\"total_nodes\":1247", json); + Assert.Contains("\"nodes_with_symbol_id\":1189", json); + Assert.Contains("\"nodes_with_code_id\":58", json); + Assert.Contains("\"coverage_percent\":100", json); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Reachability.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Reachability.cs new file mode 100644 index 000000000..3b6ca9e24 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Reachability.cs @@ -0,0 +1,103 @@ +using System.Threading.Tasks; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task InvalidManifest_MissingHashAlg_InV2_FailsValidationAsync() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", + HashAlgorithm = null!, + CasUri = "cas://reachability/graphs/blake3:..." + } + ] + } + }; + + var validator = new ReplayManifestValidator(); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.MissingHashAlg); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task InvalidManifest_MissingCasReference_FailsValidationAsync() + { + var casValidator = new InMemoryCasValidator(); + + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:missing" + } + ] + } + }; + + var validator = new ReplayManifestValidator(casValidator); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.CasNotFound); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task InvalidManifest_HashMismatch_FailsValidationAsync() + { + var casValidator = new InMemoryCasValidator(); + casValidator.Register( + "cas://reachability/graphs/blake3:actual", + "blake3:differenthash"); + casValidator.Register( + "cas://reachability/graphs/blake3:actual.dsse", + "blake3:differenthash.dsse"); + + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Hash = "blake3:expected", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:actual" + } + ] + } + }; + + var validator = new ReplayManifestValidator(casValidator); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.HashMismatch); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Schema.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Schema.cs new file mode 100644 index 000000000..4b16fdd68 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Invalid.Schema.cs @@ -0,0 +1,41 @@ +using System.Threading.Tasks; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task InvalidManifest_MissingSchemaVersion_FailsValidationAsync() + { + var manifest = new ReplayManifest + { + SchemaVersion = null! + }; + + var validator = new ReplayManifestValidator(); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.MissingVersion); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task InvalidManifest_VersionMismatch_WhenV2Required_FailsValidationAsync() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V1 + }; + + var validator = new ReplayManifestValidator(requireV2: true); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.VersionMismatch); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Minimal.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Minimal.cs new file mode 100644 index 000000000..3ca06b79c --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Minimal.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void MinimalValidManifestV2_SerializesCorrectly() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Scan = new ReplayScanMetadata + { + Id = "scan-test-001", + Time = DateTimeOffset.Parse("2025-12-13T10:00:00Z") + }, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Kind = "static", + Analyzer = "scanner.java@10.2.0", + Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2" + } + ], + RuntimeTraces = new List(), + CodeIdCoverage = new CodeIdCoverage + { + TotalNodes = 100, + NodesWithSymbolId = 100, + NodesWithCodeId = 0, + CoveragePercent = 100.0 + } + } + }; + + var json = JsonSerializer.Serialize(manifest, _jsonOptions); + + Assert.Contains("\"schemaVersion\":\"2.0\"", json); + Assert.Contains("\"hash\":\"blake3:", json); + Assert.Contains("\"hashAlg\":\"blake3-256\"", json); + Assert.Contains("\"code_id_coverage\"", json); + Assert.Contains("\"total_nodes\":100", json); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void ManifestWithRuntimeTraces_SerializesCorrectly() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Scan = new ReplayScanMetadata + { + Id = "scan-test-002", + Time = DateTimeOffset.Parse("2025-12-13T11:00:00Z") + }, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Kind = "static", + Analyzer = "scanner.java@10.2.0", + Hash = "blake3:1111111111111111111111111111111111111111111111111111111111111111", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:1111111111111111111111111111111111111111111111111111111111111111" + } + ], + RuntimeTraces = + [ + new ReplayReachabilityTraceReference + { + Source = "eventpipe", + Hash = "sha256:2222222222222222222222222222222222222222222222222222222222222222", + HashAlgorithm = "sha256", + CasUri = "cas://reachability/runtime/sha256:2222222222222222222222222222222222222222222222222222222222222222", + RecordedAt = DateTimeOffset.Parse("2025-12-13T10:30:00Z") + } + ] + } + }; + + var json = JsonSerializer.Serialize(manifest, _jsonOptions); + + Assert.Contains("\"source\":\"eventpipe\"", json); + Assert.Contains("\"hash\":\"sha256:", json); + Assert.Contains("\"hashAlg\":\"sha256\"", json); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.ReachabilityWriter.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.ReachabilityWriter.cs new file mode 100644 index 000000000..eb2d20a30 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.ReachabilityWriter.cs @@ -0,0 +1,96 @@ +using System; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BuildManifestV2_WithValidGraphs_CreatesSortedManifest() + { + var scan = new ReplayScanMetadata { Id = "test-scan" }; + var graphs = new[] + { + new ReplayReachabilityGraphReference + { + Hash = "blake3:zzzz", + CasUri = "cas://graphs/zzzz" + }, + new ReplayReachabilityGraphReference + { + Hash = "blake3:aaaa", + CasUri = "cas://graphs/aaaa" + } + }; + + var manifest = ReachabilityReplayWriter.BuildManifestV2( + scan, + graphs, + Array.Empty()); + + Assert.Equal(ReplayManifestVersions.V2, manifest.SchemaVersion); + Assert.Equal("cas://graphs/aaaa", manifest.Reachability.Graphs[0].CasUri); + Assert.Equal("cas://graphs/zzzz", manifest.Reachability.Graphs[1].CasUri); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BuildManifestV2_WithLegacySha256_MigratesHashField() + { + var scan = new ReplayScanMetadata { Id = "test-scan" }; + var graphs = new[] + { + new ReplayReachabilityGraphReference + { + Sha256 = "abc123", + CasUri = "cas://graphs/abc123" + } + }; + + var manifest = ReachabilityReplayWriter.BuildManifestV2( + scan, + graphs, + Array.Empty()); + + Assert.Equal("sha256:abc123", manifest.Reachability.Graphs[0].Hash); + Assert.Equal("sha256", manifest.Reachability.Graphs[0].HashAlgorithm); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BuildManifestV2_InfersHashAlgorithmFromPrefix() + { + var scan = new ReplayScanMetadata { Id = "test-scan" }; + var graphs = new[] + { + new ReplayReachabilityGraphReference + { + Hash = "blake3:a1b2c3d4", + CasUri = "cas://graphs/a1b2c3d4" + } + }; + + var manifest = ReachabilityReplayWriter.BuildManifestV2( + scan, + graphs, + Array.Empty()); + + Assert.Equal("blake3-256", manifest.Reachability.Graphs[0].HashAlgorithm); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void BuildManifestV2_RequiresAtLeastOneGraph() + { + var scan = new ReplayScanMetadata { Id = "test-scan" }; + + Assert.Throws(() => + ReachabilityReplayWriter.BuildManifestV2( + scan, + Array.Empty(), + Array.Empty())); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Sorting.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Sorting.cs new file mode 100644 index 000000000..7d377199c --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Sorting.cs @@ -0,0 +1,81 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SortingValidation_UnsortedGraphs_FailsValidationAsync() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Kind = "framework", + Hash = "blake3:zzzz1111111111111111111111111111111111111111111111111111111111", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:zzzz..." + }, + new ReplayReachabilityGraphReference + { + Kind = "static", + Hash = "blake3:aaaa1111111111111111111111111111111111111111111111111111111111", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:aaaa..." + } + ] + } + }; + + var validator = new ReplayManifestValidator(); + var result = await validator.ValidateAsync(manifest); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.UnsortedEntries); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task SortingValidation_SortedGraphs_PassesValidationAsync() + { + var manifest = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V2, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Kind = "static", + Hash = "blake3:aaaa1111111111111111111111111111111111111111111111111111111111", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:aaaa..." + }, + new ReplayReachabilityGraphReference + { + Kind = "framework", + Hash = "blake3:zzzz1111111111111111111111111111111111111111111111111111111111", + HashAlgorithm = "blake3-256", + CasUri = "cas://reachability/graphs/blake3:zzzz..." + } + ] + } + }; + + var validator = new ReplayManifestValidator(); + var result = await validator.ValidateAsync(manifest); + + Assert.True(result.IsValid); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Upgrade.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Upgrade.cs new file mode 100644 index 000000000..28f050e22 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.Upgrade.cs @@ -0,0 +1,65 @@ +using System.Collections.Generic; +using StellaOps.Replay.Core; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayManifestV2Tests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void UpgradeToV2_ConvertsV1ManifestCorrectly() + { + var v1 = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V1, + Scan = new ReplayScanMetadata + { + Id = "scan-legacy" + }, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference + { + Kind = "static", + Sha256 = "abc123", + CasUri = "cas://reachability/graphs/abc123" + } + ] + } + }; + + var v2 = ReplayManifestValidator.UpgradeToV2(v1); + + Assert.Equal(ReplayManifestVersions.V2, v2.SchemaVersion); + Assert.Single(v2.Reachability.Graphs); + Assert.Equal("sha256:abc123", v2.Reachability.Graphs[0].Hash); + Assert.Equal("sha256", v2.Reachability.Graphs[0].HashAlgorithm); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void UpgradeToV2_SortsGraphsByUri() + { + var v1 = new ReplayManifest + { + SchemaVersion = ReplayManifestVersions.V1, + Reachability = new ReplayReachabilitySection + { + Graphs = + [ + new ReplayReachabilityGraphReference { Sha256 = "zzz", CasUri = "cas://graphs/zzz" }, + new ReplayReachabilityGraphReference { Sha256 = "aaa", CasUri = "cas://graphs/aaa" } + ] + } + }; + + var v2 = ReplayManifestValidator.UpgradeToV2(v1); + + Assert.Equal("cas://graphs/aaa", v2.Reachability.Graphs[0].CasUri); + Assert.Equal("cas://graphs/zzz", v2.Reachability.Graphs[1].CasUri); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.cs index cdc11e696..14a7f0622 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayManifestV2Tests.cs @@ -1,500 +1,17 @@ -using System; -using System.Collections.Generic; using System.Text.Json; -using System.Threading.Tasks; using StellaOps.Replay.Core; +using StellaOps.TestKit; using Xunit; -using StellaOps.TestKit; namespace StellaOps.Replay.Core.Tests; /// /// Test vectors from replay-manifest-v2-acceptance.md /// -public class ReplayManifestV2Tests +public sealed partial class ReplayManifestV2Tests { - private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + private static readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.Web) { WriteIndented = false }; - - #region Section 4.1: Minimal Valid Manifest v2 - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void MinimalValidManifestV2_SerializesCorrectly() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Scan = new ReplayScanMetadata - { - Id = "scan-test-001", - Time = DateTimeOffset.Parse("2025-12-13T10:00:00Z") - }, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Kind = "static", - Analyzer = "scanner.java@10.2.0", - Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2" - } - }, - RuntimeTraces = new List(), - CodeIdCoverage = new CodeIdCoverage - { - TotalNodes = 100, - NodesWithSymbolId = 100, - NodesWithCodeId = 0, - CoveragePercent = 100.0 - } - } - }; - - var json = JsonSerializer.Serialize(manifest, JsonOptions); - - Assert.Contains("\"schemaVersion\":\"2.0\"", json); - Assert.Contains("\"hash\":\"blake3:", json); - Assert.Contains("\"hashAlg\":\"blake3-256\"", json); - Assert.Contains("\"code_id_coverage\"", json); - Assert.Contains("\"total_nodes\":100", json); - } - - #endregion - - #region Section 4.2: Manifest with Runtime Traces - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void ManifestWithRuntimeTraces_SerializesCorrectly() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Scan = new ReplayScanMetadata - { - Id = "scan-test-002", - Time = DateTimeOffset.Parse("2025-12-13T11:00:00Z") - }, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Kind = "static", - Analyzer = "scanner.java@10.2.0", - Hash = "blake3:1111111111111111111111111111111111111111111111111111111111111111", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:1111111111111111111111111111111111111111111111111111111111111111" - } - }, - RuntimeTraces = new List - { - new() - { - Source = "eventpipe", - Hash = "sha256:2222222222222222222222222222222222222222222222222222222222222222", - HashAlgorithm = "sha256", - CasUri = "cas://reachability/runtime/sha256:2222222222222222222222222222222222222222222222222222222222222222", - RecordedAt = DateTimeOffset.Parse("2025-12-13T10:30:00Z") - } - } - } - }; - - var json = JsonSerializer.Serialize(manifest, JsonOptions); - - Assert.Contains("\"source\":\"eventpipe\"", json); - Assert.Contains("\"hash\":\"sha256:", json); - Assert.Contains("\"hashAlg\":\"sha256\"", json); - } - - #endregion - - #region Section 4.3: Sorting Validation - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void SortingValidation_UnsortedGraphs_FailsValidation() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Kind = "framework", - Hash = "blake3:zzzz1111111111111111111111111111111111111111111111111111111111", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:zzzz..." - }, - new() - { - Kind = "static", - Hash = "blake3:aaaa1111111111111111111111111111111111111111111111111111111111", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:aaaa..." - } - } - } - }; - - var validator = new ReplayManifestValidator(); - var result = validator.ValidateAsync(manifest).GetAwaiter().GetResult(); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.UnsortedEntries); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void SortingValidation_SortedGraphs_PassesValidation() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Kind = "static", - Hash = "blake3:aaaa1111111111111111111111111111111111111111111111111111111111", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:aaaa..." - }, - new() - { - Kind = "framework", - Hash = "blake3:zzzz1111111111111111111111111111111111111111111111111111111111", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:zzzz..." - } - } - } - }; - - var validator = new ReplayManifestValidator(); - var result = validator.ValidateAsync(manifest).GetAwaiter().GetResult(); - - Assert.True(result.IsValid); - } - - #endregion - - #region Section 4.4: Invalid Manifest Vectors - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void InvalidManifest_MissingSchemaVersion_FailsValidation() - { - var manifest = new ReplayManifest - { - SchemaVersion = null! - }; - - var validator = new ReplayManifestValidator(); - var result = validator.ValidateAsync(manifest).GetAwaiter().GetResult(); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.MissingVersion); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void InvalidManifest_VersionMismatch_WhenV2Required_FailsValidation() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V1 - }; - - var validator = new ReplayManifestValidator(requireV2: true); - var result = validator.ValidateAsync(manifest).GetAwaiter().GetResult(); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.VersionMismatch); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void InvalidManifest_MissingHashAlg_InV2_FailsValidation() - { - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", - HashAlgorithm = null!, // Missing - CasUri = "cas://reachability/graphs/blake3:..." - } - } - } - }; - - var validator = new ReplayManifestValidator(); - var result = validator.ValidateAsync(manifest).GetAwaiter().GetResult(); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.MissingHashAlg); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task InvalidManifest_MissingCasReference_FailsValidation() - { - var casValidator = new InMemoryCasValidator(); - // Don't register any objects - - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Hash = "blake3:a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:missing" - } - } - } - }; - - var validator = new ReplayManifestValidator(casValidator); - var result = await validator.ValidateAsync(manifest); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.CasNotFound); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public async Task InvalidManifest_HashMismatch_FailsValidation() - { - var casValidator = new InMemoryCasValidator(); - casValidator.Register( - "cas://reachability/graphs/blake3:actual", - "blake3:differenthash"); - casValidator.Register( - "cas://reachability/graphs/blake3:actual.dsse", - "blake3:differenthash.dsse"); - - var manifest = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V2, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Hash = "blake3:expected", - HashAlgorithm = "blake3-256", - CasUri = "cas://reachability/graphs/blake3:actual" - } - } - } - }; - - var validator = new ReplayManifestValidator(casValidator); - var result = await validator.ValidateAsync(manifest); - - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.ErrorCode == ReplayManifestErrorCodes.HashMismatch); - } - - #endregion - - #region Section 5: Migration Path - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void UpgradeToV2_ConvertsV1ManifestCorrectly() - { - var v1 = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V1, - Scan = new ReplayScanMetadata - { - Id = "scan-legacy" - }, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() - { - Kind = "static", - Sha256 = "abc123", - CasUri = "cas://reachability/graphs/abc123" - } - } - } - }; - - var v2 = ReplayManifestValidator.UpgradeToV2(v1); - - Assert.Equal(ReplayManifestVersions.V2, v2.SchemaVersion); - Assert.Single(v2.Reachability.Graphs); - Assert.Equal("sha256:abc123", v2.Reachability.Graphs[0].Hash); - Assert.Equal("sha256", v2.Reachability.Graphs[0].HashAlgorithm); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void UpgradeToV2_SortsGraphsByUri() - { - var v1 = new ReplayManifest - { - SchemaVersion = ReplayManifestVersions.V1, - Reachability = new ReplayReachabilitySection - { - Graphs = new List - { - new() { Sha256 = "zzz", CasUri = "cas://graphs/zzz" }, - new() { Sha256 = "aaa", CasUri = "cas://graphs/aaa" } - } - } - }; - - var v2 = ReplayManifestValidator.UpgradeToV2(v1); - - Assert.Equal("cas://graphs/aaa", v2.Reachability.Graphs[0].CasUri); - Assert.Equal("cas://graphs/zzz", v2.Reachability.Graphs[1].CasUri); - } - - #endregion - - #region ReachabilityReplayWriter Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BuildManifestV2_WithValidGraphs_CreatesSortedManifest() - { - var scan = new ReplayScanMetadata { Id = "test-scan" }; - var graphs = new[] - { - new ReplayReachabilityGraphReference - { - Hash = "blake3:zzzz", - CasUri = "cas://graphs/zzzz" - }, - new ReplayReachabilityGraphReference - { - Hash = "blake3:aaaa", - CasUri = "cas://graphs/aaaa" - } - }; - - var manifest = ReachabilityReplayWriter.BuildManifestV2( - scan, - graphs, - Array.Empty()); - - Assert.Equal(ReplayManifestVersions.V2, manifest.SchemaVersion); - Assert.Equal("cas://graphs/aaaa", manifest.Reachability.Graphs[0].CasUri); - Assert.Equal("cas://graphs/zzzz", manifest.Reachability.Graphs[1].CasUri); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BuildManifestV2_WithLegacySha256_MigratesHashField() - { - var scan = new ReplayScanMetadata { Id = "test-scan" }; - var graphs = new[] - { - new ReplayReachabilityGraphReference - { - Sha256 = "abc123", - CasUri = "cas://graphs/abc123" - } - }; - - var manifest = ReachabilityReplayWriter.BuildManifestV2( - scan, - graphs, - Array.Empty()); - - Assert.Equal("sha256:abc123", manifest.Reachability.Graphs[0].Hash); - Assert.Equal("sha256", manifest.Reachability.Graphs[0].HashAlgorithm); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BuildManifestV2_InfersHashAlgorithmFromPrefix() - { - var scan = new ReplayScanMetadata { Id = "test-scan" }; - var graphs = new[] - { - new ReplayReachabilityGraphReference - { - Hash = "blake3:a1b2c3d4", - CasUri = "cas://graphs/a1b2c3d4" - } - }; - - var manifest = ReachabilityReplayWriter.BuildManifestV2( - scan, - graphs, - Array.Empty()); - - Assert.Equal("blake3-256", manifest.Reachability.Graphs[0].HashAlgorithm); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void BuildManifestV2_RequiresAtLeastOneGraph() - { - var scan = new ReplayScanMetadata { Id = "test-scan" }; - - Assert.Throws(() => - ReachabilityReplayWriter.BuildManifestV2( - scan, - Array.Empty(), - Array.Empty())); - } - - #endregion - - #region CodeIdCoverage Tests - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CodeIdCoverage_SerializesWithSnakeCaseKeys() - { - var coverage = new CodeIdCoverage - { - TotalNodes = 1247, - NodesWithSymbolId = 1189, - NodesWithCodeId = 58, - CoveragePercent = 100.0 - }; - - var json = JsonSerializer.Serialize(coverage, JsonOptions); - - Assert.Contains("\"total_nodes\":1247", json); - Assert.Contains("\"nodes_with_symbol_id\":1189", json); - Assert.Contains("\"nodes_with_code_id\":58", json); - Assert.Contains("\"coverage_percent\":100", json); - } - - #endregion } diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Canonical.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Canonical.cs new file mode 100644 index 000000000..a04420fd9 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Canonical.cs @@ -0,0 +1,101 @@ +using System; +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Replay.Core.Models; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayProofTests +{ + [Fact] + public void ToCanonicalJson_SortsKeysDeterministically() + { + var proof = CreateTestProof(); + + var json = proof.ToCanonicalJson(); + + var keys = ExtractJsonKeys(json); + keys.Should().BeInAscendingOrder(StringComparer.Ordinal); + } + + [Fact] + public void ToCanonicalJson_ExcludesNullValues() + { + var proof = ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0"); + + var json = proof.ToCanonicalJson(); + + json.Should().NotContain("null"); + json.Should().NotContain("artifactDigest"); + json.Should().NotContain("signatureVerified"); + json.Should().NotContain("signatureKeyId"); + } + + [Fact] + public void ToCanonicalJson_FormatsTimestampCorrectly() + { + var proof = CreateTestProof(); + + var json = proof.ToCanonicalJson(); + + json.Should().Contain("2026-01-05T12:00:00.000Z"); + } + + [Fact] + public void ToCanonicalJson_IncludesMetadataWhenPresent() + { + var proof = ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0", + metadata: ImmutableDictionary.Empty + .Add("tenant", "acme-corp") + .Add("project", "web-app")); + + var json = proof.ToCanonicalJson(); + + json.Should().Contain("metadata"); + json.Should().Contain("tenant"); + json.Should().Contain("acme-corp"); + json.Should().Contain("project"); + json.Should().Contain("web-app"); + } + + [Fact] + public void ToCanonicalJson_SortsMetadataKeys() + { + var proof = ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0", + metadata: ImmutableDictionary.Empty + .Add("zebra", "z-value") + .Add("alpha", "a-value") + .Add("mike", "m-value")); + + var json = proof.ToCanonicalJson(); + + var alphaPos = json.IndexOf("alpha", StringComparison.Ordinal); + var mikePos = json.IndexOf("mike", StringComparison.Ordinal); + var zebraPos = json.IndexOf("zebra", StringComparison.Ordinal); + + alphaPos.Should().BeLessThan(mikePos); + mikePos.Should().BeLessThan(zebraPos); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Compact.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Compact.cs new file mode 100644 index 000000000..1a2f30d66 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Compact.cs @@ -0,0 +1,74 @@ +using FluentAssertions; +using StellaOps.Replay.Core.Models; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayProofTests +{ + [Fact] + public void ToCompactString_GeneratesCorrectFormat() + { + var proof = CreateTestProof(); + + var compact = proof.ToCompactString(); + + compact.Should().StartWith("replay-proof:"); + compact.Should().HaveLength("replay-proof:".Length + 64); + } + + [Fact] + public void ToCompactString_IsDeterministic() + { + var proof1 = CreateTestProof(); + var proof2 = CreateTestProof(); + + var compact1 = proof1.ToCompactString(); + var compact2 = proof2.ToCompactString(); + + compact1.Should().Be(compact2, "same inputs should produce same compact proof"); + } + + [Fact] + public void ValidateCompactString_ReturnsTrueForValidProof() + { + var proof = CreateTestProof(); + var compact = proof.ToCompactString(); + var canonicalJson = proof.ToCanonicalJson(); + + var isValid = ReplayProof.ValidateCompactString(compact, canonicalJson); + + isValid.Should().BeTrue(); + } + + [Fact] + public void ValidateCompactString_ReturnsFalseForTamperedJson() + { + var proof = CreateTestProof(); + var compact = proof.ToCompactString(); + var tamperedJson = proof.ToCanonicalJson().Replace("1.0.0", "2.0.0"); + + var isValid = ReplayProof.ValidateCompactString(compact, tamperedJson); + + isValid.Should().BeFalse("tampered JSON should not validate"); + } + + [Fact] + public void ValidateCompactString_ReturnsFalseForInvalidPrefix() + { + var canonicalJson = CreateTestProof().ToCanonicalJson(); + + var isValid = ReplayProof.ValidateCompactString("invalid-proof:abc123", canonicalJson); + + isValid.Should().BeFalse("invalid prefix should not validate"); + } + + [Fact] + public void ValidateCompactString_ReturnsFalseForEmptyInputs() + { + ReplayProof.ValidateCompactString("", "{}").Should().BeFalse(); + ReplayProof.ValidateCompactString("replay-proof:abc", "").Should().BeFalse(); + ReplayProof.ValidateCompactString(null!, "{}").Should().BeFalse(); + ReplayProof.ValidateCompactString("replay-proof:abc", null!).Should().BeFalse(); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Factory.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Factory.cs new file mode 100644 index 000000000..5ee12a230 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.Factory.cs @@ -0,0 +1,88 @@ +using System; +using FluentAssertions; +using StellaOps.Replay.Core.Models; +using Xunit; + +namespace StellaOps.Replay.Core.Tests; + +public sealed partial class ReplayProofTests +{ + [Fact] + public void FromExecutionResult_CreatesValidProof() + { + var proof = ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0", + artifactDigest: "sha256:image123", + signatureVerified: true, + signatureKeyId: "key-001"); + + proof.BundleHash.Should().Be("sha256:abc123"); + proof.PolicyVersion.Should().Be("1.0.0"); + proof.VerdictRoot.Should().Be("sha256:def456"); + proof.VerdictMatches.Should().BeTrue(); + proof.DurationMs.Should().Be(150); + proof.ReplayedAt.Should().Be(_fixedTimestamp); + proof.EngineVersion.Should().Be("1.0.0"); + proof.ArtifactDigest.Should().Be("sha256:image123"); + proof.SignatureVerified.Should().BeTrue(); + proof.SignatureKeyId.Should().Be("key-001"); + } + + [Fact] + public void FromExecutionResult_ThrowsOnNullRequiredParams() + { + var act1 = () => ReplayProof.FromExecutionResult( + bundleHash: null!, + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0"); + act1.Should().Throw().WithParameterName("bundleHash"); + + var act2 = () => ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: null!, + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0"); + act2.Should().Throw().WithParameterName("policyVersion"); + + var act3 = () => ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: null!, + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: "1.0.0"); + act3.Should().Throw().WithParameterName("verdictRoot"); + + var act4 = () => ReplayProof.FromExecutionResult( + bundleHash: "sha256:abc123", + policyVersion: "1.0.0", + verdictRoot: "sha256:def456", + verdictMatches: true, + durationMs: 150, + replayedAt: _fixedTimestamp, + engineVersion: null!); + act4.Should().Throw().WithParameterName("engineVersion"); + } + + [Fact] + public void SchemaVersion_DefaultsTo1_0_0() + { + var proof = CreateTestProof(); + + proof.SchemaVersion.Should().Be("1.0.0"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs index 71385636b..e6e145a54 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs @@ -1,13 +1,10 @@ -// -// Copyright (c) Stella Operations. Licensed under BUSL-1.1. -// - +using System; +using System.Collections.Generic; using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; using System.Text.Json; using FluentAssertions; using StellaOps.Replay.Core.Models; +using StellaOps.TestKit; using Xunit; namespace StellaOps.Replay.Core.Tests; @@ -16,282 +13,10 @@ namespace StellaOps.Replay.Core.Tests; /// Unit tests for ReplayProof model and compact string generation. /// Sprint: SPRINT_20260105_002_001_REPLAY, Tasks RPL-011 through RPL-014. /// -[Trait("Category", "Unit")] -public class ReplayProofTests +[Trait("Category", TestCategories.Unit)] +public sealed partial class ReplayProofTests { - private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero); - - [Fact] - public void FromExecutionResult_CreatesValidProof() - { - // Arrange & Act - var proof = ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0", - artifactDigest: "sha256:image123", - signatureVerified: true, - signatureKeyId: "key-001"); - - // Assert - proof.BundleHash.Should().Be("sha256:abc123"); - proof.PolicyVersion.Should().Be("1.0.0"); - proof.VerdictRoot.Should().Be("sha256:def456"); - proof.VerdictMatches.Should().BeTrue(); - proof.DurationMs.Should().Be(150); - proof.ReplayedAt.Should().Be(FixedTimestamp); - proof.EngineVersion.Should().Be("1.0.0"); - proof.ArtifactDigest.Should().Be("sha256:image123"); - proof.SignatureVerified.Should().BeTrue(); - proof.SignatureKeyId.Should().Be("key-001"); - } - - [Fact] - public void ToCompactString_GeneratesCorrectFormat() - { - // Arrange - var proof = CreateTestProof(); - - // Act - var compact = proof.ToCompactString(); - - // Assert - compact.Should().StartWith("replay-proof:"); - compact.Should().HaveLength("replay-proof:".Length + 64); // SHA-256 hex = 64 chars - } - - [Fact] - public void ToCompactString_IsDeterministic() - { - // Arrange - var proof1 = CreateTestProof(); - var proof2 = CreateTestProof(); - - // Act - var compact1 = proof1.ToCompactString(); - var compact2 = proof2.ToCompactString(); - - // Assert - compact1.Should().Be(compact2, "same inputs should produce same compact proof"); - } - - [Fact] - public void ToCanonicalJson_SortsKeysDeterministically() - { - // Arrange - var proof = CreateTestProof(); - - // Act - var json = proof.ToCanonicalJson(); - - // Assert - Keys should appear in alphabetical order - var keys = ExtractJsonKeys(json); - keys.Should().BeInAscendingOrder(StringComparer.Ordinal); - } - - [Fact] - public void ToCanonicalJson_ExcludesNullValues() - { - // Arrange - var proof = ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0"); - - // Act - var json = proof.ToCanonicalJson(); - - // Assert - Should not contain null values - json.Should().NotContain("null"); - json.Should().NotContain("artifactDigest"); // Not set, so excluded - json.Should().NotContain("signatureVerified"); // Not set, so excluded - json.Should().NotContain("signatureKeyId"); // Not set, so excluded - } - - [Fact] - public void ToCanonicalJson_FormatsTimestampCorrectly() - { - // Arrange - var proof = CreateTestProof(); - - // Act - var json = proof.ToCanonicalJson(); - - // Assert - ISO 8601 UTC format - json.Should().Contain("2026-01-05T12:00:00.000Z"); - } - - [Fact] - public void ValidateCompactString_ReturnsTrueForValidProof() - { - // Arrange - var proof = CreateTestProof(); - var compact = proof.ToCompactString(); - var canonicalJson = proof.ToCanonicalJson(); - - // Act - var isValid = ReplayProof.ValidateCompactString(compact, canonicalJson); - - // Assert - isValid.Should().BeTrue(); - } - - [Fact] - public void ValidateCompactString_ReturnsFalseForTamperedJson() - { - // Arrange - var proof = CreateTestProof(); - var compact = proof.ToCompactString(); - var tamperedJson = proof.ToCanonicalJson().Replace("1.0.0", "2.0.0"); - - // Act - var isValid = ReplayProof.ValidateCompactString(compact, tamperedJson); - - // Assert - isValid.Should().BeFalse("tampered JSON should not validate"); - } - - [Fact] - public void ValidateCompactString_ReturnsFalseForInvalidPrefix() - { - // Arrange - var canonicalJson = CreateTestProof().ToCanonicalJson(); - - // Act - var isValid = ReplayProof.ValidateCompactString("invalid-proof:abc123", canonicalJson); - - // Assert - isValid.Should().BeFalse("invalid prefix should not validate"); - } - - [Fact] - public void ValidateCompactString_ReturnsFalseForEmptyInputs() - { - // Act & Assert - ReplayProof.ValidateCompactString("", "{}").Should().BeFalse(); - ReplayProof.ValidateCompactString("replay-proof:abc", "").Should().BeFalse(); - ReplayProof.ValidateCompactString(null!, "{}").Should().BeFalse(); - ReplayProof.ValidateCompactString("replay-proof:abc", null!).Should().BeFalse(); - } - - [Fact] - public void ToCanonicalJson_IncludesMetadataWhenPresent() - { - // Arrange - var proof = ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0", - metadata: ImmutableDictionary.Empty - .Add("tenant", "acme-corp") - .Add("project", "web-app")); - - // Act - var json = proof.ToCanonicalJson(); - - // Assert - json.Should().Contain("metadata"); - json.Should().Contain("tenant"); - json.Should().Contain("acme-corp"); - json.Should().Contain("project"); - json.Should().Contain("web-app"); - } - - [Fact] - public void ToCanonicalJson_SortsMetadataKeys() - { - // Arrange - var proof = ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0", - metadata: ImmutableDictionary.Empty - .Add("zebra", "z-value") - .Add("alpha", "a-value") - .Add("mike", "m-value")); - - // Act - var json = proof.ToCanonicalJson(); - - // Assert - Metadata keys should be in alphabetical order - var alphaPos = json.IndexOf("alpha", StringComparison.Ordinal); - var mikePos = json.IndexOf("mike", StringComparison.Ordinal); - var zebraPos = json.IndexOf("zebra", StringComparison.Ordinal); - - alphaPos.Should().BeLessThan(mikePos); - mikePos.Should().BeLessThan(zebraPos); - } - - [Fact] - public void FromExecutionResult_ThrowsOnNullRequiredParams() - { - // Act & Assert - var act1 = () => ReplayProof.FromExecutionResult( - bundleHash: null!, - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0"); - act1.Should().Throw().WithParameterName("bundleHash"); - - var act2 = () => ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: null!, - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0"); - act2.Should().Throw().WithParameterName("policyVersion"); - - var act3 = () => ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: null!, - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: "1.0.0"); - act3.Should().Throw().WithParameterName("verdictRoot"); - - var act4 = () => ReplayProof.FromExecutionResult( - bundleHash: "sha256:abc123", - policyVersion: "1.0.0", - verdictRoot: "sha256:def456", - verdictMatches: true, - durationMs: 150, - replayedAt: FixedTimestamp, - engineVersion: null!); - act4.Should().Throw().WithParameterName("engineVersion"); - } - - [Fact] - public void SchemaVersion_DefaultsTo1_0_0() - { - // Arrange & Act - var proof = CreateTestProof(); - - // Assert - proof.SchemaVersion.Should().Be("1.0.0"); - } + private static readonly DateTimeOffset _fixedTimestamp = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero); private static ReplayProof CreateTestProof() { @@ -301,7 +26,7 @@ public class ReplayProofTests verdictRoot: "sha256:verdict789", verdictMatches: true, durationMs: 150, - replayedAt: FixedTimestamp, + replayedAt: _fixedTimestamp, engineVersion: "1.0.0", artifactDigest: "sha256:image123", signatureVerified: true, diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/TASKS.md b/src/__Libraries/StellaOps.Replay.Core.Tests/TASKS.md index cb9d83fe8..336308344 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/TASKS.md +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/TASKS.md @@ -8,4 +8,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0106-M | DONE | Revalidated 2026-01-08; maintainability audit for Replay.Core.Tests. | | AUDIT-0106-T | DONE | Revalidated 2026-01-08; test coverage audit for Replay.Core.Tests. | | AUDIT-0106-A | TODO | Pending approval (revalidated 2026-01-08). | -| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-06 | DONE | SOLID review notes refreshed 2026-02-04; tests split <=100 lines; dotnet test passed (64). | diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Hash.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Hash.cs new file mode 100644 index 000000000..7a84a36dd --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Hash.cs @@ -0,0 +1,66 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_InvalidHashAlgorithm_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "sbom", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "MD5", + "value": "abc123", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "canonicalHash.algorithm"); + } + + [Fact] + public void Validate_InvalidHashValue_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "sbom", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "canonicalHash.value"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.InvalidInputs.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.InvalidInputs.cs new file mode 100644 index 000000000..b98d98d8b --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.InvalidInputs.cs @@ -0,0 +1,50 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_InvalidJson_ReturnsError() + { + var json = "{ invalid json }"; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "$"); + } + + [Fact] + public void Validate_InvalidBaseImageDigest_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "verdict", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [{"name": "test", "version": "1.0"}] + }, + "generatedAt": "2025-12-26T12:00:00Z", + "inputs": { + "baseImageDigest": "def456def456def456def456def456def456def456def456def456def456def4" + } + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "inputs.baseImageDigest"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.RequiredFields.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.RequiredFields.cs new file mode 100644 index 000000000..099edce33 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.RequiredFields.cs @@ -0,0 +1,61 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_MissingRequiredField_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "artifact"); + } + + [Fact] + public void Validate_InvalidArtifactType_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "invalid-type", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "artifact.type"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.SchemaAndTimestamp.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.SchemaAndTimestamp.cs new file mode 100644 index 000000000..49b9d028f --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.SchemaAndTimestamp.cs @@ -0,0 +1,66 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_UnsupportedSchemaVersion_ReturnsError() + { + var json = """ + { + "schemaVersion": "2.0", + "artifact": { + "type": "sbom", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "schemaVersion"); + } + + [Fact] + public void Validate_InvalidTimestamp_ReturnsError() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "sbom", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "not-a-timestamp" + } + """; + + var result = _validator.Validate(json); + + Assert.False(result.IsValid); + Assert.Contains(result.Errors, e => e.Path == "generatedAt"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Valid.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Valid.cs new file mode 100644 index 000000000..a3945f447 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Valid.cs @@ -0,0 +1,72 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_ValidManifest_ReturnsValid() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "sbom", + "name": "alpine-3.18", + "version": "2025-12-26T00:00:00Z", + "format": "SPDX 3.0.1" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [ + {"name": "StellaOps.Scanner", "version": "1.0.0"} + ] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.True(result.IsValid); + Assert.Empty(result.Errors); + } + + [Fact] + public void Validate_WithInputs_ValidatesHashFormats() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "verdict", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [{"name": "test", "version": "1.0"}] + }, + "generatedAt": "2025-12-26T12:00:00Z", + "inputs": { + "feedSnapshotHash": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "baseImageDigest": "sha256:def456def456def456def456def456def456def456def456def456def456def4" + } + } + """; + + var result = _validator.Validate(json); + + Assert.True(result.IsValid); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Warnings.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Warnings.cs new file mode 100644 index 000000000..d03d64481 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.Warnings.cs @@ -0,0 +1,68 @@ +using Xunit; + +namespace StellaOps.Replay.Core.Tests.Validation; + +public sealed partial class DeterminismManifestValidatorTests +{ + [Fact] + public void Validate_EmptyComponentsArray_ReturnsWarning() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "verdict", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.True(result.IsValid); + Assert.Contains(result.Warnings, w => w.Path == "toolchain.components"); + } + + [Fact] + public void Validate_SbomWithoutFormat_ReturnsWarning() + { + var json = """ + { + "schemaVersion": "1.0", + "artifact": { + "type": "sbom", + "name": "test", + "version": "1.0" + }, + "canonicalHash": { + "algorithm": "SHA-256", + "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", + "encoding": "hex" + }, + "toolchain": { + "platform": ".NET 10.0.0", + "components": [ + {"name": "test", "version": "1.0"} + ] + }, + "generatedAt": "2025-12-26T12:00:00Z" + } + """; + + var result = _validator.Validate(json); + + Assert.True(result.IsValid); + Assert.Contains(result.Warnings, w => w.Path == "artifact.format"); + } +} diff --git a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.cs b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.cs index e5c33f468..23afb7e65 100644 --- a/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.cs +++ b/src/__Libraries/StellaOps.Replay.Core.Tests/Validation/DeterminismManifestValidatorTests.cs @@ -1,399 +1,11 @@ -// ----------------------------------------------------------------------------- -// DeterminismManifestValidatorTests.cs -// Sprint: SPRINT_20251226_007_BE_determinism_gaps -// Task: DET-GAP-10 -// Description: Tests for determinism manifest validator -// ----------------------------------------------------------------------------- - using StellaOps.Replay.Core.Validation; +using StellaOps.TestKit; using Xunit; namespace StellaOps.Replay.Core.Tests.Validation; -public sealed class DeterminismManifestValidatorTests +[Trait("Category", TestCategories.Unit)] +public sealed partial class DeterminismManifestValidatorTests { private readonly DeterminismManifestValidator _validator = new(); - - [Fact] - public void Validate_ValidManifest_ReturnsValid() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "sbom", - "name": "alpine-3.18", - "version": "2025-12-26T00:00:00Z", - "format": "SPDX 3.0.1" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [ - {"name": "StellaOps.Scanner", "version": "1.0.0"} - ] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.True(result.IsValid); - Assert.Empty(result.Errors); - } - - [Fact] - public void Validate_MissingRequiredField_ReturnsError() - { - // Arrange - missing "artifact" - var json = """ - { - "schemaVersion": "1.0", - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "artifact"); - } - - [Fact] - public void Validate_InvalidArtifactType_ReturnsError() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "invalid-type", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "artifact.type"); - } - - [Fact] - public void Validate_InvalidHashAlgorithm_ReturnsError() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "sbom", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "MD5", - "value": "abc123", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "canonicalHash.algorithm"); - } - - [Fact] - public void Validate_InvalidHashValue_ReturnsError() - { - // Arrange - hash value too short - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "sbom", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "canonicalHash.value"); - } - - [Fact] - public void Validate_UnsupportedSchemaVersion_ReturnsError() - { - // Arrange - var json = """ - { - "schemaVersion": "2.0", - "artifact": { - "type": "sbom", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "schemaVersion"); - } - - [Fact] - public void Validate_InvalidTimestamp_ReturnsError() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "sbom", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "not-a-timestamp" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "generatedAt"); - } - - [Fact] - public void Validate_EmptyComponentsArray_ReturnsWarning() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "verdict", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.True(result.IsValid); - Assert.Contains(result.Warnings, w => w.Path == "toolchain.components"); - } - - [Fact] - public void Validate_SbomWithoutFormat_ReturnsWarning() - { - // Arrange - sbom without format specified - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "sbom", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [ - {"name": "test", "version": "1.0"} - ] - }, - "generatedAt": "2025-12-26T12:00:00Z" - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.True(result.IsValid); - Assert.Contains(result.Warnings, w => w.Path == "artifact.format"); - } - - [Fact] - public void Validate_InvalidJson_ReturnsError() - { - // Arrange - var json = "{ invalid json }"; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "$"); - } - - [Fact] - public void Validate_WithInputs_ValidatesHashFormats() - { - // Arrange - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "verdict", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [{"name": "test", "version": "1.0"}] - }, - "generatedAt": "2025-12-26T12:00:00Z", - "inputs": { - "feedSnapshotHash": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "baseImageDigest": "sha256:def456def456def456def456def456def456def456def456def456def456def4" - } - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.True(result.IsValid); - } - - [Fact] - public void Validate_InvalidBaseImageDigest_ReturnsError() - { - // Arrange - missing sha256: prefix - var json = """ - { - "schemaVersion": "1.0", - "artifact": { - "type": "verdict", - "name": "test", - "version": "1.0" - }, - "canonicalHash": { - "algorithm": "SHA-256", - "value": "abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1", - "encoding": "hex" - }, - "toolchain": { - "platform": ".NET 10.0.0", - "components": [{"name": "test", "version": "1.0"}] - }, - "generatedAt": "2025-12-26T12:00:00Z", - "inputs": { - "baseImageDigest": "def456def456def456def456def456def456def456def456def456def456def4" - } - } - """; - - // Act - var result = _validator.Validate(json); - - // Assert - Assert.False(result.IsValid); - Assert.Contains(result.Errors, e => e.Path == "inputs.baseImageDigest"); - } } diff --git a/src/__Libraries/StellaOps.Replay.Core/Bundle/StellaReplayBundleWriter.Write.cs b/src/__Libraries/StellaOps.Replay.Core/Bundle/StellaReplayBundleWriter.Write.cs index aef2878f5..18d7abcae 100644 --- a/src/__Libraries/StellaOps.Replay.Core/Bundle/StellaReplayBundleWriter.Write.cs +++ b/src/__Libraries/StellaOps.Replay.Core/Bundle/StellaReplayBundleWriter.Write.cs @@ -4,6 +4,7 @@ using System.IO; using System.IO.Compression; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using StellaOps.Replay.Core.Manifest; using StellaOps.Replay.Core.Models; diff --git a/src/__Libraries/StellaOps.Replay.Core/FeedSnapshot/FeedSnapshotCoordinatorService.ExportHelpers.cs b/src/__Libraries/StellaOps.Replay.Core/FeedSnapshot/FeedSnapshotCoordinatorService.ExportHelpers.cs index 9b74c16bb..a57fc2272 100644 --- a/src/__Libraries/StellaOps.Replay.Core/FeedSnapshot/FeedSnapshotCoordinatorService.ExportHelpers.cs +++ b/src/__Libraries/StellaOps.Replay.Core/FeedSnapshot/FeedSnapshotCoordinatorService.ExportHelpers.cs @@ -19,7 +19,7 @@ public sealed partial class FeedSnapshotCoordinatorService _ => "none" }; - var stream = _options.Compression == CompressionAlgorithm.Gzip + Stream stream = _options.Compression == CompressionAlgorithm.Gzip ? new GZipStream(hashStream, CompressionLevel.Optimal, leaveOpen: true) : new ZstdCompressionStream(hashStream); diff --git a/src/__Libraries/StellaOps.Replay.Core/Manifest/ReplayManifestWriter.cs b/src/__Libraries/StellaOps.Replay.Core/Manifest/ReplayManifestWriter.cs index 279b13137..c4d95a451 100644 --- a/src/__Libraries/StellaOps.Replay.Core/Manifest/ReplayManifestWriter.cs +++ b/src/__Libraries/StellaOps.Replay.Core/Manifest/ReplayManifestWriter.cs @@ -1,4 +1,5 @@ +using StellaOps.Replay.Core.Models; using YamlDotNet.Serialization; using YamlDotNet.Serialization.NamingConventions; diff --git a/src/__Libraries/StellaOps.Replay.Core/TASKS.md b/src/__Libraries/StellaOps.Replay.Core/TASKS.md index 675ae7f57..509e0c3d0 100644 --- a/src/__Libraries/StellaOps.Replay.Core/TASKS.md +++ b/src/__Libraries/StellaOps.Replay.Core/TASKS.md @@ -8,4 +8,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0107-M | DONE | Revalidated 2026-01-08; maintainability audit for Replay.Core. | | AUDIT-0107-T | DONE | Revalidated 2026-01-08; test coverage audit for Replay.Core. | | AUDIT-0107-A | TODO | Pending approval (revalidated 2026-01-08). | -| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-06 | DONE | SOLID review notes refreshed 2026-02-04; Replay.Core tests passed (64 + 1). | diff --git a/src/__Libraries/StellaOps.Replay/Engine/IFeedLoader.cs b/src/__Libraries/StellaOps.Replay/Engine/IFeedLoader.cs new file mode 100644 index 000000000..ac4894d5e --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/IFeedLoader.cs @@ -0,0 +1,8 @@ +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Engine; + +public interface IFeedLoader +{ + Task LoadByDigestAsync(string digest, CancellationToken ct = default); +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/IPolicyLoader.cs b/src/__Libraries/StellaOps.Replay/Engine/IPolicyLoader.cs new file mode 100644 index 000000000..25bf0d593 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/IPolicyLoader.cs @@ -0,0 +1,8 @@ +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Engine; + +public interface IPolicyLoader +{ + Task LoadByDigestAsync(string digest, CancellationToken ct = default); +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/IReplayEngine.cs b/src/__Libraries/StellaOps.Replay/Engine/IReplayEngine.cs new file mode 100644 index 000000000..45e86859f --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/IReplayEngine.cs @@ -0,0 +1,10 @@ +using StellaOps.Replay.Models; +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Engine; + +public interface IReplayEngine +{ + Task ReplayAsync(RunManifest manifest, ReplayOptions options, CancellationToken ct = default); + DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b); +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/IScanner.cs b/src/__Libraries/StellaOps.Replay/Engine/IScanner.cs new file mode 100644 index 000000000..4c39a5ede --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/IScanner.cs @@ -0,0 +1,10 @@ +using StellaOps.Replay.Models; +using StellaOps.Testing.Manifests.Models; +using System.Collections.Immutable; + +namespace StellaOps.Replay.Engine; + +public interface IScanner +{ + Task ScanAsync(ImmutableArray artifacts, CancellationToken ct = default); +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/IScannerFactory.cs b/src/__Libraries/StellaOps.Replay/Engine/IScannerFactory.cs new file mode 100644 index 000000000..abf4f6f63 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/IScannerFactory.cs @@ -0,0 +1,8 @@ +using StellaOps.Replay.Models; + +namespace StellaOps.Replay.Engine; + +public interface IScannerFactory +{ + IScanner Create(ScannerOptions options); +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Determinism.cs b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Determinism.cs new file mode 100644 index 000000000..945a9e4e9 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Determinism.cs @@ -0,0 +1,29 @@ +using StellaOps.Replay.Models; + +namespace StellaOps.Replay.Engine; + +public sealed partial class ReplayEngine +{ + public DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b) + { + if (a.VerdictDigest == b.VerdictDigest) + { + return new DeterminismCheckResult + { + IsDeterministic = true, + DigestA = a.VerdictDigest, + DigestB = b.VerdictDigest, + Differences = [] + }; + } + + var differences = FindJsonDifferences(a.VerdictJson, b.VerdictJson); + return new DeterminismCheckResult + { + IsDeterministic = false, + DigestA = a.VerdictDigest, + DigestB = b.VerdictDigest, + Differences = differences + }; + } +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.JsonDifferences.cs b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.JsonDifferences.cs new file mode 100644 index 000000000..c43b44543 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.JsonDifferences.cs @@ -0,0 +1,17 @@ +using StellaOps.Canonicalization.Verification; +using StellaOps.Replay.Models; + +namespace StellaOps.Replay.Engine; + +public sealed partial class ReplayEngine +{ + private static IReadOnlyList FindJsonDifferences(string? a, string? b) + { + if (a is null || b is null) + return [new JsonDifference("$", "One or both values are null")]; + + var verifier = new DeterminismVerifier(); + var result = verifier.Compare(a, b); + return result.Differences.Select(d => new JsonDifference(d, "Value mismatch")).ToList(); + } +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Loading.cs b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Loading.cs new file mode 100644 index 000000000..871cf0de0 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Loading.cs @@ -0,0 +1,40 @@ +using StellaOps.Replay.Models; +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Engine; + +public sealed partial class ReplayEngine +{ + private async Task> LoadFeedSnapshotAsync( + FeedSnapshot snapshot, + CancellationToken ct) + { + try + { + var feed = await _feedLoader.LoadByDigestAsync(snapshot.Digest, ct).ConfigureAwait(false); + if (!string.Equals(feed.Digest, snapshot.Digest, StringComparison.OrdinalIgnoreCase)) + return LoadResult.Fail($"Feed digest mismatch: expected {snapshot.Digest}"); + return LoadResult.Ok(feed); + } + catch (Exception ex) + { + return LoadResult.Fail($"Failed to load feed: {ex.Message}"); + } + } + + private async Task> LoadPolicySnapshotAsync( + PolicySnapshot snapshot, + CancellationToken ct) + { + try + { + var policy = await _policyLoader.LoadByDigestAsync(snapshot.LatticeRulesDigest, ct) + .ConfigureAwait(false); + return LoadResult.Ok(policy); + } + catch (Exception ex) + { + return LoadResult.Fail($"Failed to load policy: {ex.Message}"); + } + } +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Validation.cs b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Validation.cs new file mode 100644 index 000000000..dad7f373c --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.Validation.cs @@ -0,0 +1,23 @@ +using StellaOps.Replay.Models; +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Engine; + +public sealed partial class ReplayEngine +{ + private static ValidationResult ValidateManifest(RunManifest manifest) + { + var errors = new List(); + + if (string.IsNullOrWhiteSpace(manifest.RunId)) + errors.Add("RunId is required"); + + if (manifest.ArtifactDigests.Length == 0) + errors.Add("At least one artifact digest required"); + + if (string.IsNullOrWhiteSpace(manifest.FeedSnapshot.Digest)) + errors.Add("Feed snapshot digest required"); + + return new ValidationResult(errors.Count == 0, errors); + } +} diff --git a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.cs b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.cs index 7e99bb34c..ea3546db6 100644 --- a/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.cs +++ b/src/__Libraries/StellaOps.Replay/Engine/ReplayEngine.cs @@ -1,10 +1,8 @@ using Microsoft.Extensions.Logging; using StellaOps.Canonicalization.Json; -using StellaOps.Canonicalization.Verification; using StellaOps.Replay.Models; using StellaOps.Testing.Manifests.Models; -using System.Collections.Immutable; namespace StellaOps.Replay.Engine; @@ -12,7 +10,7 @@ namespace StellaOps.Replay.Engine; /// Executes scans deterministically from run manifests. /// Enables time-travel replay for verification and auditing. /// -public sealed class ReplayEngine : IReplayEngine +public sealed partial class ReplayEngine : IReplayEngine { private readonly IFeedLoader _feedLoader; private readonly IPolicyLoader _policyLoader; @@ -44,16 +42,32 @@ public sealed class ReplayEngine : IReplayEngine var validationResult = ValidateManifest(manifest); if (!validationResult.IsValid) { - return ReplayResult.Failed(manifest.RunId, "Manifest validation failed", validationResult.Errors); + return ReplayResult.Failed( + manifest.RunId, + "Manifest validation failed", + validationResult.Errors, + _timeProvider.GetUtcNow()); } var feedResult = await LoadFeedSnapshotAsync(manifest.FeedSnapshot, ct).ConfigureAwait(false); if (!feedResult.Success) - return ReplayResult.Failed(manifest.RunId, "Failed to load feed snapshot", [feedResult.Error ?? "Unknown error"]); + { + return ReplayResult.Failed( + manifest.RunId, + "Failed to load feed snapshot", + [feedResult.Error ?? "Unknown error"], + _timeProvider.GetUtcNow()); + } var policyResult = await LoadPolicySnapshotAsync(manifest.PolicySnapshot, ct).ConfigureAwait(false); if (!policyResult.Success) - return ReplayResult.Failed(manifest.RunId, "Failed to load policy snapshot", [policyResult.Error ?? "Unknown error"]); + { + return ReplayResult.Failed( + manifest.RunId, + "Failed to load policy snapshot", + [policyResult.Error ?? "Unknown error"], + _timeProvider.GetUtcNow()); + } var scannerOptions = new ScannerOptions { @@ -81,109 +95,4 @@ public sealed class ReplayEngine : IReplayEngine DurationMs = scanResult.DurationMs }; } - - public DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b) - { - if (a.VerdictDigest == b.VerdictDigest) - { - return new DeterminismCheckResult - { - IsDeterministic = true, - DigestA = a.VerdictDigest, - DigestB = b.VerdictDigest, - Differences = [] - }; - } - - var differences = FindJsonDifferences(a.VerdictJson, b.VerdictJson); - return new DeterminismCheckResult - { - IsDeterministic = false, - DigestA = a.VerdictDigest, - DigestB = b.VerdictDigest, - Differences = differences - }; - } - - private static ValidationResult ValidateManifest(RunManifest manifest) - { - var errors = new List(); - - if (string.IsNullOrWhiteSpace(manifest.RunId)) - errors.Add("RunId is required"); - - if (manifest.ArtifactDigests.Length == 0) - errors.Add("At least one artifact digest required"); - - if (string.IsNullOrWhiteSpace(manifest.FeedSnapshot.Digest)) - errors.Add("Feed snapshot digest required"); - - return new ValidationResult(errors.Count == 0, errors); - } - - private async Task> LoadFeedSnapshotAsync( - FeedSnapshot snapshot, CancellationToken ct) - { - try - { - var feed = await _feedLoader.LoadByDigestAsync(snapshot.Digest, ct).ConfigureAwait(false); - if (!string.Equals(feed.Digest, snapshot.Digest, StringComparison.OrdinalIgnoreCase)) - return LoadResult.Fail($"Feed digest mismatch: expected {snapshot.Digest}"); - return LoadResult.Ok(feed); - } - catch (Exception ex) - { - return LoadResult.Fail($"Failed to load feed: {ex.Message}"); - } - } - - private async Task> LoadPolicySnapshotAsync( - PolicySnapshot snapshot, CancellationToken ct) - { - try - { - var policy = await _policyLoader.LoadByDigestAsync(snapshot.LatticeRulesDigest, ct).ConfigureAwait(false); - return LoadResult.Ok(policy); - } - catch (Exception ex) - { - return LoadResult.Fail($"Failed to load policy: {ex.Message}"); - } - } - - private static IReadOnlyList FindJsonDifferences(string? a, string? b) - { - if (a is null || b is null) - return [new JsonDifference("$", "One or both values are null")]; - - var verifier = new DeterminismVerifier(); - var result = verifier.Compare(a, b); - return result.Differences.Select(d => new JsonDifference(d, "Value mismatch")).ToList(); - } -} - -public interface IReplayEngine -{ - Task ReplayAsync(RunManifest manifest, ReplayOptions options, CancellationToken ct = default); - DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b); -} - -public interface IScannerFactory -{ - IScanner Create(ScannerOptions options); -} - -public interface IScanner -{ - Task ScanAsync(ImmutableArray artifacts, CancellationToken ct = default); -} - -public interface IFeedLoader -{ - Task LoadByDigestAsync(string digest, CancellationToken ct = default); -} - -public interface IPolicyLoader -{ - Task LoadByDigestAsync(string digest, CancellationToken ct = default); } diff --git a/src/__Libraries/StellaOps.Replay/Loaders/DigestMismatchException.cs b/src/__Libraries/StellaOps.Replay/Loaders/DigestMismatchException.cs new file mode 100644 index 000000000..40ed104a3 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/DigestMismatchException.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Replay.Loaders; + +public sealed class DigestMismatchException : Exception +{ + public DigestMismatchException(string message) : base(message) { } +} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/FeedNotFoundException.cs b/src/__Libraries/StellaOps.Replay/Loaders/FeedNotFoundException.cs new file mode 100644 index 000000000..49f70854e --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/FeedNotFoundException.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Replay.Loaders; + +public sealed class FeedNotFoundException : Exception +{ + public FeedNotFoundException(string message) : base(message) { } +} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/FeedSnapshotLoader.cs b/src/__Libraries/StellaOps.Replay/Loaders/FeedSnapshotLoader.cs index cdd2f8b24..eac0422a2 100644 --- a/src/__Libraries/StellaOps.Replay/Loaders/FeedSnapshotLoader.cs +++ b/src/__Libraries/StellaOps.Replay/Loaders/FeedSnapshotLoader.cs @@ -21,24 +21,25 @@ public sealed class FeedSnapshotLoader : IFeedLoader public async Task LoadByDigestAsync(string digest, CancellationToken ct = default) { - _logger.LogDebug("Loading feed snapshot with digest {Digest}", digest); + var normalizedDigest = SnapshotDigestGuard.EnsureSha256Hex(digest, nameof(digest)); + _logger.LogDebug("Loading feed snapshot with digest {Digest}", normalizedDigest); - var localPath = GetLocalPath(digest); + var localPath = GetLocalPath(normalizedDigest); if (File.Exists(localPath)) { var feed = await LoadFromFileAsync(localPath, ct).ConfigureAwait(false); - VerifyDigest(feed, digest); + VerifyDigest(feed, normalizedDigest); return feed; } - var storedFeed = await _storage.GetByDigestAsync(digest, ct).ConfigureAwait(false); + var storedFeed = await _storage.GetByDigestAsync(normalizedDigest, ct).ConfigureAwait(false); if (storedFeed is not null) { - VerifyDigest(storedFeed, digest); + VerifyDigest(storedFeed, normalizedDigest); return storedFeed; } - throw new FeedNotFoundException($"Feed snapshot not found: {digest}"); + throw new FeedNotFoundException($"Feed snapshot not found: {normalizedDigest}"); } private static void VerifyDigest(FeedSnapshot feed, string expected) @@ -66,18 +67,3 @@ public sealed class FeedSnapshotLoader : IFeedLoader return CanonicalJsonSerializer.Deserialize(json); } } - -public interface IFeedStorage -{ - Task GetByDigestAsync(string digest, CancellationToken ct = default); -} - -public sealed class FeedNotFoundException : Exception -{ - public FeedNotFoundException(string message) : base(message) { } -} - -public sealed class DigestMismatchException : Exception -{ - public DigestMismatchException(string message) : base(message) { } -} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/IFeedStorage.cs b/src/__Libraries/StellaOps.Replay/Loaders/IFeedStorage.cs new file mode 100644 index 000000000..a7213a502 --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/IFeedStorage.cs @@ -0,0 +1,8 @@ +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Loaders; + +public interface IFeedStorage +{ + Task GetByDigestAsync(string digest, CancellationToken ct = default); +} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/IPolicyStorage.cs b/src/__Libraries/StellaOps.Replay/Loaders/IPolicyStorage.cs new file mode 100644 index 000000000..0561c4fea --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/IPolicyStorage.cs @@ -0,0 +1,8 @@ +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Loaders; + +public interface IPolicyStorage +{ + Task GetByDigestAsync(string digest, CancellationToken ct = default); +} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/PolicyNotFoundException.cs b/src/__Libraries/StellaOps.Replay/Loaders/PolicyNotFoundException.cs new file mode 100644 index 000000000..aabcf255f --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/PolicyNotFoundException.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Replay.Loaders; + +public sealed class PolicyNotFoundException : Exception +{ + public PolicyNotFoundException(string message) : base(message) { } +} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/PolicySnapshotLoader.cs b/src/__Libraries/StellaOps.Replay/Loaders/PolicySnapshotLoader.cs index 0d1f6e8a9..73caa8640 100644 --- a/src/__Libraries/StellaOps.Replay/Loaders/PolicySnapshotLoader.cs +++ b/src/__Libraries/StellaOps.Replay/Loaders/PolicySnapshotLoader.cs @@ -21,24 +21,25 @@ public sealed class PolicySnapshotLoader : IPolicyLoader public async Task LoadByDigestAsync(string digest, CancellationToken ct = default) { - _logger.LogDebug("Loading policy snapshot with digest {Digest}", digest); + var normalizedDigest = SnapshotDigestGuard.EnsureSha256Hex(digest, nameof(digest)); + _logger.LogDebug("Loading policy snapshot with digest {Digest}", normalizedDigest); - var localPath = GetLocalPath(digest); + var localPath = GetLocalPath(normalizedDigest); if (File.Exists(localPath)) { var policy = await LoadFromFileAsync(localPath, ct).ConfigureAwait(false); - VerifyDigest(policy, digest); + VerifyDigest(policy, normalizedDigest); return policy; } - var stored = await _storage.GetByDigestAsync(digest, ct).ConfigureAwait(false); + var stored = await _storage.GetByDigestAsync(normalizedDigest, ct).ConfigureAwait(false); if (stored is not null) { - VerifyDigest(stored, digest); + VerifyDigest(stored, normalizedDigest); return stored; } - throw new PolicyNotFoundException($"Policy snapshot not found: {digest}"); + throw new PolicyNotFoundException($"Policy snapshot not found: {normalizedDigest}"); } private static void VerifyDigest(PolicySnapshot policy, string expected) @@ -66,13 +67,3 @@ public sealed class PolicySnapshotLoader : IPolicyLoader return CanonicalJsonSerializer.Deserialize(json); } } - -public interface IPolicyStorage -{ - Task GetByDigestAsync(string digest, CancellationToken ct = default); -} - -public sealed class PolicyNotFoundException : Exception -{ - public PolicyNotFoundException(string message) : base(message) { } -} diff --git a/src/__Libraries/StellaOps.Replay/Loaders/SnapshotDigestGuard.cs b/src/__Libraries/StellaOps.Replay/Loaders/SnapshotDigestGuard.cs new file mode 100644 index 000000000..fff906c2d --- /dev/null +++ b/src/__Libraries/StellaOps.Replay/Loaders/SnapshotDigestGuard.cs @@ -0,0 +1,33 @@ +namespace StellaOps.Replay.Loaders; + +internal static class SnapshotDigestGuard +{ + private const int HexLength = 64; + + internal static string EnsureSha256Hex(string digest, string parameterName) + { + if (string.IsNullOrWhiteSpace(digest)) + throw new ArgumentException($"{parameterName} is required.", parameterName); + + var trimmed = digest.Trim(); + if (trimmed.Length != HexLength || !IsHex(trimmed)) + throw new FormatException($"{parameterName} must be {HexLength} hexadecimal characters."); + + return trimmed.ToLowerInvariant(); + } + + private static bool IsHex(ReadOnlySpan value) + { + foreach (var c in value) + { + if ((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) + { + continue; + } + + return false; + } + + return true; + } +} diff --git a/src/__Libraries/StellaOps.Replay/TASKS.md b/src/__Libraries/StellaOps.Replay/TASKS.md index 61529b3e7..981bd7066 100644 --- a/src/__Libraries/StellaOps.Replay/TASKS.md +++ b/src/__Libraries/StellaOps.Replay/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0108-T | DONE | Revalidated 2026-01-08; test coverage audit for Replay library. | | AUDIT-0108-A | TODO | Pending approval (revalidated 2026-01-08). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | 2026-02-05; ReplayEngine split into partials/interfaces, loader digest guard and exceptions separated, failure timestamps use TimeProvider; dotnet test src/__Libraries/__Tests/StellaOps.Replay.Tests/StellaOps.Replay.Tests.csproj passed (11 tests). | diff --git a/src/__Libraries/StellaOps.Worker.Health/HostStartedHealthCheck.cs b/src/__Libraries/StellaOps.Worker.Health/HostStartedHealthCheck.cs new file mode 100644 index 000000000..94882ff5a --- /dev/null +++ b/src/__Libraries/StellaOps.Worker.Health/HostStartedHealthCheck.cs @@ -0,0 +1,35 @@ +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Hosting; + +namespace StellaOps.Worker.Health; + +/// +/// Health check that reports healthy once the host has fully started. +/// Before fires, +/// this check returns so readiness +/// probes can distinguish startup from running state. +/// +internal sealed class HostStartedHealthCheck : IHealthCheck, IDisposable +{ + private volatile bool _started; + private CancellationTokenRegistration _registration; + + public HostStartedHealthCheck(IHostApplicationLifetime lifetime) + { + _registration = lifetime.ApplicationStarted.Register(() => _started = true); + } + + public Task CheckHealthAsync( + HealthCheckContext context, + CancellationToken cancellationToken = default) + { + return Task.FromResult(_started + ? HealthCheckResult.Healthy("Host started.") + : HealthCheckResult.Degraded("Host is starting.")); + } + + public void Dispose() + { + _registration.Dispose(); + } +} diff --git a/src/__Libraries/StellaOps.Worker.Health/StellaOps.Worker.Health.csproj b/src/__Libraries/StellaOps.Worker.Health/StellaOps.Worker.Health.csproj new file mode 100644 index 000000000..fd70a8d25 --- /dev/null +++ b/src/__Libraries/StellaOps.Worker.Health/StellaOps.Worker.Health.csproj @@ -0,0 +1,13 @@ + + + net10.0 + enable + enable + true + preview + + + + + + diff --git a/src/__Libraries/StellaOps.Worker.Health/WorkerHealthExtensions.cs b/src/__Libraries/StellaOps.Worker.Health/WorkerHealthExtensions.cs new file mode 100644 index 000000000..8c75dbd04 --- /dev/null +++ b/src/__Libraries/StellaOps.Worker.Health/WorkerHealthExtensions.cs @@ -0,0 +1,49 @@ +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Diagnostics.HealthChecks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Diagnostics.HealthChecks; + +namespace StellaOps.Worker.Health; + +/// +/// Extension methods to add lightweight health endpoints to worker services. +/// +public static class WorkerHealthExtensions +{ + /// + /// Registers a health check that tracks whether the host has fully started. + /// Call this on builder.Services before building the application. + /// + public static IServiceCollection AddWorkerHealthChecks(this IServiceCollection services) + { + services.AddHealthChecks() + .AddCheck("host_started", tags: ["ready"]); + return services; + } + + /// + /// Maps /health/liveness (always 200) and /health/readiness + /// (checks registered health checks) endpoints. + /// + public static WebApplication MapWorkerHealthEndpoints(this WebApplication app) + { + // Liveness: always returns 200 if the process is running + app.MapGet("/health/liveness", () => Results.Ok("Alive")) + .ExcludeFromDescription(); + + // Readiness: runs all registered health checks tagged "ready" + app.MapHealthChecks("/health/readiness", new HealthCheckOptions + { + Predicate = check => check.Tags.Contains("ready"), + ResultStatusCodes = + { + [HealthStatus.Healthy] = StatusCodes.Status200OK, + [HealthStatus.Degraded] = StatusCodes.Status503ServiceUnavailable, + [HealthStatus.Unhealthy] = StatusCodes.Status503ServiceUnavailable, + }, + }); + + return app; + } +} diff --git a/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/ArchiveUtilitiesTests.cs b/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/ArchiveUtilitiesTests.cs new file mode 100644 index 000000000..08daeafcc --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/ArchiveUtilitiesTests.cs @@ -0,0 +1,70 @@ +using System; +using System.IO; +using System.Text; +using System.Threading; +using StellaOps.AuditPack.Services; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.AuditPack.Tests; + +public sealed class ArchiveUtilitiesTests : IDisposable +{ + private static int _tempCounter; + private readonly string _tempDir; + + public ArchiveUtilitiesTests() + { + var suffix = Interlocked.Increment(ref _tempCounter); + _tempDir = Path.Combine(Path.GetTempPath(), $"audit-archive-{suffix:0000}"); + Directory.CreateDirectory(_tempDir); + } + + public void Dispose() + { + if (Directory.Exists(_tempDir)) + { + Directory.Delete(_tempDir, recursive: true); + } + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ExtractTarGzAsync_WritesFilesAsync() + { + var payload = Encoding.UTF8.GetBytes("{\"ok\":true}"); + var archivePath = Path.Combine(_tempDir, "bundle.tar.gz"); + var entries = new[] + { + new ArchiveEntry("manifest.json", payload) + }; + + await ArchiveUtilities.WriteTarGzAsync(archivePath, entries, CancellationToken.None); + + var extractDir = Path.Combine(_tempDir, "extract"); + await ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: false, CancellationToken.None); + + var extractedPath = Path.Combine(extractDir, "manifest.json"); + Assert.True(File.Exists(extractedPath)); + var actual = await File.ReadAllBytesAsync(extractedPath); + Assert.Equal(payload, actual); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task ExtractTarGzAsync_RejectsParentTraversalAsync() + { + var payload = Encoding.UTF8.GetBytes("escape"); + var archivePath = Path.Combine(_tempDir, "escape.tar.gz"); + var entries = new[] + { + new ArchiveEntry("../escape.txt", payload) + }; + + await ArchiveUtilities.WriteTarGzAsync(archivePath, entries, CancellationToken.None); + + var extractDir = Path.Combine(_tempDir, "escape"); + await Assert.ThrowsAsync(() => + ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: true, CancellationToken.None)); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/TASKS.md index b6c94421f..24f81ed40 100644 --- a/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/TASKS.md @@ -13,3 +13,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0076-A | DONE | Waived (test project; revalidated 2026-01-06). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-08 | DONE | Split tests <= 100 lines; deterministic fixtures/time/IDs; async naming; replay/attestation coverage expanded; ConfigureAwait(false) skipped per xUnit1030; dotnet test passed 2026-02-02 (46 tests). | +| REMED-05 | DONE | Added ArchiveUtilities extraction tests; dotnet test passed 2026-02-04 (52 tests). | diff --git a/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/DpopValidationOptionsTests.cs b/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/DpopValidationOptionsTests.cs new file mode 100644 index 000000000..7cf18b0fd --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/DpopValidationOptionsTests.cs @@ -0,0 +1,70 @@ +using System; +using StellaOps.Auth.Security.Dpop; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Auth.Security.Tests; + +public sealed class DpopValidationOptionsTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Validate_ThrowsWhenProofLifetimeNonPositive() + { + var options = new DpopValidationOptions + { + ProofLifetime = TimeSpan.Zero + }; + + Assert.Throws(() => options.Validate()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Validate_ThrowsWhenClockSkewOutOfRange() + { + var options = new DpopValidationOptions + { + AllowedClockSkew = TimeSpan.FromMinutes(6) + }; + + Assert.Throws(() => options.Validate()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Validate_ThrowsWhenReplayWindowNegative() + { + var options = new DpopValidationOptions + { + ReplayWindow = TimeSpan.FromSeconds(-1) + }; + + Assert.Throws(() => options.Validate()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Validate_ThrowsWhenAllowedAlgorithmsEmpty() + { + var options = new DpopValidationOptions(); + options.AllowedAlgorithms.Clear(); + + Assert.Throws(() => options.Validate()); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void Validate_NormalizesAlgorithms() + { + var options = new DpopValidationOptions(); + options.AllowedAlgorithms.Clear(); + options.AllowedAlgorithms.Add(" es256 "); + options.AllowedAlgorithms.Add("Es384"); + + options.Validate(); + + Assert.Contains("ES256", options.NormalizedAlgorithms); + Assert.Contains("ES384", options.NormalizedAlgorithms); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/TASKS.md index 959c5a959..097018461 100644 --- a/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/TASKS.md @@ -13,3 +13,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0785-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-08 | DONE | Split tests <= 100 lines; deterministic time/IDs; async naming; helper types separated; ConfigureAwait(false) skipped per xUnit1030; dotnet test passed 2026-02-02 (12 tests). | +| REMED-05 | DONE | Added DpopValidationOptions unit coverage; dotnet test passed 2026-02-04 (20 tests). | diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/CloudKmsClientTests.CryptoProvider.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/CloudKmsClientTests.CryptoProvider.cs index af51fcd83..74614d52a 100644 --- a/src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/CloudKmsClientTests.CryptoProvider.cs +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Kms.Tests/CloudKmsClientTests.CryptoProvider.cs @@ -1,3 +1,5 @@ +using System; +using System.Collections.Generic; using StellaOps.Cryptography; using StellaOps.Cryptography.Kms; using StellaOps.TestKit; @@ -9,7 +11,7 @@ public sealed partial class CloudKmsClientTests { [Trait("Category", TestCategories.Unit)] [Fact] - public void KmsCryptoProvider_Skips_NonExportable_Keys() + public void KmsCryptoProvider_Returns_VerificationOnly_Keys_When_PublicMaterial_Available() { using var fixture = new EcdsaFixture(); var parameters = fixture.Parameters; @@ -28,6 +30,36 @@ public sealed partial class CloudKmsClientTests provider.UpsertSigningKey(signingKey); + var keys = provider.GetSigningKeys(); + var key = Assert.Single(keys); + Assert.Equal(signingKey.Reference.KeyId, key.Reference.KeyId); + Assert.Null(key.PrivateParameters.D); + Assert.NotNull(key.PublicParameters.Q.X); + Assert.NotNull(key.PublicParameters.Q.Y); + Assert.Equal(signingKey.Metadata["kms.version"], key.Metadata["kms.version"]); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void KmsCryptoProvider_Skips_Keys_Without_PublicMaterial() + { + using var fixture = new EcdsaFixture(); + var parameters = fixture.Parameters; + var kmsClient = new NonExportingKmsClient(parameters, FixedNow); + var provider = new KmsCryptoProvider(kmsClient); + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference("kms-key-no-public", "kms"), + KmsAlgorithms.Es256, + new byte[32], + FixedNow, + metadata: new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["kms.version"] = "kms-key-no-public", + }); + + provider.UpsertSigningKey(signingKey); + var keys = provider.GetSigningKeys(); Assert.Empty(keys); } diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/BouncyCastleKeyNormalizationTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/BouncyCastleKeyNormalizationTests.cs new file mode 100644 index 000000000..769d9dbd2 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/BouncyCastleKeyNormalizationTests.cs @@ -0,0 +1,89 @@ +using FluentAssertions; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Plugin.BouncyCastle; +using System; +using System.Linq; +using Xunit; + +namespace StellaOps.Cryptography.Tests; + +public sealed class BouncyCastleKeyNormalizationTests +{ + private static readonly DateTimeOffset FixedNow = new(2024, 1, 15, 10, 30, 0, TimeSpan.Zero); + + [Fact] + public void UpsertSigningKey_With64BytePrivateKey_NormalizesTo32Bytes() + { + var provider = new BouncyCastleEd25519CryptoProvider(); + var privateKey = Enumerable.Range(1, 64).Select(i => (byte)i).ToArray(); + var keyReference = new CryptoKeyReference("key-64", provider.Name); + var signingKey = new CryptoSigningKey( + keyReference, + SignatureAlgorithms.Ed25519, + privateKey, + createdAt: FixedNow); + + provider.UpsertSigningKey(signingKey); + + var stored = provider.GetSigningKeys().Single(); + stored.PrivateKey.Length.Should().Be(32); + stored.PrivateKey.ToArray().Should().Equal(privateKey.Take(32).ToArray()); + } + + [Fact] + public void UpsertSigningKey_EmptyPublicKey_DerivesPublicKey() + { + var provider = new BouncyCastleEd25519CryptoProvider(); + var privateKey = Enumerable.Range(10, 32).Select(i => (byte)i).ToArray(); + var keyReference = new CryptoKeyReference("key-derived-public", provider.Name); + var signingKey = new CryptoSigningKey( + keyReference, + SignatureAlgorithms.Ed25519, + privateKey, + createdAt: FixedNow); + + provider.UpsertSigningKey(signingKey); + + var stored = provider.GetSigningKeys().Single(); + stored.PublicKey.Length.Should().Be(32); + stored.PublicKey.ToArray().Should().NotBeEmpty(); + } + + [Fact] + public void UpsertSigningKey_InvalidPublicKeyLength_Throws() + { + var provider = new BouncyCastleEd25519CryptoProvider(); + var privateKey = Enumerable.Range(0, 32).Select(i => (byte)i).ToArray(); + var publicKey = new byte[31]; + var keyReference = new CryptoKeyReference("key-invalid-public", provider.Name); + var signingKey = new CryptoSigningKey( + keyReference, + SignatureAlgorithms.Ed25519, + privateKey, + createdAt: FixedNow, + publicKey: publicKey); + + Action act = () => provider.UpsertSigningKey(signingKey); + + act.Should().Throw() + .WithMessage("*public key must be 32 bytes*"); + } + + [Fact] + public void UpsertSigningKey_EdDsaAlgorithm_NormalizesToEd25519() + { + var provider = new BouncyCastleEd25519CryptoProvider(); + var privateKey = Enumerable.Range(0, 32).Select(i => (byte)i).ToArray(); + var keyReference = new CryptoKeyReference("key-eddsa", provider.Name); + var signingKey = new CryptoSigningKey( + keyReference, + SignatureAlgorithms.EdDsa, + privateKey, + createdAt: FixedNow); + + provider.UpsertSigningKey(signingKey); + + var stored = provider.GetSigningKeys().Single(); + stored.AlgorithmId.Should().Be(SignatureAlgorithms.Ed25519); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTestProviders.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTestProviders.cs new file mode 100644 index 000000000..45c74c8f6 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTestProviders.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Tests; + +internal sealed class OrderedTestCryptoProvider : ICryptoProvider +{ + internal const string Algorithm = "test-hash"; + private readonly ICryptoHasher _hasher; + + public OrderedTestCryptoProvider(string name) + { + Name = name; + _hasher = new OrderedTestHasher(Algorithm); + } + + public string Name { get; } + + public bool Supports(CryptoCapability capability, string algorithmId) + => capability == CryptoCapability.ContentHashing && + string.Equals(algorithmId, Algorithm, StringComparison.Ordinal); + + public IPasswordHasher GetPasswordHasher(string algorithmId) + => throw new NotSupportedException(); + + public ICryptoHasher GetHasher(string algorithmId) + => Supports(CryptoCapability.ContentHashing, algorithmId) + ? _hasher + : throw new InvalidOperationException("Unsupported hash algorithm."); + + public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference) + => throw new NotSupportedException(); + + public void UpsertSigningKey(CryptoSigningKey signingKey) + => throw new NotSupportedException(); + + public bool RemoveSigningKey(string keyId) => false; + + public IReadOnlyCollection GetSigningKeys() + => Array.Empty(); + + private sealed class OrderedTestHasher : ICryptoHasher + { + public OrderedTestHasher(string algorithmId) + { + AlgorithmId = algorithmId; + } + + public string AlgorithmId { get; } + + public byte[] ComputeHash(ReadOnlySpan data) => Array.Empty(); + + public string ComputeHashHex(ReadOnlySpan data) => string.Empty; + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTests.cs new file mode 100644 index 000000000..7231d1fcb --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoDependencyInjectionTests.cs @@ -0,0 +1,144 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Runtime.InteropServices; +using System.Text.Json; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Cryptography.Tests; + +public sealed class CryptoDependencyInjectionTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddStellaOpsCrypto_ResolvesPreferredProviderOrder() + { + var services = new ServiceCollection(); + services.AddSingleton(new OrderedTestCryptoProvider("alpha")); + services.AddSingleton(new OrderedTestCryptoProvider("beta")); + services.AddStellaOpsCrypto(options => + { + options.PreferredProviders.Add("beta"); + options.PreferredProviders.Add("alpha"); + }); + + using var provider = services.BuildServiceProvider(); + var registry = provider.GetRequiredService(); + + var resolved = registry.ResolveOrThrow(CryptoCapability.ContentHashing, OrderedTestCryptoProvider.Algorithm); + + Assert.Equal("beta", resolved.Name); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void AddStellaOpsCryptoFromConfiguration_LoadsPluginProvidersByPriority() + { + var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-crypto-tests", $"crypto-di-{Environment.ProcessId}"); + Directory.CreateDirectory(tempRoot); + + try + { + var assemblyPath = typeof(TestPluginAlphaProvider).Assembly.Location; + var assemblyFileName = Path.GetFileName(assemblyPath); + File.Copy(assemblyPath, Path.Combine(tempRoot, assemblyFileName), overwrite: true); + + var manifestPath = Path.Combine(tempRoot, "crypto-plugins-manifest.json"); + WriteManifest(manifestPath, assemblyFileName, GetCurrentPlatform()); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["StellaOps:Crypto:Plugins:ManifestPath"] = manifestPath, + ["StellaOps:Crypto:Plugins:DiscoveryMode"] = "explicit", + ["StellaOps:Crypto:Plugins:Enabled:0:Id"] = "test.plugin.alpha", + ["StellaOps:Crypto:Plugins:Enabled:0:Priority"] = "10", + ["StellaOps:Crypto:Plugins:Enabled:1:Id"] = "test.plugin.beta", + ["StellaOps:Crypto:Plugins:Enabled:1:Priority"] = "90", + }) + .Build(); + + var services = new ServiceCollection(); + services.AddStellaOpsCryptoFromConfiguration(configuration, tempRoot); + + using var provider = services.BuildServiceProvider(); + var registry = provider.GetRequiredService(); + + var resolved = registry.ResolveOrThrow(CryptoCapability.ContentHashing, TestPluginCryptoProviderBase.TestAlgorithm); + + Assert.Equal("test.plugin.beta", resolved.Name); + } + finally + { + // Cleanup is best-effort because the plugin assembly can remain locked on Windows. + try + { + if (Directory.Exists(tempRoot)) + { + Directory.Delete(tempRoot, recursive: true); + } + } + catch (IOException) + { + } + catch (UnauthorizedAccessException) + { + } + } + } + + private static void WriteManifest(string manifestPath, string assemblyFileName, string platform) + { + var manifest = new + { + version = "1.0", + plugins = new[] + { + new + { + id = "test.plugin.alpha", + name = "Test Plugin Alpha", + assembly = assemblyFileName, + type = typeof(TestPluginAlphaProvider).FullName!, + platforms = new[] { platform }, + }, + new + { + id = "test.plugin.beta", + name = "Test Plugin Beta", + assembly = assemblyFileName, + type = typeof(TestPluginBetaProvider).FullName!, + platforms = new[] { platform }, + }, + }, + }; + + var json = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = true }); + File.WriteAllText(manifestPath, json); + } + + private static string GetCurrentPlatform() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return "linux"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return "windows"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return "osx"; + } + + return "unknown"; + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoPluginTestProviders.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoPluginTestProviders.cs new file mode 100644 index 000000000..d5a550c07 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoPluginTestProviders.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Tests; + +public abstract class TestPluginCryptoProviderBase : ICryptoProvider +{ + public const string TestAlgorithm = "plugin-hash"; + private static readonly ICryptoHasher Hasher = new TestPluginHasher(); + + protected TestPluginCryptoProviderBase(string name) + { + Name = name; + } + + public string Name { get; } + + public bool Supports(CryptoCapability capability, string algorithmId) + => capability == CryptoCapability.ContentHashing && + string.Equals(algorithmId, TestAlgorithm, StringComparison.Ordinal); + + public IPasswordHasher GetPasswordHasher(string algorithmId) + => throw new NotSupportedException(); + + public ICryptoHasher GetHasher(string algorithmId) + => Supports(CryptoCapability.ContentHashing, algorithmId) + ? Hasher + : throw new InvalidOperationException("Unsupported hash algorithm."); + + public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference) + => throw new NotSupportedException(); + + public void UpsertSigningKey(CryptoSigningKey signingKey) + => throw new NotSupportedException(); + + public bool RemoveSigningKey(string keyId) => false; + + public IReadOnlyCollection GetSigningKeys() + => Array.Empty(); + + private sealed class TestPluginHasher : ICryptoHasher + { + public string AlgorithmId => TestAlgorithm; + public byte[] ComputeHash(ReadOnlySpan data) => Array.Empty(); + public string ComputeHashHex(ReadOnlySpan data) => string.Empty; + } +} + +public sealed class TestPluginAlphaProvider : TestPluginCryptoProviderBase +{ + public TestPluginAlphaProvider() + : base("test.plugin.alpha") + { + } +} + +public sealed class TestPluginBetaProvider : TestPluginCryptoProviderBase +{ + public TestPluginBetaProvider() + : base("test.plugin.beta") + { + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/TASKS.md index 93b4e10e8..064791f03 100644 --- a/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/TASKS.md @@ -13,3 +13,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0271-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-08 | DONE | Remediated (deterministic fixtures, async naming, file split <= 100 lines); `dotnet test` passed (312 tests). | +| REMED-05 | DONE | Added DI ordering and plugin-loading tests for crypto registration paths; `dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -p:BuildInParallel=false -p:UseSharedCompilation=false` passed (326 tests). | diff --git a/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/PostgresEvidenceStoreIntegrationTests.Store.cs b/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/PostgresEvidenceStoreIntegrationTests.Store.cs index 6326f6b24..416158b8e 100644 --- a/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/PostgresEvidenceStoreIntegrationTests.Store.cs +++ b/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/PostgresEvidenceStoreIntegrationTests.Store.cs @@ -67,4 +67,13 @@ public sealed partial class PostgresEvidenceStoreIntegrationTests storedCount.Should().Be(1); } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task StoreBatchAsync_WithEmptyInput_ReturnsZeroAsync() + { + var storedCount = await _store.StoreBatchAsync(Array.Empty()); + + storedCount.Should().Be(0); + } } diff --git a/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/TASKS.md index b8e61e109..405691dac 100644 --- a/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Evidence.Persistence.Tests/TASKS.md @@ -13,3 +13,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0285-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-2026-02-03 | DONE | Split integration tests into partials, normalized async naming, deterministic fixtures; tests passed 2026-02-03. | +| REMED-07 | DONE | Added StoreBatch empty-input coverage; tests passed 2026-02-04 (35 tests). | diff --git a/src/__Libraries/__Tests/StellaOps.Evidence.Tests/Retention/RetentionTierManagerTests.CurrentTier.cs b/src/__Libraries/__Tests/StellaOps.Evidence.Tests/Retention/RetentionTierManagerTests.CurrentTier.cs new file mode 100644 index 000000000..eb49e29a5 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Evidence.Tests/Retention/RetentionTierManagerTests.CurrentTier.cs @@ -0,0 +1,56 @@ +using FluentAssertions; +using Microsoft.Extensions.Options; +using Moq; +using StellaOps.Evidence.Budgets; +using StellaOps.Evidence.Retention; +using StellaOps.Evidence.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.Evidence.Tests.Retention; + +public partial class RetentionTierManagerTests +{ + [Fact] + public void GetCurrentTier_UsesRetentionBoundaries() + { + var budget = new EvidenceBudget + { + MaxScanSizeBytes = 100, + RetentionPolicies = new Dictionary + { + [RetentionTier.Hot] = new RetentionPolicy { Duration = TimeSpan.FromDays(1) }, + [RetentionTier.Warm] = new RetentionPolicy { Duration = TimeSpan.FromDays(3) }, + [RetentionTier.Cold] = new RetentionPolicy { Duration = TimeSpan.FromDays(5) }, + [RetentionTier.Archive] = new RetentionPolicy { Duration = TimeSpan.FromDays(7) } + } + }; + var options = new Mock>(); + options.Setup(o => o.CurrentValue).Returns(budget); + var repository = new Mock(); + var archiveStorage = new Mock(); + + var manager = new RetentionTierManager( + repository.Object, + archiveStorage.Object, + options.Object, + new FixedTimeProvider(_fixedNow)); + + var hotItem = new EvidenceItem + { + Id = Guid.Parse("00000000-0000-0000-0000-000000000310"), + ScanId = _defaultScanId, + Type = EvidenceType.CallGraph, + SizeBytes = 1, + Tier = RetentionTier.Hot, + CreatedAt = _fixedNow.AddHours(-12) + }; + var warmItem = hotItem with { Id = Guid.Parse("00000000-0000-0000-0000-000000000311"), CreatedAt = _fixedNow.AddDays(-2) }; + var coldItem = hotItem with { Id = Guid.Parse("00000000-0000-0000-0000-000000000312"), CreatedAt = _fixedNow.AddDays(-4) }; + var archiveItem = hotItem with { Id = Guid.Parse("00000000-0000-0000-0000-000000000313"), CreatedAt = _fixedNow.AddDays(-10) }; + + manager.GetCurrentTier(hotItem).Should().Be(RetentionTier.Hot); + manager.GetCurrentTier(warmItem).Should().Be(RetentionTier.Warm); + manager.GetCurrentTier(coldItem).Should().Be(RetentionTier.Cold); + manager.GetCurrentTier(archiveItem).Should().Be(RetentionTier.Archive); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Evidence.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Evidence.Tests/TASKS.md index e55d4abf8..53504f6fd 100644 --- a/src/__Libraries/__Tests/StellaOps.Evidence.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Evidence.Tests/TASKS.md @@ -10,3 +10,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0026-A | DONE | Waived (test project; revalidated 2026-01-08). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-2026-02-02 | DONE | Remediated csproj audit findings; tests passed 2026-02-02. | +| REMED-07 | DONE | Added retention tier boundary coverage; tests passed 2026-02-04 (24 tests). | diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/ReachabilityReplayWriterTests.cs b/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/ReachabilityReplayWriterTests.cs index 2c7cd9696..c1baa02f6 100644 --- a/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/ReachabilityReplayWriterTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/ReachabilityReplayWriterTests.cs @@ -1,8 +1,8 @@ +using System; +using System.Linq; using StellaOps.Cryptography; using StellaOps.Replay.Core; using StellaOps.TestKit; -using System.Linq; -using System; using Xunit; namespace StellaOps.Replay.Core.Tests; @@ -10,7 +10,7 @@ namespace StellaOps.Replay.Core.Tests; public class ReachabilityReplayWriterTests { [Trait("Category", TestCategories.Unit)] - [Fact] + [Fact] public void BuildManifestV2_SortsGraphsAndTraces_Deterministically() { var scan = new ReplayScanMetadata diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/TASKS.md index c4cf6ec0c..8af814600 100644 --- a/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Replay.Core.Tests/TASKS.md @@ -8,4 +8,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0035-M | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0035-T | DONE | Revalidated 2026-01-08; open findings tracked in audit report. | | AUDIT-0035-A | DONE | Waived (test project; revalidated 2026-01-08). | -| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-06 | DONE | SOLID review notes refreshed 2026-02-04; dotnet test passed (1). | diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/FeedSnapshotLoaderTests.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/FeedSnapshotLoaderTests.cs new file mode 100644 index 000000000..7089e5464 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/FeedSnapshotLoaderTests.cs @@ -0,0 +1,48 @@ +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Replay.Loaders; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Tests; + +public sealed class FeedSnapshotLoaderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_InvalidDigest_ThrowsFormatException() + { + var loader = new FeedSnapshotLoader(new FeedStorageStub(null), NullLogger.Instance); + var digest = SnapshotTestData.CreateInvalidDigest('a'); + + var action = () => loader.LoadByDigestAsync(digest); + + await action.Should().ThrowAsync(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_ShortDigest_ThrowsFormatException() + { + var loader = new FeedSnapshotLoader(new FeedStorageStub(null), NullLogger.Instance); + var digest = new string('a', 63); + + var action = () => loader.LoadByDigestAsync(digest); + + await action.Should().ThrowAsync(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_DigestMismatch_ThrowsException() + { + var snapshot = SnapshotTestData.CreateFeedSnapshot(SnapshotTestData.CreateValidDigest('b')); + var actualDigest = SnapshotTestData.ComputeDigest(snapshot); + var expectedDigest = SnapshotTestData.CreateDifferentDigest(actualDigest); + var loader = new FeedSnapshotLoader(new FeedStorageStub(snapshot), NullLogger.Instance); + + var action = () => loader.LoadByDigestAsync(expectedDigest); + + await action.Should().ThrowAsync(); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/FixedTimeProvider.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/FixedTimeProvider.cs new file mode 100644 index 000000000..b2bea0a02 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/FixedTimeProvider.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Replay.Tests; + +internal sealed class FixedTimeProvider : TimeProvider +{ + private readonly DateTimeOffset _now; + + public FixedTimeProvider(DateTimeOffset now) => _now = now; + + public override DateTimeOffset GetUtcNow() => _now; +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/PolicySnapshotLoaderTests.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/PolicySnapshotLoaderTests.cs new file mode 100644 index 000000000..75f622a30 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/PolicySnapshotLoaderTests.cs @@ -0,0 +1,48 @@ +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Replay.Loaders; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Tests; + +public sealed class PolicySnapshotLoaderTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_InvalidDigest_ThrowsFormatException() + { + var loader = new PolicySnapshotLoader(new PolicyStorageStub(null), NullLogger.Instance); + var digest = SnapshotTestData.CreateInvalidDigest('c'); + + var action = () => loader.LoadByDigestAsync(digest); + + await action.Should().ThrowAsync(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_ShortDigest_ThrowsFormatException() + { + var loader = new PolicySnapshotLoader(new PolicyStorageStub(null), NullLogger.Instance); + var digest = new string('c', 10); + + var action = () => loader.LoadByDigestAsync(digest); + + await action.Should().ThrowAsync(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task LoadByDigestAsync_DigestMismatch_ThrowsException() + { + var snapshot = SnapshotTestData.CreatePolicySnapshot(SnapshotTestData.CreateValidDigest('d')); + var actualDigest = SnapshotTestData.ComputeDigest(snapshot); + var expectedDigest = SnapshotTestData.CreateDifferentDigest(actualDigest); + var loader = new PolicySnapshotLoader(new PolicyStorageStub(snapshot), NullLogger.Instance); + + var action = () => loader.LoadByDigestAsync(expectedDigest); + + await action.Should().ThrowAsync(); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTestFixtures.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTestFixtures.cs index e3ea99e20..35f6f0870 100644 --- a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTestFixtures.cs +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTestFixtures.cs @@ -16,13 +16,17 @@ internal static class ReplayEngineTestFixtures new DateTimeOffset(2026, 1, 1, 0, 0, 0, TimeSpan.Zero); internal static readonly DateTimeOffset FixedFeedSnapshotAt = FixedTimestamp.AddHours(-1); - internal static ReplayEngine CreateEngine() + internal static ReplayEngine CreateEngine() => + CreateEngine(new FixedTimeProvider(FixedTimestamp)); + + internal static ReplayEngine CreateEngine(TimeProvider timeProvider) { return new ReplayEngine( new FakeFeedLoader(), new FakePolicyLoader(), new FakeScannerFactory(), - NullLogger.Instance); + NullLogger.Instance, + timeProvider); } internal static RunManifest CreateManifest() diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Determinism.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Determinism.cs new file mode 100644 index 000000000..daac0be6c --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Determinism.cs @@ -0,0 +1,63 @@ +using FluentAssertions; +using StellaOps.Replay.Engine; +using StellaOps.Replay.Models; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Tests; + +public partial class ReplayEngineTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CheckDeterminism_IdenticalResults_ReturnsTrue() + { + var engine = ReplayEngineTestFixtures.CreateEngine(); + var result1 = new ReplayResult + { + RunId = "1", + VerdictDigest = "abc123", + Success = true, + ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp + }; + var result2 = new ReplayResult + { + RunId = "1", + VerdictDigest = "abc123", + Success = true, + ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp + }; + + var check = engine.CheckDeterminism(result1, result2); + + check.IsDeterministic.Should().BeTrue(); + } + + [Trait("Category", TestCategories.Unit)] + [Fact] + public void CheckDeterminism_DifferentResults_ReturnsDifferences() + { + var engine = ReplayEngineTestFixtures.CreateEngine(); + var result1 = new ReplayResult + { + RunId = "1", + VerdictJson = "{\"score\":100}", + VerdictDigest = "abc123", + Success = true, + ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp + }; + var result2 = new ReplayResult + { + RunId = "1", + VerdictJson = "{\"score\":99}", + VerdictDigest = "def456", + Success = true, + ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp + }; + + var check = engine.CheckDeterminism(result1, result2); + + check.IsDeterministic.Should().BeFalse(); + check.Differences.Should().NotBeEmpty(); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Failures.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Failures.cs new file mode 100644 index 000000000..c70ada187 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.Failures.cs @@ -0,0 +1,23 @@ +using FluentAssertions; +using StellaOps.Replay.Models; +using StellaOps.TestKit; +using Xunit; + +namespace StellaOps.Replay.Tests; + +public partial class ReplayEngineTests +{ + [Trait("Category", TestCategories.Unit)] + [Fact] + public async Task Replay_InvalidManifest_UsesTimeProviderAsync() + { + var fixedTime = new DateTimeOffset(2026, 1, 2, 0, 0, 0, TimeSpan.Zero); + var engine = ReplayEngineTestFixtures.CreateEngine(new FixedTimeProvider(fixedTime)); + var manifest = ReplayEngineTestFixtures.CreateManifest() with { RunId = "" }; + + var result = await engine.ReplayAsync(manifest, new ReplayOptions()); + + result.Success.Should().BeFalse(); + result.ExecutedAt.Should().Be(fixedTime); + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.cs index b4f3395ae..314aa4096 100644 --- a/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/ReplayEngineTests.cs @@ -6,7 +6,7 @@ using Xunit; namespace StellaOps.Replay.Tests; -public class ReplayEngineTests +public partial class ReplayEngineTests { [Trait("Category", TestCategories.Unit)] [Fact] @@ -38,57 +38,4 @@ public class ReplayEngineTests result1.VerdictDigest.Should().NotBe(result2.VerdictDigest); } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CheckDeterminism_IdenticalResults_ReturnsTrue() - { - var engine = ReplayEngineTestFixtures.CreateEngine(); - var result1 = new ReplayResult - { - RunId = "1", - VerdictDigest = "abc123", - Success = true, - ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp - }; - var result2 = new ReplayResult - { - RunId = "1", - VerdictDigest = "abc123", - Success = true, - ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp - }; - - var check = engine.CheckDeterminism(result1, result2); - - check.IsDeterministic.Should().BeTrue(); - } - - [Trait("Category", TestCategories.Unit)] - [Fact] - public void CheckDeterminism_DifferentResults_ReturnsDifferences() - { - var engine = ReplayEngineTestFixtures.CreateEngine(); - var result1 = new ReplayResult - { - RunId = "1", - VerdictJson = "{\"score\":100}", - VerdictDigest = "abc123", - Success = true, - ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp - }; - var result2 = new ReplayResult - { - RunId = "1", - VerdictJson = "{\"score\":99}", - VerdictDigest = "def456", - Success = true, - ExecutedAt = ReplayEngineTestFixtures.FixedTimestamp - }; - - var check = engine.CheckDeterminism(result1, result2); - - check.IsDeterministic.Should().BeFalse(); - check.Differences.Should().NotBeEmpty(); - } } diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotStorageStubs.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotStorageStubs.cs new file mode 100644 index 000000000..44abfbe88 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotStorageStubs.cs @@ -0,0 +1,24 @@ +using StellaOps.Replay.Loaders; +using StellaOps.Testing.Manifests.Models; + +namespace StellaOps.Replay.Tests; + +internal sealed class FeedStorageStub : IFeedStorage +{ + private readonly FeedSnapshot? _snapshot; + + public FeedStorageStub(FeedSnapshot? snapshot) => _snapshot = snapshot; + + public Task GetByDigestAsync(string digest, CancellationToken ct = default) + => Task.FromResult(_snapshot); +} + +internal sealed class PolicyStorageStub : IPolicyStorage +{ + private readonly PolicySnapshot? _snapshot; + + public PolicyStorageStub(PolicySnapshot? snapshot) => _snapshot = snapshot; + + public Task GetByDigestAsync(string digest, CancellationToken ct = default) + => Task.FromResult(_snapshot); +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotTestData.cs b/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotTestData.cs new file mode 100644 index 000000000..f38ead033 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/SnapshotTestData.cs @@ -0,0 +1,36 @@ +using StellaOps.Canonicalization.Json; +using StellaOps.Testing.Manifests.Models; +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Replay.Tests; + +internal static class SnapshotTestData +{ + internal static readonly DateTimeOffset FixedSnapshotAt = + new DateTimeOffset(2026, 1, 1, 0, 0, 0, TimeSpan.Zero); + + internal static FeedSnapshot CreateFeedSnapshot(string digest) => + new FeedSnapshot("nvd", "v1", digest, FixedSnapshotAt); + + internal static PolicySnapshot CreatePolicySnapshot(string digest) => + new PolicySnapshot("1.0.0", digest, ImmutableArray.Empty); + + internal static string ComputeDigest(T value) + { + var json = CanonicalJsonSerializer.Serialize(value); + return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(json))).ToLowerInvariant(); + } + + internal static string CreateValidDigest(char fill) => new string(fill, 64); + + internal static string CreateInvalidDigest(char fill) => new string(fill, 63) + "g"; + + internal static string CreateDifferentDigest(string digest) + { + var last = digest[^1]; + var replacement = last == 'a' ? 'b' : 'a'; + return digest[..^1] + replacement; + } +} diff --git a/src/__Libraries/__Tests/StellaOps.Replay.Tests/TASKS.md b/src/__Libraries/__Tests/StellaOps.Replay.Tests/TASKS.md index a2180d4d8..70c3aab66 100644 --- a/src/__Libraries/__Tests/StellaOps.Replay.Tests/TASKS.md +++ b/src/__Libraries/__Tests/StellaOps.Replay.Tests/TASKS.md @@ -12,3 +12,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | REMED-03 | DONE | Tier 0 remediation (usings sorted, deterministic test data, warnings as errors); dotnet test passed 2026-02-02. | | REMED-04 | DONE | Async naming updates; ConfigureAwait(false) skipped in tests per xUnit1030; dotnet test passed 2026-02-02. | | REMED-05 | DONE | File split to keep tests <= 100 lines; dotnet test passed 2026-02-02. | +| REMED-07 | DONE | 2026-02-05; replay tests split into partials, loader validation/digest mismatch coverage + failure timestamp test added; dotnet test src/__Libraries/__Tests/StellaOps.Replay.Tests/StellaOps.Replay.Tests.csproj passed (11 tests). | diff --git a/src/__Tests/StellaOps.Evidence.Bundle.Tests/BinaryDiffEvidenceTests.cs b/src/__Tests/StellaOps.Evidence.Bundle.Tests/BinaryDiffEvidenceTests.cs index 0a5326f8f..84cc40695 100644 --- a/src/__Tests/StellaOps.Evidence.Bundle.Tests/BinaryDiffEvidenceTests.cs +++ b/src/__Tests/StellaOps.Evidence.Bundle.Tests/BinaryDiffEvidenceTests.cs @@ -5,6 +5,7 @@ using System; using System.Collections.Immutable; +using System.Text.Json; using Microsoft.Extensions.Time.Testing; using StellaOps.TestKit; using Xunit; @@ -281,6 +282,18 @@ public sealed class BinaryDiffEvidenceTests } } + [Fact] + public void BinaryDiffEnums_SerializeAsStrings() + { + var diffType = JsonSerializer.Serialize(BinaryDiffType.Semantic); + var operation = JsonSerializer.Serialize(BinaryDiffOperation.Modified); + var securityType = JsonSerializer.Serialize(BinarySecurityChangeType.HardeningChange); + + Assert.Equal("\"Semantic\"", diffType); + Assert.Equal("\"Modified\"", operation); + Assert.Equal("\"HardeningChange\"", securityType); + } + [Fact] public void SchemaVersion_UpdatedForBinaryDiff() { diff --git a/src/__Tests/StellaOps.Evidence.Bundle.Tests/TASKS.md b/src/__Tests/StellaOps.Evidence.Bundle.Tests/TASKS.md index 536f98b0a..0e2bb4a7a 100644 --- a/src/__Tests/StellaOps.Evidence.Bundle.Tests/TASKS.md +++ b/src/__Tests/StellaOps.Evidence.Bundle.Tests/TASKS.md @@ -9,3 +9,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-0281-T | DONE | Revalidated 2026-01-07; open findings tracked in audit report. | | AUDIT-0281-A | DONE | Waived (test project; revalidated 2026-01-07). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | +| REMED-07 | DONE | Added enum serialization coverage; dotnet test 2026-02-04 (29 tests). | diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingDifference.cs b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingDifference.cs new file mode 100644 index 000000000..b919b7c4c --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingDifference.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Interop.Tests.Analysis; + +public sealed record FindingDifference( + string Category, + string Description, + bool IsAcceptable, + string? Reason = null); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingsParityAnalyzer.cs b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingsParityAnalyzer.cs index b1bf5844a..0485e0606 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingsParityAnalyzer.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/FindingsParityAnalyzer.cs @@ -1,3 +1,6 @@ +using System.Collections.Generic; +using System.Linq; + namespace StellaOps.Interop.Tests.Analysis; /// @@ -98,20 +101,3 @@ public sealed class FindingsParityAnalyzer return differences; } } - -public sealed class ParityAnalysisReport -{ - public int TotalDifferences { get; init; } - public IReadOnlyList VersionMatchingDifferences { get; init; } = []; - public IReadOnlyList FeedCoverageDifferences { get; init; } = []; - public IReadOnlyList PurlDifferences { get; init; } = []; - public IReadOnlyList VexDifferences { get; init; } = []; - public int AcceptableDifferences { get; init; } - public int RequiresInvestigation { get; init; } -} - -public sealed record FindingDifference( - string Category, - string Description, - bool IsAcceptable, - string? Reason = null); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/ParityAnalysisReport.cs b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/ParityAnalysisReport.cs new file mode 100644 index 000000000..40edb5b5d --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/Analysis/ParityAnalysisReport.cs @@ -0,0 +1,14 @@ +using System.Collections.Generic; + +namespace StellaOps.Interop.Tests.Analysis; + +public sealed class ParityAnalysisReport +{ + public int TotalDifferences { get; init; } + public IReadOnlyList VersionMatchingDifferences { get; init; } = []; + public IReadOnlyList FeedCoverageDifferences { get; init; } = []; + public IReadOnlyList PurlDifferences { get; init; } = []; + public IReadOnlyList VexDifferences { get; init; } = []; + public int AcceptableDifferences { get; init; } + public int RequiresInvestigation { get; init; } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Attestation.cs b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Attestation.cs new file mode 100644 index 000000000..171a76ec9 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Attestation.cs @@ -0,0 +1,32 @@ +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Interop.Tests.CycloneDx; + +public partial class CycloneDxRoundTripTests +{ + [Theory] + [MemberData(nameof(TestImages))] + [Trait("Category", "Attestation")] + public async Task CycloneDx_Attestation_RoundTripAsync(string imageRef) + { + if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI"))) + { + throw SkipException.ForSkip("Cosign attestation requires CI credentials."); + } + + var sbomResult = await _harness.GenerateSbomWithStellaAsync( + imageRef, SbomFormat.CycloneDx16); + sbomResult.Success.Should().BeTrue(); + + var attestResult = await _harness.AttestWithCosignAsync( + sbomResult.Path!, imageRef); + attestResult.Success.Should().BeTrue("Cosign should attest SBOM"); + + // TODO: Verify attestation + // var verifyResult = await _harness.VerifyCosignAttestation(imageRef); + // verifyResult.Success.Should().BeTrue(); + // var attestedDigest = verifyResult.PredicateDigest; + // attestedDigest.Should().Be(sbomResult.Digest); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Consume.cs b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Consume.cs new file mode 100644 index 000000000..8631b595d --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Consume.cs @@ -0,0 +1,32 @@ +using Xunit; + +namespace StellaOps.Interop.Tests.CycloneDx; + +public partial class CycloneDxRoundTripTests +{ + [Theory] + [MemberData(nameof(TestImages))] + public async Task Syft_GeneratesCycloneDx_GrypeCanConsumeAsync(string imageRef) + { + var sbomResult = await _harness.GenerateSbomWithSyftAsync( + imageRef, SbomFormat.CycloneDx16); + sbomResult.Success.Should().BeTrue("Syft should generate CycloneDX SBOM"); + + var grypeResult = await _harness.ScanWithGrypeFromSbomAsync(sbomResult.Path!); + grypeResult.Success.Should().BeTrue("Grype should consume Syft-generated CycloneDX SBOM"); + + grypeResult.Findings.Should().NotBeNull(); + } + + [Theory] + [MemberData(nameof(TestImages))] + public async Task Stella_GeneratesCycloneDx_GrypeCanConsumeAsync(string imageRef) + { + var sbomResult = await _harness.GenerateSbomWithStellaAsync( + imageRef, SbomFormat.CycloneDx16); + sbomResult.Success.Should().BeTrue("Stella should generate CycloneDX SBOM"); + + var grypeResult = await _harness.ScanWithGrypeFromSbomAsync(sbomResult.Path!); + grypeResult.Success.Should().BeTrue("Grype should consume Stella-generated CycloneDX SBOM"); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Parity.cs b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Parity.cs new file mode 100644 index 000000000..e24dcd1ef --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Parity.cs @@ -0,0 +1,31 @@ +using StellaOps.Interop.Tests; +using Xunit; + +namespace StellaOps.Interop.Tests.CycloneDx; + +public partial class CycloneDxRoundTripTests +{ + [Theory] + [MemberData(nameof(TestImages))] + [Trait("Category", "Parity")] + public async Task Stella_And_Grype_FindingsParity_Above95PercentAsync(string imageRef) + { + var stellaSbom = await _harness.GenerateSbomWithStellaAsync( + imageRef, SbomFormat.CycloneDx16); + stellaSbom.Success.Should().BeTrue(); + + var stellaFindings = new List(); + + var grypeResult = await _harness.ScanWithGrypeFromSbomAsync(stellaSbom.Path!); + grypeResult.Success.Should().BeTrue(); + + var comparison = FindingsComparer.Compare( + stellaFindings, + grypeResult.Findings!, + tolerancePercent: 5); + + comparison.ParityPercent.Should().BeGreaterThanOrEqualTo(95, + $"Findings parity {comparison.ParityPercent:F2}% is below 95% threshold. " + + $"Only in Stella: {comparison.OnlyInStella}, Only in Grype: {comparison.OnlyInGrype}"); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Schema.cs b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Schema.cs new file mode 100644 index 000000000..ca2fe2f1f --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.Schema.cs @@ -0,0 +1,22 @@ +using Xunit; + +namespace StellaOps.Interop.Tests.CycloneDx; + +public partial class CycloneDxRoundTripTests +{ + [Fact] + [Trait("Category", "Schema")] + public async Task Stella_CycloneDx_ValidatesAgainstSchemaAsync() + { + var imageRef = "alpine:3.18"; + + var sbomResult = await _harness.GenerateSbomWithStellaAsync( + imageRef, SbomFormat.CycloneDx16); + sbomResult.Success.Should().BeTrue(); + + // TODO: Validate against CycloneDX 1.6 JSON schema + sbomResult.Content.Should().NotBeNullOrEmpty(); + sbomResult.Content.Should().Contain("\"bomFormat\": \"CycloneDX\""); + sbomResult.Content.Should().Contain("\"specVersion\": \"1.6\""); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.cs b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.cs index e859f65cd..67c0d2ef6 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/CycloneDx/CycloneDxRoundTripTests.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Interop.Tests.CycloneDx; +using Xunit; -using Xunit.Sdk; +namespace StellaOps.Interop.Tests.CycloneDx; [Trait("Category", "Interop")] [Trait("Format", "CycloneDX")] -public class CycloneDxRoundTripTests : IClassFixture +public partial class CycloneDxRoundTripTests : IClassFixture { private readonly InteropTestHarness _harness; @@ -13,112 +13,6 @@ public class CycloneDxRoundTripTests : IClassFixture _harness = harness; } - [Theory] - [MemberData(nameof(TestImages))] - public async Task Syft_GeneratesCycloneDx_GrypeCanConsume(string imageRef) - { - // Generate SBOM with Syft - var sbomResult = await _harness.GenerateSbomWithSyft( - imageRef, SbomFormat.CycloneDx16); - sbomResult.Success.Should().BeTrue("Syft should generate CycloneDX SBOM"); - - // Scan from SBOM with Grype - var grypeResult = await _harness.ScanWithGrypeFromSbom(sbomResult.Path!); - grypeResult.Success.Should().BeTrue("Grype should consume Syft-generated CycloneDX SBOM"); - - // Grype should be able to parse and find vulnerabilities - grypeResult.Findings.Should().NotBeNull(); - } - - [Theory] - [MemberData(nameof(TestImages))] - public async Task Stella_GeneratesCycloneDx_GrypeCanConsume(string imageRef) - { - // Generate SBOM with Stella - var sbomResult = await _harness.GenerateSbomWithStella( - imageRef, SbomFormat.CycloneDx16); - sbomResult.Success.Should().BeTrue("Stella should generate CycloneDX SBOM"); - - // Scan from SBOM with Grype - var grypeResult = await _harness.ScanWithGrypeFromSbom(sbomResult.Path!); - grypeResult.Success.Should().BeTrue("Grype should consume Stella-generated CycloneDX SBOM"); - } - - [Theory] - [MemberData(nameof(TestImages))] - [Trait("Category", "Parity")] - public async Task Stella_And_Grype_FindingsParity_Above95Percent(string imageRef) - { - // Generate SBOM with Stella - var stellaSbom = await _harness.GenerateSbomWithStella( - imageRef, SbomFormat.CycloneDx16); - stellaSbom.Success.Should().BeTrue(); - - // TODO: Get Stella findings from scan result - var stellaFindings = new List(); - - // Scan SBOM with Grype - var grypeResult = await _harness.ScanWithGrypeFromSbom(stellaSbom.Path!); - grypeResult.Success.Should().BeTrue(); - - // Compare findings - var comparison = _harness.CompareFindings( - stellaFindings, - grypeResult.Findings!, - tolerancePercent: 5); - - comparison.ParityPercent.Should().BeGreaterThanOrEqualTo(95, - $"Findings parity {comparison.ParityPercent:F2}% is below 95% threshold. " + - $"Only in Stella: {comparison.OnlyInStella}, Only in Grype: {comparison.OnlyInGrype}"); - } - - [Theory] - [MemberData(nameof(TestImages))] - [Trait("Category", "Attestation")] - public async Task CycloneDx_Attestation_RoundTrip(string imageRef) - { - // Skip if not in CI - cosign requires credentials - if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI"))) - { - throw SkipException.ForSkip("Cosign attestation requires CI credentials."); - } - - // Generate SBOM - var sbomResult = await _harness.GenerateSbomWithStella( - imageRef, SbomFormat.CycloneDx16); - sbomResult.Success.Should().BeTrue(); - - // Attest with cosign - var attestResult = await _harness.AttestWithCosign( - sbomResult.Path!, imageRef); - attestResult.Success.Should().BeTrue("Cosign should attest SBOM"); - - // TODO: Verify attestation - // var verifyResult = await _harness.VerifyCosignAttestation(imageRef); - // verifyResult.Success.Should().BeTrue(); - - // Digest should match - // var attestedDigest = verifyResult.PredicateDigest; - // attestedDigest.Should().Be(sbomResult.Digest); - } - - [Fact] - [Trait("Category", "Schema")] - public async Task Stella_CycloneDx_ValidatesAgainstSchema() - { - var imageRef = "alpine:3.18"; - - // Generate SBOM - var sbomResult = await _harness.GenerateSbomWithStella( - imageRef, SbomFormat.CycloneDx16); - sbomResult.Success.Should().BeTrue(); - - // TODO: Validate against CycloneDX 1.6 JSON schema - sbomResult.Content.Should().NotBeNullOrEmpty(); - sbomResult.Content.Should().Contain("\"bomFormat\": \"CycloneDX\""); - sbomResult.Content.Should().Contain("\"specVersion\": \"1.6\""); - } - public static IEnumerable TestImages => [ ["alpine:3.18"], diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparer.cs b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparer.cs new file mode 100644 index 000000000..99c59fe2e --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparer.cs @@ -0,0 +1,41 @@ +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Interop.Tests; + +public static class FindingsComparer +{ + public static FindingsComparisonResult Compare( + IReadOnlyList stellaFindings, + IReadOnlyList grypeFindings, + decimal tolerancePercent = 5) + { + var stellaVulns = stellaFindings + .Select(f => (f.VulnerabilityId, f.PackagePurl)) + .ToHashSet(); + + var grypeVulns = grypeFindings + .Select(f => (f.VulnerabilityId, f.PackagePurl)) + .ToHashSet(); + + var onlyInStella = stellaVulns.Except(grypeVulns).ToList(); + var onlyInGrype = grypeVulns.Except(stellaVulns).ToList(); + var inBoth = stellaVulns.Intersect(grypeVulns).ToList(); + + var totalUnique = stellaVulns.Union(grypeVulns).Count(); + var parityPercent = totalUnique > 0 + ? (decimal)inBoth.Count / totalUnique * 100 + : 100; + + return new FindingsComparisonResult( + ParityPercent: parityPercent, + IsWithinTolerance: parityPercent >= (100 - tolerancePercent), + StellaTotalFindings: stellaFindings.Count, + GrypeTotalFindings: grypeFindings.Count, + MatchingFindings: inBoth.Count, + OnlyInStella: onlyInStella.Count, + OnlyInGrype: onlyInGrype.Count, + OnlyInStellaDetails: onlyInStella, + OnlyInGrypeDetails: onlyInGrype); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparerTests.cs b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparerTests.cs new file mode 100644 index 000000000..0869896be --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsComparerTests.cs @@ -0,0 +1,66 @@ +using FluentAssertions; +using Xunit; + +namespace StellaOps.Interop.Tests; + +public sealed class FindingsComparerTests +{ + [Fact] + public void Compare_ReturnsFullParity_WhenSetsMatch() + { + var stella = new[] + { + new Finding("CVE-2024-0001", "pkg:apk/alpine/zlib@1.2.13", "High"), + new Finding("CVE-2024-0002", "pkg:apk/alpine/openssl@3.0.0", "Medium") + }; + var grype = new[] + { + new GrypeFinding("CVE-2024-0001", "pkg:apk/alpine/zlib@1.2.13", "High"), + new GrypeFinding("CVE-2024-0002", "pkg:apk/alpine/openssl@3.0.0", "Medium") + }; + + var result = FindingsComparer.Compare(stella, grype); + + result.ParityPercent.Should().Be(100); + result.IsWithinTolerance.Should().BeTrue(); + result.MatchingFindings.Should().Be(2); + result.OnlyInStella.Should().Be(0); + result.OnlyInGrype.Should().Be(0); + } + + [Fact] + public void Compare_ReturnsExpectedCounts_WhenSetsDiffer() + { + var stella = new[] + { + new Finding("CVE-2024-0001", "pkg:apk/alpine/zlib@1.2.13", "High"), + new Finding("CVE-2024-0002", "pkg:apk/alpine/openssl@3.0.0", "Medium") + }; + var grype = new[] + { + new GrypeFinding("CVE-2024-0002", "pkg:apk/alpine/openssl@3.0.0", "Medium"), + new GrypeFinding("CVE-2024-0003", "pkg:apk/alpine/busybox@1.36.0", "Low") + }; + + var result = FindingsComparer.Compare(stella, grype); + + var expectedParity = (decimal)1 / 3 * 100; + result.ParityPercent.Should().Be(expectedParity); + result.IsWithinTolerance.Should().BeFalse(); + result.MatchingFindings.Should().Be(1); + result.OnlyInStella.Should().Be(1); + result.OnlyInGrype.Should().Be(1); + } + + [Fact] + public void Compare_ReturnsFullParity_WhenInputsEmpty() + { + var result = FindingsComparer.Compare(Array.Empty(), Array.Empty()); + + result.ParityPercent.Should().Be(100); + result.IsWithinTolerance.Should().BeTrue(); + result.MatchingFindings.Should().Be(0); + result.OnlyInStella.Should().Be(0); + result.OnlyInGrype.Should().Be(0); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/FindingsModels.cs b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsModels.cs new file mode 100644 index 000000000..817ac14d3 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/FindingsModels.cs @@ -0,0 +1,23 @@ +namespace StellaOps.Interop.Tests; + +public sealed record GrypeFinding( + string VulnerabilityId, + string PackagePurl, + string Severity, + string? FixedIn = null); + +public sealed record Finding( + string VulnerabilityId, + string PackagePurl, + string Severity); + +public sealed record FindingsComparisonResult( + decimal ParityPercent, + bool IsWithinTolerance, + int StellaTotalFindings, + int GrypeTotalFindings, + int MatchingFindings, + int OnlyInStella, + int OnlyInGrype, + IReadOnlyList<(string VulnId, string Purl)> OnlyInStellaDetails, + IReadOnlyList<(string VulnId, string Purl)> OnlyInGrypeDetails); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/InteropResults.cs b/src/__Tests/interop/StellaOps.Interop.Tests/InteropResults.cs new file mode 100644 index 000000000..6a877bff2 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/InteropResults.cs @@ -0,0 +1,28 @@ +namespace StellaOps.Interop.Tests; + +public sealed record AttestationResult( + bool Success, + string? ImageRef = null, + string? Error = null) +{ + public static AttestationResult Failed(string error) => new(false, Error: error); +} + +public sealed record GrypeScanResult( + bool Success, + IReadOnlyList? Findings = null, + string? RawOutput = null, + string? Error = null) +{ + public static GrypeScanResult Failed(string error) => new(false, Error: error); +} + +public sealed record ToolResult( + bool Success, + string Output, + string? Error = null); + +public sealed record VerifyResult( + bool Success, + string? PredicateDigest = null, + string? Error = null); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Attestation.cs b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Attestation.cs new file mode 100644 index 000000000..a4df875d0 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Attestation.cs @@ -0,0 +1,27 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Interop.Tests; + +public sealed partial class InteropTestHarness +{ + /// + /// Attest SBOM using cosign. + /// + public async Task AttestWithCosignAsync( + string sbomPath, + string imageRef, + CancellationToken ct = default) + { + EnsureToolsAvailable(); + var result = await _toolManager.RunAsync( + "cosign", + $"attest --predicate {sbomPath} --type cyclonedx {imageRef} --yes", + ct).ConfigureAwait(false); + + if (!result.Success) + return AttestationResult.Failed(result.Error ?? "Cosign attestation failed"); + + return new AttestationResult(Success: true, ImageRef: imageRef); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Grype.cs b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Grype.cs new file mode 100644 index 000000000..f4cd0ab6d --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Grype.cs @@ -0,0 +1,42 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Interop.Tests; + +public sealed partial class InteropTestHarness +{ + /// + /// Scan using Grype from SBOM (no image pull). + /// + public async Task ScanWithGrypeFromSbomAsync( + string sbomPath, + CancellationToken ct = default) + { + EnsureToolsAvailable(); + var outputPath = Path.Combine(_workDir, "grype-findings.json"); + var result = await _toolManager.RunAsync( + "grype", + $"sbom:{sbomPath} -o json --file {outputPath}", + ct).ConfigureAwait(false); + + if (!result.Success) + return GrypeScanResult.Failed(result.Error ?? "Grype scan failed"); + + var content = await File.ReadAllTextAsync(outputPath, ct).ConfigureAwait(false); + var findings = ParseGrypeFindings(content); + + return new GrypeScanResult( + Success: true, + Findings: findings, + RawOutput: content); + } + + private static IReadOnlyList ParseGrypeFindings(string json) + { + // Placeholder: In real implementation, parse Grype JSON output + // For now, return empty list + return Array.Empty(); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Sbom.cs b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Sbom.cs new file mode 100644 index 000000000..35c133e73 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.Sbom.cs @@ -0,0 +1,86 @@ +using System; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Interop.Tests; + +public sealed partial class InteropTestHarness +{ + /// + /// Generate SBOM using Syft. + /// + public async Task GenerateSbomWithSyftAsync( + string imageRef, + SbomFormat format, + CancellationToken ct = default) + { + EnsureToolsAvailable(); + var formatArg = format switch + { + SbomFormat.CycloneDx16 => "cyclonedx-json", + SbomFormat.Spdx30 => "spdx-json", + _ => throw new ArgumentException($"Unsupported format: {format}") + }; + + var outputPath = Path.Combine(_workDir, $"sbom-syft-{format}.json"); + var result = await _toolManager.RunAsync( + "syft", + $"{imageRef} -o {formatArg}={outputPath}", + ct).ConfigureAwait(false); + + if (!result.Success) + return SbomResult.Failed(result.Error ?? "Syft execution failed"); + + var content = await File.ReadAllTextAsync(outputPath, ct).ConfigureAwait(false); + var digest = ComputeDigest(content); + + return new SbomResult( + Success: true, + Path: outputPath, + Format: format, + Content: content, + Digest: digest); + } + + /// + /// Generate SBOM using Stella scanner. + /// + public async Task GenerateSbomWithStellaAsync( + string imageRef, + SbomFormat format, + CancellationToken ct = default) + { + EnsureToolsAvailable(); + var formatArg = format switch + { + SbomFormat.CycloneDx16 => "cyclonedx", + SbomFormat.Spdx30 => "spdx", + _ => throw new ArgumentException($"Unsupported format: {format}") + }; + + var outputPath = Path.Combine(_workDir, $"stella-sbom-{format}.json"); + var result = await _toolManager.RunAsync( + "stella", + $"scan {imageRef} --sbom-format {formatArg} --sbom-output {outputPath}", + ct).ConfigureAwait(false); + + if (!result.Success) + return SbomResult.Failed(result.Error ?? "Stella execution failed"); + + var content = await File.ReadAllTextAsync(outputPath, ct).ConfigureAwait(false); + var digest = ComputeDigest(content); + + return new SbomResult( + Success: true, + Path: outputPath, + Format: format, + Content: content, + Digest: digest); + } + + private static string ComputeDigest(string content) => + Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(content))).ToLowerInvariant(); +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.cs b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.cs index bec33b2fe..f378742f7 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/InteropTestHarness.cs @@ -1,16 +1,17 @@ -namespace StellaOps.Interop.Tests; - -using System.Diagnostics; -using System.Security.Cryptography; -using System.Text; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; using StellaOps.Interop; using Xunit.Sdk; +namespace StellaOps.Interop.Tests; + /// /// Test harness for SBOM interoperability testing. /// Coordinates Syft, Grype, Trivy, and cosign tools. /// -public sealed class InteropTestHarness : IAsyncLifetime +public sealed partial class InteropTestHarness : IAsyncLifetime { private readonly ToolManager _toolManager; private readonly string _workDir; @@ -40,165 +41,9 @@ public sealed class InteropTestHarness : IAsyncLifetime return; } - // Verify tools are available - await _toolManager.VerifyToolAsync("syft", "--version"); - await _toolManager.VerifyToolAsync("grype", "--version"); - await _toolManager.VerifyToolAsync("cosign", "version"); - } - - /// - /// Generate SBOM using Syft. - /// - public async Task GenerateSbomWithSyft( - string imageRef, - SbomFormat format, - CancellationToken ct = default) - { - EnsureToolsAvailable(); - var formatArg = format switch - { - SbomFormat.CycloneDx16 => "cyclonedx-json", - SbomFormat.Spdx30 => "spdx-json", - _ => throw new ArgumentException($"Unsupported format: {format}") - }; - - var outputPath = Path.Combine(_workDir, $"sbom-syft-{format}.json"); - var result = await _toolManager.RunAsync( - "syft", - $"{imageRef} -o {formatArg}={outputPath}", - ct); - - if (!result.Success) - return SbomResult.Failed(result.Error ?? "Syft execution failed"); - - var content = await File.ReadAllTextAsync(outputPath, ct); - var digest = ComputeDigest(content); - - return new SbomResult( - Success: true, - Path: outputPath, - Format: format, - Content: content, - Digest: digest); - } - - /// - /// Generate SBOM using Stella scanner. - /// - public async Task GenerateSbomWithStella( - string imageRef, - SbomFormat format, - CancellationToken ct = default) - { - EnsureToolsAvailable(); - var formatArg = format switch - { - SbomFormat.CycloneDx16 => "cyclonedx", - SbomFormat.Spdx30 => "spdx", - _ => throw new ArgumentException($"Unsupported format: {format}") - }; - - var outputPath = Path.Combine(_workDir, $"stella-sbom-{format}.json"); - var result = await _toolManager.RunAsync( - "stella", - $"scan {imageRef} --sbom-format {formatArg} --sbom-output {outputPath}", - ct); - - if (!result.Success) - return SbomResult.Failed(result.Error ?? "Stella execution failed"); - - var content = await File.ReadAllTextAsync(outputPath, ct); - var digest = ComputeDigest(content); - - return new SbomResult( - Success: true, - Path: outputPath, - Format: format, - Content: content, - Digest: digest); - } - - /// - /// Attest SBOM using cosign. - /// - public async Task AttestWithCosign( - string sbomPath, - string imageRef, - CancellationToken ct = default) - { - EnsureToolsAvailable(); - var result = await _toolManager.RunAsync( - "cosign", - $"attest --predicate {sbomPath} --type cyclonedx {imageRef} --yes", - ct); - - if (!result.Success) - return AttestationResult.Failed(result.Error ?? "Cosign attestation failed"); - - return new AttestationResult(Success: true, ImageRef: imageRef); - } - - /// - /// Scan using Grype from SBOM (no image pull). - /// - public async Task ScanWithGrypeFromSbom( - string sbomPath, - CancellationToken ct = default) - { - EnsureToolsAvailable(); - var outputPath = Path.Combine(_workDir, "grype-findings.json"); - var result = await _toolManager.RunAsync( - "grype", - $"sbom:{sbomPath} -o json --file {outputPath}", - ct); - - if (!result.Success) - return GrypeScanResult.Failed(result.Error ?? "Grype scan failed"); - - var content = await File.ReadAllTextAsync(outputPath, ct); - var findings = ParseGrypeFindings(content); - - return new GrypeScanResult( - Success: true, - Findings: findings, - RawOutput: content); - } - - /// - /// Compare findings between Stella and Grype. - /// - public FindingsComparisonResult CompareFindings( - IReadOnlyList stellaFindings, - IReadOnlyList grypeFindings, - decimal tolerancePercent = 5) - { - var stellaVulns = stellaFindings - .Select(f => (f.VulnerabilityId, f.PackagePurl)) - .ToHashSet(); - - var grypeVulns = grypeFindings - .Select(f => (f.VulnerabilityId, f.PackagePurl)) - .ToHashSet(); - - var onlyInStella = stellaVulns.Except(grypeVulns).ToList(); - var onlyInGrype = grypeVulns.Except(stellaVulns).ToList(); - var inBoth = stellaVulns.Intersect(grypeVulns).ToList(); - - var totalUnique = stellaVulns.Union(grypeVulns).Count(); - var parityPercent = totalUnique > 0 - ? (decimal)inBoth.Count / totalUnique * 100 - : 100; - - return new FindingsComparisonResult( - ParityPercent: parityPercent, - IsWithinTolerance: parityPercent >= (100 - tolerancePercent), - StellaTotalFindings: stellaFindings.Count, - GrypeTotalFindings: grypeFindings.Count, - MatchingFindings: inBoth.Count, - OnlyInStella: onlyInStella.Count, - OnlyInGrype: onlyInGrype.Count, - OnlyInStellaDetails: onlyInStella, - OnlyInGrypeDetails: onlyInGrype); + await _toolManager.VerifyToolAsync("syft", "--version").ConfigureAwait(false); + await _toolManager.VerifyToolAsync("grype", "--version").ConfigureAwait(false); + await _toolManager.VerifyToolAsync("cosign", "version").ConfigureAwait(false); } public ValueTask DisposeAsync() @@ -208,22 +53,9 @@ public sealed class InteropTestHarness : IAsyncLifetime return ValueTask.CompletedTask; } - private static string ComputeDigest(string content) => - Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(content))).ToLowerInvariant(); - - private static IReadOnlyList ParseGrypeFindings(string json) - { - // Placeholder: In real implementation, parse Grype JSON output - // For now, return empty list - return Array.Empty(); - } - private void EnsureToolsAvailable() { if (!string.IsNullOrWhiteSpace(_skipReason)) throw SkipException.ForSkip(_skipReason); } } - - - diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/Models.cs b/src/__Tests/interop/StellaOps.Interop.Tests/Models.cs index 9a7f0b17c..f546c60fa 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/Models.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/Models.cs @@ -4,11 +4,6 @@ // Task: T1, T7 - Interop Test Harness & Project Setup // Description: Models for SBOM interoperability testing. // ----------------------------------------------------------------------------- - -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; - namespace StellaOps.Interop.Tests; public enum SbomFormat @@ -27,52 +22,3 @@ public sealed record SbomResult( { public static SbomResult Failed(string error) => new(false, Error: error); } - -public sealed record AttestationResult( - bool Success, - string? ImageRef = null, - string? Error = null) -{ - public static AttestationResult Failed(string error) => new(false, Error: error); -} - -public sealed record GrypeScanResult( - bool Success, - IReadOnlyList? Findings = null, - string? RawOutput = null, - string? Error = null) -{ - public static GrypeScanResult Failed(string error) => new(false, Error: error); -} - -public sealed record GrypeFinding( - string VulnerabilityId, - string PackagePurl, - string Severity, - string? FixedIn = null); - -public sealed record Finding( - string VulnerabilityId, - string PackagePurl, - string Severity); - -public sealed record ToolResult( - bool Success, - string Output, - string? Error = null); - -public sealed record FindingsComparisonResult( - decimal ParityPercent, - bool IsWithinTolerance, - int StellaTotalFindings, - int GrypeTotalFindings, - int MatchingFindings, - int OnlyInStella, - int OnlyInGrype, - IReadOnlyList<(string VulnId, string Purl)> OnlyInStellaDetails, - IReadOnlyList<(string VulnId, string Purl)> OnlyInGrypeDetails); - -public sealed record VerifyResult( - bool Success, - string? PredicateDigest = null, - string? Error = null); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/Spdx/SpdxRoundTripTests.cs b/src/__Tests/interop/StellaOps.Interop.Tests/Spdx/SpdxRoundTripTests.cs index 270b99475..3b4c9c8f0 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/Spdx/SpdxRoundTripTests.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/Spdx/SpdxRoundTripTests.cs @@ -13,10 +13,10 @@ public class SpdxRoundTripTests : IClassFixture [Theory] [MemberData(nameof(TestImages))] - public async Task Syft_GeneratesSpdx_CanBeParsed(string imageRef) + public async Task Syft_GeneratesSpdx_CanBeParsedAsync(string imageRef) { // Generate SBOM with Syft - var sbomResult = await _harness.GenerateSbomWithSyft( + var sbomResult = await _harness.GenerateSbomWithSyftAsync( imageRef, SbomFormat.Spdx30); sbomResult.Success.Should().BeTrue("Syft should generate SPDX SBOM"); @@ -27,10 +27,10 @@ public class SpdxRoundTripTests : IClassFixture [Theory] [MemberData(nameof(TestImages))] - public async Task Stella_GeneratesSpdx_CanBeParsed(string imageRef) + public async Task Stella_GeneratesSpdx_CanBeParsedAsync(string imageRef) { // Generate SBOM with Stella - var sbomResult = await _harness.GenerateSbomWithStella( + var sbomResult = await _harness.GenerateSbomWithStellaAsync( imageRef, SbomFormat.Spdx30); sbomResult.Success.Should().BeTrue("Stella should generate SPDX SBOM"); @@ -42,10 +42,10 @@ public class SpdxRoundTripTests : IClassFixture [Theory] [MemberData(nameof(TestImages))] [Trait("Category", "Schema")] - public async Task Stella_Spdx_ValidatesAgainstSchema(string imageRef) + public async Task Stella_Spdx_ValidatesAgainstSchemaAsync(string imageRef) { // Generate SBOM - var sbomResult = await _harness.GenerateSbomWithStella( + var sbomResult = await _harness.GenerateSbomWithStellaAsync( imageRef, SbomFormat.Spdx30); sbomResult.Success.Should().BeTrue(); @@ -57,12 +57,12 @@ public class SpdxRoundTripTests : IClassFixture [Fact] [Trait("Category", "EvidenceChain")] - public async Task Spdx_IncludesEvidenceChain() + public async Task Spdx_IncludesEvidenceChainAsync() { var imageRef = "alpine:3.18"; // Generate SBOM with evidence - var sbomResult = await _harness.GenerateSbomWithStella( + var sbomResult = await _harness.GenerateSbomWithStellaAsync( imageRef, SbomFormat.Spdx30); sbomResult.Success.Should().BeTrue(); @@ -73,12 +73,12 @@ public class SpdxRoundTripTests : IClassFixture [Fact] [Trait("Category", "Interop")] - public async Task Spdx_CompatibleWithConsumers() + public async Task Spdx_CompatibleWithConsumersAsync() { var imageRef = "debian:12-slim"; // Generate SBOM - var sbomResult = await _harness.GenerateSbomWithStella( + var sbomResult = await _harness.GenerateSbomWithStellaAsync( imageRef, SbomFormat.Spdx30); sbomResult.Success.Should().BeTrue(); diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/TASKS.md b/src/__Tests/interop/StellaOps.Interop.Tests/TASKS.md index 2aef38335..5bb2ad64d 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/TASKS.md +++ b/src/__Tests/interop/StellaOps.Interop.Tests/TASKS.md @@ -11,3 +11,4 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229 | AUDIT-TESTGAP-CORELIB-INTEROP-0001 | DONE | Added ToolManager unit tests + skip gating (2026-01-13). | | REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. | | REMED-08 | DONE | Added stubbed ToolManager unit tests for deterministic path/process checks. | +| REMED-09 | DONE | Async naming + file splits (<=100 lines), harness/model refactors, FindingsComparer tests added; ConfigureAwait(false) skipped in xUnit tests per xUnit1030; `dotnet test src/__Tests/interop/StellaOps.Interop.Tests/StellaOps.Interop.Tests.csproj` passed (11 tests, 38 skipped) 2026-02-04. | diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.Helpers.cs b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.Helpers.cs new file mode 100644 index 000000000..18f615e93 --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.Helpers.cs @@ -0,0 +1,36 @@ +using System.Text; + +namespace StellaOps.Interop.Tests; + +public sealed partial class ToolManagerTests +{ + private static string ResolveShellPath() + => OperatingSystem.IsWindows() + ? Environment.GetEnvironmentVariable("ComSpec") ?? string.Empty + : "/bin/sh"; + + private static string WriteShellScript(string directory) + { + var scriptName = OperatingSystem.IsWindows() ? "interop-tool.cmd" : "interop-tool.sh"; + var scriptPath = Path.Combine(directory, scriptName); + var content = OperatingSystem.IsWindows() + ? "@echo off\r\necho ok\r\nexit /b 0\r\n" + : "#!/bin/sh\n\necho ok\nexit 0\n"; + + File.WriteAllText(scriptPath, content, Encoding.ASCII); + return scriptPath; + } + + private static string CreateTempDirectory() + { + var path = Path.Combine(Path.GetTempPath(), $"interop-tool-{Guid.NewGuid():N}"); + Directory.CreateDirectory(path); + return path; + } + + private static void DeleteDirectory(string path) + { + if (Directory.Exists(path)) + Directory.Delete(path, recursive: true); + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.RunAsync.cs b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.RunAsync.cs new file mode 100644 index 000000000..7386f5bfb --- /dev/null +++ b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.RunAsync.cs @@ -0,0 +1,62 @@ +using FluentAssertions; +using StellaOps.Interop; +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Interop.Tests; + +public sealed partial class ToolManagerTests +{ + [Fact] + public async Task RunAsync_ReturnsFailure_WhenToolMissingAsync() + { + var workDir = CreateTempDirectory(); + try + { + var manager = new ToolManager(workDir); + + var result = await manager.RunAsync("missing-tool", "--version", CancellationToken.None); + + result.Success.Should().BeFalse(); + result.Error.Should().Contain("Tool not found"); + result.ExitCode.Should().Be(-1); + } + finally + { + DeleteDirectory(workDir); + } + } + + [Fact] + public async Task RunAsync_ReturnsSuccess_WhenShellExecutesScriptAsync() + { + var workDir = CreateTempDirectory(); + try + { + var scriptPath = WriteShellScript(workDir); + var shellPath = ResolveShellPath(); + + if (string.IsNullOrWhiteSpace(shellPath) || !File.Exists(shellPath)) + throw SkipException.ForSkip("Shell not available for interop tool test."); + + var toolPaths = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["shell"] = shellPath + }; + + var args = OperatingSystem.IsWindows() + ? $"/c \"{scriptPath}\"" + : $"\"{scriptPath}\""; + + var manager = new ToolManager(workDir, toolPaths); + var result = await manager.RunAsync("shell", args, CancellationToken.None); + + result.Success.Should().BeTrue(); + result.StdOut.Should().Contain("ok"); + } + finally + { + DeleteDirectory(workDir); + } + } +} diff --git a/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.cs b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.cs index 54e3bcd04..fde323ee7 100644 --- a/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.cs +++ b/src/__Tests/interop/StellaOps.Interop.Tests/ToolManagerTests.cs @@ -2,11 +2,9 @@ using System.Text; using FluentAssertions; using StellaOps.Interop; using Xunit; -using Xunit.Sdk; - namespace StellaOps.Interop.Tests; -public sealed class ToolManagerTests +public sealed partial class ToolManagerTests { [Fact] public void ResolveToolPath_UsesConfiguredPath() @@ -73,86 +71,4 @@ public sealed class ToolManagerTests } } - [Fact] - public async Task RunAsync_ReturnsFailure_WhenToolMissing() - { - var workDir = CreateTempDirectory(); - try - { - var manager = new ToolManager(workDir); - - var result = await manager.RunAsync("missing-tool", "--version", CancellationToken.None); - - result.Success.Should().BeFalse(); - result.Error.Should().Contain("Tool not found"); - result.ExitCode.Should().Be(-1); - } - finally - { - DeleteDirectory(workDir); - } - } - - [Fact] - public async Task RunAsync_ReturnsSuccess_WhenShellExecutesScript() - { - var workDir = CreateTempDirectory(); - try - { - var scriptPath = WriteShellScript(workDir); - var shellPath = ResolveShellPath(); - - if (string.IsNullOrWhiteSpace(shellPath) || !File.Exists(shellPath)) - throw SkipException.ForSkip("Shell not available for interop tool test."); - - var toolPaths = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["shell"] = shellPath - }; - - var args = OperatingSystem.IsWindows() - ? $"/c \"{scriptPath}\"" - : $"\"{scriptPath}\""; - - var manager = new ToolManager(workDir, toolPaths); - var result = await manager.RunAsync("shell", args, CancellationToken.None); - - result.Success.Should().BeTrue(); - result.StdOut.Should().Contain("ok"); - } - finally - { - DeleteDirectory(workDir); - } - } - - private static string ResolveShellPath() - => OperatingSystem.IsWindows() - ? Environment.GetEnvironmentVariable("ComSpec") ?? string.Empty - : "/bin/sh"; - - private static string WriteShellScript(string directory) - { - var scriptName = OperatingSystem.IsWindows() ? "interop-tool.cmd" : "interop-tool.sh"; - var scriptPath = Path.Combine(directory, scriptName); - var content = OperatingSystem.IsWindows() - ? "@echo off\r\necho ok\r\nexit /b 0\r\n" - : "#!/bin/sh\n\necho ok\nexit 0\n"; - - File.WriteAllText(scriptPath, content, Encoding.ASCII); - return scriptPath; - } - - private static string CreateTempDirectory() - { - var path = Path.Combine(Path.GetTempPath(), $"interop-tool-{Guid.NewGuid():N}"); - Directory.CreateDirectory(path); - return path; - } - - private static void DeleteDirectory(string path) - { - if (Directory.Exists(path)) - Directory.Delete(path, recursive: true); - } }