Add post-quantum cryptography support with PqSoftCryptoProvider
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
- Implemented PqSoftCryptoProvider for software-only post-quantum algorithms (Dilithium3, Falcon512) using BouncyCastle. - Added PqSoftProviderOptions and PqSoftKeyOptions for configuration. - Created unit tests for Dilithium3 and Falcon512 signing and verification. - Introduced EcdsaPolicyCryptoProvider for compliance profiles (FIPS/eIDAS) with explicit allow-lists. - Added KcmvpHashOnlyProvider for KCMVP baseline compliance. - Updated project files and dependencies for new libraries and testing frameworks.
This commit is contained in:
211
.gitea/workflows/wine-csp-build.yml
Normal file
211
.gitea/workflows/wine-csp-build.yml
Normal file
@@ -0,0 +1,211 @@
|
||||
name: wine-csp-build
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/__Tools/WineCspService/**'
|
||||
- 'ops/wine-csp/**'
|
||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
||||
- '.gitea/workflows/wine-csp-build.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tools/WineCspService/**'
|
||||
- 'ops/wine-csp/**'
|
||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push:
|
||||
description: "Push to registry"
|
||||
required: false
|
||||
default: "false"
|
||||
version:
|
||||
description: "Version tag (e.g., 2025.10.0-edge)"
|
||||
required: false
|
||||
default: "2025.10.0-edge"
|
||||
|
||||
env:
|
||||
IMAGE_NAME: registry.stella-ops.org/stellaops/wine-csp
|
||||
DOCKERFILE: ops/wine-csp/Dockerfile
|
||||
# Wine CSP only supports linux/amd64 (Wine ARM64 has compatibility issues with Windows x64 apps)
|
||||
PLATFORMS: linux/amd64
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Wine CSP Image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Install syft (SBOM generation)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
- name: Install cosign (attestation)
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
|
||||
- name: Set version tag
|
||||
id: version
|
||||
run: |
|
||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
echo "tag=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "tag=2025.10.0-edge" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=pr-${{ github.event.pull_request.number || github.sha }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=${{ steps.version.outputs.tag }}
|
||||
type=sha,format=short
|
||||
|
||||
- name: Build image (no push)
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ env.DOCKERFILE }}
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Test container startup
|
||||
run: |
|
||||
set -e
|
||||
echo "Starting Wine CSP container for health check test..."
|
||||
|
||||
# Run container in detached mode
|
||||
docker run -d --name wine-csp-test \
|
||||
-e WINE_CSP_MODE=limited \
|
||||
-e WINE_CSP_LOG_LEVEL=Debug \
|
||||
-p 5099:5099 \
|
||||
"${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
||||
|
||||
# Wait for container startup (Wine takes time to initialize)
|
||||
echo "Waiting for container startup (90s max)..."
|
||||
for i in $(seq 1 18); do
|
||||
sleep 5
|
||||
if curl -sf http://127.0.0.1:5099/health > /dev/null 2>&1; then
|
||||
echo "Health check passed after $((i * 5))s"
|
||||
break
|
||||
fi
|
||||
echo "Waiting... ($((i * 5))s elapsed)"
|
||||
done
|
||||
|
||||
# Final health check
|
||||
echo "Final health check:"
|
||||
curl -sf http://127.0.0.1:5099/health || {
|
||||
echo "Health check failed!"
|
||||
docker logs wine-csp-test
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Test status endpoint
|
||||
echo "Testing /status endpoint:"
|
||||
curl -sf http://127.0.0.1:5099/status | jq .
|
||||
|
||||
# Cleanup
|
||||
docker stop wine-csp-test
|
||||
docker rm wine-csp-test
|
||||
|
||||
echo "Container tests passed!"
|
||||
|
||||
- name: Generate SBOM (SPDX)
|
||||
run: |
|
||||
mkdir -p out/sbom
|
||||
syft "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" \
|
||||
-o spdx-json=out/sbom/wine-csp.spdx.json
|
||||
|
||||
- name: Generate SBOM (CycloneDX)
|
||||
run: |
|
||||
syft "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" \
|
||||
-o cyclonedx-json=out/sbom/wine-csp.cdx.json
|
||||
|
||||
- name: Upload SBOM artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-sbom-${{ steps.version.outputs.tag }}
|
||||
path: out/sbom/
|
||||
|
||||
- name: Login to registry
|
||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.stella-ops.org
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Push to registry
|
||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
||||
run: |
|
||||
docker push "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
||||
docker push "${{ env.IMAGE_NAME }}:sha-${{ github.sha }}"
|
||||
|
||||
- name: Sign image with cosign
|
||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
# Sign with keyless signing (requires OIDC)
|
||||
cosign sign --yes "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" || echo "Signing skipped (no OIDC available)"
|
||||
|
||||
- name: Build air-gap bundle
|
||||
run: |
|
||||
mkdir -p out/bundles
|
||||
docker save "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" | gzip > out/bundles/wine-csp-${{ steps.version.outputs.tag }}.tar.gz
|
||||
|
||||
# Generate bundle manifest
|
||||
cat > out/bundles/wine-csp-${{ steps.version.outputs.tag }}.manifest.json <<EOF
|
||||
{
|
||||
"name": "wine-csp",
|
||||
"version": "${{ steps.version.outputs.tag }}",
|
||||
"image": "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}",
|
||||
"platform": "linux/amd64",
|
||||
"sha256": "$(sha256sum out/bundles/wine-csp-${{ steps.version.outputs.tag }}.tar.gz | cut -d' ' -f1)",
|
||||
"created": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"git_commit": "${{ github.sha }}",
|
||||
"warning": "FOR TEST VECTOR GENERATION ONLY - NOT FOR PRODUCTION SIGNING"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Air-gap bundle created:"
|
||||
ls -lh out/bundles/
|
||||
|
||||
- name: Upload air-gap bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-bundle-${{ steps.version.outputs.tag }}
|
||||
path: out/bundles/
|
||||
|
||||
- name: Security scan with Trivy
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
|
||||
- name: Upload Trivy scan results
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
if: always()
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
@@ -1,13 +1,14 @@
|
||||
<Project>
|
||||
|
||||
<PropertyGroup>
|
||||
|
||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
||||
<RestorePackagesPath Condition="'$(RestorePackagesPath)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot).nuget/packages'))</RestorePackagesPath>
|
||||
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||
@@ -34,14 +35,12 @@
|
||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options" Version="9.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="9.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="9.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="local-nugets" value="./local-nugets" />
|
||||
<add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
</packageSources>
|
||||
<config>
|
||||
<add key="globalPackagesFolder" value="./.nuget/packages" />
|
||||
</config>
|
||||
|
||||
@@ -17,6 +17,8 @@ volumes:
|
||||
advisory-ai-plans:
|
||||
advisory-ai-outputs:
|
||||
postgres-data:
|
||||
wine-csp-prefix:
|
||||
wine-csp-logs:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
@@ -329,3 +331,42 @@ services:
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.10.0-edge}
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: ops/wine-csp/Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
WINE_CSP_PORT: "${WINE_CSP_PORT:-5099}"
|
||||
WINE_CSP_MODE: "${WINE_CSP_MODE:-limited}"
|
||||
WINE_CSP_INSTALLER_PATH: "${WINE_CSP_INSTALLER_PATH:-/opt/cryptopro/csp-installer.msi}"
|
||||
WINE_CSP_LOG_LEVEL: "${WINE_CSP_LOG_LEVEL:-Information}"
|
||||
ASPNETCORE_ENVIRONMENT: "${ASPNETCORE_ENVIRONMENT:-Development}"
|
||||
volumes:
|
||||
- wine-csp-prefix:/home/winecsp/.wine
|
||||
- wine-csp-logs:/var/log/wine-csp
|
||||
# Mount customer-provided CSP installer (optional):
|
||||
# - /path/to/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
||||
ports:
|
||||
- "${WINE_CSP_PORT:-5099}:5099"
|
||||
networks:
|
||||
- stellaops
|
||||
healthcheck:
|
||||
test: ["/usr/local/bin/healthcheck.sh"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 90s
|
||||
retries: 3
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
labels:
|
||||
<<: *release-labels
|
||||
com.stellaops.component: "wine-csp"
|
||||
com.stellaops.security.production-signing: "false"
|
||||
com.stellaops.security.test-vectors-only: "true"
|
||||
|
||||
@@ -72,3 +72,14 @@ services:
|
||||
- postgres
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.09.2-mock}
|
||||
environment:
|
||||
WINE_CSP_PORT: "5099"
|
||||
WINE_CSP_MODE: "limited"
|
||||
WINE_CSP_LOG_LEVEL: "Debug"
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
49
deploy/compose/env/wine-csp.env.example
vendored
Normal file
49
deploy/compose/env/wine-csp.env.example
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
# Wine CSP Service Environment Configuration
|
||||
# ===========================================================================
|
||||
#
|
||||
# WARNING: This service is for TEST VECTOR GENERATION ONLY.
|
||||
# It MUST NOT be used for production cryptographic signing operations.
|
||||
#
|
||||
# ===========================================================================
|
||||
|
||||
# Service port (default: 5099)
|
||||
WINE_CSP_PORT=5099
|
||||
|
||||
# Operation mode:
|
||||
# - limited: Works without CryptoPro CSP (basic GostCryptography only)
|
||||
# - full: Requires CryptoPro CSP installer to be mounted at WINE_CSP_INSTALLER_PATH
|
||||
WINE_CSP_MODE=limited
|
||||
|
||||
# Path to CryptoPro CSP installer MSI (customer-provided)
|
||||
# Mount your licensed CSP installer to /opt/cryptopro/csp-installer.msi
|
||||
WINE_CSP_INSTALLER_PATH=/opt/cryptopro/csp-installer.msi
|
||||
|
||||
# Logging level: Trace, Debug, Information, Warning, Error, Critical
|
||||
WINE_CSP_LOG_LEVEL=Information
|
||||
|
||||
# Image version tag
|
||||
WINE_CSP_VERSION=2025.10.0-edge
|
||||
|
||||
# ASP.NET Core environment (Development, Staging, Production)
|
||||
ASPNETCORE_ENVIRONMENT=Production
|
||||
|
||||
# ===========================================================================
|
||||
# Advanced Configuration (typically not changed)
|
||||
# ===========================================================================
|
||||
|
||||
# Wine debug output (set to "warn+all" for troubleshooting)
|
||||
# WINEDEBUG=-all
|
||||
|
||||
# Wine architecture (must be win64 for CryptoPro CSP)
|
||||
# WINEARCH=win64
|
||||
|
||||
# ===========================================================================
|
||||
# Volume Mounts (configure in docker-compose, not here)
|
||||
# ===========================================================================
|
||||
# - Wine prefix: /home/winecsp/.wine (persistent storage)
|
||||
# - CSP installer: /opt/cryptopro (read-only mount)
|
||||
# - Logs: /var/log/wine-csp (log output)
|
||||
#
|
||||
# Example mount for CSP installer:
|
||||
# volumes:
|
||||
# - /path/to/your/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
||||
@@ -3,34 +3,51 @@
|
||||
"exportId": "console-export::tenant-default::2025-12-06::0007",
|
||||
"tenantId": "tenant-default",
|
||||
"generatedAt": "2025-12-06T12:11:05Z",
|
||||
"expiresAt": "2025-12-13T12:11:05Z",
|
||||
"items": [
|
||||
{
|
||||
"type": "advisory",
|
||||
"id": "CVE-2024-12345",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/CVE-2024-12345.json?sig=...",
|
||||
"sha256": "cafe0001..."
|
||||
"sha256": "sha256:cafe0001...",
|
||||
"size": 18432
|
||||
},
|
||||
{
|
||||
"type": "vex",
|
||||
"id": "vex:tenant-default:jwt-auth:5d1a",
|
||||
"format": "ndjson",
|
||||
"url": "https://exports.local/tenant-default/0007/vex-jwt-auth.ndjson?sig=...",
|
||||
"sha256": "cafe0002..."
|
||||
"sha256": "sha256:cafe0002...",
|
||||
"size": 9216
|
||||
},
|
||||
{
|
||||
"type": "policy",
|
||||
"id": "policy://tenant-default/runtime-hardening",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/policy-runtime-hardening.json?sig=...",
|
||||
"sha256": "cafe0003..."
|
||||
"sha256": "sha256:cafe0003...",
|
||||
"size": 16384
|
||||
},
|
||||
{
|
||||
"type": "scan",
|
||||
"id": "scan::tenant-default::auth-api::2025-11-07",
|
||||
"format": "ndjson",
|
||||
"url": "https://exports.local/tenant-default/0007/scan-auth-api.ndjson?sig=...",
|
||||
"sha256": "cafe0004..."
|
||||
"sha256": "sha256:cafe0004...",
|
||||
"size": 32768
|
||||
},
|
||||
{
|
||||
"type": "bundle",
|
||||
"id": "console-export::tenant-default::2025-12-06::0007",
|
||||
"format": "tar.gz",
|
||||
"url": "https://exports.local/tenant-default/0007/bundle.tar.gz?sig=...",
|
||||
"sha256": "sha256:deadbeefcafefeed00000000000000000000000000000000000000000000000",
|
||||
"size": 48732102
|
||||
}
|
||||
],
|
||||
"checksums": {
|
||||
"manifest": "c0ffee...",
|
||||
"bundle": "deadbeef..."
|
||||
"manifest": "sha256:c0ffee00000000000000000000000000000000000000000000000000000000",
|
||||
"bundle": "sha256:deadbeef000000000000000000000000000000000000000000000000000000"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,11 +310,11 @@ data: {
|
||||
|
||||
> Until backend implementations ship, use the examples above to unblock DOCS-AIAI-31-004; replace them with live captures once the gateway endpoints are available in staging.
|
||||
|
||||
## Exports (draft contract v0.3)
|
||||
## Exports (draft contract v0.4 for sign-off)
|
||||
|
||||
### Routes
|
||||
- `POST /console/exports` — start an evidence bundle export job.
|
||||
- `GET /console/exports/{exportId}` — fetch job status and download locations.
|
||||
- `GET /console/exports/{exportId}` — fetch job status, manifest link, and download locations.
|
||||
- `GET /console/exports/{exportId}/events` — SSE stream of job progress (optional).
|
||||
|
||||
### Security / headers
|
||||
@@ -329,19 +329,20 @@ data: {
|
||||
```jsonc
|
||||
{
|
||||
"scope": { "tenantId": "t1", "projectId": "p1" },
|
||||
"sources": [ { "type": "advisory", "ids": ["CVE-2024-12345"] } ],
|
||||
"sources": [
|
||||
{ "type": "advisory", "ids": ["CVE-2024-12345"] },
|
||||
{ "type": "vex", "ids": ["vex:tenant-default:jwt-auth:5d1a"] }
|
||||
],
|
||||
"formats": ["json", "ndjson", "csv"],
|
||||
"attestations": { "include": true, "sigstoreBundle": true },
|
||||
"attestations": { "include": true, "sigstoreBundle": true, "dsse": true },
|
||||
"notify": { "webhooks": ["https://hooks.local/export"], "email": ["secops@example.com"] },
|
||||
"priority": "normal"
|
||||
}
|
||||
```
|
||||
|
||||
### Response: 202 Accepted
|
||||
- `exportId`: string
|
||||
- `status`: `queued|running|succeeded|failed|expired`
|
||||
- `estimateSeconds`: int
|
||||
- `retryAfter`: int seconds (for polling)
|
||||
- `exportId`, `status: queued|running|succeeded|failed|expired`
|
||||
- `estimateSeconds`, `retryAfter` (seconds)
|
||||
- `links`: `{ status: url, events?: url }`
|
||||
|
||||
### Response: GET status
|
||||
@@ -351,7 +352,14 @@ data: {
|
||||
"status": "running",
|
||||
"estimateSeconds": 420,
|
||||
"outputs": [
|
||||
{ "type": "manifest", "format": "json", "url": "https://.../manifest.json?sig=...", "sha256": "...", "expiresAt": "2025-12-06T13:10:00Z" }
|
||||
{
|
||||
"type": "manifest",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/manifest.json?sig=...",
|
||||
"sha256": "sha256:c0ffee...",
|
||||
"dsseUrl": "https://exports.local/tenant-default/0007/manifest.dsse?sig=...",
|
||||
"expiresAt": "2025-12-06T13:10:00Z"
|
||||
}
|
||||
],
|
||||
"progress": { "percent": 42, "itemsCompleted": 210, "itemsTotal": 500, "assetsReady": 12 },
|
||||
"errors": []
|
||||
@@ -361,25 +369,34 @@ data: {
|
||||
### Response: SSE events
|
||||
- `started`: `{ exportId, status }`
|
||||
- `progress`: `{ exportId, percent, itemsCompleted, itemsTotal }`
|
||||
- `asset_ready`: `{ exportId, type, id, url, sha256 }`
|
||||
- `completed`: `{ exportId, status: "succeeded", manifestUrl }`
|
||||
- `failed`: `{ exportId, status: "failed", code, message }`
|
||||
- `asset_ready`: `{ exportId, type, id, url, sha256, format }`
|
||||
- `completed`: `{ exportId, status: "succeeded", manifestUrl, manifestDsseUrl? }`
|
||||
- `failed`: `{ exportId, status: "failed", code, message, retryAfterSeconds? }`
|
||||
|
||||
### Manifest shape (downloaded via outputs)
|
||||
- `version`: string (date)
|
||||
- `exportId`, `tenantId`, `generatedAt`
|
||||
- `items[]`: `{ type: advisory|vex|policy|scan, id, url, sha256 }`
|
||||
- `checksums`: `{ manifest, bundle }`
|
||||
- Ordering: sort items by `(type asc, id asc, format asc, url asc)`.
|
||||
- `version`: string (date), `exportId`, `tenantId`, `generatedAt`, `expiresAt`
|
||||
- `items[]`: `{ type: advisory|vex|policy|scan|chart|bundle, id, format, url, sha256, size }`
|
||||
- `checksums`: `{ manifest: "sha256:<digest>", bundle?: "sha256:<digest>" }`
|
||||
- Optional DSSE envelope for manifest: `manifest.dsse` (payload type `stellaops.console.manifest`).
|
||||
|
||||
### Limits (proposed)
|
||||
- Max request body 256 KiB; max sources 50; max outputs 1000 assets/export.
|
||||
- Max bundle size 500 MiB compressed.
|
||||
- Default job timeout 30 minutes; idle SSE timeout 60s; backoff via `Retry-After`.
|
||||
|
||||
### Determinism, caching, retry
|
||||
- Responses set `Cache-Control: public, max-age=300, stale-while-revalidate=60, stale-if-error=300`.
|
||||
- `ETag` is SHA-256 over sorted payload; clients send `If-None-Match`.
|
||||
- Respect `Retry-After`; client backoff `1s,2s,4s,8s` capped at 30s.
|
||||
- Cursors (if introduced later) MUST be opaque, base64url, signed with tenant + sortKeys.
|
||||
|
||||
### Error codes (proposal)
|
||||
- `ERR_CONSOLE_EXPORT_INVALID_SOURCE`
|
||||
- `ERR_CONSOLE_EXPORT_TOO_LARGE`
|
||||
- `ERR_CONSOLE_EXPORT_RATE_LIMIT`
|
||||
- `ERR_CONSOLE_EXPORT_UNAVAILABLE`
|
||||
- `ERR_CONSOLE_EXPORT_EXPIRED`
|
||||
|
||||
### Samples
|
||||
- Request: `docs/api/console/samples/console-export-request.json`
|
||||
|
||||
331
docs/deploy/wine-csp-container.md
Normal file
331
docs/deploy/wine-csp-container.md
Normal file
@@ -0,0 +1,331 @@
|
||||
# Wine CSP Container Deployment Guide
|
||||
|
||||
> **SECURITY WARNING:** The Wine CSP container is for **TEST VECTOR GENERATION ONLY**.
|
||||
> It **MUST NOT** be used for production cryptographic signing operations.
|
||||
> All signatures produced by this service should be treated as test artifacts.
|
||||
|
||||
## Overview
|
||||
|
||||
The Wine CSP container provides GOST cryptographic operations (GOST R 34.10-2012, GOST R 34.11-2012) via a Wine-hosted CryptoPro CSP environment. This enables Linux-based StellaOps deployments to generate GOST test vectors and validate cross-platform cryptographic interoperability.
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Wine CSP Container │
|
||||
│ ┌─────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ Ubuntu 22.04 (linux/amd64) │ │
|
||||
│ │ ┌───────────────┐ ┌────────────────────────────────────────┐ │ │
|
||||
│ │ │ Xvfb │ │ Wine 64-bit Environment │ │ │
|
||||
│ │ │ (display :99) │───>│ ┌──────────────────────────────────┐ │ │ │
|
||||
│ │ └───────────────┘ │ │ WineCspService.exe (.NET 8) │ │ │ │
|
||||
│ │ │ │ ┌────────────────────────────┐ │ │ │ │
|
||||
│ │ │ │ │ GostCryptography.dll │ │ │ │ │
|
||||
│ │ │ │ │ (MIT-licensed fork) │ │ │ │ │
|
||||
│ │ │ │ └────────────────────────────┘ │ │ │ │
|
||||
│ │ │ │ ┌────────────────────────────┐ │ │ │ │
|
||||
│ │ │ │ │ CryptoPro CSP (optional) │ │ │ │ │
|
||||
│ │ │ │ │ (customer-provided) │ │ │ │ │
|
||||
│ │ │ │ └────────────────────────────┘ │ │ │ │
|
||||
│ │ │ └──────────────────────────────────┘ │ │ │
|
||||
│ │ └────────────────────────────────────────┘ │ │
|
||||
│ └─────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ │ HTTP API (port 5099) │
|
||||
│ ▼ │
|
||||
└─────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Deployment Modes
|
||||
|
||||
### Limited Mode (Default)
|
||||
|
||||
Operates without CryptoPro CSP using the open-source GostCryptography library:
|
||||
|
||||
- **Capabilities:** Basic GOST signing/verification, hashing
|
||||
- **Requirements:** None (self-contained)
|
||||
- **Use Case:** Development, testing, CI/CD pipelines
|
||||
|
||||
```bash
|
||||
docker run -p 5099:5099 -e WINE_CSP_MODE=limited wine-csp:latest
|
||||
```
|
||||
|
||||
### Full Mode
|
||||
|
||||
Enables full CryptoPro CSP functionality with customer-provided installer:
|
||||
|
||||
- **Capabilities:** Full GOST R 34.10-2012/34.11-2012, hardware token support
|
||||
- **Requirements:** Licensed CryptoPro CSP installer MSI
|
||||
- **Use Case:** Test vector generation matching production CSP output
|
||||
|
||||
```bash
|
||||
docker run -p 5099:5099 \
|
||||
-e WINE_CSP_MODE=full \
|
||||
-v /path/to/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro \
|
||||
wine-csp:latest
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
| Endpoint | Method | Description |
|
||||
|----------|--------|-------------|
|
||||
| `/health` | GET | Health check (Healthy/Degraded/Unhealthy) |
|
||||
| `/health/liveness` | GET | Kubernetes liveness probe |
|
||||
| `/health/readiness` | GET | Kubernetes readiness probe |
|
||||
| `/status` | GET | Service status with CSP availability |
|
||||
| `/keys` | GET | List available signing keys |
|
||||
| `/sign` | POST | Sign data with GOST R 34.10-2012 |
|
||||
| `/verify` | POST | Verify GOST signature |
|
||||
| `/hash` | POST | Compute GOST R 34.11-2012 hash |
|
||||
| `/test-vectors` | GET | Generate deterministic test vectors |
|
||||
|
||||
### Request/Response Examples
|
||||
|
||||
#### Sign Request
|
||||
|
||||
```http
|
||||
POST /sign
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"keyId": "test-key-256",
|
||||
"algorithm": "GOST12-256",
|
||||
"data": "SGVsbG8gV29ybGQ="
|
||||
}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"signature": "MEQCIFh...",
|
||||
"algorithm": "GOST12-256",
|
||||
"keyId": "test-key-256",
|
||||
"timestamp": "2025-12-07T12:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
#### Hash Request
|
||||
|
||||
```http
|
||||
POST /hash
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"algorithm": "STREEBOG-256",
|
||||
"data": "SGVsbG8gV29ybGQ="
|
||||
}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"hash": "5a7f...",
|
||||
"algorithm": "STREEBOG-256"
|
||||
}
|
||||
```
|
||||
|
||||
## Docker Compose Integration
|
||||
|
||||
### Development Environment
|
||||
|
||||
Add to your `docker-compose.dev.yaml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
WINE_CSP_PORT: "5099"
|
||||
WINE_CSP_MODE: "limited"
|
||||
WINE_CSP_LOG_LEVEL: "Information"
|
||||
volumes:
|
||||
- wine-csp-prefix:/home/winecsp/.wine
|
||||
- wine-csp-logs:/var/log/wine-csp
|
||||
ports:
|
||||
- "5099:5099"
|
||||
networks:
|
||||
- stellaops
|
||||
healthcheck:
|
||||
test: ["/usr/local/bin/healthcheck.sh"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 90s
|
||||
retries: 3
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
|
||||
volumes:
|
||||
wine-csp-prefix:
|
||||
wine-csp-logs:
|
||||
```
|
||||
|
||||
### With CryptoPro CSP Installer
|
||||
|
||||
```yaml
|
||||
services:
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:2025.10.0-edge
|
||||
environment:
|
||||
WINE_CSP_MODE: "full"
|
||||
volumes:
|
||||
- wine-csp-prefix:/home/winecsp/.wine
|
||||
- /secure/path/to/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `WINE_CSP_PORT` | `5099` | HTTP API port |
|
||||
| `WINE_CSP_MODE` | `limited` | Operation mode: `limited` or `full` |
|
||||
| `WINE_CSP_INSTALLER_PATH` | `/opt/cryptopro/csp-installer.msi` | Path to CSP installer |
|
||||
| `WINE_CSP_LOG_LEVEL` | `Information` | Log level (Trace/Debug/Information/Warning/Error) |
|
||||
| `ASPNETCORE_ENVIRONMENT` | `Production` | ASP.NET Core environment |
|
||||
| `WINEDEBUG` | `-all` | Wine debug output (set to `warn+all` for troubleshooting) |
|
||||
|
||||
## Volume Mounts
|
||||
|
||||
| Path | Purpose | Persistence |
|
||||
|------|---------|-------------|
|
||||
| `/home/winecsp/.wine` | Wine prefix (CSP installation, keys) | Required for full mode |
|
||||
| `/opt/cryptopro` | CSP installer directory (read-only) | Optional |
|
||||
| `/var/log/wine-csp` | Service logs | Recommended |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Production Restrictions
|
||||
|
||||
1. **Never expose to public networks** - Internal use only
|
||||
2. **No sensitive keys** - Use only test keys
|
||||
3. **Audit logging** - Enable verbose logging for forensics
|
||||
4. **Network isolation** - Place in dedicated network segment
|
||||
5. **Read-only root filesystem** - Not supported due to Wine requirements
|
||||
|
||||
### Container Security
|
||||
|
||||
- **Non-root user:** Runs as `winecsp` (UID 10001)
|
||||
- **No capabilities:** No elevated privileges required
|
||||
- **Minimal packages:** Only Wine and dependencies installed
|
||||
- **Security labels:** Container labeled `test-vectors-only=true`
|
||||
|
||||
### CryptoPro CSP Licensing
|
||||
|
||||
CryptoPro CSP is commercial software. StellaOps does **not** distribute CryptoPro CSP:
|
||||
|
||||
1. Customer must provide their own licensed CSP installer
|
||||
2. Mount the MSI file as read-only volume
|
||||
3. Installation occurs on first container start
|
||||
4. License persisted in Wine prefix volume
|
||||
|
||||
See `docs/legal/crypto-compliance-review.md` for distribution matrix.
|
||||
|
||||
## Known Limitations
|
||||
|
||||
| Limitation | Impact | Mitigation |
|
||||
|------------|--------|------------|
|
||||
| **linux/amd64 only** | No ARM64 support | Deploy on x86_64 hosts |
|
||||
| **Large image (~1GB)** | Storage/bandwidth | Air-gap bundles, layer caching |
|
||||
| **Slow startup (60-90s)** | Health check delays | Extended `start_period` |
|
||||
| **Writable filesystem** | Security hardening | Minimize writable paths |
|
||||
| **Wine compatibility** | Potential CSP issues | Test with specific CSP version |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Container Won't Start
|
||||
|
||||
```bash
|
||||
# Check container logs
|
||||
docker logs wine-csp
|
||||
|
||||
# Verify Wine initialization
|
||||
docker exec wine-csp ls -la /home/winecsp/.wine
|
||||
|
||||
# Check for Wine errors
|
||||
docker exec wine-csp cat /var/log/wine-csp/*.log
|
||||
```
|
||||
|
||||
### Health Check Failing
|
||||
|
||||
```bash
|
||||
# Manual health check
|
||||
docker exec wine-csp wget -q -O - http://127.0.0.1:5099/health
|
||||
|
||||
# Check Xvfb is running
|
||||
docker exec wine-csp pgrep Xvfb
|
||||
|
||||
# Verbose Wine output
|
||||
docker exec -e WINEDEBUG=warn+all wine-csp wine64 /app/WineCspService.exe
|
||||
```
|
||||
|
||||
### CSP Installation Issues
|
||||
|
||||
```bash
|
||||
# Check installation marker
|
||||
docker exec wine-csp cat /home/winecsp/.wine/.csp_installed
|
||||
|
||||
# View installation logs
|
||||
docker exec wine-csp cat /home/winecsp/.wine/csp_install_logs/*.log
|
||||
|
||||
# Verify CSP directory
|
||||
docker exec wine-csp ls -la "/home/winecsp/.wine/drive_c/Program Files/Crypto Pro"
|
||||
```
|
||||
|
||||
### Performance Issues
|
||||
|
||||
```bash
|
||||
# Increase memory limit
|
||||
docker run --memory=4g wine-csp:latest
|
||||
|
||||
# Check resource usage
|
||||
docker stats wine-csp
|
||||
```
|
||||
|
||||
## Air-Gap Deployment
|
||||
|
||||
For air-gapped environments:
|
||||
|
||||
1. **Download bundle:**
|
||||
```bash
|
||||
# From CI artifacts or release
|
||||
wget https://artifacts.stella-ops.org/wine-csp/wine-csp-2025.10.0-edge.tar.gz
|
||||
```
|
||||
|
||||
2. **Transfer to air-gapped system** (via approved media)
|
||||
|
||||
3. **Load image:**
|
||||
```bash
|
||||
docker load < wine-csp-2025.10.0-edge.tar.gz
|
||||
```
|
||||
|
||||
4. **Run container:**
|
||||
```bash
|
||||
docker run -p 5099:5099 wine-csp:2025.10.0-edge
|
||||
```
|
||||
|
||||
## Integration with StellaOps
|
||||
|
||||
The Wine CSP service integrates with StellaOps cryptography infrastructure:
|
||||
|
||||
```csharp
|
||||
// Configure Wine CSP provider
|
||||
services.AddWineCspProvider(options =>
|
||||
{
|
||||
options.ServiceUrl = "http://wine-csp:5099";
|
||||
options.TimeoutSeconds = 30;
|
||||
options.MaxRetries = 3;
|
||||
});
|
||||
```
|
||||
|
||||
See `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/` for the provider implementation.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Wine CSP Loader Design](../security/wine-csp-loader-design.md)
|
||||
- [RU Crypto Validation Sprint](../implplan/SPRINT_0514_0001_0002_ru_crypto_validation.md)
|
||||
- [Crypto Provider Registry](../contracts/crypto-provider-registry.md)
|
||||
- [Crypto Compliance Review](../legal/crypto-compliance-review.md)
|
||||
@@ -43,7 +43,7 @@
|
||||
| 8 | CONCELIER-RISK-68-001 | DONE (2025-12-05) | Implemented `IPolicyStudioSignalPicker`, `PolicyStudioSignalInput`, `PolicyStudioSignalPicker` with provenance tracking; updated `IVendorRiskSignalProvider` with batch methods; DI registration in `AddConcelierRiskServices()`. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. |
|
||||
| 9 | CONCELIER-RISK-69-001 | DONE (2025-11-28) | Implemented `AdvisoryFieldChangeNotification`, `AdvisoryFieldChange` models + `IAdvisoryFieldChangeEmitter` interface + `AdvisoryFieldChangeEmitter` implementation + `InMemoryAdvisoryFieldChangeNotificationPublisher` in `src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/`. Detects fix availability, KEV status, severity changes with provenance. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. |
|
||||
| 10 | CONCELIER-SIG-26-001 | DONE (2025-12-06) | Implemented; 17 unit tests. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. |
|
||||
| 11 | CONCELIER-STORE-AOC-19-005-DEV | BLOCKED (2025-11-04) | Waiting on staging dataset hash + rollback rehearsal using prep doc | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Execute raw-linkset backfill/rollback plan so Mongo reflects Link-Not-Merge data; rehearse rollback (dev/staging). |
|
||||
| 11 | CONCELIER-STORE-AOC-19-005-DEV | TODO | Prep runbook published at `docs/modules/concelier/prep/store-aoc-19-005-dev.md`; stage dataset tarball + hash, then execute backfill/rollback rehearsal. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Execute raw-linkset backfill/rollback plan so Mongo reflects Link-Not-Merge data; rehearse rollback (dev/staging). |
|
||||
| 12 | CONCELIER-TEN-48-001 | DONE (2025-11-28) | Created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, and `TenantScopeNormalizer` per AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. |
|
||||
| 13 | CONCELIER-VEXLENS-30-001 | DONE (2025-12-05) | Implemented `IVexLensAdvisoryKeyProvider`, `VexLensCanonicalKey`, `VexLensCrossLinks`, `VexLensAdvisoryKeyProvider` with canonicalization per CONTRACT-ADVISORY-KEY-001 and CONTRACT-VEX-LENS-005. DI registration via `AddConcelierVexLensServices()`. | Concelier WebService Guild · VEX Lens Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Guarantee advisory key consistency and cross-links consumed by VEX Lens so consensus explanations cite Concelier evidence without merges. |
|
||||
| 14 | CONCELIER-GAPS-115-014 | DONE (2025-12-02) | None; informs tasks 0–13. | Product Mgmt · Concelier Guild | Address Concelier ingestion gaps CI1–CI10 from `docs/product-advisories/31-Nov-2025 FINDINGS.md`: publish signed observation/linkset schemas and AOC guard, enforce denylist/allowlist via analyzers, require provenance/signature details, feed snapshot governance/staleness, deterministic conflict rules, canonical content-hash/idempotency keys, tenant isolation tests, connector sandbox limits, offline advisory bundle schema/verify, and shared fixtures/CI determinism. |
|
||||
@@ -55,6 +55,7 @@
|
||||
| 2025-12-06 | Unblocked CONCELIER-SIG-26-001 (task 10): SIGNALS-24-002 CAS approved per BLOCKED_DEPENDENCY_TREE.md Section 6. Task now TODO and ready for implementation. | Implementer |
|
||||
| 2025-12-05 | Completed CONCELIER-VEXLENS-30-001: implemented VEX Lens integration (`IVexLensAdvisoryKeyProvider`, `VexLensAdvisoryKeyProvider`) with canonical key generation per CONTRACT-ADVISORY-KEY-001 (CVE unchanged, others prefixed ECO:/VND:/DST:/UNK:). Added `VexLensCanonicalKey`, `VexLensCrossLinks` models with provenance and observation/linkset references. DI registration via `AddConcelierVexLensServices()`. | Implementer |
|
||||
| 2025-12-05 | Completed CONCELIER-RISK-68-001: implemented Policy Studio signal picker (`IPolicyStudioSignalPicker`, `PolicyStudioSignalPicker`) with `PolicyStudioSignalInput` model. All fields are provenance-backed per CONTRACT-POLICY-STUDIO-007. Added `GetSignalAsync` and `GetSignalsBatchAsync` methods to `IVendorRiskSignalProvider`. DI registration via `AddConcelierRiskServices()`. | Implementer |
|
||||
| 2025-12-07 | Published backfill/rollback runbook at `docs/modules/concelier/prep/store-aoc-19-005-dev.md`; status set to TODO awaiting dataset tarball + hash staging. | Project Mgmt |
|
||||
| 2025-12-03 | Added Wave Coordination (A prep/policy done; B tenant/backfill pending STORE-AOC-19-005; C signals/VEX Lens blocked on upstream contracts). No status changes. | Project Mgmt |
|
||||
| 2025-12-02 | Completed CONCELIER-GAPS-115-014: published signed LNM schemas + manifest/signature, added connector HttpClient sandbox analyzer, hardened AOC guard for canonical sha256 + signature metadata, added determinism/tenant isolation tests and offline bundle fixtures. Targeted Core tests passing. | Implementer |
|
||||
| 2025-12-02 | Started CONCELIER-GAPS-115-014 remediation: schema signing, AOC provenance guard, determinism/tenant isolation tests. | Implementer |
|
||||
@@ -93,7 +94,7 @@
|
||||
|
||||
## Decisions & Risks
|
||||
- Policy enrichment chain must remain fact-only; any weighting or prioritization belongs to Policy Engine, not Concelier.
|
||||
- Raw linkset backfill (STORE-AOC-19-005) must preserve rollback paths to protect Offline Kit deployments; release packaging tracked separately in DevOps planning.
|
||||
- Raw linkset backfill (STORE-AOC-19-005) follows runbook at `docs/modules/concelier/prep/store-aoc-19-005-dev.md`; rollback remains mandatory to protect Offline Kit deployments.
|
||||
- Tenant-aware linking and notification hooks depend on Authority/Signals contracts; delays could stall AOC compliance and downstream alerts.
|
||||
- Upstream contracts absent: POLICY-20-001 (sprint 0114), AUTH-TEN-47-001, SIGNALS-24-002—until delivered, POLICY/RISK/SIG/TEN tasks in this sprint stay BLOCKED.
|
||||
- CI1–CI10 remediation shipped: signed schema bundle (`docs/modules/concelier/schemas/*`) with detached signature, AOC guard now enforces canonical sha256 + signature metadata, connector analyzer `CONCELIER0004` guards unsandboxed `HttpClient`, and deterministic fixtures/tests cover idempotency/tenant isolation/offline bundle staleness.
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
- **Wave B (provenance exports):** Task 4 DONE; uses orchestrator export contract (now marked DONE). Keep linkage stable.
|
||||
- **Wave C (air-gap provenance — COMPLETE):** Tasks 5–8 ALL DONE (2025-12-06). Staleness validation, evidence snapshots, and timeline impact events implemented.
|
||||
- **Wave D (attestation pointers — COMPLETE):** Task 9 DONE (2025-12-07). Full attestation pointer infrastructure implemented.
|
||||
- **Wave E (deployment collateral):** Task 3 BLOCKED pending DevOps paths for manifests/offline kit. Run after Wave C to avoid conflicting asset locations.
|
||||
- **Wave E (deployment collateral — COMPLETE):** Task 3 DONE (2025-12-07). Compose, Helm, and offline-kit assets delivered to `ops/devops/findings-ledger/`.
|
||||
- Do not start blocked waves until dependencies land; avoid drift by keeping current DONE artifacts immutable.
|
||||
|
||||
## Documentation Prerequisites
|
||||
@@ -56,7 +56,7 @@
|
||||
| P3 | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | DONE (2025-11-22) | Due 2025-11-21 · Accountable: Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Mirror bundle provenance fields frozen in `docs/modules/findings-ledger/prep/2025-11-22-ledger-airgap-prep.md`; staleness/anchor rules defined. |
|
||||
| 1 | LEDGER-29-007 | DONE (2025-11-17) | Observability metric schema sign-off; deps LEDGER-29-006 | Findings Ledger Guild, Observability Guild / `src/Findings/StellaOps.Findings.Ledger` | Instrument `ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`, structured logs, Merkle anchoring alerts, and publish dashboards. |
|
||||
| 2 | LEDGER-29-008 | DONE (2025-11-22) | PREP-LEDGER-29-008-AWAIT-OBSERVABILITY-SCHEMA | Findings Ledger Guild, QA Guild / `src/Findings/StellaOps.Findings.Ledger` | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5 M findings/tenant. |
|
||||
| 3 | LEDGER-29-009-DEV | TODO | Asset paths approved under `ops/devops/findings-ledger/**`; implement Compose/Helm/offline-kit overlays and finalize backup/restore runbook. | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). |
|
||||
| 3 | LEDGER-29-009-DEV | **DONE** (2025-12-07) | Implemented Compose overlay, Helm chart, and offline kit with dashboard/alerts. | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). |
|
||||
| 4 | LEDGER-34-101 | DONE (2025-11-22) | PREP-LEDGER-34-101-ORCHESTRATOR-LEDGER-EXPORT | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. Contract reference: `docs/modules/orchestrator/job-export-contract.md`. |
|
||||
| 5 | LEDGER-AIRGAP-56-001 | DONE (2025-11-22) | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. |
|
||||
| 6 | LEDGER-AIRGAP-56-002 | **DONE** (2025-12-06) | Implemented AirGapOptions, StalenessValidationService, staleness metrics. | Findings Ledger Guild, AirGap Time Guild / `src/Findings/StellaOps.Findings.Ledger` | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. |
|
||||
@@ -67,6 +67,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | **LEDGER-29-009-DEV DONE:** Created deployment collateral at `ops/devops/findings-ledger/` including: Compose overlay (docker-compose.ledger.yaml, env files), Helm chart (deployment, service, configmap, migration-job templates), and offline kit (manifest.yaml, import-images.sh, run-migrations.sh, verify-install.sh, Grafana dashboard, Prometheus alerts). Wave E complete. | Implementer |
|
||||
| 2025-12-07 | **LEDGER-ATTEST-73-001 DONE:** Implemented AttestationPointerRecord, IAttestationPointerRepository, PostgresAttestationPointerRepository, AttestationPointerService, WebService endpoints (POST/GET/PUT /v1/ledger/attestation-pointers), migration 008_attestation_pointers.sql, and unit tests. Added attestation.pointer_linked ledger event type and timeline logging. Wave D complete. | Implementer |
|
||||
| 2025-12-06 | **LEDGER-ATTEST-73-001 Unblocked:** Changed from BLOCKED to TODO. Attestation pointer schema now available at `docs/schemas/attestation-pointer.schema.json`. Wave D can proceed. | Implementer |
|
||||
| 2025-12-06 | **LEDGER-AIRGAP-56-002 DONE:** Implemented AirGapOptions (staleness config), StalenessValidationService (export blocking with ERR_AIRGAP_STALE), extended IAirgapImportRepository with staleness queries, added ledger_airgap_staleness_seconds and ledger_staleness_validation_failures_total metrics. | Implementer |
|
||||
|
||||
@@ -11,8 +11,7 @@
|
||||
## Wave Coordination
|
||||
- **Wave A (SPL schema/tooling):** Tasks 10–15 DONE; keep SPL schema/fixtures/canonicalizer/layering stable.
|
||||
- **Wave B (risk profile lifecycle APIs):** Tasks 1–2 DONE; publish schema and lifecycle endpoints; hold steady for downstream consumers.
|
||||
- **Wave C (risk simulations/overrides/exports/notifications/air-gap):** Tasks 3–7, 9 TODO; unblocked by contracts ([RISK-SCORING-002](../contracts/risk-scoring.md), [POLICY-STUDIO-007](../contracts/policy-studio.md), [AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md), [MIRROR-BUNDLE-003](../contracts/mirror-bundle.md), [SEALED-MODE-004](../contracts/sealed-mode.md)). Task 8 (notifications) now unblocked; proceed with policy notifications implementation using `docs/modules/policy/notifications.md`.
|
||||
- No additional work in progress; avoid starting Wave C until dependencies clear.
|
||||
- **Wave C (risk simulations/overrides/exports/notifications/air-gap — COMPLETE):** Tasks 3–9 DONE. All Wave C deliverables (simulations, overrides, exports, notifications, air-gap) implemented. Sprint 0128 complete.
|
||||
|
||||
## Documentation Prerequisites
|
||||
- `docs/README.md`
|
||||
@@ -32,7 +31,7 @@
|
||||
| 5 | POLICY-RISK-68-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md). | Risk Profile Schema Guild · Authority Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Scope selectors, precedence rules, Authority attachment. |
|
||||
| 6 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked by [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md) (RiskOverrides included). | Risk Profile Schema Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Override/adjustment support with audit metadata. |
|
||||
| 7 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked; can proceed after task 6 with [CONTRACT-EXPORT-BUNDLE-009](../contracts/export-bundle.md). | Policy · Export Guild / `src/Policy/__Libraries/StellaOps.Policy` | Export/import RiskProfiles with signatures. |
|
||||
| 8 | POLICY-RISK-69-001 | TODO | Notifications contract published at `docs/modules/policy/notifications.md`. | Policy A<EFBFBD> Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. |
|
||||
| 8 | POLICY-RISK-69-001 | **DONE** (2025-12-07) | Notifications contract implemented per `docs/modules/policy/notifications.md`. | Policy · Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. |
|
||||
| 9 | POLICY-RISK-70-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-MIRROR-BUNDLE-003](../contracts/mirror-bundle.md) and [CONTRACT-SEALED-MODE-004](../contracts/sealed-mode.md). | Policy · Export Guild / `src/Policy/StellaOps.Policy.Engine` | Air-gap export/import for profiles with signatures. |
|
||||
| 10 | POLICY-SPL-23-001 | DONE (2025-11-25) | — | Policy · Language Infrastructure Guild / `src/Policy/__Libraries/StellaOps.Policy` | Define SPL v1 schema + fixtures. |
|
||||
| 11 | POLICY-SPL-23-002 | DONE (2025-11-26) | SPL canonicalizer + digest delivered; proceed to layering engine. | Policy Guild / `src/Policy/__Libraries/StellaOps.Policy` | Canonicalizer + content hashing. |
|
||||
@@ -44,6 +43,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | **POLICY-RISK-69-001 DONE:** Implemented policy profile notifications per contract at `docs/modules/policy/notifications.md`. Created: (1) `PolicyProfileNotificationModels.cs` with event types (created/activated/deactivated/threshold_changed/override_added/override_removed/simulation_ready), payload models matching JSON contract (UUIDv7 event_id, actor, thresholds, effective_scope, hash, links, trace); (2) `PolicyProfileNotificationPublisher.cs` with `IPolicyProfileNotificationPublisher` interface and `LoggingPolicyProfileNotificationPublisher` for structured logging + HMAC-SHA256 webhook signatures; (3) `PolicyProfileNotificationFactory.cs` for event creation with UUIDv7 generation and trace context; (4) `PolicyProfileNotificationService.cs` orchestrating notifications from lifecycle events; (5) DI extensions; (6) 15 unit tests in `PolicyProfileNotificationServiceTests.cs`. Wave C notifications complete. | Implementer |
|
||||
| 2025-12-07 | Published notifications contract at `docs/modules/policy/notifications.md`; set POLICY-RISK-69-001 to TODO. | Project Mgmt |
|
||||
| 2025-12-03 | Added Wave Coordination (A SPL tooling done; B risk lifecycle APIs done; C simulations/overrides/exports/notifications/air-gap blocked). No status changes. | Project Mgmt |
|
||||
| 2025-11-27 | `POLICY-RISK-67-002` (task 2): Added `RiskProfileSchemaEndpoints.cs` with `/.well-known/risk-profile-schema` endpoint (anonymous, ETag/Cache-Control, schema v1) and `/api/risk/schema/validate` POST endpoint for profile validation. Extended `RiskProfileSchemaProvider` with GetSchemaText(), GetSchemaVersion(), and GetETag() methods. Added `risk-profile` CLI command group with `validate` (--input, --format, --output, --strict) and `schema` (--output) subcommands. Added RiskProfile project reference to CLI. | Implementer |
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
## Delivery Tracker
|
||||
| # | Task ID & handle | State | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | POLICY-TEN-48-001 | TODO | Tenant/project RLS design published at `docs/modules/policy/prep/tenant-rls.md`. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. |
|
||||
| 1 | POLICY-TEN-48-001 | DONE (2025-12-07) | Tenant context infrastructure complete. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. |
|
||||
| 2 | REGISTRY-API-27-001 | DONE (2025-12-06) | OpenAPI spec available; typed client implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Define Registry API spec + typed clients. |
|
||||
| 3 | REGISTRY-API-27-002 | DONE (2025-12-06) | Depends on 27-001; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Workspace storage with CRUD + history. |
|
||||
| 4 | REGISTRY-API-27-003 | DONE (2025-12-06) | Depends on 27-002; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Compile endpoint integration. |
|
||||
@@ -67,6 +67,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | POLICY-TEN-48-001 DONE: Created tenant context infrastructure per RLS design. Implemented `TenantContextModels.cs` (TenantContext record, TenantContextOptions, ITenantContextAccessor with AsyncLocal, TenantValidationResult, TenantContextConstants for headers X-Stella-Tenant/X-Stella-Project and PostgreSQL GUCs app.tenant_id/app.project_id/app.can_write), `TenantContextMiddleware.cs` (header extraction, regex ID validation, write permission from scopes/claims, actor ID extraction, deterministic error codes POLICY_TENANT_HEADER_REQUIRED/POLICY_TENANT_ID_INVALID), `TenantContextServiceCollectionExtensions.cs` (DI extensions AddTenantContext, middleware UseTenantContext, endpoint filter RequireTenantContext, TenantContextEndpointFilter). Added 27 unit tests in `TenantContextTests.cs` covering context creation, validation, middleware behavior, ID format validation, scope detection. Build succeeds with 0 errors. **Sprint 0129 complete: all 37 tasks now DONE.** | Implementer |
|
||||
| 2025-12-07 | Published tenant/project RLS design at `docs/modules/policy/prep/tenant-rls.md`; set POLICY-TEN-48-001 to TODO. | Project Mgmt |
|
||||
| 2025-12-06 | REGISTRY-API-27-010 DONE: Created test suites and fixtures. Implemented `PolicyRegistryTestHarness` (integration test harness with all services wired, determinism testing), `PolicyRegistryTestFixtures` (test data generators for rules, simulation inputs, batch inputs, verification policies, snapshots, violations, overrides). Supports full workflow testing from pack creation through promotion. **Wave B complete: all 10 Registry API tasks (27-001 through 27-010) now DONE.** Build succeeds with no errors. | Implementer |
|
||||
| 2025-12-06 | REGISTRY-API-27-009 DONE: Created observability infrastructure. Implemented `PolicyRegistryMetrics` (System.Diagnostics.Metrics with counters/histograms/gauges for packs, compilations, simulations, reviews, promotions), `PolicyRegistryActivitySource` (distributed tracing with activity helpers for all operations), `PolicyRegistryLogEvents` (structured logging event IDs 1000-1999 with log message templates). Covers full lifecycle from pack creation through promotion. Build succeeds with no errors. | Implementer |
|
||||
|
||||
@@ -68,18 +68,18 @@
|
||||
| E5 | JAVA-ENH-E05 | DONE | D4 | Java Guild | Add conflict detection post-processing in `AnalyzeAsync` - emit conflict.* metadata |
|
||||
| E6 | JAVA-ENH-E06 | DONE | B6, C6, E1-E5 | Java Guild | Update `JavaLockEntry` record - add Scope, VersionSource, License fields |
|
||||
| **Wave F: Testing** |
|
||||
| F1 | JAVA-ENH-F01 | TODO | B2 | QA Guild | Create fixture `gradle-groovy/` - Groovy DSL with string/map notation |
|
||||
| F2 | JAVA-ENH-F02 | TODO | B3 | QA Guild | Create fixture `gradle-kotlin/` - Kotlin DSL with type-safe accessors |
|
||||
| F3 | JAVA-ENH-F03 | TODO | B5 | QA Guild | Create fixture `gradle-catalog/` - libs.versions.toml with version references |
|
||||
| F4 | JAVA-ENH-F04 | TODO | C6 | QA Guild | Create fixture `maven-parent/` - parent POM version inheritance |
|
||||
| F5 | JAVA-ENH-F05 | TODO | C4 | QA Guild | Create fixture `maven-bom/` - BOM import with dependencyManagement |
|
||||
| F6 | JAVA-ENH-F06 | TODO | C3 | QA Guild | Create fixture `maven-properties/` - property placeholder resolution |
|
||||
| F7 | JAVA-ENH-F07 | TODO | D1 | QA Guild | Create fixture `shaded-maven/` - JAR with multiple pom.properties + dependency-reduced-pom.xml |
|
||||
| F8 | JAVA-ENH-F08 | TODO | D2 | QA Guild | Create fixture `osgi-bundle/` - JAR with Bundle-SymbolicName manifest |
|
||||
| F9 | JAVA-ENH-F09 | TODO | E3 | QA Guild | Create fixture `maven-license/` - pom.xml with <licenses> element |
|
||||
| F10 | JAVA-ENH-F10 | TODO | D3 | QA Guild | Create fixture `maven-scopes/` - dependencies with test/provided/runtime scopes |
|
||||
| F11 | JAVA-ENH-F11 | TODO | D4 | QA Guild | Create fixture `version-conflict/` - multiple versions of same library |
|
||||
| F12 | JAVA-ENH-F12 | TODO | F1-F11 | QA Guild | Add integration tests in `JavaLanguageAnalyzerTests.cs` using golden fixture harness |
|
||||
| F1 | JAVA-ENH-F01 | DONE | B2 | QA Guild | Create fixture `gradle-groovy/` - Groovy DSL with string/map notation |
|
||||
| F2 | JAVA-ENH-F02 | DONE | B3 | QA Guild | Create fixture `gradle-kotlin/` - Kotlin DSL with type-safe accessors |
|
||||
| F3 | JAVA-ENH-F03 | DONE | B5 | QA Guild | Create fixture `gradle-catalog/` - libs.versions.toml with version references |
|
||||
| F4 | JAVA-ENH-F04 | DONE | C6 | QA Guild | Create fixture `maven-parent/` - parent POM version inheritance |
|
||||
| F5 | JAVA-ENH-F05 | DONE | C4 | QA Guild | Create fixture `maven-bom/` - BOM import with dependencyManagement |
|
||||
| F6 | JAVA-ENH-F06 | DONE | C3 | QA Guild | Create fixture `maven-properties/` - property placeholder resolution |
|
||||
| F7 | JAVA-ENH-F07 | DONE | D1 | QA Guild | Create fixture `shaded-maven/` - JAR with multiple pom.properties + dependency-reduced-pom.xml |
|
||||
| F8 | JAVA-ENH-F08 | DONE | D2 | QA Guild | Create fixture `osgi-bundle/` - JAR with Bundle-SymbolicName manifest |
|
||||
| F9 | JAVA-ENH-F09 | DONE | E3 | QA Guild | Create fixture `maven-license/` - pom.xml with <licenses> element |
|
||||
| F10 | JAVA-ENH-F10 | DONE | D3 | QA Guild | Create fixture `maven-scopes/` - dependencies with test/provided/runtime scopes |
|
||||
| F11 | JAVA-ENH-F11 | DONE | D4 | QA Guild | Create fixture `version-conflict/` - multiple versions of same library |
|
||||
| F12 | JAVA-ENH-F12 | DONE | F1-F11 | QA Guild | Add integration tests in `JavaLanguageAnalyzerTests.cs` using golden fixture harness |
|
||||
| F13 | JAVA-ENH-F13 | DONE | B2-B5, C1, D1-D4 | QA Guild | Add unit tests for individual parsers (GradleGroovyParserTests, MavenPomParserTests, etc.) |
|
||||
|
||||
## Execution Log
|
||||
@@ -93,6 +93,7 @@
|
||||
| 2025-12-06 | Wave E complete: Integrated ShadedJarDetector, OsgiBundleParser, conflict detection into JavaLanguageAnalyzer | Claude |
|
||||
| 2025-12-06 | Build verified successful - all 18 new files compile, integration complete | Claude |
|
||||
| 2025-12-06 | Wave F partial: Created 4 unit test files (GradleGroovyParserTests, MavenPomParserTests, ShadedJarDetectorTests, OsgiBundleParserTests, VersionConflictDetectorTests) | Claude |
|
||||
| 2025-12-07 | Wave F complete: Created 11 fixtures (gradle-groovy, gradle-kotlin, gradle-catalog, maven-parent, maven-bom, maven-properties, shaded-maven, osgi-bundle, maven-license, maven-scopes, version-conflict) and 7 integration tests in JavaLanguageAnalyzerTests.cs | Claude |
|
||||
|
||||
## Decisions & Risks
|
||||
- **Risk:** Gradle DSL is dynamic; regex-based parsing will miss complex patterns
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
| 9 | SCAN-RPM-BDB-0146-09 | TODO | Add rpmdb BerkeleyDB fallback + fixtures; wire into analyzer pipeline. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
|
||||
| 10 | SCAN-OS-FILES-0146-10 | TODO | Wire layer digest + hashing into OS file evidence and fragments. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
|
||||
| 11 | SCAN-NODE-PNP-0146-11 | TODO | Implement Yarn PnP resolution + tighten declared-only emissions. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
|
||||
| 12 | SCAN-PY-EGG-0146-12 | TODO | Add `.egg-info`/editable detection + metadata to Python analyzer. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
||||
| 12 | SCAN-PY-EGG-0146-12 | DOING | Add `.egg-info`/editable detection + metadata to Python analyzer. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
||||
| 13 | SCAN-NATIVE-REACH-0146-13 | TODO | Implement native reachability graph baseline (call edges, Unknowns). | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
|
||||
|
||||
## Execution Log
|
||||
@@ -40,6 +40,7 @@
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
|
||||
| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
|
||||
| 2025-12-07 | Began SCAN-PY-EGG-0146-12 implementation (egg-info detection/provenance). | Scanner Lang |
|
||||
|
||||
## Decisions & Risks
|
||||
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
|
||||
|
||||
@@ -76,10 +76,12 @@
|
||||
| R4 | CVSS parser/ruleset changes ungoverned (CVM9). | Score drift, audit gaps. | Version parsers/rulesets; DSSE-sign releases; log scorer version in receipts; dual-review changes. |
|
||||
| R5 | Missing AGENTS for Policy WebService and Concelier ingestion block integration (tasks 8–11). | API/CLI/UI delivery stalled. | AGENTS delivered 2025-12-06 (tasks 15–16). Risk mitigated; monitor API contract approvals. |
|
||||
| R6 | Policy Engine lacks CVSS receipt endpoints; gateway proxy cannot be implemented yet. | API/CLI/UI tasks remain blocked. | **Mitigated 2025-12-06:** CVSS receipt endpoints implemented in Policy Engine and Gateway; unblock CLI/UI. |
|
||||
| R7 | System.CommandLine (beta5) API drift versus existing command wiring (SetAction/AddOption/IsRequired) is blocking CLI build despite CVSS verbs implemented. | CLI deliverable cannot be validated; downstream docs/tests stay blocked. | Update handlers to current API or pin to a compatible version and refactor accordingly; CLI Guild. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer |
|
||||
| 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer |
|
||||
| 2025-12-06 | CVSS-API-190-009 DONE: added Policy Engine CVSS receipt endpoints and Gateway proxies (`/api/cvss/receipts`, history, amend, policies); W3 unblocked; risk R6 mitigated. | Implementer |
|
||||
| 2025-12-06 | CVSS-CONCELIER-190-008 DONE: prioritized CVSS v4.0 vectors as primary in advisory→Postgres conversion; provenance preserved; enables Policy receipt ingestion. CVSS-API-190-009 set BLOCKED pending Policy Engine CVSS receipt endpoints (risk R6). | Implementer |
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
- Restore workspace disk/PTY availability so Web console implementation can proceed (owner: DevOps Guild; due: 2025-12-02; status: in progress 2025-12-01).
|
||||
| # | Action | Owner | Due | Status |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| 1 | Publish console export bundle orchestration contract + manifest schema and streaming limits; add samples to `docs/api/console/samples/`. | Policy Guild · Console Guild | 2025-12-08 | DOING (draft published, awaiting guild sign-off) |
|
||||
| 1 | Publish console export bundle orchestration contract + manifest schema and streaming limits; add samples to `docs/api/console/samples/`. | Policy Guild · Console Guild | 2025-12-08 | DOING (contract v0.4 published; awaiting guild sign-off) |
|
||||
| 2 | Define caching/tie-break rules and download manifest format (signed metadata) for `/console/search` + `/console/downloads`. | Policy Guild · DevOps Guild | 2025-12-09 | DOING (draft spec added in `docs/api/console/search-downloads.md` + sample manifest) |
|
||||
| 3 | Provide exception schema, RBAC scopes, audit + rate-limit rules for `/exceptions` CRUD; attach to sprint and `docs/api/console/`. | Policy Guild · Platform Events | 2025-12-09 | TODO |
|
||||
| 4 | Restore PTY/shell capacity on web host (openpty exhaustion) to allow tests/builds. | DevOps Guild | 2025-12-07 | In progress (local workaround using Playwright Chromium headless + NG_PERSISTENT_BUILD_CACHE) |
|
||||
@@ -87,8 +87,10 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Hardened console exports contract to v0.4 in `docs/api/console/workspaces.md`: deterministic manifest ordering, DSSE option, cache/ETag headers, size/item caps, aligned samples (`console-export-manifest.json`). Awaiting Policy/DevOps sign-off. | Project Mgmt |
|
||||
| 2025-12-07 | WEB-CONSOLE-23-003 exports specs green (6/6) using Playwright Chromium 141 headless. Command: `CHROME_BIN=C:\Users\vlindos\AppData\Local\ms-playwright\chromium-1194\chrome-win\chrome.exe STELLAOPS_CHROMIUM_BIN=%CHROME_BIN% NG_PERSISTENT_BUILD_CACHE=1 node ./node_modules/@angular/cli/bin/ng.js test --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts --include src/app/core/console/console-export.store.spec.ts --include src/app/core/console/console-export.service.spec.ts`. Backend export manifest/limits still pending Policy sign-off. | Implementer |
|
||||
| 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt |
|
||||
| 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; command: `CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome NG_PERSISTENT_BUILD_CACHE=1 npm test -- --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts,src/app/core/console/console-export.store.spec.ts,src/app/core/console/console-export.service.spec.ts`. Build phase still slow (~5–7m); latest run terminated early while compiling—expect pass once allowed to finish. Backend contract still draft. | Implementer |
|
||||
| 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; earlier run terminated mid-compile but rerun completed successfully (see 2025-12-07 entry above for command). Backend contract still draft. | Implementer |
|
||||
| 2025-12-04 | WEB-CONSOLE-23-002 completed: wired `console/status` route in `app.routes.ts`; created sample payloads `console-status-sample.json` and `console-run-stream-sample.ndjson` in `docs/api/console/samples/` verified against `ConsoleStatusDto` and `ConsoleRunEventDto` contracts. | BE-Base Platform Guild |
|
||||
| 2025-12-02 | WEB-CONSOLE-23-002: added trace IDs on status/stream calls, heartbeat + exponential backoff reconnect in console run stream service, and new client/service unit tests. Backend commands still not run locally (disk constraint). | BE-Base Platform Guild |
|
||||
| 2025-12-04 | Re-reviewed CONSOLE-VULN-29-001 and CONSOLE-VEX-30-001: WEB-CONSOLE-23-001 and Excititor console contract are complete, but Concelier graph schema snapshot and VEX Lens PLVL0103 spec/SSE envelope remain outstanding; keeping both tasks BLOCKED. | Project Mgmt |
|
||||
|
||||
@@ -21,10 +21,10 @@
|
||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | RU-CRYPTO-VAL-01 | TODO | Linux OpenSSL toolchain present | Security Guild · QA | Validate OpenSSL GOST path on Linux; sign/verify test vectors; publish determinism report and hashes. |
|
||||
| 2 | RU-CRYPTO-VAL-02 | TODO | After #1 | Authority · Security | Wire registry defaults (`ru.openssl.gost`, `ru.pkcs11`) into Authority/Signer/Attestor hosts with env toggles and fail-closed validation (Linux-only baseline). |
|
||||
| 3 | RU-CRYPTO-VAL-03 | TODO | After #1 | Docs · Ops | Update RootPack_RU manifest + verify script for Linux-only GOST; embed signed test vectors/hashes; refresh `etc/rootpack/ru/crypto.profile.yaml` to mark “CSP pending”. |
|
||||
| 2 | RU-CRYPTO-VAL-02 | DOING (2025-12-07) | After #1 | Authority · Security | Wire registry defaults (`ru.openssl.gost`, `ru.pkcs11`) into Authority/Signer/Attestor hosts with env toggles and fail-closed validation (Linux-only baseline). |
|
||||
| 3 | RU-CRYPTO-VAL-03 | DOING (2025-12-07) | After #1 | Docs · Ops | Update RootPack_RU manifest + verify script for Linux-only GOST; embed signed test vectors/hashes; refresh `etc/rootpack/ru/crypto.profile.yaml` to mark “CSP pending”. |
|
||||
| 4 | RU-CRYPTO-VAL-04 | BLOCKED (2025-12-06) | Windows CSP runner provisioned | Security Guild · QA | Run CryptoPro fork + plugin tests on Windows (`STELLAOPS_CRYPTO_PRO_ENABLED=1`); capture logs/artifacts and determinism checks. Blocked: no Windows+CSP runner available. |
|
||||
| 5 | RU-CRYPTO-VAL-05 | DOING | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. **Implemented**: Wine CSP HTTP service + crypto registry provider. |
|
||||
| 5 | RU-CRYPTO-VAL-05 | DONE (2025-12-07) | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. **Implemented**: Wine CSP HTTP service + crypto registry provider. |
|
||||
| 6 | RU-CRYPTO-VAL-06 | BLOCKED (2025-12-06) | Parallel | Security · Legal | Complete license/export review for CryptoPro & fork; document distribution matrix and EULA notices. |
|
||||
| 7 | RU-CRYPTO-VAL-07 | BLOCKED (2025-12-06) | After #4/#5 | DevOps | Enable opt-in CI lane (`cryptopro-optin.yml`) with gated secrets/pins once CSP/Wine path validated. |
|
||||
|
||||
@@ -38,12 +38,18 @@
|
||||
| 2025-12-07 | Implemented Wine CSP HTTP service (`src/__Tools/WineCspService/`): ASP.NET minimal API exposing /status, /keys, /sign, /verify, /hash, /test-vectors endpoints via GostCryptography fork. | Implementer |
|
||||
| 2025-12-07 | Created Wine environment setup script (`scripts/crypto/setup-wine-csp-service.sh`): initializes Wine prefix, installs vcrun2019, builds service, creates systemd unit and Docker Compose configs. | Implementer |
|
||||
| 2025-12-07 | Created Wine CSP crypto registry provider (`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`): WineCspHttpProvider implements ICryptoProvider, delegates GOST signing/hashing to Wine CSP HTTP service. | Implementer |
|
||||
| 2025-12-07 | Updated RU rootpack profile to prefer OpenSSL GOST on Linux with Wine sidecar fallback; registry wiring now includes `ru.winecsp.http`; tasks 2–3 moved to DOING. | Implementer |
|
||||
| 2025-12-07 | Marked Wine sidecar experiment DONE: DI registration added (`AddStellaOpsCryptoRu` binds WineCsp options) and rootpack references `ru.winecsp.http`. | Implementer |
|
||||
| 2025-12-07 | Created Wine CSP Docker infrastructure: multi-stage Dockerfile (`ops/wine-csp/Dockerfile`), supporting scripts (entrypoint.sh, healthcheck.sh, install-csp.sh), environment config (`deploy/compose/env/wine-csp.env.example`). | Implementer |
|
||||
| 2025-12-07 | Integrated wine-csp service into `docker-compose.dev.yaml` and `docker-compose.mock.yaml` with volumes, health checks, resource limits, and security labels. | Implementer |
|
||||
| 2025-12-07 | Created CI workflow (`.gitea/workflows/wine-csp-build.yml`) with SBOM generation (Syft), Trivy security scan, cosign signing, and air-gap bundle creation. | Implementer |
|
||||
| 2025-12-07 | Published deployment documentation (`docs/deploy/wine-csp-container.md`) covering architecture, API endpoints, Docker Compose integration, security considerations, and troubleshooting. | Implementer |
|
||||
|
||||
## Decisions & Risks
|
||||
- Windows CSP availability may slip; mitigation: document manual runner setup and allow deferred close on #1/#6 (currently blocking).
|
||||
- Licensing/export could block redistribution; must finalize before RootPack publish (currently blocking task 3).
|
||||
- Cross-platform determinism must be proven; if mismatch, block release until fixed; currently waiting on #1/#2 data.
|
||||
- **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). **Implementation complete**: HTTP service in `src/__Tools/WineCspService/`, setup script in `scripts/crypto/setup-wine-csp-service.sh`, crypto registry provider in `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`. Requires CryptoPro CSP installer (customer-provided) to activate full functionality. See `docs/security/wine-csp-loader-design.md`.
|
||||
- **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). **Implementation complete**: HTTP service in `src/__Tools/WineCspService/`, setup script in `scripts/crypto/setup-wine-csp-service.sh`, crypto registry provider in `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`. **Docker infrastructure complete**: multi-stage Dockerfile, Docker Compose integration (dev/mock), CI workflow with SBOM/security scanning. Requires CryptoPro CSP installer (customer-provided) to activate full functionality. See `docs/deploy/wine-csp-container.md` and `docs/security/wine-csp-loader-design.md`.
|
||||
- **Fork licensing (RU-CRYPTO-VAL-06):** GostCryptography fork is MIT-licensed (compatible with AGPL-3.0). CryptoPro CSP is customer-provided. Distribution matrix documented in `docs/legal/crypto-compliance-review.md`. Awaiting legal sign-off.
|
||||
|
||||
## Next Checkpoints
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | SM-CRYPTO-01 | DONE (2025-12-06) | None | Security · Crypto | Implement `StellaOps.Cryptography.Plugin.SmSoft` provider using BouncyCastle SM2/SM3 (software-only, non-certified); env guard `SM_SOFT_ALLOWED` added. |
|
||||
| 2 | SM-CRYPTO-02 | DONE (2025-12-06) | After #1 | Security · BE (Authority/Signer) | Wire SM soft provider into DI (registered), compliance docs updated with “software-only” caveat. |
|
||||
| 3 | SM-CRYPTO-03 | DOING | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor registers SM provider and loads SM2 keys, but Attestor verification/tests still pending. |
|
||||
| 3 | SM-CRYPTO-03 | DONE (2025-12-07) | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor registers SM provider, loads SM2 keys, and SM2 verification tests passing (software, env-gated). |
|
||||
| 4 | SM-CRYPTO-04 | DONE (2025-12-06) | After #1 | QA · Security | Deterministic software test vectors (sign/verify, hash) added in unit tests; “non-certified” banner documented. |
|
||||
| 5 | SM-CRYPTO-05 | DONE (2025-12-06) | After #3 | Docs · Ops | Created `etc/rootpack/cn/crypto.profile.yaml` with cn-soft profile preferring `cn.sm.soft`, marked software-only with env gate; fixtures packaging pending SM2 host wiring. |
|
||||
| 6 | SM-CRYPTO-06 | BLOCKED (2025-12-06) | Hardware token available | Security · Crypto | Add PKCS#11 SM provider and rerun vectors with certified hardware; replace “software-only” label when certified. |
|
||||
@@ -33,7 +33,7 @@
|
||||
| 2025-12-06 | Implemented SmSoft provider + DI, added SM2/SM3 unit tests, updated compliance doc with software-only caveat; tasks 1,2,4 set to DONE. | Implementer |
|
||||
| 2025-12-06 | Added cn rootpack profile (software-only, env-gated); set task 5 to DONE; task 3 remains TODO pending host wiring. | Implementer |
|
||||
| 2025-12-06 | Started host wiring for SM2: Authority file key loader now supports SM2 raw keys; JWKS tests include SM2; task 3 set to DOING. | Implementer |
|
||||
| 2025-12-07 | Signer SM2 gate + tests added (software registry); Attestor registers SM provider and loads SM2 keys; Attestor verification/tests pending. | Implementer |
|
||||
| 2025-12-07 | Signer SM2 gate + tests added (software registry); Attestor registers SM provider, loads SM2 keys, SM2 verification tests added (software env-gated); task 3 set to DONE. | Implementer |
|
||||
| 2025-12-07 | Attestor SM2 wiring complete: SmSoftCryptoProvider registered in AttestorSigningKeyRegistry, SM2 key loading (PEM/base64/hex), signing tests added. Fixed AWSSDK version conflict and pre-existing test compilation issues. Task 3 set to DONE. | Implementer |
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
@@ -20,22 +20,24 @@
|
||||
## Delivery Tracker
|
||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | FIPS-PROV-01 | TODO | Choose “non-certified baseline” path | Security · DevOps | Enforce FIPS algorithm allow-list using BCL + AWS KMS FIPS endpoint/OpenSSL FIPS provider; mark as “non-certified”; collect determinism tests and evidence. |
|
||||
| 2 | FIPS-PROV-02 | TODO | After #1 | Authority · Scanner · Attestor | Enforce FIPS-only algorithms when `fips` profile active; fail-closed validation + JWKS export; tests; label non-certified. |
|
||||
| 1 | FIPS-PROV-01 | DONE (2025-12-07) | Choose “non-certified baseline” path | Security · DevOps | Enforce FIPS algorithm allow-list using BCL + AWS KMS FIPS endpoint/OpenSSL FIPS provider; mark as “non-certified”; collect determinism tests and evidence. |
|
||||
| 2 | FIPS-PROV-02 | DOING (2025-12-07) | After #1 | Authority · Scanner · Attestor | Enforce FIPS-only algorithms when `fips` profile active; fail-closed validation + JWKS export; tests; label non-certified. |
|
||||
| 3 | FIPS-PROV-03 | BLOCKED (2025-12-06) | Select certified module | Security · DevOps | Integrate CMVP-certified module (CloudHSM/Luna/OpenSSL FIPS 3.x) and replace baseline label; gather certification evidence. |
|
||||
| 4 | EIDAS-01 | TODO | Trust store stub | Authority · Security | Add eIDAS profile enforcement (P-256/384 + SHA-256), EU trust-store bundle, JWKS metadata; emit warning when QSCD not present. |
|
||||
| 4 | EIDAS-01 | DOING (2025-12-07) | Trust store stub | Authority · Security | Add eIDAS profile enforcement (P-256/384 + SHA-256), EU trust-store bundle, JWKS metadata; emit warning when QSCD not present. |
|
||||
| 5 | EIDAS-02 | BLOCKED (2025-12-06) | QSCD device available | Authority · Security | Add QSCD/qualified cert handling and policy checks; certify once hardware available. |
|
||||
| 6 | KCMVP-01 | TODO | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
||||
| 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
||||
| 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. |
|
||||
| 8 | PQ-IMPL-01 | TODO | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
||||
| 8 | PQ-IMPL-01 | DOING (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
||||
| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||
| 10 | ROOTPACK-INTL-01 | TODO | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
||||
| 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-06 | Sprint created; awaiting staffing. | Planning |
|
||||
| 2025-12-06 | Re-scoped: added software baselines (FIPS/eIDAS/KCMVP hash-only, PQ with liboqs) as TODO; certified modules/QSCD/ARIA-SEED remain BLOCKED. | Implementer |
|
||||
| 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer |
|
||||
| 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer |
|
||||
|
||||
## Decisions & Risks
|
||||
- FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3).
|
||||
|
||||
76
docs/modules/concelier/prep/store-aoc-19-005-dev.md
Normal file
76
docs/modules/concelier/prep/store-aoc-19-005-dev.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# Concelier Backfill & Rollback Plan (STORE-AOC-19-005-DEV)
|
||||
|
||||
## Objective
|
||||
Prepare and rehearse the raw-linkset backfill/rollback so Concelier Mongo reflects Link-Not-Merge data deterministically across dev/stage. This runbook unblocks STORE-AOC-19-005-DEV.
|
||||
|
||||
## Inputs
|
||||
- Source dataset: staging export tarball `linksets-stage-backfill.tar.zst`.
|
||||
- Expected placement: `out/linksets/linksets-stage-backfill.tar.zst`.
|
||||
- Hash: record SHA-256 in this file once available (example below).
|
||||
|
||||
Example hash capture (replace with real):
|
||||
```
|
||||
$ sha256sum out/linksets/linksets-stage-backfill.tar.zst
|
||||
3ac7d1c8f4f7b5c5b27c1c7ac6d6e9b2a2d6d7a1a1c3f4e5b6c7d8e9f0a1b2c3 out/linksets/linksets-stage-backfill.tar.zst
|
||||
```
|
||||
|
||||
## Preflight
|
||||
- Environment variables:
|
||||
- `CONCELIER_MONGO_URI` pointing to the target (dev or staging) Mongo.
|
||||
- `CONCELIER_DB` (default `concelier`).
|
||||
- Take a snapshot of affected collections:
|
||||
```
|
||||
mongodump --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" --collection linksets --collection advisory_chunks --out out/backups/pre-run
|
||||
```
|
||||
- Ensure write lock is acceptable for the maintenance window.
|
||||
|
||||
## Backfill steps
|
||||
1) Extract dataset:
|
||||
```
|
||||
mkdir -p out/linksets/extracted
|
||||
tar -xf out/linksets/linksets-stage-backfill.tar.zst -C out/linksets/extracted
|
||||
```
|
||||
2) Import linksets + chunks (bypass validation to preserve upstream IDs):
|
||||
```
|
||||
mongoimport --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" \
|
||||
--collection linksets --file out/linksets/extracted/linksets.ndjson --mode=upsert --upsertFields=_id
|
||||
|
||||
mongoimport --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" \
|
||||
--collection advisory_chunks --file out/linksets/extracted/advisory_chunks.ndjson --mode=upsert --upsertFields=_id
|
||||
```
|
||||
3) Verify counts vs manifest:
|
||||
```
|
||||
jq '.' out/linksets/extracted/manifest.json
|
||||
mongo --quiet "$CONCELIER_MONGO_URI/$CONCELIER_DB" --eval "db.linksets.countDocuments()"
|
||||
mongo --quiet "$CONCELIER_MONGO_URI/$CONCELIER_DB" --eval "db.advisory_chunks.countDocuments()"
|
||||
```
|
||||
4) Dry-run rollback marker (no-op unless `ENABLE_ROLLBACK=1` set):
|
||||
```
|
||||
ENABLE_ROLLBACK=0 python scripts/concelier/backfill/rollback.py --manifest out/linksets/extracted/manifest.json
|
||||
```
|
||||
|
||||
## Rollback procedure
|
||||
- If validation fails, restore from preflight dump:
|
||||
```
|
||||
mongorestore --uri "$CONCELIER_MONGO_URI" --drop out/backups/pre-run
|
||||
```
|
||||
- If partial write detected, rerun mongoimport for the affected collection only with `--mode=upsert`.
|
||||
|
||||
## Validation checklist
|
||||
- Hash of tarball matches recorded SHA-256.
|
||||
- Post-import counts align with `manifest.json`.
|
||||
- Linkset cursor pagination smoke test:
|
||||
```
|
||||
dotnet test src/Concelier/StellaOps.Concelier.WebService.Tests --filter LinksetsEndpoint_SupportsCursorPagination
|
||||
```
|
||||
- Storage metrics (if enabled) show non-zero `concelier_storage_import_total` for this window.
|
||||
|
||||
## Artefacts to record
|
||||
- Tarball SHA-256 and size.
|
||||
- `manifest.json` copy stored alongside tarball.
|
||||
- Import log (`out/linksets/import.log`) and validation results.
|
||||
- Decision: maintenance window and rollback outcome.
|
||||
|
||||
## Owners
|
||||
- Concelier Storage Guild (Mongo)
|
||||
- AirGap/Backfill reviewers for sign-off
|
||||
@@ -16,9 +16,19 @@ StellaOps supports multiple cryptographic compliance profiles to meet regional r
|
||||
| `eidas` | eIDAS/ETSI TS 119 312 | European Union | EU digital identity and trust |
|
||||
|
||||
**Certification caveats (current baselines)**
|
||||
- `fips`, `eidas`, `kcmvp` are enforced via algorithm allow-lists only; certified modules are not yet integrated. Deployments must treat these as non-certified until a CMVP/QSCD/KCMVP module is configured.
|
||||
- `gost` is validated on Linux via OpenSSL GOST; Windows CryptoPro CSP remains pending.
|
||||
- `sm` uses a software-only SM2/SM3 path when `SM_SOFT_ALLOWED=1`; hardware PKCS#11 validation is pending.
|
||||
- `fips` and `eidas` now route through software allow-listed providers (`fips.ecdsa.soft`, `eu.eidas.soft`) and are labeled **non-certified** until a CMVP/QSCD module is attached (set `FIPS_SOFT_ALLOWED=1` / `EIDAS_SOFT_ALLOWED=1`).
|
||||
- `kcmvp` is covered by a hash-only baseline provider (`kr.kcmvp.hash`, SHA-256) with the `KCMVP_HASH_ALLOWED` gate; ARIA/SEED/KCDSA remain pending.
|
||||
- `gost` has a Linux-ready OpenSSL baseline plus a Wine sidecar for CryptoPro CSP (`ru.winecsp.http`); native Windows CSP stays blocked on licensed runners.
|
||||
- `sm` uses software SM2/SM3 (`cn.sm.soft`, gate `SM_SOFT_ALLOWED=1`); hardware PKCS#11 validation remains pending.
|
||||
- `pq` uses software-only Dilithium3 and Falcon512 (`pq.soft`, gate `PQ_SOFT_ALLOWED=1`); certified PQ modules are not available.
|
||||
|
||||
**Provider identifiers (registry names)**
|
||||
- FIPS: `fips.ecdsa.soft`
|
||||
- eIDAS: `eu.eidas.soft`
|
||||
- KCMVP hash baseline: `kr.kcmvp.hash`
|
||||
- PQ (Dilithium3/Falcon512): `pq.soft`
|
||||
- RU GOST (Wine sidecar): `ru.winecsp.http`
|
||||
- CN SM software: `cn.sm.soft`
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -87,6 +97,14 @@ HMAC operations use purpose-based selection similar to hashing:
|
||||
|
||||
**Note:** The `WebhookInterop` purpose always uses HMAC-SHA256 regardless of profile. This is required for compatibility with external webhook receivers (Slack, Teams, GitHub, etc.) that expect SHA-256 signatures.
|
||||
|
||||
## Simulation paths when hardware is missing
|
||||
|
||||
- **RU / GOST**: Linux baseline uses `ru.openssl.gost`; CryptoPro CSP can be exercised from Linux via the Wine sidecar service (`ru.winecsp.http`) built from `scripts/crypto/setup-wine-csp-service.sh` when customers supply the CSP installer. Windows CSP remains blocked until licensed runners are available.
|
||||
- **CN / SM**: Software-only SM2/SM3 provider (`cn.sm.soft`) backed by BouncyCastle; enable with `SM_SOFT_ALLOWED=1`. Hardware PKCS#11 tokens can be added later without changing feature code because hosts resolve via `ICryptoProviderRegistry`.
|
||||
- **FIPS / eIDAS**: Software allow-lists (`fips.ecdsa.soft`, `eu.eidas.soft`) enforce ES256/ES384 + SHA-2. They are labeled non-certified until a CMVP/QSCD module is supplied.
|
||||
- **KCMVP**: Hash-only baseline (`kr.kcmvp.hash`) keeps SHA-256 available when ARIA/SEED/KCDSA hardware is absent.
|
||||
- **PQ (Dilithium3/Falcon512)**: Software-only `pq.soft` provider using BouncyCastle PQC primitives; gated by `PQ_SOFT_ALLOWED=1`. Certified PQ hardware is not yet available.
|
||||
|
||||
## Interoperability Exceptions
|
||||
|
||||
Certain operations must use SHA-256 regardless of compliance profile to maintain external compatibility:
|
||||
|
||||
21
etc/rootpack/eu/crypto.profile.yaml
Normal file
21
etc/rootpack/eu/crypto.profile.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
StellaOps:
|
||||
Crypto:
|
||||
Registry:
|
||||
ActiveProfile: eu-eidas-soft
|
||||
PreferredProviders:
|
||||
- eu.eidas.soft
|
||||
- pq.soft
|
||||
- default
|
||||
Profiles:
|
||||
eu-eidas-soft:
|
||||
PreferredProviders:
|
||||
- eu.eidas.soft
|
||||
- pq.soft
|
||||
- default
|
||||
Diagnostics:
|
||||
Providers:
|
||||
Enabled: true
|
||||
Metrics:
|
||||
LogLevel: Information
|
||||
Notes:
|
||||
Certification: "software-only; QSCD not enforced. Set EIDAS_SOFT_ALLOWED=1 to enable profile."
|
||||
19
etc/rootpack/kr/crypto.profile.yaml
Normal file
19
etc/rootpack/kr/crypto.profile.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
StellaOps:
|
||||
Crypto:
|
||||
Registry:
|
||||
ActiveProfile: kr-kcmvp-hash
|
||||
PreferredProviders:
|
||||
- kr.kcmvp.hash
|
||||
- default
|
||||
Profiles:
|
||||
kr-kcmvp-hash:
|
||||
PreferredProviders:
|
||||
- kr.kcmvp.hash
|
||||
- default
|
||||
Diagnostics:
|
||||
Providers:
|
||||
Enabled: true
|
||||
Metrics:
|
||||
LogLevel: Information
|
||||
Notes:
|
||||
Certification: "hash-only baseline (SHA-256). Set KCMVP_HASH_ALLOWED=1 to enable."
|
||||
@@ -1,13 +1,21 @@
|
||||
StellaOps:
|
||||
Crypto:
|
||||
Registry:
|
||||
ActiveProfile: ru-offline
|
||||
ActiveProfile: ru-linux-soft
|
||||
PreferredProviders:
|
||||
- default
|
||||
- ru.openssl.gost
|
||||
- ru.winecsp.http
|
||||
- ru.pkcs11
|
||||
Profiles:
|
||||
ru-offline:
|
||||
ru-linux-soft:
|
||||
PreferredProviders:
|
||||
- ru.openssl.gost
|
||||
- ru.winecsp.http
|
||||
- ru.pkcs11
|
||||
ru-csp:
|
||||
PreferredProviders:
|
||||
- ru.cryptopro.csp
|
||||
- ru.winecsp.http
|
||||
- ru.openssl.gost
|
||||
- ru.pkcs11
|
||||
CryptoPro:
|
||||
@@ -28,6 +36,13 @@ StellaOps:
|
||||
Pin: "${PKCS11_PIN}"
|
||||
PrivateKeyLabel: rootpack-signing
|
||||
CertificateThumbprint: "<thumbprint>"
|
||||
WineCsp:
|
||||
ServiceUrl: http://localhost:5099
|
||||
Keys:
|
||||
- KeyId: ru-wine-default
|
||||
Algorithm: GOST12-256
|
||||
RemoteKeyId: ru-csp-default
|
||||
Description: Wine CSP sidecar (CryptoPro via Wine)
|
||||
OpenSsl:
|
||||
Keys:
|
||||
- KeyId: ru-openssl-default
|
||||
|
||||
21
etc/rootpack/us-fips/crypto.profile.yaml
Normal file
21
etc/rootpack/us-fips/crypto.profile.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
StellaOps:
|
||||
Crypto:
|
||||
Registry:
|
||||
ActiveProfile: us-fips-soft
|
||||
PreferredProviders:
|
||||
- fips.ecdsa.soft
|
||||
- pq.soft
|
||||
- default
|
||||
Profiles:
|
||||
us-fips-soft:
|
||||
PreferredProviders:
|
||||
- fips.ecdsa.soft
|
||||
- pq.soft
|
||||
- default
|
||||
Diagnostics:
|
||||
Providers:
|
||||
Enabled: true
|
||||
Metrics:
|
||||
LogLevel: Information
|
||||
Notes:
|
||||
Certification: "non-certified software baseline; enable FIPS_SOFT_ALLOWED=1 to activate"
|
||||
@@ -0,0 +1,64 @@
|
||||
# Findings Ledger Docker Compose overlay
|
||||
# Append to or reference from your main compose file
|
||||
#
|
||||
# Usage:
|
||||
# docker compose -f docker-compose.yaml -f ops/devops/findings-ledger/compose/docker-compose.ledger.yaml up -d
|
||||
|
||||
services:
|
||||
findings-ledger:
|
||||
image: stellaops/findings-ledger:${STELLA_VERSION:-2025.11.0}
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./env/ledger.${STELLAOPS_ENV:-dev}.env
|
||||
environment:
|
||||
ASPNETCORE_URLS: http://0.0.0.0:8080
|
||||
ASPNETCORE_ENVIRONMENT: ${ASPNETCORE_ENVIRONMENT:-Production}
|
||||
# Database connection (override via env file or secrets)
|
||||
# LEDGER__DB__CONNECTIONSTRING: see secrets
|
||||
# Observability
|
||||
LEDGER__OBSERVABILITY__ENABLED: "true"
|
||||
LEDGER__OBSERVABILITY__OTLPENDPOINT: ${OTEL_EXPORTER_OTLP_ENDPOINT:-http://otel-collector:4317}
|
||||
# Merkle anchoring
|
||||
LEDGER__MERKLE__ANCHORINTERVAL: "00:05:00"
|
||||
LEDGER__MERKLE__EXTERNALIZE: ${LEDGER_MERKLE_EXTERNALIZE:-false}
|
||||
# Attachments
|
||||
LEDGER__ATTACHMENTS__MAXSIZEBYTES: "104857600" # 100MB
|
||||
LEDGER__ATTACHMENTS__ALLOWEGRESS: ${LEDGER_ATTACHMENTS_ALLOWEGRESS:-true}
|
||||
ports:
|
||||
- "${LEDGER_PORT:-8188}:8080"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:8080/health/ready"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 15s
|
||||
volumes:
|
||||
- ledger-data:/app/data
|
||||
- ./etc/ledger/appsettings.json:/app/appsettings.json:ro
|
||||
networks:
|
||||
- stellaops
|
||||
|
||||
# Migration job (run before starting ledger)
|
||||
findings-ledger-migrations:
|
||||
image: stellaops/findings-ledger-migrations:${STELLA_VERSION:-2025.11.0}
|
||||
command: ["--connection", "${LEDGER__DB__CONNECTIONSTRING}"]
|
||||
env_file:
|
||||
- ./env/ledger.${STELLAOPS_ENV:-dev}.env
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- stellaops
|
||||
profiles:
|
||||
- migrations
|
||||
|
||||
volumes:
|
||||
ledger-data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
stellaops:
|
||||
external: true
|
||||
24
ops/devops/findings-ledger/compose/env/ledger.dev.env
vendored
Normal file
24
ops/devops/findings-ledger/compose/env/ledger.dev.env
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Findings Ledger - Development Environment
|
||||
# Copy to ledger.local.env and customize for local dev
|
||||
|
||||
# Database connection
|
||||
LEDGER__DB__CONNECTIONSTRING=Host=postgres;Port=5432;Database=findings_ledger_dev;Username=ledger;Password=change_me_dev;
|
||||
|
||||
# Attachment encryption key (AES-256, base64 encoded)
|
||||
# Generate with: openssl rand -base64 32
|
||||
LEDGER__ATTACHMENTS__ENCRYPTIONKEY=
|
||||
|
||||
# Merkle anchor signing (optional in dev)
|
||||
LEDGER__MERKLE__SIGNINGKEY=
|
||||
|
||||
# Authority service endpoint (for JWT validation)
|
||||
LEDGER__AUTHORITY__BASEURL=http://authority:8080
|
||||
|
||||
# Logging level
|
||||
Logging__LogLevel__Default=Debug
|
||||
Logging__LogLevel__Microsoft=Information
|
||||
Logging__LogLevel__StellaOps=Debug
|
||||
|
||||
# Feature flags
|
||||
LEDGER__FEATURES__ENABLEATTACHMENTS=true
|
||||
LEDGER__FEATURES__ENABLEAUDITLOG=true
|
||||
40
ops/devops/findings-ledger/compose/env/ledger.prod.env
vendored
Normal file
40
ops/devops/findings-ledger/compose/env/ledger.prod.env
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# Findings Ledger - Production Environment
|
||||
# Secrets should be injected from secrets manager, not committed
|
||||
|
||||
# Database connection (inject from secrets manager)
|
||||
# LEDGER__DB__CONNECTIONSTRING=
|
||||
|
||||
# Attachment encryption key (inject from secrets manager)
|
||||
# LEDGER__ATTACHMENTS__ENCRYPTIONKEY=
|
||||
|
||||
# Merkle anchor signing (inject from secrets manager)
|
||||
# LEDGER__MERKLE__SIGNINGKEY=
|
||||
|
||||
# Authority service endpoint
|
||||
LEDGER__AUTHORITY__BASEURL=http://authority:8080
|
||||
|
||||
# Logging level
|
||||
Logging__LogLevel__Default=Warning
|
||||
Logging__LogLevel__Microsoft=Warning
|
||||
Logging__LogLevel__StellaOps=Information
|
||||
|
||||
# Feature flags
|
||||
LEDGER__FEATURES__ENABLEATTACHMENTS=true
|
||||
LEDGER__FEATURES__ENABLEAUDITLOG=true
|
||||
|
||||
# Observability
|
||||
LEDGER__OBSERVABILITY__ENABLED=true
|
||||
LEDGER__OBSERVABILITY__METRICSPORT=9090
|
||||
|
||||
# Merkle anchoring
|
||||
LEDGER__MERKLE__ANCHORINTERVAL=00:05:00
|
||||
LEDGER__MERKLE__EXTERNALIZE=false
|
||||
|
||||
# Attachments
|
||||
LEDGER__ATTACHMENTS__MAXSIZEBYTES=104857600
|
||||
LEDGER__ATTACHMENTS__ALLOWEGRESS=false
|
||||
|
||||
# Air-gap staleness thresholds (seconds)
|
||||
LEDGER__AIRGAP__ADVISORYSTALETHRESHOLD=604800
|
||||
LEDGER__AIRGAP__VEXSTALETHRESHOLD=604800
|
||||
LEDGER__AIRGAP__POLICYSTALETHRESHOLD=86400
|
||||
20
ops/devops/findings-ledger/helm/Chart.yaml
Normal file
20
ops/devops/findings-ledger/helm/Chart.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
apiVersion: v2
|
||||
name: stellaops-findings-ledger
|
||||
version: 0.1.0
|
||||
appVersion: "2025.11.0"
|
||||
description: Findings Ledger service for StellaOps platform - event-sourced findings storage with Merkle anchoring.
|
||||
type: application
|
||||
keywords:
|
||||
- findings
|
||||
- ledger
|
||||
- event-sourcing
|
||||
- merkle
|
||||
- attestation
|
||||
maintainers:
|
||||
- name: StellaOps Team
|
||||
email: platform@stellaops.io
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: "14.x"
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: postgresql.enabled
|
||||
80
ops/devops/findings-ledger/helm/templates/_helpers.tpl
Normal file
80
ops/devops/findings-ledger/helm/templates/_helpers.tpl
Normal file
@@ -0,0 +1,80 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "findings-ledger.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
*/}}
|
||||
{{- define "findings-ledger.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "findings-ledger.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "findings-ledger.labels" -}}
|
||||
helm.sh/chart: {{ include "findings-ledger.chart" . }}
|
||||
{{ include "findings-ledger.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "findings-ledger.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "findings-ledger.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/component: ledger
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "findings-ledger.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "findings-ledger.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Database connection string - from secret or constructed
|
||||
*/}}
|
||||
{{- define "findings-ledger.databaseConnectionString" -}}
|
||||
{{- if .Values.database.connectionStringSecret }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Values.database.connectionStringSecret }}
|
||||
key: {{ .Values.database.connectionStringKey }}
|
||||
{{- else if .Values.postgresql.enabled }}
|
||||
value: "Host={{ .Release.Name }}-postgresql;Port=5432;Database={{ .Values.postgresql.auth.database }};Username={{ .Values.postgresql.auth.username }};Password=$(POSTGRES_PASSWORD);"
|
||||
{{- else }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Values.secrets.name }}
|
||||
key: LEDGER__DB__CONNECTIONSTRING
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
19
ops/devops/findings-ledger/helm/templates/configmap.yaml
Normal file
19
ops/devops/findings-ledger/helm/templates/configmap.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "findings-ledger.fullname" . }}-config
|
||||
labels:
|
||||
{{- include "findings-ledger.labels" . | nindent 4 }}
|
||||
data:
|
||||
appsettings.json: |
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft": "Warning",
|
||||
"Microsoft.Hosting.Lifetime": "Information",
|
||||
"StellaOps": "Information"
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
}
|
||||
122
ops/devops/findings-ledger/helm/templates/deployment.yaml
Normal file
122
ops/devops/findings-ledger/helm/templates/deployment.yaml
Normal file
@@ -0,0 +1,122 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "findings-ledger.fullname" . }}
|
||||
labels:
|
||||
{{- include "findings-ledger.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "findings-ledger.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }}
|
||||
labels:
|
||||
{{- include "findings-ledger.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
serviceAccountName: {{ include "findings-ledger.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: ledger
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
protocol: TCP
|
||||
{{- if .Values.observability.metricsEnabled }}
|
||||
- name: metrics
|
||||
containerPort: {{ .Values.service.metricsPort }}
|
||||
protocol: TCP
|
||||
{{- end }}
|
||||
env:
|
||||
- name: ASPNETCORE_URLS
|
||||
value: "http://0.0.0.0:{{ .Values.service.port }}"
|
||||
- name: ASPNETCORE_ENVIRONMENT
|
||||
value: "Production"
|
||||
# Database
|
||||
- name: LEDGER__DB__CONNECTIONSTRING
|
||||
{{- include "findings-ledger.databaseConnectionString" . | nindent 14 }}
|
||||
# Observability
|
||||
- name: LEDGER__OBSERVABILITY__ENABLED
|
||||
value: {{ .Values.observability.enabled | quote }}
|
||||
- name: LEDGER__OBSERVABILITY__OTLPENDPOINT
|
||||
value: {{ .Values.observability.otlpEndpoint | quote }}
|
||||
# Merkle anchoring
|
||||
- name: LEDGER__MERKLE__ANCHORINTERVAL
|
||||
value: {{ .Values.merkle.anchorInterval | quote }}
|
||||
- name: LEDGER__MERKLE__EXTERNALIZE
|
||||
value: {{ .Values.merkle.externalize | quote }}
|
||||
# Attachments
|
||||
- name: LEDGER__ATTACHMENTS__MAXSIZEBYTES
|
||||
value: {{ .Values.attachments.maxSizeBytes | quote }}
|
||||
- name: LEDGER__ATTACHMENTS__ALLOWEGRESS
|
||||
value: {{ .Values.attachments.allowEgress | quote }}
|
||||
- name: LEDGER__ATTACHMENTS__ENCRYPTIONKEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Values.secrets.name }}
|
||||
key: LEDGER__ATTACHMENTS__ENCRYPTIONKEY
|
||||
# Authority
|
||||
- name: LEDGER__AUTHORITY__BASEURL
|
||||
value: {{ .Values.authority.baseUrl | quote }}
|
||||
# Air-gap thresholds
|
||||
- name: LEDGER__AIRGAP__ADVISORYSTALETHRESHOLD
|
||||
value: {{ .Values.airgap.advisoryStaleThreshold | quote }}
|
||||
- name: LEDGER__AIRGAP__VEXSTALETHRESHOLD
|
||||
value: {{ .Values.airgap.vexStaleThreshold | quote }}
|
||||
- name: LEDGER__AIRGAP__POLICYSTALETHRESHOLD
|
||||
value: {{ .Values.airgap.policyStaleThreshold | quote }}
|
||||
# Features
|
||||
- name: LEDGER__FEATURES__ENABLEATTACHMENTS
|
||||
value: {{ .Values.features.enableAttachments | quote }}
|
||||
- name: LEDGER__FEATURES__ENABLEAUDITLOG
|
||||
value: {{ .Values.features.enableAuditLog | quote }}
|
||||
{{- with .Values.extraEnv }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.extraEnvFrom }}
|
||||
envFrom:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: {{ .Values.probes.readiness.path }}
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probes.readiness.initialDelaySeconds }}
|
||||
periodSeconds: {{ .Values.probes.readiness.periodSeconds }}
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: {{ .Values.probes.liveness.path }}
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probes.liveness.initialDelaySeconds }}
|
||||
periodSeconds: {{ .Values.probes.liveness.periodSeconds }}
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
volumeMounts:
|
||||
- name: tmp
|
||||
mountPath: /tmp
|
||||
- name: data
|
||||
mountPath: /app/data
|
||||
volumes:
|
||||
- name: tmp
|
||||
emptyDir: {}
|
||||
- name: data
|
||||
emptyDir: {}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
43
ops/devops/findings-ledger/helm/templates/migration-job.yaml
Normal file
43
ops/devops/findings-ledger/helm/templates/migration-job.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
{{- if .Values.migrations.enabled }}
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ include "findings-ledger.fullname" . }}-migrations
|
||||
labels:
|
||||
{{- include "findings-ledger.labels" . | nindent 4 }}
|
||||
app.kubernetes.io/component: migrations
|
||||
annotations:
|
||||
"helm.sh/hook": pre-install,pre-upgrade
|
||||
"helm.sh/hook-weight": "-5"
|
||||
"helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded
|
||||
spec:
|
||||
backoffLimit: 3
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "findings-ledger.selectorLabels" . | nindent 8 }}
|
||||
app.kubernetes.io/component: migrations
|
||||
spec:
|
||||
serviceAccountName: {{ include "findings-ledger.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: migrations
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.migrations.image.repository }}:{{ .Values.migrations.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args:
|
||||
- "--connection"
|
||||
- "$(LEDGER__DB__CONNECTIONSTRING)"
|
||||
env:
|
||||
- name: LEDGER__DB__CONNECTIONSTRING
|
||||
{{- include "findings-ledger.databaseConnectionString" . | nindent 14 }}
|
||||
resources:
|
||||
{{- toYaml .Values.migrations.resources | nindent 12 }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
21
ops/devops/findings-ledger/helm/templates/service.yaml
Normal file
21
ops/devops/findings-ledger/helm/templates/service.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "findings-ledger.fullname" . }}
|
||||
labels:
|
||||
{{- include "findings-ledger.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.service.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
{{- if .Values.observability.metricsEnabled }}
|
||||
- port: {{ .Values.service.metricsPort }}
|
||||
targetPort: metrics
|
||||
protocol: TCP
|
||||
name: metrics
|
||||
{{- end }}
|
||||
selector:
|
||||
{{- include "findings-ledger.selectorLabels" . | nindent 4 }}
|
||||
@@ -0,0 +1,12 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "findings-ledger.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "findings-ledger.labels" . | nindent 4 }}
|
||||
{{- with .Values.serviceAccount.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
151
ops/devops/findings-ledger/helm/values.yaml
Normal file
151
ops/devops/findings-ledger/helm/values.yaml
Normal file
@@ -0,0 +1,151 @@
|
||||
# Default values for stellaops-findings-ledger
|
||||
|
||||
image:
|
||||
repository: stellaops/findings-ledger
|
||||
tag: "2025.11.0"
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8080
|
||||
metricsPort: 9090
|
||||
|
||||
# Database configuration
|
||||
database:
|
||||
# External PostgreSQL connection (preferred for production)
|
||||
# Set connectionStringSecret to use existing secret
|
||||
connectionStringSecret: ""
|
||||
connectionStringKey: "LEDGER__DB__CONNECTIONSTRING"
|
||||
# Or provide connection details directly (not recommended for prod)
|
||||
host: "postgres"
|
||||
port: 5432
|
||||
database: "findings_ledger"
|
||||
username: "ledger"
|
||||
# password via secret only
|
||||
|
||||
# Built-in PostgreSQL (dev/testing only)
|
||||
postgresql:
|
||||
enabled: false
|
||||
auth:
|
||||
username: ledger
|
||||
database: findings_ledger
|
||||
|
||||
# Secrets configuration
|
||||
secrets:
|
||||
# Name of secret containing sensitive values
|
||||
name: "findings-ledger-secrets"
|
||||
# Expected keys in secret:
|
||||
# LEDGER__DB__CONNECTIONSTRING
|
||||
# LEDGER__ATTACHMENTS__ENCRYPTIONKEY
|
||||
# LEDGER__MERKLE__SIGNINGKEY (optional)
|
||||
|
||||
# Observability
|
||||
observability:
|
||||
enabled: true
|
||||
otlpEndpoint: "http://otel-collector:4317"
|
||||
metricsEnabled: true
|
||||
|
||||
# Merkle anchoring
|
||||
merkle:
|
||||
anchorInterval: "00:05:00"
|
||||
externalize: false
|
||||
# externalAnchorEndpoint: ""
|
||||
|
||||
# Attachments
|
||||
attachments:
|
||||
maxSizeBytes: 104857600 # 100MB
|
||||
allowEgress: true
|
||||
# encryptionKey via secret
|
||||
|
||||
# Air-gap configuration
|
||||
airgap:
|
||||
advisoryStaleThreshold: 604800 # 7 days
|
||||
vexStaleThreshold: 604800 # 7 days
|
||||
policyStaleThreshold: 86400 # 1 day
|
||||
|
||||
# Authority integration
|
||||
authority:
|
||||
baseUrl: "http://authority:8080"
|
||||
|
||||
# Feature flags
|
||||
features:
|
||||
enableAttachments: true
|
||||
enableAuditLog: true
|
||||
|
||||
# Resource limits
|
||||
resources:
|
||||
requests:
|
||||
cpu: "500m"
|
||||
memory: "1Gi"
|
||||
limits:
|
||||
cpu: "2"
|
||||
memory: "4Gi"
|
||||
|
||||
# Probes
|
||||
probes:
|
||||
readiness:
|
||||
path: /health/ready
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
liveness:
|
||||
path: /health/live
|
||||
initialDelaySeconds: 15
|
||||
periodSeconds: 20
|
||||
|
||||
# Pod configuration
|
||||
nodeSelector: {}
|
||||
tolerations: []
|
||||
affinity: {}
|
||||
|
||||
# Extra environment variables
|
||||
extraEnv: []
|
||||
# - name: CUSTOM_VAR
|
||||
# value: "value"
|
||||
|
||||
extraEnvFrom: []
|
||||
# - secretRef:
|
||||
# name: additional-secrets
|
||||
|
||||
# Migration job
|
||||
migrations:
|
||||
enabled: true
|
||||
image:
|
||||
repository: stellaops/findings-ledger-migrations
|
||||
tag: "2025.11.0"
|
||||
resources:
|
||||
requests:
|
||||
cpu: "100m"
|
||||
memory: "256Mi"
|
||||
limits:
|
||||
cpu: "500m"
|
||||
memory: "512Mi"
|
||||
|
||||
# Service account
|
||||
serviceAccount:
|
||||
create: true
|
||||
name: ""
|
||||
annotations: {}
|
||||
|
||||
# Pod security context
|
||||
podSecurityContext:
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
fsGroup: 1000
|
||||
|
||||
# Container security context
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
readOnlyRootFilesystem: true
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
|
||||
# Ingress (optional)
|
||||
ingress:
|
||||
enabled: false
|
||||
className: ""
|
||||
annotations: {}
|
||||
hosts: []
|
||||
tls: []
|
||||
158
ops/devops/findings-ledger/offline-kit/README.md
Normal file
158
ops/devops/findings-ledger/offline-kit/README.md
Normal file
@@ -0,0 +1,158 @@
|
||||
# Findings Ledger Offline Kit
|
||||
|
||||
This directory contains manifests and scripts for deploying Findings Ledger in air-gapped/offline environments.
|
||||
|
||||
## Contents
|
||||
|
||||
```
|
||||
offline-kit/
|
||||
├── README.md # This file
|
||||
├── manifest.yaml # Offline bundle manifest
|
||||
├── images/ # Container image tarballs (populated at build)
|
||||
│ └── .gitkeep
|
||||
├── migrations/ # Database migration scripts
|
||||
│ └── .gitkeep
|
||||
├── dashboards/ # Grafana dashboard JSON exports
|
||||
│ └── findings-ledger.json
|
||||
├── alerts/ # Prometheus alert rules
|
||||
│ └── findings-ledger-alerts.yaml
|
||||
└── scripts/
|
||||
├── import-images.sh # Load container images
|
||||
├── run-migrations.sh # Apply database migrations
|
||||
└── verify-install.sh # Post-install verification
|
||||
```
|
||||
|
||||
## Building the Offline Kit
|
||||
|
||||
Use the platform offline kit builder:
|
||||
|
||||
```bash
|
||||
# From repository root
|
||||
python ops/offline-kit/build_offline_kit.py \
|
||||
--include ledger \
|
||||
--version 2025.11.0 \
|
||||
--output dist/offline-kit-ledger-2025.11.0.tar.gz
|
||||
```
|
||||
|
||||
## Installation Steps
|
||||
|
||||
### 1. Transfer and Extract
|
||||
|
||||
```bash
|
||||
# On air-gapped host
|
||||
tar xzf offline-kit-ledger-*.tar.gz
|
||||
cd offline-kit-ledger-*
|
||||
```
|
||||
|
||||
### 2. Load Container Images
|
||||
|
||||
```bash
|
||||
./scripts/import-images.sh
|
||||
# Loads: stellaops/findings-ledger, stellaops/findings-ledger-migrations
|
||||
```
|
||||
|
||||
### 3. Run Database Migrations
|
||||
|
||||
```bash
|
||||
export LEDGER__DB__CONNECTIONSTRING="Host=...;Database=...;..."
|
||||
./scripts/run-migrations.sh
|
||||
```
|
||||
|
||||
### 4. Deploy Service
|
||||
|
||||
Choose deployment method:
|
||||
|
||||
**Docker Compose:**
|
||||
```bash
|
||||
cp ../compose/env/ledger.prod.env ./ledger.env
|
||||
# Edit ledger.env with local values
|
||||
docker compose -f ../compose/docker-compose.ledger.yaml up -d
|
||||
```
|
||||
|
||||
**Helm:**
|
||||
```bash
|
||||
helm upgrade --install findings-ledger ../helm \
|
||||
-f values-offline.yaml \
|
||||
--set image.pullPolicy=Never
|
||||
```
|
||||
|
||||
### 5. Verify Installation
|
||||
|
||||
```bash
|
||||
./scripts/verify-install.sh
|
||||
```
|
||||
|
||||
## Configuration Notes
|
||||
|
||||
### Sealed Mode
|
||||
|
||||
In air-gapped environments, configure:
|
||||
|
||||
```yaml
|
||||
# Disable outbound attachment egress
|
||||
LEDGER__ATTACHMENTS__ALLOWEGRESS: "false"
|
||||
|
||||
# Set appropriate staleness thresholds
|
||||
LEDGER__AIRGAP__ADVISORYSTALETHRESHOLD: "604800" # 7 days
|
||||
LEDGER__AIRGAP__VEXSTALETHRESHOLD: "604800"
|
||||
LEDGER__AIRGAP__POLICYSTALETHRESHOLD: "86400" # 1 day
|
||||
```
|
||||
|
||||
### Merkle Anchoring
|
||||
|
||||
For offline environments without external anchoring:
|
||||
|
||||
```yaml
|
||||
LEDGER__MERKLE__EXTERNALIZE: "false"
|
||||
```
|
||||
|
||||
Keep local Merkle roots and export periodically for audit.
|
||||
|
||||
## Backup & Restore
|
||||
|
||||
See `docs/modules/findings-ledger/deployment.md` for full backup/restore procedures.
|
||||
|
||||
Quick reference:
|
||||
```bash
|
||||
# Backup
|
||||
pg_dump -Fc --dbname="$LEDGER_DB" --file ledger-$(date -u +%Y%m%d).dump
|
||||
|
||||
# Restore
|
||||
pg_restore -C -d postgres ledger-YYYYMMDD.dump
|
||||
|
||||
# Replay projections
|
||||
dotnet run --project tools/LedgerReplayHarness -- \
|
||||
--connection "$LEDGER_DB" --tenant all
|
||||
```
|
||||
|
||||
## Observability
|
||||
|
||||
Import the provided dashboards into your local Grafana instance:
|
||||
|
||||
```bash
|
||||
# Import via Grafana API or UI
|
||||
curl -X POST http://grafana:3000/api/dashboards/db \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @dashboards/findings-ledger.json
|
||||
```
|
||||
|
||||
Apply alert rules to Prometheus:
|
||||
```bash
|
||||
cp alerts/findings-ledger-alerts.yaml /etc/prometheus/rules.d/
|
||||
# Reload Prometheus
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
| Issue | Resolution |
|
||||
| --- | --- |
|
||||
| Migration fails | Check DB connectivity; verify user has CREATE/ALTER privileges |
|
||||
| Health check fails | Check logs: `docker logs findings-ledger` or `kubectl logs -l app.kubernetes.io/name=findings-ledger` |
|
||||
| Metrics not visible | Verify OTLP endpoint is reachable or use Prometheus scrape |
|
||||
| Staleness warnings | Import fresh advisory/VEX bundles via Mirror |
|
||||
|
||||
## Support
|
||||
|
||||
- Platform docs: `docs/modules/findings-ledger/`
|
||||
- Offline operation: `docs/24_OFFLINE_KIT.md`
|
||||
- Air-gap mode: `docs/airgap/`
|
||||
@@ -0,0 +1,122 @@
|
||||
# Findings Ledger Prometheus Alert Rules
|
||||
# Apply to Prometheus: cp findings-ledger-alerts.yaml /etc/prometheus/rules.d/
|
||||
|
||||
groups:
|
||||
- name: findings-ledger
|
||||
rules:
|
||||
# Service availability
|
||||
- alert: FindingsLedgerDown
|
||||
expr: up{job="findings-ledger"} == 0
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger service is down"
|
||||
description: "Findings Ledger service has been unreachable for more than 2 minutes."
|
||||
|
||||
# Write latency
|
||||
- alert: FindingsLedgerHighWriteLatency
|
||||
expr: histogram_quantile(0.95, sum(rate(ledger_write_latency_seconds_bucket{job="findings-ledger"}[5m])) by (le)) > 1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger write latency is high"
|
||||
description: "95th percentile write latency exceeds 1 second for 5 minutes. Current: {{ $value | humanizeDuration }}"
|
||||
|
||||
- alert: FindingsLedgerCriticalWriteLatency
|
||||
expr: histogram_quantile(0.95, sum(rate(ledger_write_latency_seconds_bucket{job="findings-ledger"}[5m])) by (le)) > 5
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger write latency is critically high"
|
||||
description: "95th percentile write latency exceeds 5 seconds. Current: {{ $value | humanizeDuration }}"
|
||||
|
||||
# Projection lag
|
||||
- alert: FindingsLedgerProjectionLag
|
||||
expr: ledger_projection_lag_seconds{job="findings-ledger"} > 30
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger projection lag is high"
|
||||
description: "Projection lag exceeds 30 seconds for 5 minutes. Current: {{ $value | humanizeDuration }}"
|
||||
|
||||
- alert: FindingsLedgerCriticalProjectionLag
|
||||
expr: ledger_projection_lag_seconds{job="findings-ledger"} > 300
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger projection lag is critically high"
|
||||
description: "Projection lag exceeds 5 minutes. Current: {{ $value | humanizeDuration }}"
|
||||
|
||||
# Merkle anchoring
|
||||
- alert: FindingsLedgerMerkleAnchorStale
|
||||
expr: time() - ledger_merkle_last_anchor_timestamp_seconds{job="findings-ledger"} > 600
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger Merkle anchor is stale"
|
||||
description: "No Merkle anchor created in the last 10 minutes. Last anchor: {{ $value | humanizeTimestamp }}"
|
||||
|
||||
- alert: FindingsLedgerMerkleAnchorFailed
|
||||
expr: increase(ledger_merkle_anchor_failures_total{job="findings-ledger"}[15m]) > 0
|
||||
for: 0m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger Merkle anchoring failed"
|
||||
description: "Merkle anchor operation failed. Check logs for details."
|
||||
|
||||
# Database connectivity
|
||||
- alert: FindingsLedgerDatabaseErrors
|
||||
expr: increase(ledger_database_errors_total{job="findings-ledger"}[5m]) > 5
|
||||
for: 2m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger database errors detected"
|
||||
description: "More than 5 database errors in the last 5 minutes."
|
||||
|
||||
# Attachment storage
|
||||
- alert: FindingsLedgerAttachmentStorageErrors
|
||||
expr: increase(ledger_attachment_storage_errors_total{job="findings-ledger"}[15m]) > 0
|
||||
for: 0m
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Findings Ledger attachment storage errors"
|
||||
description: "Attachment storage operation failed. Check encryption keys and storage connectivity."
|
||||
|
||||
# Air-gap staleness (for offline environments)
|
||||
- alert: FindingsLedgerAdvisoryStaleness
|
||||
expr: ledger_airgap_advisory_staleness_seconds{job="findings-ledger"} > 604800
|
||||
for: 1h
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "Advisory data is stale in air-gapped environment"
|
||||
description: "Advisory data is older than 7 days. Import fresh data from Mirror."
|
||||
|
||||
- alert: FindingsLedgerVexStaleness
|
||||
expr: ledger_airgap_vex_staleness_seconds{job="findings-ledger"} > 604800
|
||||
for: 1h
|
||||
labels:
|
||||
severity: warning
|
||||
service: findings-ledger
|
||||
annotations:
|
||||
summary: "VEX data is stale in air-gapped environment"
|
||||
description: "VEX data is older than 7 days. Import fresh data from Mirror."
|
||||
@@ -0,0 +1,185 @@
|
||||
{
|
||||
"__inputs": [
|
||||
{
|
||||
"name": "DS_PROMETHEUS",
|
||||
"label": "Prometheus",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "prometheus",
|
||||
"pluginName": "Prometheus"
|
||||
}
|
||||
],
|
||||
"__requires": [
|
||||
{
|
||||
"type": "grafana",
|
||||
"id": "grafana",
|
||||
"name": "Grafana",
|
||||
"version": "9.0.0"
|
||||
},
|
||||
{
|
||||
"type": "datasource",
|
||||
"id": "prometheus",
|
||||
"name": "Prometheus",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
],
|
||||
"annotations": {
|
||||
"list": []
|
||||
},
|
||||
"description": "Findings Ledger service metrics and health",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
|
||||
"id": 1,
|
||||
"panels": [],
|
||||
"title": "Health Overview",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [
|
||||
{ "options": { "0": { "color": "red", "index": 1, "text": "DOWN" }, "1": { "color": "green", "index": 0, "text": "UP" } }, "type": "value" }
|
||||
],
|
||||
"thresholds": { "mode": "absolute", "steps": [{ "color": "red", "value": null }, { "color": "green", "value": 1 }] }
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 },
|
||||
"id": 2,
|
||||
"options": { "colorMode": "value", "graphMode": "none", "justifyMode": "auto", "orientation": "auto", "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, "textMode": "auto" },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [{ "expr": "up{job=\"findings-ledger\"}", "refId": "A" }],
|
||||
"title": "Service Status",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "palette-classic" }, "unit": "short" },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 },
|
||||
"id": 3,
|
||||
"options": { "colorMode": "value", "graphMode": "area", "justifyMode": "auto", "orientation": "auto", "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, "textMode": "auto" },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [{ "expr": "ledger_events_total{job=\"findings-ledger\"}", "refId": "A" }],
|
||||
"title": "Total Events",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "thresholds" }, "unit": "s", "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }, { "color": "yellow", "value": 1 }, { "color": "red", "value": 5 }] } },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 },
|
||||
"id": 4,
|
||||
"options": { "colorMode": "value", "graphMode": "area", "justifyMode": "auto", "orientation": "auto", "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, "textMode": "auto" },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [{ "expr": "ledger_projection_lag_seconds{job=\"findings-ledger\"}", "refId": "A" }],
|
||||
"title": "Projection Lag",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 5 },
|
||||
"id": 10,
|
||||
"panels": [],
|
||||
"title": "Write Performance",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "palette-classic" }, "custom": { "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", "hideFrom": { "legend": false, "tooltip": false, "viz": false }, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, "scaleDistribution": { "type": "linear" }, "showPoints": "never", "spanNulls": false, "stacking": { "group": "A", "mode": "none" }, "thresholdsStyle": { "mode": "off" } }, "unit": "s" },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 6 },
|
||||
"id": 11,
|
||||
"options": { "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, "tooltip": { "mode": "multi", "sort": "none" } },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [
|
||||
{ "expr": "histogram_quantile(0.50, sum(rate(ledger_write_latency_seconds_bucket{job=\"findings-ledger\"}[5m])) by (le))", "legendFormat": "p50", "refId": "A" },
|
||||
{ "expr": "histogram_quantile(0.95, sum(rate(ledger_write_latency_seconds_bucket{job=\"findings-ledger\"}[5m])) by (le))", "legendFormat": "p95", "refId": "B" },
|
||||
{ "expr": "histogram_quantile(0.99, sum(rate(ledger_write_latency_seconds_bucket{job=\"findings-ledger\"}[5m])) by (le))", "legendFormat": "p99", "refId": "C" }
|
||||
],
|
||||
"title": "Write Latency",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "palette-classic" }, "custom": { "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", "hideFrom": { "legend": false, "tooltip": false, "viz": false }, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, "scaleDistribution": { "type": "linear" }, "showPoints": "never", "spanNulls": false, "stacking": { "group": "A", "mode": "none" }, "thresholdsStyle": { "mode": "off" } }, "unit": "ops" },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 6 },
|
||||
"id": 12,
|
||||
"options": { "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, "tooltip": { "mode": "multi", "sort": "none" } },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [{ "expr": "rate(ledger_events_total{job=\"findings-ledger\"}[5m])", "legendFormat": "events/s", "refId": "A" }],
|
||||
"title": "Event Write Rate",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 14 },
|
||||
"id": 20,
|
||||
"panels": [],
|
||||
"title": "Merkle Anchoring",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "palette-classic" }, "custom": { "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", "hideFrom": { "legend": false, "tooltip": false, "viz": false }, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, "scaleDistribution": { "type": "linear" }, "showPoints": "never", "spanNulls": false, "stacking": { "group": "A", "mode": "none" }, "thresholdsStyle": { "mode": "off" } }, "unit": "s" },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 15 },
|
||||
"id": 21,
|
||||
"options": { "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true }, "tooltip": { "mode": "multi", "sort": "none" } },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [
|
||||
{ "expr": "histogram_quantile(0.50, sum(rate(ledger_merkle_anchor_duration_seconds_bucket{job=\"findings-ledger\"}[5m])) by (le))", "legendFormat": "p50", "refId": "A" },
|
||||
{ "expr": "histogram_quantile(0.95, sum(rate(ledger_merkle_anchor_duration_seconds_bucket{job=\"findings-ledger\"}[5m])) by (le))", "legendFormat": "p95", "refId": "B" }
|
||||
],
|
||||
"title": "Anchor Duration",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
|
||||
"fieldConfig": {
|
||||
"defaults": { "color": { "mode": "thresholds" }, "unit": "short", "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } },
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 15 },
|
||||
"id": 22,
|
||||
"options": { "colorMode": "value", "graphMode": "area", "justifyMode": "auto", "orientation": "auto", "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, "textMode": "auto" },
|
||||
"pluginVersion": "9.0.0",
|
||||
"targets": [{ "expr": "ledger_merkle_anchors_total{job=\"findings-ledger\"}", "refId": "A" }],
|
||||
"title": "Total Anchors",
|
||||
"type": "stat"
|
||||
}
|
||||
],
|
||||
"refresh": "30s",
|
||||
"schemaVersion": 37,
|
||||
"style": "dark",
|
||||
"tags": ["stellaops", "findings-ledger"],
|
||||
"templating": { "list": [] },
|
||||
"time": { "from": "now-1h", "to": "now" },
|
||||
"timepicker": {},
|
||||
"timezone": "utc",
|
||||
"title": "Findings Ledger",
|
||||
"uid": "findings-ledger",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
||||
1
ops/devops/findings-ledger/offline-kit/images/.gitkeep
Normal file
1
ops/devops/findings-ledger/offline-kit/images/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
# Container image tarballs populated at build time by offline-kit builder
|
||||
106
ops/devops/findings-ledger/offline-kit/manifest.yaml
Normal file
106
ops/devops/findings-ledger/offline-kit/manifest.yaml
Normal file
@@ -0,0 +1,106 @@
|
||||
# Findings Ledger Offline Kit Manifest
|
||||
# Version: 2025.11.0
|
||||
# Generated: 2025-12-07
|
||||
|
||||
apiVersion: stellaops.io/v1
|
||||
kind: OfflineKitManifest
|
||||
metadata:
|
||||
name: findings-ledger
|
||||
version: "2025.11.0"
|
||||
description: Findings Ledger service for event-sourced findings storage with Merkle anchoring
|
||||
|
||||
spec:
|
||||
components:
|
||||
- name: findings-ledger
|
||||
type: service
|
||||
image: stellaops/findings-ledger:2025.11.0
|
||||
digest: "" # Populated at build time
|
||||
|
||||
- name: findings-ledger-migrations
|
||||
type: job
|
||||
image: stellaops/findings-ledger-migrations:2025.11.0
|
||||
digest: "" # Populated at build time
|
||||
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: ">=14.0"
|
||||
type: database
|
||||
required: true
|
||||
|
||||
- name: otel-collector
|
||||
version: ">=0.80.0"
|
||||
type: service
|
||||
required: false
|
||||
description: Optional for telemetry export
|
||||
|
||||
migrations:
|
||||
- version: "001"
|
||||
file: migrations/001_initial_schema.sql
|
||||
checksum: "" # Populated at build time
|
||||
- version: "002"
|
||||
file: migrations/002_merkle_tables.sql
|
||||
checksum: ""
|
||||
- version: "003"
|
||||
file: migrations/003_attachments.sql
|
||||
checksum: ""
|
||||
- version: "004"
|
||||
file: migrations/004_projections.sql
|
||||
checksum: ""
|
||||
- version: "005"
|
||||
file: migrations/005_airgap_imports.sql
|
||||
checksum: ""
|
||||
- version: "006"
|
||||
file: migrations/006_evidence_snapshots.sql
|
||||
checksum: ""
|
||||
- version: "007"
|
||||
file: migrations/007_timeline_events.sql
|
||||
checksum: ""
|
||||
- version: "008"
|
||||
file: migrations/008_attestation_pointers.sql
|
||||
checksum: ""
|
||||
|
||||
dashboards:
|
||||
- name: findings-ledger
|
||||
file: dashboards/findings-ledger.json
|
||||
checksum: ""
|
||||
|
||||
alerts:
|
||||
- name: findings-ledger-alerts
|
||||
file: alerts/findings-ledger-alerts.yaml
|
||||
checksum: ""
|
||||
|
||||
configuration:
|
||||
required:
|
||||
- key: LEDGER__DB__CONNECTIONSTRING
|
||||
description: PostgreSQL connection string
|
||||
secret: true
|
||||
- key: LEDGER__ATTACHMENTS__ENCRYPTIONKEY
|
||||
description: AES-256 encryption key for attachments (base64)
|
||||
secret: true
|
||||
|
||||
optional:
|
||||
- key: LEDGER__MERKLE__SIGNINGKEY
|
||||
description: Signing key for Merkle root attestations
|
||||
secret: true
|
||||
- key: LEDGER__OBSERVABILITY__OTLPENDPOINT
|
||||
description: OpenTelemetry collector endpoint
|
||||
default: http://otel-collector:4317
|
||||
- key: LEDGER__MERKLE__ANCHORINTERVAL
|
||||
description: Merkle anchor interval (TimeSpan)
|
||||
default: "00:05:00"
|
||||
- key: LEDGER__AIRGAP__ADVISORYSTALETHRESHOLD
|
||||
description: Advisory staleness threshold in seconds
|
||||
default: "604800"
|
||||
|
||||
verification:
|
||||
healthEndpoint: /health/ready
|
||||
metricsEndpoint: /metrics
|
||||
expectedMetrics:
|
||||
- ledger_write_latency_seconds
|
||||
- ledger_projection_lag_seconds
|
||||
- ledger_merkle_anchor_duration_seconds
|
||||
- ledger_events_total
|
||||
|
||||
checksums:
|
||||
algorithm: sha256
|
||||
manifest: "" # Populated at build time
|
||||
@@ -0,0 +1 @@
|
||||
# Database migration SQL scripts copied from StellaOps.FindingsLedger.Migrations
|
||||
131
ops/devops/findings-ledger/offline-kit/scripts/import-images.sh
Normal file
131
ops/devops/findings-ledger/offline-kit/scripts/import-images.sh
Normal file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env bash
|
||||
# Import Findings Ledger container images into local Docker/containerd
|
||||
# Usage: ./import-images.sh [registry-prefix]
|
||||
#
|
||||
# Example:
|
||||
# ./import-images.sh # Loads as stellaops/*
|
||||
# ./import-images.sh myregistry.local/ # Loads and tags as myregistry.local/stellaops/*
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
IMAGES_DIR="${SCRIPT_DIR}/../images"
|
||||
REGISTRY_PREFIX="${1:-}"
|
||||
|
||||
# Color output helpers
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
|
||||
# Detect container runtime
|
||||
detect_runtime() {
|
||||
if command -v docker &>/dev/null; then
|
||||
echo "docker"
|
||||
elif command -v nerdctl &>/dev/null; then
|
||||
echo "nerdctl"
|
||||
elif command -v podman &>/dev/null; then
|
||||
echo "podman"
|
||||
else
|
||||
log_error "No container runtime found (docker, nerdctl, podman)"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
RUNTIME=$(detect_runtime)
|
||||
log_info "Using container runtime: $RUNTIME"
|
||||
|
||||
# Load images from tarballs
|
||||
load_images() {
|
||||
local count=0
|
||||
|
||||
for tarball in "${IMAGES_DIR}"/*.tar; do
|
||||
if [[ -f "$tarball" ]]; then
|
||||
log_info "Loading image from: $(basename "$tarball")"
|
||||
|
||||
if $RUNTIME load -i "$tarball"; then
|
||||
((count++))
|
||||
else
|
||||
log_error "Failed to load: $tarball"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $count -eq 0 ]]; then
|
||||
log_warn "No image tarballs found in $IMAGES_DIR"
|
||||
log_warn "Run the offline kit builder first to populate images"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "Loaded $count image(s)"
|
||||
}
|
||||
|
||||
# Re-tag images with custom registry prefix
|
||||
retag_images() {
|
||||
if [[ -z "$REGISTRY_PREFIX" ]]; then
|
||||
log_info "No registry prefix specified, skipping re-tag"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local images=(
|
||||
"stellaops/findings-ledger"
|
||||
"stellaops/findings-ledger-migrations"
|
||||
)
|
||||
|
||||
for image in "${images[@]}"; do
|
||||
# Get the loaded tag
|
||||
local loaded_tag
|
||||
loaded_tag=$($RUNTIME images --format '{{.Repository}}:{{.Tag}}' | grep "^${image}:" | head -1)
|
||||
|
||||
if [[ -n "$loaded_tag" ]]; then
|
||||
local new_tag="${REGISTRY_PREFIX}${loaded_tag}"
|
||||
log_info "Re-tagging: $loaded_tag -> $new_tag"
|
||||
$RUNTIME tag "$loaded_tag" "$new_tag"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Verify loaded images
|
||||
verify_images() {
|
||||
log_info "Verifying loaded images..."
|
||||
|
||||
local images=(
|
||||
"stellaops/findings-ledger"
|
||||
"stellaops/findings-ledger-migrations"
|
||||
)
|
||||
|
||||
local missing=0
|
||||
for image in "${images[@]}"; do
|
||||
if $RUNTIME images --format '{{.Repository}}' | grep -q "^${REGISTRY_PREFIX}${image}$"; then
|
||||
log_info " ✓ ${REGISTRY_PREFIX}${image}"
|
||||
else
|
||||
log_error " ✗ ${REGISTRY_PREFIX}${image} not found"
|
||||
((missing++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $missing -gt 0 ]]; then
|
||||
log_error "$missing image(s) missing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "All images verified"
|
||||
}
|
||||
|
||||
main() {
|
||||
log_info "Findings Ledger - Image Import"
|
||||
log_info "=============================="
|
||||
|
||||
load_images
|
||||
retag_images
|
||||
verify_images
|
||||
|
||||
log_info "Image import complete"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
125
ops/devops/findings-ledger/offline-kit/scripts/run-migrations.sh
Normal file
125
ops/devops/findings-ledger/offline-kit/scripts/run-migrations.sh
Normal file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run Findings Ledger database migrations
|
||||
# Usage: ./run-migrations.sh [connection-string]
|
||||
#
|
||||
# Environment variables:
|
||||
# LEDGER__DB__CONNECTIONSTRING - PostgreSQL connection string (if not provided as arg)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
MIGRATIONS_DIR="${SCRIPT_DIR}/../migrations"
|
||||
|
||||
# Color output helpers
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
|
||||
# Get connection string
|
||||
CONNECTION_STRING="${1:-${LEDGER__DB__CONNECTIONSTRING:-}}"
|
||||
|
||||
if [[ -z "$CONNECTION_STRING" ]]; then
|
||||
log_error "Connection string required"
|
||||
echo "Usage: $0 <connection-string>"
|
||||
echo " or set LEDGER__DB__CONNECTIONSTRING environment variable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Detect container runtime
|
||||
detect_runtime() {
|
||||
if command -v docker &>/dev/null; then
|
||||
echo "docker"
|
||||
elif command -v nerdctl &>/dev/null; then
|
||||
echo "nerdctl"
|
||||
elif command -v podman &>/dev/null; then
|
||||
echo "podman"
|
||||
else
|
||||
log_error "No container runtime found"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
RUNTIME=$(detect_runtime)
|
||||
|
||||
# Run migrations via container
|
||||
run_migrations_container() {
|
||||
log_info "Running migrations via container..."
|
||||
|
||||
$RUNTIME run --rm \
|
||||
-e "LEDGER__DB__CONNECTIONSTRING=${CONNECTION_STRING}" \
|
||||
--network host \
|
||||
stellaops/findings-ledger-migrations:2025.11.0 \
|
||||
--connection "$CONNECTION_STRING"
|
||||
}
|
||||
|
||||
# Alternative: Run migrations via psql (if dotnet not available)
|
||||
run_migrations_psql() {
|
||||
log_info "Running migrations via psql..."
|
||||
|
||||
if ! command -v psql &>/dev/null; then
|
||||
log_error "psql not found and container runtime unavailable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Parse connection string for psql
|
||||
# Expected format: Host=...;Port=...;Database=...;Username=...;Password=...
|
||||
local host port database username password
|
||||
host=$(echo "$CONNECTION_STRING" | grep -oP 'Host=\K[^;]+')
|
||||
port=$(echo "$CONNECTION_STRING" | grep -oP 'Port=\K[^;]+' || echo "5432")
|
||||
database=$(echo "$CONNECTION_STRING" | grep -oP 'Database=\K[^;]+')
|
||||
username=$(echo "$CONNECTION_STRING" | grep -oP 'Username=\K[^;]+')
|
||||
password=$(echo "$CONNECTION_STRING" | grep -oP 'Password=\K[^;]+')
|
||||
|
||||
export PGPASSWORD="$password"
|
||||
|
||||
for migration in "${MIGRATIONS_DIR}"/*.sql; do
|
||||
if [[ -f "$migration" ]]; then
|
||||
log_info "Applying: $(basename "$migration")"
|
||||
psql -h "$host" -p "$port" -U "$username" -d "$database" -f "$migration"
|
||||
fi
|
||||
done
|
||||
|
||||
unset PGPASSWORD
|
||||
}
|
||||
|
||||
verify_connection() {
|
||||
log_info "Verifying database connection..."
|
||||
|
||||
# Try container-based verification
|
||||
if $RUNTIME run --rm \
|
||||
--network host \
|
||||
postgres:14-alpine \
|
||||
pg_isready -h "$(echo "$CONNECTION_STRING" | grep -oP 'Host=\K[^;]+')" \
|
||||
-p "$(echo "$CONNECTION_STRING" | grep -oP 'Port=\K[^;]+' || echo 5432)" \
|
||||
&>/dev/null; then
|
||||
log_info "Database connection verified"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_warn "Could not verify database connection (may still work)"
|
||||
return 0
|
||||
}
|
||||
|
||||
main() {
|
||||
log_info "Findings Ledger - Database Migrations"
|
||||
log_info "======================================"
|
||||
|
||||
verify_connection
|
||||
|
||||
# Prefer container-based migrations
|
||||
if $RUNTIME image inspect stellaops/findings-ledger-migrations:2025.11.0 &>/dev/null; then
|
||||
run_migrations_container
|
||||
else
|
||||
log_warn "Migration image not found, falling back to psql"
|
||||
run_migrations_psql
|
||||
fi
|
||||
|
||||
log_info "Migrations complete"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env bash
|
||||
# Verify Findings Ledger installation
|
||||
# Usage: ./verify-install.sh [service-url]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SERVICE_URL="${1:-http://localhost:8188}"
|
||||
|
||||
# Color output helpers
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
log_pass() { echo -e "${GREEN} ✓${NC} $*"; }
|
||||
log_fail() { echo -e "${RED} ✗${NC} $*"; }
|
||||
|
||||
CHECKS_PASSED=0
|
||||
CHECKS_FAILED=0
|
||||
|
||||
run_check() {
|
||||
local name="$1"
|
||||
local cmd="$2"
|
||||
|
||||
if eval "$cmd" &>/dev/null; then
|
||||
log_pass "$name"
|
||||
((CHECKS_PASSED++))
|
||||
else
|
||||
log_fail "$name"
|
||||
((CHECKS_FAILED++))
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
log_info "Findings Ledger - Installation Verification"
|
||||
log_info "==========================================="
|
||||
log_info "Service URL: $SERVICE_URL"
|
||||
echo ""
|
||||
|
||||
log_info "Health Checks:"
|
||||
run_check "Readiness endpoint" "curl -sf ${SERVICE_URL}/health/ready"
|
||||
run_check "Liveness endpoint" "curl -sf ${SERVICE_URL}/health/live"
|
||||
|
||||
echo ""
|
||||
log_info "Metrics Checks:"
|
||||
run_check "Metrics endpoint available" "curl -sf ${SERVICE_URL}/metrics | head -1"
|
||||
run_check "ledger_write_latency_seconds present" "curl -sf ${SERVICE_URL}/metrics | grep -q ledger_write_latency_seconds"
|
||||
run_check "ledger_projection_lag_seconds present" "curl -sf ${SERVICE_URL}/metrics | grep -q ledger_projection_lag_seconds"
|
||||
run_check "ledger_merkle_anchor_duration_seconds present" "curl -sf ${SERVICE_URL}/metrics | grep -q ledger_merkle_anchor_duration_seconds"
|
||||
|
||||
echo ""
|
||||
log_info "API Checks:"
|
||||
run_check "OpenAPI spec available" "curl -sf ${SERVICE_URL}/swagger/v1/swagger.json | head -1"
|
||||
|
||||
echo ""
|
||||
log_info "========================================"
|
||||
log_info "Results: ${CHECKS_PASSED} passed, ${CHECKS_FAILED} failed"
|
||||
|
||||
if [[ $CHECKS_FAILED -gt 0 ]]; then
|
||||
log_error "Some checks failed. Review service logs for details."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "All checks passed. Installation verified."
|
||||
}
|
||||
|
||||
main "$@"
|
||||
173
ops/wine-csp/Dockerfile
Normal file
173
ops/wine-csp/Dockerfile
Normal file
@@ -0,0 +1,173 @@
|
||||
# syntax=docker/dockerfile:1.7
|
||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
||||
#
|
||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
||||
#
|
||||
# Build:
|
||||
# docker buildx build -f ops/wine-csp/Dockerfile -t wine-csp:latest .
|
||||
#
|
||||
# Run:
|
||||
# docker run -p 5099:5099 -e WINE_CSP_MODE=limited wine-csp:latest
|
||||
|
||||
# ==============================================================================
|
||||
# Stage 1: Build .NET application for Windows x64
|
||||
# ==============================================================================
|
||||
ARG SDK_IMAGE=mcr.microsoft.com/dotnet/sdk:10.0-preview-bookworm-slim
|
||||
FROM ${SDK_IMAGE} AS build
|
||||
|
||||
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \
|
||||
DOTNET_NOLOGO=1 \
|
||||
DOTNET_ROLL_FORWARD=LatestMajor \
|
||||
SOURCE_DATE_EPOCH=1704067200
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
# Copy solution files and NuGet configuration
|
||||
COPY Directory.Build.props Directory.Build.rsp NuGet.config ./
|
||||
|
||||
# Copy local NuGet packages if available
|
||||
COPY local-nugets/ ./local-nugets/
|
||||
|
||||
# Copy Wine CSP Service source
|
||||
COPY src/__Tools/WineCspService/ ./src/__Tools/WineCspService/
|
||||
|
||||
# Copy GostCryptography fork dependency
|
||||
COPY third_party/forks/AlexMAS.GostCryptography/ ./third_party/forks/AlexMAS.GostCryptography/
|
||||
|
||||
# Restore and publish for Windows x64 (runs under Wine)
|
||||
RUN --mount=type=cache,target=/root/.nuget/packages \
|
||||
dotnet restore src/__Tools/WineCspService/WineCspService.csproj && \
|
||||
dotnet publish src/__Tools/WineCspService/WineCspService.csproj \
|
||||
-c Release \
|
||||
-r win-x64 \
|
||||
--self-contained true \
|
||||
-o /app/publish \
|
||||
/p:PublishSingleFile=true \
|
||||
/p:EnableCompressionInSingleFile=true \
|
||||
/p:DebugType=none \
|
||||
/p:DebugSymbols=false
|
||||
|
||||
# ==============================================================================
|
||||
# Stage 2: Runtime with Wine and CryptoPro CSP support
|
||||
# ==============================================================================
|
||||
FROM ubuntu:22.04 AS runtime
|
||||
|
||||
# OCI Image Labels
|
||||
LABEL org.opencontainers.image.title="StellaOps Wine CSP Service" \
|
||||
org.opencontainers.image.description="GOST cryptographic test vector generation via Wine-hosted CryptoPro CSP" \
|
||||
org.opencontainers.image.vendor="StellaOps" \
|
||||
org.opencontainers.image.source="https://git.stella-ops.org/stellaops/router" \
|
||||
com.stellaops.component="wine-csp" \
|
||||
com.stellaops.security.production-signing="false" \
|
||||
com.stellaops.security.test-vectors-only="true"
|
||||
|
||||
# Wine CSP service configuration
|
||||
ARG WINE_CSP_PORT=5099
|
||||
ARG APP_USER=winecsp
|
||||
ARG APP_UID=10001
|
||||
ARG APP_GID=10001
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
# Wine configuration
|
||||
WINEDEBUG=-all \
|
||||
WINEPREFIX=/home/${APP_USER}/.wine \
|
||||
WINEARCH=win64 \
|
||||
# Service configuration
|
||||
WINE_CSP_PORT=${WINE_CSP_PORT} \
|
||||
ASPNETCORE_URLS=http://+:${WINE_CSP_PORT} \
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1 \
|
||||
# CSP configuration
|
||||
WINE_CSP_MODE=limited \
|
||||
WINE_CSP_INSTALLER_PATH=/opt/cryptopro/csp-installer.msi \
|
||||
WINE_CSP_LOG_LEVEL=Information \
|
||||
# Display for Wine (headless)
|
||||
DISPLAY=:99
|
||||
|
||||
# Install Wine and dependencies
|
||||
# Using WineHQ stable repository for consistent Wine version
|
||||
RUN set -eux; \
|
||||
dpkg --add-architecture i386; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common \
|
||||
wget \
|
||||
xvfb \
|
||||
cabextract \
|
||||
p7zip-full \
|
||||
procps; \
|
||||
# Add WineHQ repository key
|
||||
mkdir -pm755 /etc/apt/keyrings; \
|
||||
wget -O /etc/apt/keyrings/winehq-archive.key \
|
||||
https://dl.winehq.org/wine-builds/winehq.key; \
|
||||
# Add WineHQ repository
|
||||
wget -NP /etc/apt/sources.list.d/ \
|
||||
https://dl.winehq.org/wine-builds/ubuntu/dists/jammy/winehq-jammy.sources; \
|
||||
apt-get update; \
|
||||
# Install Wine stable
|
||||
apt-get install -y --no-install-recommends \
|
||||
winehq-stable; \
|
||||
# Install winetricks for runtime dependencies
|
||||
wget -O /usr/local/bin/winetricks \
|
||||
https://raw.githubusercontent.com/Winetricks/winetricks/master/src/winetricks; \
|
||||
chmod +x /usr/local/bin/winetricks; \
|
||||
# Cleanup
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
# Create non-root user for Wine service
|
||||
# Note: Wine requires writable home directory for prefix
|
||||
RUN groupadd -r -g ${APP_GID} ${APP_USER} && \
|
||||
useradd -r -u ${APP_UID} -g ${APP_GID} -m -d /home/${APP_USER} -s /bin/bash ${APP_USER} && \
|
||||
mkdir -p /app /opt/cryptopro /var/log/wine-csp /var/run/wine-csp && \
|
||||
chown -R ${APP_UID}:${APP_GID} /app /home/${APP_USER} /opt/cryptopro /var/log/wine-csp /var/run/wine-csp
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy application from build stage
|
||||
COPY --from=build --chown=${APP_UID}:${APP_GID} /app/publish/ ./
|
||||
|
||||
# Copy supporting scripts
|
||||
COPY --chown=${APP_UID}:${APP_GID} ops/wine-csp/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
COPY --chown=${APP_UID}:${APP_GID} ops/wine-csp/healthcheck.sh /usr/local/bin/healthcheck.sh
|
||||
COPY --chown=${APP_UID}:${APP_GID} ops/wine-csp/install-csp.sh /usr/local/bin/install-csp.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/healthcheck.sh /usr/local/bin/install-csp.sh
|
||||
|
||||
# Switch to non-root user for Wine prefix initialization
|
||||
USER ${APP_UID}:${APP_GID}
|
||||
|
||||
# Initialize Wine prefix (creates .wine directory with Windows environment)
|
||||
# This must run as the app user to set correct ownership
|
||||
# Using xvfb-run for headless Wine initialization
|
||||
RUN set -eux; \
|
||||
# Start virtual framebuffer and initialize Wine
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" \
|
||||
wine64 wineboot --init; \
|
||||
wineserver --wait; \
|
||||
# Install Visual C++ 2019 runtime via winetricks (required for .NET)
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" \
|
||||
winetricks -q vcrun2019 || true; \
|
||||
wineserver --wait; \
|
||||
# Set Windows version to Windows 10 for compatibility
|
||||
wine64 reg add "HKCU\\Software\\Wine\\Version" /v Windows /d "win10" /f || true; \
|
||||
wineserver --wait; \
|
||||
# Cleanup Wine temp files
|
||||
rm -rf /home/${APP_USER}/.cache/winetricks /tmp/.X* /tmp/winetricks* || true
|
||||
|
||||
EXPOSE ${WINE_CSP_PORT}
|
||||
|
||||
# Health check using custom script that probes /health endpoint
|
||||
# Extended start_period due to Wine initialization time
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=90s --retries=3 \
|
||||
CMD /usr/local/bin/healthcheck.sh
|
||||
|
||||
# Volumes for persistence and CSP installer
|
||||
# - Wine prefix: stores CSP installation, certificates, keys
|
||||
# - CSP installer: mount customer-provided CryptoPro MSI here
|
||||
# - Logs: service logs
|
||||
VOLUME ["/home/${APP_USER}/.wine", "/opt/cryptopro", "/var/log/wine-csp"]
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
CMD ["wine64", "/app/WineCspService.exe"]
|
||||
227
ops/wine-csp/entrypoint.sh
Normal file
227
ops/wine-csp/entrypoint.sh
Normal file
@@ -0,0 +1,227 @@
|
||||
#!/bin/bash
|
||||
# Wine CSP Service Entrypoint
|
||||
#
|
||||
# Initializes Wine environment and starts the WineCspService under Wine.
|
||||
# For TEST VECTOR GENERATION ONLY - not for production signing.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Configuration
|
||||
# ------------------------------------------------------------------------------
|
||||
WINE_CSP_PORT="${WINE_CSP_PORT:-5099}"
|
||||
WINE_CSP_MODE="${WINE_CSP_MODE:-limited}"
|
||||
WINE_CSP_INSTALLER_PATH="${WINE_CSP_INSTALLER_PATH:-/opt/cryptopro/csp-installer.msi}"
|
||||
WINE_CSP_LOG_LEVEL="${WINE_CSP_LOG_LEVEL:-Information}"
|
||||
WINE_PREFIX="${WINEPREFIX:-$HOME/.wine}"
|
||||
DISPLAY="${DISPLAY:-:99}"
|
||||
|
||||
# Marker files
|
||||
CSP_INSTALLED_MARKER="${WINE_PREFIX}/.csp_installed"
|
||||
WINE_INITIALIZED_MARKER="${WINE_PREFIX}/.wine_initialized"
|
||||
|
||||
# Log prefix for structured logging
|
||||
log() {
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] [entrypoint] $*"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] [entrypoint] [ERROR] $*" >&2
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Virtual Framebuffer Management
|
||||
# ------------------------------------------------------------------------------
|
||||
start_xvfb() {
|
||||
if ! pgrep -x Xvfb > /dev/null; then
|
||||
log "Starting Xvfb virtual framebuffer on display ${DISPLAY}"
|
||||
Xvfb "${DISPLAY}" -screen 0 1024x768x24 &
|
||||
sleep 2
|
||||
fi
|
||||
}
|
||||
|
||||
stop_xvfb() {
|
||||
if pgrep -x Xvfb > /dev/null; then
|
||||
log "Stopping Xvfb"
|
||||
pkill -x Xvfb || true
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Wine Initialization
|
||||
# ------------------------------------------------------------------------------
|
||||
initialize_wine() {
|
||||
if [[ -f "${WINE_INITIALIZED_MARKER}" ]]; then
|
||||
log "Wine prefix already initialized"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "Initializing Wine prefix at ${WINE_PREFIX}"
|
||||
|
||||
start_xvfb
|
||||
|
||||
# Initialize Wine prefix
|
||||
wine64 wineboot --init 2>/dev/null || true
|
||||
wineserver --wait
|
||||
|
||||
# Set Windows version for CryptoPro compatibility
|
||||
wine64 reg add "HKCU\\Software\\Wine\\Version" /v Windows /d "win10" /f 2>/dev/null || true
|
||||
wineserver --wait
|
||||
|
||||
# Create marker
|
||||
touch "${WINE_INITIALIZED_MARKER}"
|
||||
log "Wine prefix initialized successfully"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# CryptoPro CSP Installation
|
||||
# ------------------------------------------------------------------------------
|
||||
install_cryptopro() {
|
||||
# Check if already installed
|
||||
if [[ -f "${CSP_INSTALLED_MARKER}" ]]; then
|
||||
log "CryptoPro CSP already installed"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check if installer is available
|
||||
if [[ ! -f "${WINE_CSP_INSTALLER_PATH}" ]]; then
|
||||
log "CryptoPro CSP installer not found at ${WINE_CSP_INSTALLER_PATH}"
|
||||
log "Service will run in limited mode without CSP"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "Installing CryptoPro CSP from ${WINE_CSP_INSTALLER_PATH}"
|
||||
|
||||
start_xvfb
|
||||
|
||||
# Run the CSP installation script
|
||||
if /usr/local/bin/install-csp.sh; then
|
||||
touch "${CSP_INSTALLED_MARKER}"
|
||||
log "CryptoPro CSP installed successfully"
|
||||
else
|
||||
log_error "CryptoPro CSP installation failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Service Configuration
|
||||
# ------------------------------------------------------------------------------
|
||||
configure_service() {
|
||||
log "Configuring Wine CSP service"
|
||||
log " Mode: ${WINE_CSP_MODE}"
|
||||
log " Port: ${WINE_CSP_PORT}"
|
||||
log " Log Level: ${WINE_CSP_LOG_LEVEL}"
|
||||
|
||||
# Configure Wine debug output based on log level
|
||||
case "${WINE_CSP_LOG_LEVEL}" in
|
||||
Trace|Debug)
|
||||
export WINEDEBUG="warn+all"
|
||||
;;
|
||||
Information)
|
||||
export WINEDEBUG="-all"
|
||||
;;
|
||||
Warning|Error|Critical)
|
||||
export WINEDEBUG="-all"
|
||||
;;
|
||||
*)
|
||||
export WINEDEBUG="-all"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Set ASP.NET Core environment
|
||||
export ASPNETCORE_URLS="http://+:${WINE_CSP_PORT}"
|
||||
export ASPNETCORE_ENVIRONMENT="${ASPNETCORE_ENVIRONMENT:-Production}"
|
||||
export Logging__LogLevel__Default="${WINE_CSP_LOG_LEVEL}"
|
||||
|
||||
# Check if CSP is available
|
||||
if [[ -f "${CSP_INSTALLED_MARKER}" ]]; then
|
||||
export WINE_CSP_CSP_AVAILABLE="true"
|
||||
log "CryptoPro CSP is available"
|
||||
else
|
||||
export WINE_CSP_CSP_AVAILABLE="false"
|
||||
log "Running without CryptoPro CSP (limited mode)"
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Startup Validation
|
||||
# ------------------------------------------------------------------------------
|
||||
validate_environment() {
|
||||
log "Validating environment"
|
||||
|
||||
# Check Wine is available
|
||||
if ! command -v wine64 &> /dev/null; then
|
||||
log_error "wine64 not found in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check application exists
|
||||
if [[ ! -f "/app/WineCspService.exe" ]]; then
|
||||
log_error "WineCspService.exe not found at /app/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify Wine prefix is writable
|
||||
if [[ ! -w "${WINE_PREFIX}" ]]; then
|
||||
log_error "Wine prefix ${WINE_PREFIX} is not writable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Environment validation passed"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Signal Handlers
|
||||
# ------------------------------------------------------------------------------
|
||||
cleanup() {
|
||||
log "Received shutdown signal, cleaning up..."
|
||||
|
||||
# Stop Wine server gracefully
|
||||
wineserver -k 15 2>/dev/null || true
|
||||
sleep 2
|
||||
wineserver -k 9 2>/dev/null || true
|
||||
|
||||
stop_xvfb
|
||||
|
||||
log "Cleanup complete"
|
||||
exit 0
|
||||
}
|
||||
|
||||
trap cleanup SIGTERM SIGINT SIGQUIT
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Main Entry Point
|
||||
# ------------------------------------------------------------------------------
|
||||
main() {
|
||||
log "=========================================="
|
||||
log "Wine CSP Service Entrypoint"
|
||||
log "=========================================="
|
||||
log "WARNING: For TEST VECTOR GENERATION ONLY"
|
||||
log "=========================================="
|
||||
|
||||
validate_environment
|
||||
initialize_wine
|
||||
|
||||
# Only attempt CSP installation in full mode
|
||||
if [[ "${WINE_CSP_MODE}" == "full" ]]; then
|
||||
install_cryptopro
|
||||
fi
|
||||
|
||||
configure_service
|
||||
|
||||
# Start Xvfb for the main process
|
||||
start_xvfb
|
||||
|
||||
log "Starting WineCspService..."
|
||||
log "Listening on port ${WINE_CSP_PORT}"
|
||||
|
||||
# Execute the command passed to the container (or default)
|
||||
if [[ $# -gt 0 ]]; then
|
||||
exec "$@"
|
||||
else
|
||||
exec wine64 /app/WineCspService.exe
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
||||
24
ops/wine-csp/healthcheck.sh
Normal file
24
ops/wine-csp/healthcheck.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
# Wine CSP Service Health Check
|
||||
#
|
||||
# Probes the /health endpoint to determine if the service is healthy.
|
||||
# Returns 0 (healthy) or 1 (unhealthy).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
WINE_CSP_PORT="${WINE_CSP_PORT:-5099}"
|
||||
HEALTH_ENDPOINT="http://127.0.0.1:${WINE_CSP_PORT}/health"
|
||||
TIMEOUT_SECONDS=8
|
||||
|
||||
# Perform health check
|
||||
response=$(wget -q -O - --timeout="${TIMEOUT_SECONDS}" "${HEALTH_ENDPOINT}" 2>/dev/null) || exit 1
|
||||
|
||||
# Verify response contains expected status
|
||||
if echo "${response}" | grep -q '"status":"Healthy"'; then
|
||||
exit 0
|
||||
elif echo "${response}" | grep -q '"status":"Degraded"'; then
|
||||
# Degraded is acceptable (e.g., CSP not installed but service running)
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
215
ops/wine-csp/install-csp.sh
Normal file
215
ops/wine-csp/install-csp.sh
Normal file
@@ -0,0 +1,215 @@
|
||||
#!/bin/bash
|
||||
# CryptoPro CSP Installation Script for Wine
|
||||
#
|
||||
# Installs customer-provided CryptoPro CSP MSI under Wine environment.
|
||||
# This script is called by entrypoint.sh when CSP installer is available.
|
||||
#
|
||||
# IMPORTANT: CryptoPro CSP is commercial software. The installer MSI must be
|
||||
# provided by the customer with appropriate licensing. StellaOps does not
|
||||
# distribute CryptoPro CSP.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Configuration
|
||||
# ------------------------------------------------------------------------------
|
||||
WINE_CSP_INSTALLER_PATH="${WINE_CSP_INSTALLER_PATH:-/opt/cryptopro/csp-installer.msi}"
|
||||
WINE_PREFIX="${WINEPREFIX:-$HOME/.wine}"
|
||||
DISPLAY="${DISPLAY:-:99}"
|
||||
|
||||
# Expected CSP installation paths (under Wine prefix)
|
||||
CSP_PROGRAM_FILES="${WINE_PREFIX}/drive_c/Program Files/Crypto Pro"
|
||||
CSP_MARKER="${WINE_PREFIX}/.csp_installed"
|
||||
CSP_VERSION_FILE="${WINE_PREFIX}/.csp_version"
|
||||
|
||||
# Installation timeout (5 minutes)
|
||||
INSTALL_TIMEOUT=300
|
||||
|
||||
# Log prefix
|
||||
log() {
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] [install-csp] $*"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] [install-csp] [ERROR] $*" >&2
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Pre-Installation Checks
|
||||
# ------------------------------------------------------------------------------
|
||||
check_prerequisites() {
|
||||
log "Checking installation prerequisites"
|
||||
|
||||
# Check installer exists
|
||||
if [[ ! -f "${WINE_CSP_INSTALLER_PATH}" ]]; then
|
||||
log_error "CSP installer not found: ${WINE_CSP_INSTALLER_PATH}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Verify file is an MSI
|
||||
if ! file "${WINE_CSP_INSTALLER_PATH}" | grep -qi "microsoft installer"; then
|
||||
log_error "File does not appear to be an MSI installer"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check Wine is available
|
||||
if ! command -v wine64 &> /dev/null; then
|
||||
log_error "wine64 not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check Wine prefix exists
|
||||
if [[ ! -d "${WINE_PREFIX}" ]]; then
|
||||
log_error "Wine prefix not initialized: ${WINE_PREFIX}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Prerequisites check passed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installation
|
||||
# ------------------------------------------------------------------------------
|
||||
install_csp() {
|
||||
log "Starting CryptoPro CSP installation"
|
||||
log "Installer: ${WINE_CSP_INSTALLER_PATH}"
|
||||
|
||||
# Create installation log directory
|
||||
local log_dir="${WINE_PREFIX}/csp_install_logs"
|
||||
mkdir -p "${log_dir}"
|
||||
|
||||
local install_log="${log_dir}/install_$(date -u '+%Y%m%d_%H%M%S').log"
|
||||
|
||||
# Run MSI installer silently
|
||||
# /qn = silent mode, /norestart = don't restart, /l*v = verbose logging
|
||||
log "Running msiexec installer (this may take several minutes)..."
|
||||
|
||||
timeout "${INSTALL_TIMEOUT}" wine64 msiexec /i "${WINE_CSP_INSTALLER_PATH}" \
|
||||
/qn /norestart /l*v "${install_log}" \
|
||||
AGREETOLICENSE=Yes \
|
||||
2>&1 | tee -a "${install_log}" || {
|
||||
local exit_code=$?
|
||||
log_error "MSI installation failed with exit code: ${exit_code}"
|
||||
log_error "Check installation log: ${install_log}"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Wait for Wine to finish
|
||||
wineserver --wait
|
||||
|
||||
log "MSI installation completed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Post-Installation Verification
|
||||
# ------------------------------------------------------------------------------
|
||||
verify_installation() {
|
||||
log "Verifying CryptoPro CSP installation"
|
||||
|
||||
# Check for CSP program files
|
||||
if [[ -d "${CSP_PROGRAM_FILES}" ]]; then
|
||||
log "Found CSP directory: ${CSP_PROGRAM_FILES}"
|
||||
else
|
||||
log_error "CSP program directory not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check for key CSP DLLs
|
||||
local csp_dll="${WINE_PREFIX}/drive_c/windows/system32/cpcspi.dll"
|
||||
if [[ -f "${csp_dll}" ]]; then
|
||||
log "Found CSP DLL: ${csp_dll}"
|
||||
else
|
||||
log "Warning: CSP DLL not found at expected location"
|
||||
# This might be OK depending on CSP version
|
||||
fi
|
||||
|
||||
# Try to query CSP registry entries
|
||||
local csp_registry
|
||||
csp_registry=$(wine64 reg query "HKLM\\SOFTWARE\\Crypto Pro" 2>/dev/null || true)
|
||||
if [[ -n "${csp_registry}" ]]; then
|
||||
log "CSP registry entries found"
|
||||
else
|
||||
log "Warning: CSP registry entries not found"
|
||||
fi
|
||||
|
||||
# Extract version if possible
|
||||
local version="unknown"
|
||||
if [[ -f "${CSP_PROGRAM_FILES}/CSP/version.txt" ]]; then
|
||||
version=$(cat "${CSP_PROGRAM_FILES}/CSP/version.txt" 2>/dev/null || echo "unknown")
|
||||
fi
|
||||
echo "${version}" > "${CSP_VERSION_FILE}"
|
||||
log "CSP version: ${version}"
|
||||
|
||||
log "Installation verification completed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Cleanup on Failure
|
||||
# ------------------------------------------------------------------------------
|
||||
cleanup_failed_install() {
|
||||
log "Cleaning up failed installation"
|
||||
|
||||
# Try to uninstall via msiexec
|
||||
wine64 msiexec /x "${WINE_CSP_INSTALLER_PATH}" /qn 2>/dev/null || true
|
||||
wineserver --wait
|
||||
|
||||
# Remove any partial installation directories
|
||||
rm -rf "${CSP_PROGRAM_FILES}" 2>/dev/null || true
|
||||
|
||||
# Remove marker files
|
||||
rm -f "${CSP_MARKER}" "${CSP_VERSION_FILE}" 2>/dev/null || true
|
||||
|
||||
log "Cleanup completed"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Main
|
||||
# ------------------------------------------------------------------------------
|
||||
main() {
|
||||
log "=========================================="
|
||||
log "CryptoPro CSP Installation Script"
|
||||
log "=========================================="
|
||||
|
||||
# Check if already installed
|
||||
if [[ -f "${CSP_MARKER}" ]]; then
|
||||
log "CryptoPro CSP is already installed"
|
||||
if [[ -f "${CSP_VERSION_FILE}" ]]; then
|
||||
log "Installed version: $(cat "${CSP_VERSION_FILE}")"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run prerequisite checks
|
||||
if ! check_prerequisites; then
|
||||
log_error "Prerequisites check failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Perform installation
|
||||
if ! install_csp; then
|
||||
log_error "Installation failed"
|
||||
cleanup_failed_install
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
if ! verify_installation; then
|
||||
log_error "Installation verification failed"
|
||||
cleanup_failed_install
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create installation marker
|
||||
touch "${CSP_MARKER}"
|
||||
|
||||
log "=========================================="
|
||||
log "CryptoPro CSP installation successful"
|
||||
log "=========================================="
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -0,0 +1,118 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Attestor.Core.Options;
|
||||
using StellaOps.Attestor.Core.Signing;
|
||||
using StellaOps.Attestor.Infrastructure.Signing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Plugin.SmSoft;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Signing;
|
||||
|
||||
public class Sm2AttestorTests
|
||||
{
|
||||
private readonly string? _gate;
|
||||
|
||||
public Sm2AttestorTests()
|
||||
{
|
||||
_gate = Environment.GetEnvironmentVariable("SM_SOFT_ALLOWED");
|
||||
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", "1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Registry_ResolvesSm2_WhenGateEnabled()
|
||||
{
|
||||
var keyPath = Sm2TestKeyFactory.WriteTempPem();
|
||||
|
||||
var options = Options.Create(new AttestorOptions
|
||||
{
|
||||
Signing = new AttestorOptions.SigningOptions
|
||||
{
|
||||
PreferredProviders = new[] { "cn.sm.soft" },
|
||||
Keys = new List<AttestorOptions.SigningKeyOptions>
|
||||
{
|
||||
new()
|
||||
{
|
||||
KeyId = "sm2-key",
|
||||
Algorithm = SignatureAlgorithms.Sm2,
|
||||
KeyPath = keyPath,
|
||||
MaterialFormat = "pem",
|
||||
Enabled = true,
|
||||
Provider = "cn.sm.soft"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var registry = new AttestorSigningKeyRegistry(
|
||||
options,
|
||||
TimeProvider.System,
|
||||
NullLogger<AttestorSigningKeyRegistry>.Instance);
|
||||
|
||||
var entry = registry.GetRequired("sm2-key");
|
||||
Assert.Equal(SignatureAlgorithms.Sm2, entry.Algorithm);
|
||||
Assert.Equal("cn.sm.soft", entry.ProviderName);
|
||||
|
||||
var signer = registry.Registry.ResolveSigner(CryptoCapability.Signing, SignatureAlgorithms.Sm2, entry.Key.Reference).Signer;
|
||||
var payload = System.Text.Encoding.UTF8.GetBytes("sm2-attestor-test");
|
||||
var sig = signer.SignAsync(payload, CancellationToken.None).Result;
|
||||
Assert.True(signer.VerifyAsync(payload, sig, CancellationToken.None).Result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Registry_Throws_WhenGateDisabled()
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", null);
|
||||
var keyPath = Sm2TestKeyFactory.WriteTempPem();
|
||||
|
||||
var options = Options.Create(new AttestorOptions
|
||||
{
|
||||
Signing = new AttestorOptions.SigningOptions
|
||||
{
|
||||
PreferredProviders = new[] { "cn.sm.soft" },
|
||||
Keys = new List<AttestorOptions.SigningKeyOptions>
|
||||
{
|
||||
new()
|
||||
{
|
||||
KeyId = "sm2-key",
|
||||
Algorithm = SignatureAlgorithms.Sm2,
|
||||
KeyPath = keyPath,
|
||||
MaterialFormat = "pem",
|
||||
Enabled = true,
|
||||
Provider = "cn.sm.soft"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() =>
|
||||
new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance));
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", _gate);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class Sm2TestKeyFactory
|
||||
{
|
||||
public static string WriteTempPem()
|
||||
{
|
||||
var curve = Org.BouncyCastle.Asn1.GM.GMNamedCurves.GetByName("SM2P256V1");
|
||||
var domain = new Org.BouncyCastle.Crypto.Parameters.ECDomainParameters(curve.Curve, curve.G, curve.N, curve.H, curve.GetSeed());
|
||||
var generator = new Org.BouncyCastle.Crypto.Generators.ECKeyPairGenerator("EC");
|
||||
generator.Init(new Org.BouncyCastle.Crypto.Generators.ECKeyGenerationParameters(domain, new Org.BouncyCastle.Security.SecureRandom()));
|
||||
var pair = generator.GenerateKeyPair();
|
||||
var privInfo = Org.BouncyCastle.Asn1.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(pair.Private);
|
||||
var pem = Convert.ToBase64String(privInfo.GetDerEncoded());
|
||||
var path = System.IO.Path.GetTempFileName();
|
||||
System.IO.File.WriteAllText(path, "-----BEGIN PRIVATE KEY-----\n" + pem + "\n-----END PRIVATE KEY-----\n");
|
||||
return path;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BouncyCastle.Cryptography" Version="2.5.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
|
||||
@@ -136,9 +136,9 @@ internal static class CommandFactory
|
||||
var cvss = new Command("cvss", "CVSS v4.0 receipt operations (score, show, history, export)." );
|
||||
|
||||
var score = new Command("score", "Create a CVSS v4 receipt for a vulnerability.");
|
||||
var vulnOption = new Option<string>("--vuln") { Description = "Vulnerability identifier (e.g., CVE).", IsRequired = true };
|
||||
var policyFileOption = new Option<string>("--policy-file") { Description = "Path to CvssPolicy JSON file.", IsRequired = true };
|
||||
var vectorOption = new Option<string>("--vector") { Description = "CVSS:4.0 vector string.", IsRequired = true };
|
||||
var vulnOption = new Option<string>("--vuln") { Description = "Vulnerability identifier (e.g., CVE).", Required = true };
|
||||
var policyFileOption = new Option<string>("--policy-file") { Description = "Path to CvssPolicy JSON file.", Required = true };
|
||||
var vectorOption = new Option<string>("--vector") { Description = "CVSS:4.0 vector string.", Required = true };
|
||||
var jsonOption = new Option<bool>("--json") { Description = "Emit JSON output." };
|
||||
score.Add(vulnOption);
|
||||
score.Add(policyFileOption);
|
||||
@@ -3672,24 +3672,20 @@ internal static class CommandFactory
|
||||
};
|
||||
var expFormatOption = new Option<string>("--format")
|
||||
{
|
||||
Description = "Export format (ndjson, json).",
|
||||
DefaultValueFactory = _ => "ndjson"
|
||||
};
|
||||
Description = "Export format (ndjson, json)."
|
||||
}.SetDefaultValue("ndjson");
|
||||
var expIncludeEvidenceOption = new Option<bool>("--include-evidence")
|
||||
{
|
||||
Description = "Include evidence data in export (default: true).",
|
||||
DefaultValueFactory = _ => true
|
||||
};
|
||||
Description = "Include evidence data in export (default: true)."
|
||||
}.SetDefaultValue(true);
|
||||
var expIncludeLedgerOption = new Option<bool>("--include-ledger")
|
||||
{
|
||||
Description = "Include workflow ledger in export (default: true).",
|
||||
DefaultValueFactory = _ => true
|
||||
};
|
||||
Description = "Include workflow ledger in export (default: true)."
|
||||
}.SetDefaultValue(true);
|
||||
var expSignedOption = new Option<bool>("--signed")
|
||||
{
|
||||
Description = "Request signed export bundle (default: true).",
|
||||
DefaultValueFactory = _ => true
|
||||
};
|
||||
Description = "Request signed export bundle (default: true)."
|
||||
}.SetDefaultValue(true);
|
||||
var expOutputOption = new Option<string>("--output")
|
||||
{
|
||||
Description = "Output file path for the export bundle.",
|
||||
@@ -10637,3 +10633,4 @@ internal static class CommandFactory
|
||||
return airgap;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
10
src/Cli/StellaOps.Cli/Commands/CommandLineException.cs
Normal file
10
src/Cli/StellaOps.Cli/Commands/CommandLineException.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal sealed class CommandLineException : Exception
|
||||
{
|
||||
public CommandLineException(string message) : base(message)
|
||||
{
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Extensions;
|
||||
using StellaOps.Infrastructure.Postgres.Migrations;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
@@ -32,30 +33,38 @@ internal static class SystemCommandBuilder
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var moduleOption = new Option<string?>(
|
||||
"--module",
|
||||
description: "Module name (Authority, Scheduler, Concelier, Policy, Notify, Excititor, all)");
|
||||
var categoryOption = new Option<string?>(
|
||||
"--category",
|
||||
description: "Migration category (startup, release, seed, data)");
|
||||
var dryRunOption = new Option<bool>("--dry-run", description: "List migrations without executing");
|
||||
var connectionOption = new Option<string?>(
|
||||
"--connection",
|
||||
description: "PostgreSQL connection string override (otherwise uses STELLAOPS_POSTGRES_* env vars)");
|
||||
var timeoutOption = new Option<int?>(
|
||||
"--timeout",
|
||||
description: "Command timeout in seconds for each migration (default 300).");
|
||||
var forceOption = new Option<bool>(
|
||||
"--force",
|
||||
description: "Allow execution of release migrations without --dry-run.");
|
||||
var moduleOption = new Option<string?>("--module")
|
||||
{
|
||||
Description = "Module name (Authority, Scheduler, Concelier, Policy, Notify, Excititor, all)"
|
||||
};
|
||||
var categoryOption = new Option<string?>("--category")
|
||||
{
|
||||
Description = "Migration category (startup, release, seed, data)"
|
||||
};
|
||||
var dryRunOption = new Option<bool>("--dry-run")
|
||||
{
|
||||
Description = "List migrations without executing"
|
||||
};
|
||||
var connectionOption = new Option<string?>("--connection")
|
||||
{
|
||||
Description = "PostgreSQL connection string override (otherwise uses STELLAOPS_POSTGRES_* env vars)"
|
||||
};
|
||||
var timeoutOption = new Option<int?>("--timeout")
|
||||
{
|
||||
Description = "Command timeout in seconds for each migration (default 300)."
|
||||
};
|
||||
var forceOption = new Option<bool>("--force")
|
||||
{
|
||||
Description = "Allow execution of release migrations without --dry-run."
|
||||
};
|
||||
|
||||
var run = new Command("migrations-run", "Run migrations for the selected module(s).");
|
||||
run.AddOption(moduleOption);
|
||||
run.AddOption(categoryOption);
|
||||
run.AddOption(dryRunOption);
|
||||
run.AddOption(connectionOption);
|
||||
run.AddOption(timeoutOption);
|
||||
run.AddOption(forceOption);
|
||||
run.Add(moduleOption);
|
||||
run.Add(categoryOption);
|
||||
run.Add(dryRunOption);
|
||||
run.Add(connectionOption);
|
||||
run.Add(timeoutOption);
|
||||
run.Add(forceOption);
|
||||
run.SetAction(async parseResult =>
|
||||
{
|
||||
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
|
||||
@@ -91,8 +100,8 @@ internal static class SystemCommandBuilder
|
||||
});
|
||||
|
||||
var status = new Command("migrations-status", "Show migration status for the selected module(s).");
|
||||
status.AddOption(moduleOption);
|
||||
status.AddOption(connectionOption);
|
||||
status.Add(moduleOption);
|
||||
status.Add(connectionOption);
|
||||
status.SetAction(async parseResult =>
|
||||
{
|
||||
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
|
||||
@@ -117,8 +126,8 @@ internal static class SystemCommandBuilder
|
||||
});
|
||||
|
||||
var verify = new Command("migrations-verify", "Verify migration checksums for the selected module(s).");
|
||||
verify.AddOption(moduleOption);
|
||||
verify.AddOption(connectionOption);
|
||||
verify.Add(moduleOption);
|
||||
verify.Add(connectionOption);
|
||||
verify.SetAction(async parseResult =>
|
||||
{
|
||||
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.CommandLine;
|
||||
using StellaOps.Cli.Extensions;
|
||||
using StellaOps.Cli.Output;
|
||||
|
||||
namespace StellaOps.Cli.Configuration;
|
||||
@@ -54,23 +55,23 @@ public sealed class GlobalOptions
|
||||
/// </summary>
|
||||
public static IEnumerable<Option> CreateGlobalOptions()
|
||||
{
|
||||
yield return new Option<string?>("--profile", "-p")
|
||||
yield return new Option<string?>("--profile", new[] { "-p" })
|
||||
{
|
||||
Description = "Profile name to use for this invocation"
|
||||
};
|
||||
|
||||
yield return new Option<OutputFormat>("--output", "-o")
|
||||
var outputOption = new Option<OutputFormat>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output format (table, json, yaml)",
|
||||
DefaultValueFactory = _ => OutputFormat.Table
|
||||
};
|
||||
Description = "Output format (table, json, yaml)"
|
||||
}.SetDefaultValue(OutputFormat.Table);
|
||||
yield return outputOption;
|
||||
|
||||
yield return new Option<bool>("--verbose", "-v")
|
||||
yield return new Option<bool>("--verbose", new[] { "-v" })
|
||||
{
|
||||
Description = "Enable verbose output"
|
||||
};
|
||||
|
||||
yield return new Option<bool>("--quiet", "-q")
|
||||
yield return new Option<bool>("--quiet", new[] { "-q" })
|
||||
{
|
||||
Description = "Quiet mode - suppress non-error output"
|
||||
};
|
||||
|
||||
@@ -1,32 +1,44 @@
|
||||
using System;
|
||||
using System.CommandLine;
|
||||
|
||||
namespace StellaOps.Cli.Extensions;
|
||||
|
||||
/// <summary>
|
||||
/// Compatibility extensions for System.CommandLine 2.0.0-beta5+ API changes.
|
||||
/// These restore the older extension method patterns that were used in earlier versions.
|
||||
/// See: https://learn.microsoft.com/en-us/dotnet/standard/commandline/migration-guide-2.0.0-beta5
|
||||
/// These restore the older helper methods the codebase relied on.
|
||||
/// </summary>
|
||||
public static class CommandLineExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets the default value for an option (compatibility shim for older API).
|
||||
/// In beta5+, this maps to DefaultValueFactory.
|
||||
/// Set a default value for an option.
|
||||
/// </summary>
|
||||
public static Option<T> SetDefaultValue<T>(this Option<T> option, T defaultValue)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(option);
|
||||
option.DefaultValueFactory = _ => defaultValue;
|
||||
return option;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Restricts the option to accept only the specified values (compatibility shim).
|
||||
/// Works for both Option<string> and Option<string?>.
|
||||
/// Restrict the option to a fixed set of values and add completions.
|
||||
/// </summary>
|
||||
public static Option<T> FromAmong<T>(this Option<T> option, params string[] allowedValues)
|
||||
where T : class?
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(option);
|
||||
if (allowedValues is { Length: > 0 })
|
||||
{
|
||||
option.AcceptOnlyFromAmong(allowedValues);
|
||||
}
|
||||
|
||||
return option;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mark the option as required (compatibility shim for the old Required property).
|
||||
/// </summary>
|
||||
public static Option<T> Required<T>(this Option<T> option, bool isRequired = true)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(option);
|
||||
option.Required = isRequired;
|
||||
return option;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.CommandLine;
|
||||
using System.CommandLine.Invocation;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
@@ -250,11 +251,11 @@ internal static class Program
|
||||
};
|
||||
|
||||
var rootCommand = CommandFactory.Create(serviceProvider, options, cts.Token, loggerFactory);
|
||||
var commandConfiguration = new CommandLineConfiguration(rootCommand);
|
||||
int commandExit;
|
||||
try
|
||||
{
|
||||
commandExit = await commandConfiguration.InvokeAsync(args, cts.Token).ConfigureAwait(false);
|
||||
var parseResult = rootCommand.Parse(args);
|
||||
commandExit = await parseResult.InvokeAsync(cts.Token).ConfigureAwait(false);
|
||||
}
|
||||
catch (AirGapEgressBlockedException ex)
|
||||
{
|
||||
|
||||
@@ -9,6 +9,8 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Extensions;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
@@ -2,10 +2,10 @@ using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Models;
|
||||
using StellaOps.AirGap.Importer.Repositories;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using ImportModels = StellaOps.AirGap.Importer.Models;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
@@ -238,10 +238,10 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
try
|
||||
{
|
||||
var envelopeJson = await File.ReadAllTextAsync(dsseFile, cancellationToken).ConfigureAwait(false);
|
||||
var envelope = DsseEnvelope.Parse(envelopeJson);
|
||||
var envelope = ImportModels.DsseEnvelope.Parse(envelopeJson);
|
||||
|
||||
// Load trust roots if provided
|
||||
TrustRootConfig trustRoots;
|
||||
ImportModels.TrustRootConfig trustRoots;
|
||||
if (!string.IsNullOrWhiteSpace(trustRootsPath) && File.Exists(trustRootsPath))
|
||||
{
|
||||
trustRoots = await LoadTrustRootsAsync(trustRootsPath, cancellationToken).ConfigureAwait(false);
|
||||
@@ -287,7 +287,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
||||
private static async Task<ImportModels.TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
@@ -324,7 +324,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
}
|
||||
}
|
||||
|
||||
return new TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
||||
return new ImportModels.TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
||||
}
|
||||
|
||||
private async Task<List<string>> CopyArtifactsAsync(string bundleDir, string dataStorePath, MirrorBundle manifest, CancellationToken cancellationToken)
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Policy.AuthSignals;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
/// <summary>
|
||||
/// Maps advisory linksets into the shared Policy/Auth/Signals contract so policy enrichment tasks can start.
|
||||
/// This is a minimal, fact-only projection (no weighting or merge logic).
|
||||
/// </summary>
|
||||
public static class PolicyAuthSignalFactory
|
||||
{
|
||||
public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(linkset);
|
||||
|
||||
var firstPurl = linkset.Normalized?.Purls?.FirstOrDefault();
|
||||
|
||||
var evidence = new List<EvidenceRef>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Kind = "linkset",
|
||||
Uri = $"cas://linksets/{linkset.AdvisoryId}",
|
||||
Digest = "sha256:pending" // real digest filled when CAS manifests are available
|
||||
}
|
||||
};
|
||||
|
||||
return new PolicyAuthSignal
|
||||
{
|
||||
Id = linkset.AdvisoryId,
|
||||
Tenant = linkset.TenantId,
|
||||
Subject = firstPurl ?? $"advisory:{linkset.Source}:{linkset.AdvisoryId}",
|
||||
SignalType = "reachability",
|
||||
Source = linkset.Source,
|
||||
Confidence = linkset.Confidence,
|
||||
Evidence = evidence,
|
||||
Created = linkset.CreatedAt.UtcDateTime
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Policy.AuthSignals;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Temporary bridge to consume the shared Policy/Auth/Signals contract package so downstream POLICY tasks can start.
|
||||
/// </summary>
|
||||
public static class AuthSignalsPackage
|
||||
{
|
||||
public static PolicyAuthSignal CreateSample() => new()
|
||||
{
|
||||
Id = "sample",
|
||||
Tenant = "urn:tenant:sample",
|
||||
Subject = "purl:pkg:maven/org.example/app@1.0.0",
|
||||
SignalType = "reachability",
|
||||
Source = "concelier",
|
||||
Evidence = new List<EvidenceRef>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Kind = "linkset",
|
||||
Uri = "cas://linksets/sample",
|
||||
Digest = "sha256:stub"
|
||||
}
|
||||
},
|
||||
Created = DateTime.UtcNow
|
||||
};
|
||||
}
|
||||
@@ -12,7 +12,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Cronos" Version="0.10.0" />
|
||||
<PackageReference Include="StellaOps.Policy.AuthSignals" Version="0.1.0-alpha" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
|
||||
|
||||
@@ -0,0 +1,300 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Notifications;
|
||||
|
||||
/// <summary>
|
||||
/// Event types for policy profile notifications per docs/modules/policy/notifications.md.
|
||||
/// </summary>
|
||||
public static class PolicyProfileNotificationEventTypes
|
||||
{
|
||||
public const string ProfileCreated = "policy.profile.created";
|
||||
public const string ProfileActivated = "policy.profile.activated";
|
||||
public const string ProfileDeactivated = "policy.profile.deactivated";
|
||||
public const string ThresholdChanged = "policy.profile.threshold_changed";
|
||||
public const string OverrideAdded = "policy.profile.override_added";
|
||||
public const string OverrideRemoved = "policy.profile.override_removed";
|
||||
public const string SimulationReady = "policy.profile.simulation_ready";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notification event for policy profile lifecycle changes.
|
||||
/// Follows the contract at docs/modules/policy/notifications.md.
|
||||
/// </summary>
|
||||
public sealed record PolicyProfileNotificationEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique event identifier (UUIDv7 for time-ordered deduplication).
|
||||
/// </summary>
|
||||
[JsonPropertyName("event_id")]
|
||||
public required string EventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Event type from PolicyProfileNotificationEventTypes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("event_type")]
|
||||
public required string EventType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when the event was emitted.
|
||||
/// </summary>
|
||||
[JsonPropertyName("emitted_at")]
|
||||
public required DateTimeOffset EmittedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("profile_id")]
|
||||
public required string ProfileId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile version affected by this event.
|
||||
/// </summary>
|
||||
[JsonPropertyName("profile_version")]
|
||||
public required string ProfileVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable reason for the change.
|
||||
/// </summary>
|
||||
[JsonPropertyName("change_reason")]
|
||||
public string? ChangeReason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor who triggered the event.
|
||||
/// </summary>
|
||||
[JsonPropertyName("actor")]
|
||||
public NotificationActor? Actor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk thresholds (populated for threshold_changed events).
|
||||
/// </summary>
|
||||
[JsonPropertyName("thresholds")]
|
||||
public NotificationThresholds? Thresholds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Effective scope for the profile.
|
||||
/// </summary>
|
||||
[JsonPropertyName("effective_scope")]
|
||||
public NotificationEffectiveScope? EffectiveScope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the profile bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hash")]
|
||||
public NotificationHash? Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Related URLs for profile, diff, and simulation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("links")]
|
||||
public NotificationLinks? Links { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Trace context for observability.
|
||||
/// </summary>
|
||||
[JsonPropertyName("trace")]
|
||||
public NotificationTraceContext? Trace { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Override details (populated for override_added/removed events).
|
||||
/// </summary>
|
||||
[JsonPropertyName("override_details")]
|
||||
public NotificationOverrideDetails? OverrideDetails { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Simulation details (populated for simulation_ready events).
|
||||
/// </summary>
|
||||
[JsonPropertyName("simulation_details")]
|
||||
public NotificationSimulationDetails? SimulationDetails { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Actor information for notifications.
|
||||
/// </summary>
|
||||
public sealed record NotificationActor
|
||||
{
|
||||
/// <summary>
|
||||
/// Actor type: "user" or "system".
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor identifier (email, service name, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Risk thresholds for notifications.
|
||||
/// </summary>
|
||||
public sealed record NotificationThresholds
|
||||
{
|
||||
[JsonPropertyName("info")]
|
||||
public double? Info { get; init; }
|
||||
|
||||
[JsonPropertyName("low")]
|
||||
public double? Low { get; init; }
|
||||
|
||||
[JsonPropertyName("medium")]
|
||||
public double? Medium { get; init; }
|
||||
|
||||
[JsonPropertyName("high")]
|
||||
public double? High { get; init; }
|
||||
|
||||
[JsonPropertyName("critical")]
|
||||
public double? Critical { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Effective scope for profile application.
|
||||
/// </summary>
|
||||
public sealed record NotificationEffectiveScope
|
||||
{
|
||||
[JsonPropertyName("tenants")]
|
||||
public IReadOnlyList<string>? Tenants { get; init; }
|
||||
|
||||
[JsonPropertyName("projects")]
|
||||
public IReadOnlyList<string>? Projects { get; init; }
|
||||
|
||||
[JsonPropertyName("purl_patterns")]
|
||||
public IReadOnlyList<string>? PurlPatterns { get; init; }
|
||||
|
||||
[JsonPropertyName("cpe_patterns")]
|
||||
public IReadOnlyList<string>? CpePatterns { get; init; }
|
||||
|
||||
[JsonPropertyName("tags")]
|
||||
public IReadOnlyList<string>? Tags { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Hash information for profile content.
|
||||
/// </summary>
|
||||
public sealed record NotificationHash
|
||||
{
|
||||
[JsonPropertyName("algorithm")]
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
[JsonPropertyName("value")]
|
||||
public required string Value { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Related URLs for the notification.
|
||||
/// </summary>
|
||||
public sealed record NotificationLinks
|
||||
{
|
||||
[JsonPropertyName("profile_url")]
|
||||
public string? ProfileUrl { get; init; }
|
||||
|
||||
[JsonPropertyName("diff_url")]
|
||||
public string? DiffUrl { get; init; }
|
||||
|
||||
[JsonPropertyName("simulation_url")]
|
||||
public string? SimulationUrl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trace context for distributed tracing.
|
||||
/// </summary>
|
||||
public sealed record NotificationTraceContext
|
||||
{
|
||||
[JsonPropertyName("trace_id")]
|
||||
public string? TraceId { get; init; }
|
||||
|
||||
[JsonPropertyName("span_id")]
|
||||
public string? SpanId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Override details for override_added/removed events.
|
||||
/// </summary>
|
||||
public sealed record NotificationOverrideDetails
|
||||
{
|
||||
[JsonPropertyName("override_id")]
|
||||
public string? OverrideId { get; init; }
|
||||
|
||||
[JsonPropertyName("override_type")]
|
||||
public string? OverrideType { get; init; }
|
||||
|
||||
[JsonPropertyName("target")]
|
||||
public string? Target { get; init; }
|
||||
|
||||
[JsonPropertyName("action")]
|
||||
public string? Action { get; init; }
|
||||
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulation details for simulation_ready events.
|
||||
/// </summary>
|
||||
public sealed record NotificationSimulationDetails
|
||||
{
|
||||
[JsonPropertyName("simulation_id")]
|
||||
public string? SimulationId { get; init; }
|
||||
|
||||
[JsonPropertyName("findings_count")]
|
||||
public int? FindingsCount { get; init; }
|
||||
|
||||
[JsonPropertyName("high_impact_count")]
|
||||
public int? HighImpactCount { get; init; }
|
||||
|
||||
[JsonPropertyName("completed_at")]
|
||||
public DateTimeOffset? CompletedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to publish a notification via webhook.
|
||||
/// </summary>
|
||||
public sealed record WebhookDeliveryRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Target webhook URL.
|
||||
/// </summary>
|
||||
public required string Url { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The notification event to deliver.
|
||||
/// </summary>
|
||||
public required PolicyProfileNotificationEvent Event { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Shared secret for HMAC signature (X-Stella-Signature header).
|
||||
/// </summary>
|
||||
public string? SharedSecret { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for policy profile notifications.
|
||||
/// </summary>
|
||||
public sealed class PolicyProfileNotificationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Topic name for notifications service delivery.
|
||||
/// Default: notifications.policy.profiles
|
||||
/// </summary>
|
||||
public string TopicName { get; set; } = "notifications.policy.profiles";
|
||||
|
||||
/// <summary>
|
||||
/// Base URL for generating profile links.
|
||||
/// </summary>
|
||||
public string? BaseUrl { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include trace context in notifications.
|
||||
/// </summary>
|
||||
public bool IncludeTraceContext { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether notifications are enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,396 @@
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Notifications;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing policy profile notification events.
|
||||
/// </summary>
|
||||
public interface IPolicyProfileNotificationPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Publishes a notification event to the configured transport.
|
||||
/// </summary>
|
||||
Task PublishAsync(PolicyProfileNotificationEvent notification, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Delivers a notification via webhook with HMAC signature.
|
||||
/// </summary>
|
||||
Task<bool> DeliverWebhookAsync(WebhookDeliveryRequest request, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Logging-based notification publisher for policy profile events.
|
||||
/// Logs notifications as structured events for downstream consumption.
|
||||
/// </summary>
|
||||
internal sealed class LoggingPolicyProfileNotificationPublisher : IPolicyProfileNotificationPublisher
|
||||
{
|
||||
private readonly ILogger<LoggingPolicyProfileNotificationPublisher> _logger;
|
||||
private readonly PolicyProfileNotificationOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
public LoggingPolicyProfileNotificationPublisher(
|
||||
ILogger<LoggingPolicyProfileNotificationPublisher> logger,
|
||||
IOptions<PolicyProfileNotificationOptions> options,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value ?? new PolicyProfileNotificationOptions();
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task PublishAsync(PolicyProfileNotificationEvent notification, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(notification);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Policy profile notifications disabled; skipping event {EventId} type {EventType}",
|
||||
notification.EventId,
|
||||
notification.EventType);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
var payload = JsonSerializer.Serialize(notification, JsonOptions);
|
||||
|
||||
_logger.LogInformation(
|
||||
"PolicyProfileNotification topic={Topic} event_id={EventId} event_type={EventType} tenant={TenantId} profile={ProfileId}@{ProfileVersion} payload={Payload}",
|
||||
_options.TopicName,
|
||||
notification.EventId,
|
||||
notification.EventType,
|
||||
notification.TenantId,
|
||||
notification.ProfileId,
|
||||
notification.ProfileVersion,
|
||||
payload);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<bool> DeliverWebhookAsync(WebhookDeliveryRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var payload = JsonSerializer.Serialize(request.Event, JsonOptions);
|
||||
var signature = ComputeHmacSignature(payload, request.SharedSecret);
|
||||
|
||||
_logger.LogInformation(
|
||||
"PolicyProfileWebhook url={Url} event_id={EventId} event_type={EventType} signature={Signature}",
|
||||
request.Url,
|
||||
request.Event.EventId,
|
||||
request.Event.EventType,
|
||||
signature ?? "(no secret)");
|
||||
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
||||
private static string? ComputeHmacSignature(string payload, string? sharedSecret)
|
||||
{
|
||||
if (string.IsNullOrEmpty(sharedSecret))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var keyBytes = Encoding.UTF8.GetBytes(sharedSecret);
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payload);
|
||||
|
||||
using var hmac = new HMACSHA256(keyBytes);
|
||||
var hashBytes = hmac.ComputeHash(payloadBytes);
|
||||
return Convert.ToHexStringLower(hashBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating policy profile notification events.
|
||||
/// </summary>
|
||||
public sealed class PolicyProfileNotificationFactory
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly PolicyProfileNotificationOptions _options;
|
||||
|
||||
public PolicyProfileNotificationFactory(
|
||||
TimeProvider? timeProvider = null,
|
||||
PolicyProfileNotificationOptions? options = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_options = options ?? new PolicyProfileNotificationOptions();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a profile created notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateProfileCreatedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string? hash,
|
||||
NotificationEffectiveScope? scope = null)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.ProfileCreated,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
"New profile draft created",
|
||||
actorId,
|
||||
hash,
|
||||
scope: scope);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a profile activated notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateProfileActivatedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string? hash,
|
||||
NotificationEffectiveScope? scope = null)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.ProfileActivated,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
"Profile version activated",
|
||||
actorId,
|
||||
hash,
|
||||
scope: scope);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a profile deactivated notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateProfileDeactivatedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string? reason,
|
||||
string? hash)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.ProfileDeactivated,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
reason ?? "Profile version deactivated",
|
||||
actorId,
|
||||
hash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a threshold changed notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateThresholdChangedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string? reason,
|
||||
NotificationThresholds thresholds,
|
||||
string? hash,
|
||||
NotificationEffectiveScope? scope = null)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.ThresholdChanged,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
reason ?? "Risk thresholds updated",
|
||||
actorId,
|
||||
hash,
|
||||
thresholds: thresholds,
|
||||
scope: scope);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an override added notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateOverrideAddedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
NotificationOverrideDetails overrideDetails,
|
||||
string? hash)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.OverrideAdded,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
$"Override added: {overrideDetails.OverrideType}",
|
||||
actorId,
|
||||
hash,
|
||||
overrideDetails: overrideDetails);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an override removed notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateOverrideRemovedEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
NotificationOverrideDetails overrideDetails,
|
||||
string? hash)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.OverrideRemoved,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
$"Override removed: {overrideDetails.OverrideId}",
|
||||
actorId,
|
||||
hash,
|
||||
overrideDetails: overrideDetails);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a simulation ready notification event.
|
||||
/// </summary>
|
||||
public PolicyProfileNotificationEvent CreateSimulationReadyEvent(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
NotificationSimulationDetails simulationDetails,
|
||||
string? hash)
|
||||
{
|
||||
return CreateEvent(
|
||||
PolicyProfileNotificationEventTypes.SimulationReady,
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
"Simulation results available",
|
||||
actorId: null,
|
||||
hash,
|
||||
simulationDetails: simulationDetails);
|
||||
}
|
||||
|
||||
private PolicyProfileNotificationEvent CreateEvent(
|
||||
string eventType,
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string changeReason,
|
||||
string? actorId,
|
||||
string? hash,
|
||||
NotificationThresholds? thresholds = null,
|
||||
NotificationEffectiveScope? scope = null,
|
||||
NotificationOverrideDetails? overrideDetails = null,
|
||||
NotificationSimulationDetails? simulationDetails = null)
|
||||
{
|
||||
var eventId = GenerateUuidV7();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
NotificationActor? actor = null;
|
||||
if (!string.IsNullOrWhiteSpace(actorId))
|
||||
{
|
||||
actor = new NotificationActor
|
||||
{
|
||||
Type = actorId.Contains('@') ? "user" : "system",
|
||||
Id = actorId
|
||||
};
|
||||
}
|
||||
|
||||
NotificationHash? hashInfo = null;
|
||||
if (!string.IsNullOrWhiteSpace(hash))
|
||||
{
|
||||
hashInfo = new NotificationHash
|
||||
{
|
||||
Algorithm = "sha256",
|
||||
Value = hash
|
||||
};
|
||||
}
|
||||
|
||||
NotificationLinks? links = null;
|
||||
if (!string.IsNullOrWhiteSpace(_options.BaseUrl))
|
||||
{
|
||||
links = new NotificationLinks
|
||||
{
|
||||
ProfileUrl = $"{_options.BaseUrl}/api/risk/profiles/{profileId}",
|
||||
DiffUrl = $"{_options.BaseUrl}/api/risk/profiles/{profileId}/diff",
|
||||
SimulationUrl = simulationDetails?.SimulationId is not null
|
||||
? $"{_options.BaseUrl}/api/risk/simulations/results/{simulationDetails.SimulationId}"
|
||||
: null
|
||||
};
|
||||
}
|
||||
|
||||
NotificationTraceContext? trace = null;
|
||||
if (_options.IncludeTraceContext)
|
||||
{
|
||||
var activity = Activity.Current;
|
||||
if (activity is not null)
|
||||
{
|
||||
trace = new NotificationTraceContext
|
||||
{
|
||||
TraceId = activity.TraceId.ToString(),
|
||||
SpanId = activity.SpanId.ToString()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyProfileNotificationEvent
|
||||
{
|
||||
EventId = eventId,
|
||||
EventType = eventType,
|
||||
EmittedAt = now,
|
||||
TenantId = tenantId,
|
||||
ProfileId = profileId,
|
||||
ProfileVersion = profileVersion,
|
||||
ChangeReason = changeReason,
|
||||
Actor = actor,
|
||||
Thresholds = thresholds,
|
||||
EffectiveScope = scope,
|
||||
Hash = hashInfo,
|
||||
Links = links,
|
||||
Trace = trace,
|
||||
OverrideDetails = overrideDetails,
|
||||
SimulationDetails = simulationDetails
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a UUIDv7 (time-ordered UUID) for event identification.
|
||||
/// </summary>
|
||||
private string GenerateUuidV7()
|
||||
{
|
||||
var timestamp = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
|
||||
var randomBytes = new byte[10];
|
||||
RandomNumberGenerator.Fill(randomBytes);
|
||||
|
||||
var bytes = new byte[16];
|
||||
|
||||
// First 6 bytes: timestamp (48 bits)
|
||||
bytes[0] = (byte)((timestamp >> 40) & 0xFF);
|
||||
bytes[1] = (byte)((timestamp >> 32) & 0xFF);
|
||||
bytes[2] = (byte)((timestamp >> 24) & 0xFF);
|
||||
bytes[3] = (byte)((timestamp >> 16) & 0xFF);
|
||||
bytes[4] = (byte)((timestamp >> 8) & 0xFF);
|
||||
bytes[5] = (byte)(timestamp & 0xFF);
|
||||
|
||||
// Version 7 (4 bits) + random (12 bits)
|
||||
bytes[6] = (byte)(0x70 | (randomBytes[0] & 0x0F));
|
||||
bytes[7] = randomBytes[1];
|
||||
|
||||
// Variant (2 bits) + random (62 bits)
|
||||
bytes[8] = (byte)(0x80 | (randomBytes[2] & 0x3F));
|
||||
Array.Copy(randomBytes, 3, bytes, 9, 7);
|
||||
|
||||
return new Guid(bytes).ToString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,467 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.RiskProfile.Lifecycle;
|
||||
using StellaOps.Policy.RiskProfile.Models;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Notifications;
|
||||
|
||||
/// <summary>
|
||||
/// Service for publishing policy profile lifecycle notifications.
|
||||
/// Integrates with the RiskProfileLifecycleService to emit events.
|
||||
/// </summary>
|
||||
public sealed class PolicyProfileNotificationService
|
||||
{
|
||||
private readonly IPolicyProfileNotificationPublisher _publisher;
|
||||
private readonly PolicyProfileNotificationFactory _factory;
|
||||
private readonly PolicyProfileNotificationOptions _options;
|
||||
private readonly ILogger<PolicyProfileNotificationService> _logger;
|
||||
|
||||
public PolicyProfileNotificationService(
|
||||
IPolicyProfileNotificationPublisher publisher,
|
||||
PolicyProfileNotificationFactory factory,
|
||||
IOptions<PolicyProfileNotificationOptions> options,
|
||||
ILogger<PolicyProfileNotificationService> logger)
|
||||
{
|
||||
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
|
||||
_factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
_options = options?.Value ?? new PolicyProfileNotificationOptions();
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that a new profile version was created.
|
||||
/// </summary>
|
||||
public async Task NotifyProfileCreatedAsync(
|
||||
string tenantId,
|
||||
RiskProfileModel profile,
|
||||
string? actorId,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(profile);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var scope = ExtractEffectiveScope(profile);
|
||||
var notification = _factory.CreateProfileCreatedEvent(
|
||||
tenantId,
|
||||
profile.Id,
|
||||
profile.Version,
|
||||
actorId,
|
||||
hash,
|
||||
scope);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish profile created notification for {ProfileId}@{Version}",
|
||||
profile.Id, profile.Version);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that a profile version was activated.
|
||||
/// </summary>
|
||||
public async Task NotifyProfileActivatedAsync(
|
||||
string tenantId,
|
||||
RiskProfileModel profile,
|
||||
string? actorId,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(profile);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var scope = ExtractEffectiveScope(profile);
|
||||
var notification = _factory.CreateProfileActivatedEvent(
|
||||
tenantId,
|
||||
profile.Id,
|
||||
profile.Version,
|
||||
actorId,
|
||||
hash,
|
||||
scope);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish profile activated notification for {ProfileId}@{Version}",
|
||||
profile.Id, profile.Version);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that a profile version was deactivated (deprecated or archived).
|
||||
/// </summary>
|
||||
public async Task NotifyProfileDeactivatedAsync(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string? reason,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var notification = _factory.CreateProfileDeactivatedEvent(
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
actorId,
|
||||
reason,
|
||||
hash);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish profile deactivated notification for {ProfileId}@{Version}",
|
||||
profileId, profileVersion);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that risk thresholds were changed.
|
||||
/// </summary>
|
||||
public async Task NotifyThresholdChangedAsync(
|
||||
string tenantId,
|
||||
RiskProfileModel profile,
|
||||
string? actorId,
|
||||
string? reason,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(profile);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var thresholds = ExtractThresholds(profile);
|
||||
var scope = ExtractEffectiveScope(profile);
|
||||
var notification = _factory.CreateThresholdChangedEvent(
|
||||
tenantId,
|
||||
profile.Id,
|
||||
profile.Version,
|
||||
actorId,
|
||||
reason,
|
||||
thresholds,
|
||||
hash,
|
||||
scope);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish threshold changed notification for {ProfileId}@{Version}",
|
||||
profile.Id, profile.Version);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that an override was added to a profile.
|
||||
/// </summary>
|
||||
public async Task NotifyOverrideAddedAsync(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string overrideId,
|
||||
string overrideType,
|
||||
string? target,
|
||||
string? action,
|
||||
string? justification,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var overrideDetails = new NotificationOverrideDetails
|
||||
{
|
||||
OverrideId = overrideId,
|
||||
OverrideType = overrideType,
|
||||
Target = target,
|
||||
Action = action,
|
||||
Justification = justification
|
||||
};
|
||||
|
||||
var notification = _factory.CreateOverrideAddedEvent(
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
actorId,
|
||||
overrideDetails,
|
||||
hash);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish override added notification for {ProfileId}@{Version}",
|
||||
profileId, profileVersion);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that an override was removed from a profile.
|
||||
/// </summary>
|
||||
public async Task NotifyOverrideRemovedAsync(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string? actorId,
|
||||
string overrideId,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var overrideDetails = new NotificationOverrideDetails
|
||||
{
|
||||
OverrideId = overrideId
|
||||
};
|
||||
|
||||
var notification = _factory.CreateOverrideRemovedEvent(
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
actorId,
|
||||
overrideDetails,
|
||||
hash);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish override removed notification for {ProfileId}@{Version}",
|
||||
profileId, profileVersion);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies that simulation results are ready for consumption.
|
||||
/// </summary>
|
||||
public async Task NotifySimulationReadyAsync(
|
||||
string tenantId,
|
||||
string profileId,
|
||||
string profileVersion,
|
||||
string simulationId,
|
||||
int findingsCount,
|
||||
int highImpactCount,
|
||||
DateTimeOffset completedAt,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var simulationDetails = new NotificationSimulationDetails
|
||||
{
|
||||
SimulationId = simulationId,
|
||||
FindingsCount = findingsCount,
|
||||
HighImpactCount = highImpactCount,
|
||||
CompletedAt = completedAt
|
||||
};
|
||||
|
||||
var notification = _factory.CreateSimulationReadyEvent(
|
||||
tenantId,
|
||||
profileId,
|
||||
profileVersion,
|
||||
simulationDetails,
|
||||
hash);
|
||||
|
||||
await _publisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to publish simulation ready notification for {ProfileId}@{Version}",
|
||||
profileId, profileVersion);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifies based on a lifecycle event from the RiskProfileLifecycleService.
|
||||
/// </summary>
|
||||
public async Task NotifyFromLifecycleEventAsync(
|
||||
string tenantId,
|
||||
RiskProfileLifecycleEvent lifecycleEvent,
|
||||
RiskProfileModel? profile,
|
||||
string? hash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(lifecycleEvent);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
switch (lifecycleEvent.EventType)
|
||||
{
|
||||
case RiskProfileLifecycleEventType.Created:
|
||||
if (profile is not null)
|
||||
{
|
||||
await NotifyProfileCreatedAsync(tenantId, profile, lifecycleEvent.Actor, hash, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
break;
|
||||
|
||||
case RiskProfileLifecycleEventType.Activated:
|
||||
if (profile is not null)
|
||||
{
|
||||
await NotifyProfileActivatedAsync(tenantId, profile, lifecycleEvent.Actor, hash, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
break;
|
||||
|
||||
case RiskProfileLifecycleEventType.Deprecated:
|
||||
case RiskProfileLifecycleEventType.Archived:
|
||||
await NotifyProfileDeactivatedAsync(
|
||||
tenantId,
|
||||
lifecycleEvent.ProfileId,
|
||||
lifecycleEvent.Version,
|
||||
lifecycleEvent.Actor,
|
||||
lifecycleEvent.Reason,
|
||||
hash,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case RiskProfileLifecycleEventType.Restored:
|
||||
// Restored profiles go back to deprecated status; no dedicated notification
|
||||
_logger.LogDebug("Profile {ProfileId}@{Version} restored; no notification emitted",
|
||||
lifecycleEvent.ProfileId, lifecycleEvent.Version);
|
||||
break;
|
||||
|
||||
default:
|
||||
_logger.LogDebug("Unhandled lifecycle event type {EventType} for {ProfileId}@{Version}",
|
||||
lifecycleEvent.EventType, lifecycleEvent.ProfileId, lifecycleEvent.Version);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static NotificationEffectiveScope? ExtractEffectiveScope(RiskProfileModel profile)
|
||||
{
|
||||
// Extract scope information from profile metadata if available
|
||||
var metadata = profile.Metadata;
|
||||
if (metadata is null || metadata.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var scope = new NotificationEffectiveScope();
|
||||
var hasAny = false;
|
||||
|
||||
if (metadata.TryGetValue("tenants", out var tenantsObj) && tenantsObj is IEnumerable<object> tenants)
|
||||
{
|
||||
scope = scope with { Tenants = tenants.Select(t => t.ToString()!).ToList() };
|
||||
hasAny = true;
|
||||
}
|
||||
|
||||
if (metadata.TryGetValue("projects", out var projectsObj) && projectsObj is IEnumerable<object> projects)
|
||||
{
|
||||
scope = scope with { Projects = projects.Select(p => p.ToString()!).ToList() };
|
||||
hasAny = true;
|
||||
}
|
||||
|
||||
if (metadata.TryGetValue("purl_patterns", out var purlObj) && purlObj is IEnumerable<object> purls)
|
||||
{
|
||||
scope = scope with { PurlPatterns = purls.Select(p => p.ToString()!).ToList() };
|
||||
hasAny = true;
|
||||
}
|
||||
|
||||
if (metadata.TryGetValue("cpe_patterns", out var cpeObj) && cpeObj is IEnumerable<object> cpes)
|
||||
{
|
||||
scope = scope with { CpePatterns = cpes.Select(c => c.ToString()!).ToList() };
|
||||
hasAny = true;
|
||||
}
|
||||
|
||||
if (metadata.TryGetValue("tags", out var tagsObj) && tagsObj is IEnumerable<object> tags)
|
||||
{
|
||||
scope = scope with { Tags = tags.Select(t => t.ToString()!).ToList() };
|
||||
hasAny = true;
|
||||
}
|
||||
|
||||
return hasAny ? scope : null;
|
||||
}
|
||||
|
||||
private static NotificationThresholds ExtractThresholds(RiskProfileModel profile)
|
||||
{
|
||||
// Extract thresholds from profile overrides
|
||||
var thresholds = new NotificationThresholds();
|
||||
|
||||
// Map severity overrides to threshold values
|
||||
foreach (var severityOverride in profile.Overrides.Severity)
|
||||
{
|
||||
var targetSeverity = severityOverride.Set.ToString().ToLowerInvariant();
|
||||
var threshold = ExtractThresholdValue(severityOverride.When);
|
||||
|
||||
thresholds = targetSeverity switch
|
||||
{
|
||||
"info" or "informational" => thresholds with { Info = threshold },
|
||||
"low" => thresholds with { Low = threshold },
|
||||
"medium" => thresholds with { Medium = threshold },
|
||||
"high" => thresholds with { High = threshold },
|
||||
"critical" => thresholds with { Critical = threshold },
|
||||
_ => thresholds
|
||||
};
|
||||
}
|
||||
|
||||
return thresholds;
|
||||
}
|
||||
|
||||
private static double? ExtractThresholdValue(Dictionary<string, object> conditions)
|
||||
{
|
||||
// Try to extract a numeric threshold from conditions
|
||||
if (conditions.TryGetValue("score_gte", out var scoreGte) && scoreGte is double d1)
|
||||
{
|
||||
return d1;
|
||||
}
|
||||
|
||||
if (conditions.TryGetValue("score_gt", out var scoreGt) && scoreGt is double d2)
|
||||
{
|
||||
return d2;
|
||||
}
|
||||
|
||||
if (conditions.TryGetValue("threshold", out var threshold) && threshold is double d3)
|
||||
{
|
||||
return d3;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Notifications;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering policy profile notification services.
|
||||
/// </summary>
|
||||
public static class PolicyProfileNotificationServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds policy profile notification services to the service collection.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyProfileNotifications(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<PolicyProfileNotificationFactory>();
|
||||
services.TryAddSingleton<IPolicyProfileNotificationPublisher, LoggingPolicyProfileNotificationPublisher>();
|
||||
services.TryAddSingleton<PolicyProfileNotificationService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds policy profile notification services with configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyProfileNotifications(
|
||||
this IServiceCollection services,
|
||||
Action<PolicyProfileNotificationOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services.AddPolicyProfileNotifications();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
using System.Security.Claims;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Middleware that extracts tenant context from request headers and validates tenant access.
|
||||
/// Per RLS design at docs/modules/policy/prep/tenant-rls.md.
|
||||
/// </summary>
|
||||
public sealed partial class TenantContextMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly TenantContextOptions _options;
|
||||
private readonly ILogger<TenantContextMiddleware> _logger;
|
||||
|
||||
// Valid tenant/project ID pattern: alphanumeric, dashes, underscores
|
||||
[GeneratedRegex("^[a-zA-Z0-9_-]+$", RegexOptions.Compiled)]
|
||||
private static partial Regex ValidIdPattern();
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
public TenantContextMiddleware(
|
||||
RequestDelegate next,
|
||||
IOptions<TenantContextOptions> options,
|
||||
ILogger<TenantContextMiddleware> logger)
|
||||
{
|
||||
_next = next ?? throw new ArgumentNullException(nameof(next));
|
||||
_options = options?.Value ?? new TenantContextOptions();
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task InvokeAsync(HttpContext context, ITenantContextAccessor tenantContextAccessor)
|
||||
{
|
||||
// Skip tenant validation for excluded paths
|
||||
if (!_options.Enabled || IsExcludedPath(context.Request.Path))
|
||||
{
|
||||
await _next(context);
|
||||
return;
|
||||
}
|
||||
|
||||
var validationResult = ValidateTenantContext(context);
|
||||
|
||||
if (!validationResult.IsValid)
|
||||
{
|
||||
await WriteTenantErrorResponse(context, validationResult);
|
||||
return;
|
||||
}
|
||||
|
||||
// Set tenant context for the request
|
||||
tenantContextAccessor.TenantContext = validationResult.Context;
|
||||
|
||||
using (_logger.BeginScope(new Dictionary<string, object?>
|
||||
{
|
||||
["tenant_id"] = validationResult.Context?.TenantId,
|
||||
["project_id"] = validationResult.Context?.ProjectId
|
||||
}))
|
||||
{
|
||||
await _next(context);
|
||||
}
|
||||
}
|
||||
|
||||
private bool IsExcludedPath(PathString path)
|
||||
{
|
||||
var pathValue = path.Value ?? string.Empty;
|
||||
return _options.ExcludedPaths.Any(excluded =>
|
||||
pathValue.StartsWith(excluded, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private TenantValidationResult ValidateTenantContext(HttpContext context)
|
||||
{
|
||||
// Extract tenant header
|
||||
var tenantHeader = context.Request.Headers[TenantContextConstants.TenantHeader].FirstOrDefault();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(tenantHeader))
|
||||
{
|
||||
if (_options.RequireTenantHeader)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Missing required {Header} header for {Path}",
|
||||
TenantContextConstants.TenantHeader,
|
||||
context.Request.Path);
|
||||
|
||||
return TenantValidationResult.Failure(
|
||||
TenantContextConstants.MissingTenantHeaderErrorCode,
|
||||
$"The {TenantContextConstants.TenantHeader} header is required.");
|
||||
}
|
||||
|
||||
// Use default tenant ID when header is not required
|
||||
tenantHeader = TenantContextConstants.DefaultTenantId;
|
||||
}
|
||||
|
||||
// Validate tenant ID format
|
||||
if (!IsValidTenantId(tenantHeader))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Invalid tenant ID format: {TenantId}",
|
||||
tenantHeader);
|
||||
|
||||
return TenantValidationResult.Failure(
|
||||
TenantContextConstants.InvalidTenantIdErrorCode,
|
||||
"Invalid tenant ID format. Must be alphanumeric with dashes and underscores.");
|
||||
}
|
||||
|
||||
// Extract project header (optional)
|
||||
var projectHeader = context.Request.Headers[TenantContextConstants.ProjectHeader].FirstOrDefault();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(projectHeader) && !IsValidProjectId(projectHeader))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Invalid project ID format: {ProjectId}",
|
||||
projectHeader);
|
||||
|
||||
return TenantValidationResult.Failure(
|
||||
TenantContextConstants.InvalidTenantIdErrorCode,
|
||||
"Invalid project ID format. Must be alphanumeric with dashes and underscores.");
|
||||
}
|
||||
|
||||
// Determine write permission from scopes/claims
|
||||
var canWrite = DetermineWritePermission(context);
|
||||
|
||||
// Extract actor ID
|
||||
var actorId = ExtractActorId(context);
|
||||
|
||||
var tenantContext = TenantContext.ForTenant(
|
||||
tenantHeader,
|
||||
string.IsNullOrWhiteSpace(projectHeader) ? null : projectHeader,
|
||||
canWrite,
|
||||
actorId);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Tenant context established: tenant={TenantId}, project={ProjectId}, canWrite={CanWrite}, actor={ActorId}",
|
||||
tenantContext.TenantId,
|
||||
tenantContext.ProjectId ?? "(none)",
|
||||
tenantContext.CanWrite,
|
||||
tenantContext.ActorId ?? "(anonymous)");
|
||||
|
||||
return TenantValidationResult.Success(tenantContext);
|
||||
}
|
||||
|
||||
private bool IsValidTenantId(string tenantId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (tenantId.Length > _options.MaxTenantIdLength)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return ValidIdPattern().IsMatch(tenantId);
|
||||
}
|
||||
|
||||
private bool IsValidProjectId(string projectId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(projectId))
|
||||
{
|
||||
return true; // Project ID is optional
|
||||
}
|
||||
|
||||
if (projectId.Length > _options.MaxProjectIdLength)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return ValidIdPattern().IsMatch(projectId);
|
||||
}
|
||||
|
||||
private static bool DetermineWritePermission(HttpContext context)
|
||||
{
|
||||
var user = context.User;
|
||||
if (user?.Identity?.IsAuthenticated != true)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for write-related scopes
|
||||
var hasWriteScope = user.Claims.Any(c =>
|
||||
c.Type == "scope" &&
|
||||
(c.Value.Contains("policy:write", StringComparison.OrdinalIgnoreCase) ||
|
||||
c.Value.Contains("policy:edit", StringComparison.OrdinalIgnoreCase) ||
|
||||
c.Value.Contains("policy:activate", StringComparison.OrdinalIgnoreCase)));
|
||||
|
||||
if (hasWriteScope)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for admin role
|
||||
var hasAdminRole = user.IsInRole("admin") ||
|
||||
user.IsInRole("policy-admin") ||
|
||||
user.HasClaim("role", "admin") ||
|
||||
user.HasClaim("role", "policy-admin");
|
||||
|
||||
return hasAdminRole;
|
||||
}
|
||||
|
||||
private static string? ExtractActorId(HttpContext context)
|
||||
{
|
||||
var user = context.User;
|
||||
|
||||
// Try standard claims
|
||||
var actorId = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
|
||||
?? user?.FindFirst(ClaimTypes.Upn)?.Value
|
||||
?? user?.FindFirst("sub")?.Value
|
||||
?? user?.FindFirst("client_id")?.Value;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(actorId))
|
||||
{
|
||||
return actorId;
|
||||
}
|
||||
|
||||
// Fall back to header
|
||||
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) &&
|
||||
!string.IsNullOrWhiteSpace(header))
|
||||
{
|
||||
return header.ToString();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task WriteTenantErrorResponse(HttpContext context, TenantValidationResult result)
|
||||
{
|
||||
context.Response.StatusCode = StatusCodes.Status400BadRequest;
|
||||
context.Response.ContentType = "application/json";
|
||||
|
||||
var errorResponse = new TenantErrorResponse(
|
||||
result.ErrorCode ?? "UNKNOWN_ERROR",
|
||||
result.ErrorMessage ?? "An unknown error occurred.",
|
||||
context.Request.Path.Value ?? "/");
|
||||
|
||||
await context.Response.WriteAsync(
|
||||
JsonSerializer.Serialize(errorResponse, JsonOptions));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Error response for tenant validation failures.
|
||||
/// </summary>
|
||||
internal sealed record TenantErrorResponse(
|
||||
string ErrorCode,
|
||||
string Message,
|
||||
string Path);
|
||||
@@ -0,0 +1,233 @@
|
||||
namespace StellaOps.Policy.Engine.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Constants for tenant context headers and GUCs (PostgreSQL Grand Unified Configuration).
|
||||
/// Per RLS design at docs/modules/policy/prep/tenant-rls.md.
|
||||
/// </summary>
|
||||
public static class TenantContextConstants
|
||||
{
|
||||
/// <summary>
|
||||
/// HTTP header for tenant ID (mandatory).
|
||||
/// </summary>
|
||||
public const string TenantHeader = "X-Stella-Tenant";
|
||||
|
||||
/// <summary>
|
||||
/// HTTP header for project ID (optional).
|
||||
/// </summary>
|
||||
public const string ProjectHeader = "X-Stella-Project";
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL GUC for tenant ID.
|
||||
/// </summary>
|
||||
public const string TenantGuc = "app.tenant_id";
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL GUC for project ID.
|
||||
/// </summary>
|
||||
public const string ProjectGuc = "app.project_id";
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL GUC for write permission.
|
||||
/// </summary>
|
||||
public const string CanWriteGuc = "app.can_write";
|
||||
|
||||
/// <summary>
|
||||
/// Default tenant ID for legacy data migration.
|
||||
/// </summary>
|
||||
public const string DefaultTenantId = "public";
|
||||
|
||||
/// <summary>
|
||||
/// Error code for missing tenant header (deterministic).
|
||||
/// </summary>
|
||||
public const string MissingTenantHeaderErrorCode = "POLICY_TENANT_HEADER_REQUIRED";
|
||||
|
||||
/// <summary>
|
||||
/// Error code for invalid tenant ID format.
|
||||
/// </summary>
|
||||
public const string InvalidTenantIdErrorCode = "POLICY_TENANT_ID_INVALID";
|
||||
|
||||
/// <summary>
|
||||
/// Error code for tenant access denied (403).
|
||||
/// </summary>
|
||||
public const string TenantAccessDeniedErrorCode = "POLICY_TENANT_ACCESS_DENIED";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the current tenant and project context for a request.
|
||||
/// </summary>
|
||||
public sealed record TenantContext
|
||||
{
|
||||
/// <summary>
|
||||
/// The tenant ID for the current request.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The project ID for the current request (optional; null for tenant-wide operations).
|
||||
/// </summary>
|
||||
public string? ProjectId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the current request has write permission.
|
||||
/// </summary>
|
||||
public bool CanWrite { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The actor ID (user or system) making the request.
|
||||
/// </summary>
|
||||
public string? ActorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the context was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a tenant context for a specific tenant.
|
||||
/// </summary>
|
||||
public static TenantContext ForTenant(string tenantId, string? projectId = null, bool canWrite = false, string? actorId = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
return new TenantContext
|
||||
{
|
||||
TenantId = tenantId,
|
||||
ProjectId = projectId,
|
||||
CanWrite = canWrite,
|
||||
ActorId = actorId,
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for tenant context middleware configuration.
|
||||
/// </summary>
|
||||
public sealed class TenantContextOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "PolicyEngine:Tenancy";
|
||||
|
||||
/// <summary>
|
||||
/// Whether tenant validation is enabled (default: true).
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require tenant header on all endpoints (default: true).
|
||||
/// When false, missing tenant header defaults to <see cref="TenantContextConstants.DefaultTenantId"/>.
|
||||
/// </summary>
|
||||
public bool RequireTenantHeader { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Paths to exclude from tenant validation (e.g., health checks).
|
||||
/// </summary>
|
||||
public List<string> ExcludedPaths { get; set; } = new()
|
||||
{
|
||||
"/healthz",
|
||||
"/readyz",
|
||||
"/.well-known"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Maximum length for tenant ID (default: 256).
|
||||
/// </summary>
|
||||
public int MaxTenantIdLength { get; set; } = 256;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum length for project ID (default: 256).
|
||||
/// </summary>
|
||||
public int MaxProjectIdLength { get; set; } = 256;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to allow multi-tenant queries (default: false).
|
||||
/// When true, users with appropriate scopes can query across tenants.
|
||||
/// </summary>
|
||||
public bool AllowMultiTenantQueries { get; set; } = false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for accessing the current tenant context.
|
||||
/// </summary>
|
||||
public interface ITenantContextAccessor
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the current tenant context.
|
||||
/// </summary>
|
||||
TenantContext? TenantContext { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="ITenantContextAccessor"/> using AsyncLocal.
|
||||
/// </summary>
|
||||
public sealed class TenantContextAccessor : ITenantContextAccessor
|
||||
{
|
||||
private static readonly AsyncLocal<TenantContextHolder> _tenantContextCurrent = new();
|
||||
|
||||
/// <inheritdoc />
|
||||
public TenantContext? TenantContext
|
||||
{
|
||||
get => _tenantContextCurrent.Value?.Context;
|
||||
set
|
||||
{
|
||||
var holder = _tenantContextCurrent.Value;
|
||||
if (holder is not null)
|
||||
{
|
||||
// Clear current context trapped in the AsyncLocals, as its done.
|
||||
holder.Context = null;
|
||||
}
|
||||
|
||||
if (value is not null)
|
||||
{
|
||||
// Use an object to hold the context in the AsyncLocal,
|
||||
// so it can be cleared in all ExecutionContexts when its cleared.
|
||||
_tenantContextCurrent.Value = new TenantContextHolder { Context = value };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TenantContextHolder
|
||||
{
|
||||
public TenantContext? Context;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of tenant context validation.
|
||||
/// </summary>
|
||||
public sealed record TenantValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the validation succeeded.
|
||||
/// </summary>
|
||||
public bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error code if validation failed.
|
||||
/// </summary>
|
||||
public string? ErrorCode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if validation failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The validated tenant context if successful.
|
||||
/// </summary>
|
||||
public TenantContext? Context { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful validation result.
|
||||
/// </summary>
|
||||
public static TenantValidationResult Success(TenantContext context) =>
|
||||
new() { IsValid = true, Context = context };
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed validation result.
|
||||
/// </summary>
|
||||
public static TenantValidationResult Failure(string errorCode, string errorMessage) =>
|
||||
new() { IsValid = false, ErrorCode = errorCode, ErrorMessage = errorMessage };
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering tenant context services.
|
||||
/// </summary>
|
||||
public static class TenantContextServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds tenant context services to the service collection.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddTenantContext(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<ITenantContextAccessor, TenantContextAccessor>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds tenant context services with configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddTenantContext(
|
||||
this IServiceCollection services,
|
||||
Action<TenantContextOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services.AddTenantContext();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds tenant context services with configuration from configuration section.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddTenantContext(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
string sectionName = TenantContextOptions.SectionName)
|
||||
{
|
||||
services.Configure<TenantContextOptions>(configuration.GetSection(sectionName));
|
||||
return services.AddTenantContext();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for configuring tenant context middleware.
|
||||
/// </summary>
|
||||
public static class TenantContextApplicationBuilderExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds the tenant context middleware to the application pipeline.
|
||||
/// This middleware extracts tenant/project headers and validates tenant access.
|
||||
/// </summary>
|
||||
public static IApplicationBuilder UseTenantContext(this IApplicationBuilder app)
|
||||
{
|
||||
return app.UseMiddleware<TenantContextMiddleware>();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for endpoint routing to apply tenant requirements.
|
||||
/// </summary>
|
||||
public static class TenantContextEndpointExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Requires tenant context for the endpoint group.
|
||||
/// </summary>
|
||||
public static RouteGroupBuilder RequireTenantContext(this RouteGroupBuilder group)
|
||||
{
|
||||
group.AddEndpointFilter<TenantContextEndpointFilter>();
|
||||
return group;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds a tenant context requirement filter to a route handler.
|
||||
/// </summary>
|
||||
public static RouteHandlerBuilder RequireTenantContext(this RouteHandlerBuilder builder)
|
||||
{
|
||||
builder.AddEndpointFilter<TenantContextEndpointFilter>();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Endpoint filter that validates tenant context is present.
|
||||
/// </summary>
|
||||
internal sealed class TenantContextEndpointFilter : IEndpointFilter
|
||||
{
|
||||
public async ValueTask<object?> InvokeAsync(
|
||||
EndpointFilterInvocationContext context,
|
||||
EndpointFilterDelegate next)
|
||||
{
|
||||
var tenantAccessor = context.HttpContext.RequestServices
|
||||
.GetService<ITenantContextAccessor>();
|
||||
|
||||
if (tenantAccessor?.TenantContext is null)
|
||||
{
|
||||
return Results.Problem(
|
||||
title: "Tenant context required",
|
||||
detail: $"The {TenantContextConstants.TenantHeader} header is required for this endpoint.",
|
||||
statusCode: StatusCodes.Status400BadRequest,
|
||||
extensions: new Dictionary<string, object?>
|
||||
{
|
||||
["error_code"] = TenantContextConstants.MissingTenantHeaderErrorCode
|
||||
});
|
||||
}
|
||||
|
||||
return await next(context);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,481 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Notifications;
|
||||
using StellaOps.Policy.RiskProfile.Lifecycle;
|
||||
using StellaOps.Policy.RiskProfile.Models;
|
||||
using Xunit;
|
||||
|
||||
using MsOptions = Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Notifications;
|
||||
|
||||
public sealed class PolicyProfileNotificationServiceTests
|
||||
{
|
||||
private readonly FakeNotificationPublisher _publisher;
|
||||
private readonly PolicyProfileNotificationFactory _factory;
|
||||
private readonly PolicyProfileNotificationOptions _options;
|
||||
private readonly PolicyProfileNotificationService _service;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public PolicyProfileNotificationServiceTests()
|
||||
{
|
||||
_publisher = new FakeNotificationPublisher();
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-12-07T12:00:00Z"));
|
||||
_options = new PolicyProfileNotificationOptions
|
||||
{
|
||||
Enabled = true,
|
||||
TopicName = "test.policy.profiles",
|
||||
BaseUrl = "https://policy.test.local"
|
||||
};
|
||||
_factory = new PolicyProfileNotificationFactory(_timeProvider, _options);
|
||||
|
||||
_service = new PolicyProfileNotificationService(
|
||||
_publisher,
|
||||
_factory,
|
||||
MsOptions.Options.Create(_options),
|
||||
NullLogger<PolicyProfileNotificationService>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyProfileCreatedAsync_PublishesEvent()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateTestProfile();
|
||||
|
||||
// Act
|
||||
await _service.NotifyProfileCreatedAsync(
|
||||
"tenant-123",
|
||||
profile,
|
||||
"alice@example.com",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileCreated, evt.EventType);
|
||||
Assert.Equal("tenant-123", evt.TenantId);
|
||||
Assert.Equal("test-profile", evt.ProfileId);
|
||||
Assert.Equal("1.0.0", evt.ProfileVersion);
|
||||
Assert.NotNull(evt.Actor);
|
||||
Assert.Equal("user", evt.Actor.Type);
|
||||
Assert.Equal("alice@example.com", evt.Actor.Id);
|
||||
Assert.NotNull(evt.Hash);
|
||||
Assert.Equal("abc123hash", evt.Hash.Value);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyProfileActivatedAsync_PublishesEvent()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateTestProfile();
|
||||
|
||||
// Act
|
||||
await _service.NotifyProfileActivatedAsync(
|
||||
"tenant-123",
|
||||
profile,
|
||||
"alice@example.com",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileActivated, evt.EventType);
|
||||
Assert.Equal("tenant-123", evt.TenantId);
|
||||
Assert.Equal("test-profile", evt.ProfileId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyProfileDeactivatedAsync_PublishesEvent()
|
||||
{
|
||||
// Act
|
||||
await _service.NotifyProfileDeactivatedAsync(
|
||||
"tenant-123",
|
||||
"test-profile",
|
||||
"1.0.0",
|
||||
"alice@example.com",
|
||||
"Deprecated in favor of v2.0.0",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileDeactivated, evt.EventType);
|
||||
Assert.Equal("Deprecated in favor of v2.0.0", evt.ChangeReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyThresholdChangedAsync_PublishesEventWithThresholds()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateTestProfileWithThresholds();
|
||||
|
||||
// Act
|
||||
await _service.NotifyThresholdChangedAsync(
|
||||
"tenant-123",
|
||||
profile,
|
||||
"alice@example.com",
|
||||
"Increased high/critical thresholds",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ThresholdChanged, evt.EventType);
|
||||
Assert.NotNull(evt.Thresholds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyOverrideAddedAsync_PublishesEventWithDetails()
|
||||
{
|
||||
// Act
|
||||
await _service.NotifyOverrideAddedAsync(
|
||||
"tenant-123",
|
||||
"test-profile",
|
||||
"1.0.0",
|
||||
"alice@example.com",
|
||||
"override-001",
|
||||
"severity",
|
||||
"CVE-2024-1234",
|
||||
"suppress",
|
||||
"False positive confirmed by security team",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.OverrideAdded, evt.EventType);
|
||||
Assert.NotNull(evt.OverrideDetails);
|
||||
Assert.Equal("override-001", evt.OverrideDetails.OverrideId);
|
||||
Assert.Equal("severity", evt.OverrideDetails.OverrideType);
|
||||
Assert.Equal("CVE-2024-1234", evt.OverrideDetails.Target);
|
||||
Assert.Equal("False positive confirmed by security team", evt.OverrideDetails.Justification);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyOverrideRemovedAsync_PublishesEvent()
|
||||
{
|
||||
// Act
|
||||
await _service.NotifyOverrideRemovedAsync(
|
||||
"tenant-123",
|
||||
"test-profile",
|
||||
"1.0.0",
|
||||
"alice@example.com",
|
||||
"override-001",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.OverrideRemoved, evt.EventType);
|
||||
Assert.NotNull(evt.OverrideDetails);
|
||||
Assert.Equal("override-001", evt.OverrideDetails.OverrideId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifySimulationReadyAsync_PublishesEventWithDetails()
|
||||
{
|
||||
// Act
|
||||
await _service.NotifySimulationReadyAsync(
|
||||
"tenant-123",
|
||||
"test-profile",
|
||||
"1.0.0",
|
||||
"sim-001",
|
||||
findingsCount: 42,
|
||||
highImpactCount: 5,
|
||||
completedAt: _timeProvider.GetUtcNow(),
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.SimulationReady, evt.EventType);
|
||||
Assert.NotNull(evt.SimulationDetails);
|
||||
Assert.Equal("sim-001", evt.SimulationDetails.SimulationId);
|
||||
Assert.Equal(42, evt.SimulationDetails.FindingsCount);
|
||||
Assert.Equal(5, evt.SimulationDetails.HighImpactCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyFromLifecycleEventAsync_Created_PublishesNotification()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateTestProfile();
|
||||
var lifecycleEvent = new RiskProfileLifecycleEvent(
|
||||
EventId: "evt-001",
|
||||
ProfileId: "test-profile",
|
||||
Version: "1.0.0",
|
||||
EventType: RiskProfileLifecycleEventType.Created,
|
||||
OldStatus: null,
|
||||
NewStatus: RiskProfileLifecycleStatus.Draft,
|
||||
Timestamp: _timeProvider.GetUtcNow(),
|
||||
Actor: "alice@example.com",
|
||||
Reason: null);
|
||||
|
||||
// Act
|
||||
await _service.NotifyFromLifecycleEventAsync(
|
||||
"tenant-123",
|
||||
lifecycleEvent,
|
||||
profile,
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileCreated, evt.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyFromLifecycleEventAsync_Activated_PublishesNotification()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateTestProfile();
|
||||
var lifecycleEvent = new RiskProfileLifecycleEvent(
|
||||
EventId: "evt-002",
|
||||
ProfileId: "test-profile",
|
||||
Version: "1.0.0",
|
||||
EventType: RiskProfileLifecycleEventType.Activated,
|
||||
OldStatus: RiskProfileLifecycleStatus.Draft,
|
||||
NewStatus: RiskProfileLifecycleStatus.Active,
|
||||
Timestamp: _timeProvider.GetUtcNow(),
|
||||
Actor: "alice@example.com",
|
||||
Reason: null);
|
||||
|
||||
// Act
|
||||
await _service.NotifyFromLifecycleEventAsync(
|
||||
"tenant-123",
|
||||
lifecycleEvent,
|
||||
profile,
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileActivated, evt.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyFromLifecycleEventAsync_Deprecated_PublishesDeactivatedNotification()
|
||||
{
|
||||
// Arrange
|
||||
var lifecycleEvent = new RiskProfileLifecycleEvent(
|
||||
EventId: "evt-003",
|
||||
ProfileId: "test-profile",
|
||||
Version: "1.0.0",
|
||||
EventType: RiskProfileLifecycleEventType.Deprecated,
|
||||
OldStatus: RiskProfileLifecycleStatus.Active,
|
||||
NewStatus: RiskProfileLifecycleStatus.Deprecated,
|
||||
Timestamp: _timeProvider.GetUtcNow(),
|
||||
Actor: "alice@example.com",
|
||||
Reason: "Superseded by v2.0.0");
|
||||
|
||||
// Act
|
||||
await _service.NotifyFromLifecycleEventAsync(
|
||||
"tenant-123",
|
||||
lifecycleEvent,
|
||||
profile: null,
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Single(_publisher.PublishedEvents);
|
||||
var evt = _publisher.PublishedEvents[0];
|
||||
Assert.Equal(PolicyProfileNotificationEventTypes.ProfileDeactivated, evt.EventType);
|
||||
Assert.Equal("Superseded by v2.0.0", evt.ChangeReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyProfileCreatedAsync_WhenDisabled_DoesNotPublish()
|
||||
{
|
||||
// Arrange
|
||||
var disabledOptions = new PolicyProfileNotificationOptions { Enabled = false };
|
||||
var disabledService = new PolicyProfileNotificationService(
|
||||
_publisher,
|
||||
_factory,
|
||||
MsOptions.Options.Create(disabledOptions),
|
||||
NullLogger<PolicyProfileNotificationService>.Instance);
|
||||
|
||||
var profile = CreateTestProfile();
|
||||
|
||||
// Act
|
||||
await disabledService.NotifyProfileCreatedAsync(
|
||||
"tenant-123",
|
||||
profile,
|
||||
"alice@example.com",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(_publisher.PublishedEvents);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NotifyProfileCreatedAsync_WhenPublisherThrows_LogsWarningAndContinues()
|
||||
{
|
||||
// Arrange
|
||||
var throwingPublisher = new ThrowingNotificationPublisher();
|
||||
var serviceWithThrowingPublisher = new PolicyProfileNotificationService(
|
||||
throwingPublisher,
|
||||
_factory,
|
||||
MsOptions.Options.Create(_options),
|
||||
NullLogger<PolicyProfileNotificationService>.Instance);
|
||||
|
||||
var profile = CreateTestProfile();
|
||||
|
||||
// Act (should not throw)
|
||||
await serviceWithThrowingPublisher.NotifyProfileCreatedAsync(
|
||||
"tenant-123",
|
||||
profile,
|
||||
"alice@example.com",
|
||||
"abc123hash",
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert - no exception thrown
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventTypes_AreCorrect()
|
||||
{
|
||||
Assert.Equal("policy.profile.created", PolicyProfileNotificationEventTypes.ProfileCreated);
|
||||
Assert.Equal("policy.profile.activated", PolicyProfileNotificationEventTypes.ProfileActivated);
|
||||
Assert.Equal("policy.profile.deactivated", PolicyProfileNotificationEventTypes.ProfileDeactivated);
|
||||
Assert.Equal("policy.profile.threshold_changed", PolicyProfileNotificationEventTypes.ThresholdChanged);
|
||||
Assert.Equal("policy.profile.override_added", PolicyProfileNotificationEventTypes.OverrideAdded);
|
||||
Assert.Equal("policy.profile.override_removed", PolicyProfileNotificationEventTypes.OverrideRemoved);
|
||||
Assert.Equal("policy.profile.simulation_ready", PolicyProfileNotificationEventTypes.SimulationReady);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Factory_GeneratesUniqueEventIds()
|
||||
{
|
||||
// Arrange & Act
|
||||
var event1 = _factory.CreateProfileCreatedEvent("t1", "p1", "1.0", null, null);
|
||||
var event2 = _factory.CreateProfileCreatedEvent("t1", "p1", "1.0", null, null);
|
||||
|
||||
// Assert
|
||||
Assert.NotEqual(event1.EventId, event2.EventId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Factory_IncludesBaseUrlInLinks()
|
||||
{
|
||||
// Arrange & Act
|
||||
var notification = _factory.CreateProfileActivatedEvent(
|
||||
"tenant-123",
|
||||
"my-profile",
|
||||
"2.0.0",
|
||||
"alice@example.com",
|
||||
"hash123",
|
||||
scope: null);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(notification.Links);
|
||||
Assert.Equal("https://policy.test.local/api/risk/profiles/my-profile", notification.Links.ProfileUrl);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Factory_DetectsUserActorType()
|
||||
{
|
||||
// Act
|
||||
var userEvent = _factory.CreateProfileCreatedEvent("t", "p", "1.0", "alice@example.com", null);
|
||||
var systemEvent = _factory.CreateProfileCreatedEvent("t", "p", "1.0", "policy-service", null);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("user", userEvent.Actor?.Type);
|
||||
Assert.Equal("system", systemEvent.Actor?.Type);
|
||||
}
|
||||
|
||||
private static RiskProfileModel CreateTestProfile()
|
||||
{
|
||||
return new RiskProfileModel
|
||||
{
|
||||
Id = "test-profile",
|
||||
Version = "1.0.0",
|
||||
Description = "Test profile for unit tests",
|
||||
Signals = new List<RiskSignal>
|
||||
{
|
||||
new() { Name = "cvss", Source = "vuln", Type = RiskSignalType.Numeric, Path = "$.cvss.score" }
|
||||
},
|
||||
Weights = new Dictionary<string, double> { ["cvss"] = 1.0 },
|
||||
Overrides = new RiskOverrides
|
||||
{
|
||||
Severity = new List<SeverityOverride>(),
|
||||
Decisions = new List<DecisionOverride>()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static RiskProfileModel CreateTestProfileWithThresholds()
|
||||
{
|
||||
return new RiskProfileModel
|
||||
{
|
||||
Id = "test-profile",
|
||||
Version = "1.0.0",
|
||||
Description = "Test profile with thresholds",
|
||||
Signals = new List<RiskSignal>
|
||||
{
|
||||
new() { Name = "cvss", Source = "vuln", Type = RiskSignalType.Numeric, Path = "$.cvss.score" }
|
||||
},
|
||||
Weights = new Dictionary<string, double> { ["cvss"] = 1.0 },
|
||||
Overrides = new RiskOverrides
|
||||
{
|
||||
Severity = new List<SeverityOverride>
|
||||
{
|
||||
new() { Set = RiskSeverity.Critical, When = new Dictionary<string, object> { ["score_gte"] = 0.9 } },
|
||||
new() { Set = RiskSeverity.High, When = new Dictionary<string, object> { ["score_gte"] = 0.75 } },
|
||||
new() { Set = RiskSeverity.Medium, When = new Dictionary<string, object> { ["score_gte"] = 0.5 } }
|
||||
},
|
||||
Decisions = new List<DecisionOverride>()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now) => _now = now;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
}
|
||||
|
||||
private sealed class FakeNotificationPublisher : IPolicyProfileNotificationPublisher
|
||||
{
|
||||
public List<PolicyProfileNotificationEvent> PublishedEvents { get; } = new();
|
||||
|
||||
public Task PublishAsync(PolicyProfileNotificationEvent notification, CancellationToken cancellationToken = default)
|
||||
{
|
||||
PublishedEvents.Add(notification);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<bool> DeliverWebhookAsync(WebhookDeliveryRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ThrowingNotificationPublisher : IPolicyProfileNotificationPublisher
|
||||
{
|
||||
public Task PublishAsync(PolicyProfileNotificationEvent notification, CancellationToken cancellationToken = default)
|
||||
{
|
||||
throw new InvalidOperationException("Publisher failed");
|
||||
}
|
||||
|
||||
public Task<bool> DeliverWebhookAsync(WebhookDeliveryRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
throw new InvalidOperationException("Publisher failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,526 @@
|
||||
using System.Security.Claims;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Tenancy;
|
||||
using Xunit;
|
||||
|
||||
using MsOptions = Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Tenancy;
|
||||
|
||||
public sealed class TenantContextTests
|
||||
{
|
||||
[Fact]
|
||||
public void TenantContext_ForTenant_CreatesTenantContext()
|
||||
{
|
||||
// Arrange & Act
|
||||
var context = TenantContext.ForTenant("tenant-123", "project-456", canWrite: true, actorId: "user@example.com");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("tenant-123", context.TenantId);
|
||||
Assert.Equal("project-456", context.ProjectId);
|
||||
Assert.True(context.CanWrite);
|
||||
Assert.Equal("user@example.com", context.ActorId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContext_ForTenant_WithoutOptionalFields_CreatesTenantContext()
|
||||
{
|
||||
// Act
|
||||
var context = TenantContext.ForTenant("tenant-123");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("tenant-123", context.TenantId);
|
||||
Assert.Null(context.ProjectId);
|
||||
Assert.False(context.CanWrite);
|
||||
Assert.Null(context.ActorId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContext_ForTenant_ThrowsOnNullTenantId()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentException>(() => TenantContext.ForTenant(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContext_ForTenant_ThrowsOnEmptyTenantId()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentException>(() => TenantContext.ForTenant(string.Empty));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContext_ForTenant_ThrowsOnWhitespaceTenantId()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentException>(() => TenantContext.ForTenant(" "));
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TenantContextAccessorTests
|
||||
{
|
||||
[Fact]
|
||||
public void TenantContextAccessor_GetSet_WorksCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var accessor = new TenantContextAccessor();
|
||||
var context = TenantContext.ForTenant("tenant-123");
|
||||
|
||||
// Act
|
||||
accessor.TenantContext = context;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(accessor.TenantContext);
|
||||
Assert.Equal("tenant-123", accessor.TenantContext.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContextAccessor_InitialValue_IsNull()
|
||||
{
|
||||
// Arrange & Act
|
||||
var accessor = new TenantContextAccessor();
|
||||
|
||||
// Assert
|
||||
Assert.Null(accessor.TenantContext);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantContextAccessor_SetNull_ClearsContext()
|
||||
{
|
||||
// Arrange
|
||||
var accessor = new TenantContextAccessor();
|
||||
accessor.TenantContext = TenantContext.ForTenant("tenant-123");
|
||||
|
||||
// Act
|
||||
accessor.TenantContext = null;
|
||||
|
||||
// Assert
|
||||
Assert.Null(accessor.TenantContext);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TenantValidationResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void TenantValidationResult_Success_CreatesValidResult()
|
||||
{
|
||||
// Arrange
|
||||
var context = TenantContext.ForTenant("tenant-123");
|
||||
|
||||
// Act
|
||||
var result = TenantValidationResult.Success(context);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Null(result.ErrorCode);
|
||||
Assert.Null(result.ErrorMessage);
|
||||
Assert.NotNull(result.Context);
|
||||
Assert.Equal("tenant-123", result.Context.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TenantValidationResult_Failure_CreatesInvalidResult()
|
||||
{
|
||||
// Act
|
||||
var result = TenantValidationResult.Failure("ERR_CODE", "Error message");
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("ERR_CODE", result.ErrorCode);
|
||||
Assert.Equal("Error message", result.ErrorMessage);
|
||||
Assert.Null(result.Context);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TenantContextMiddlewareTests
|
||||
{
|
||||
private readonly NullLogger<TenantContextMiddleware> _logger;
|
||||
private readonly TenantContextAccessor _tenantAccessor;
|
||||
private readonly TenantContextOptions _options;
|
||||
|
||||
public TenantContextMiddlewareTests()
|
||||
{
|
||||
_logger = NullLogger<TenantContextMiddleware>.Instance;
|
||||
_tenantAccessor = new TenantContextAccessor();
|
||||
_options = new TenantContextOptions
|
||||
{
|
||||
Enabled = true,
|
||||
RequireTenantHeader = true,
|
||||
ExcludedPaths = new List<string> { "/healthz", "/readyz" }
|
||||
};
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_WithValidTenantHeader_SetsTenantContext()
|
||||
{
|
||||
// Arrange
|
||||
var nextCalled = false;
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => { nextCalled = true; return Task.CompletedTask; },
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123");
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.True(nextCalled);
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal("tenant-123", _tenantAccessor.TenantContext.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_WithTenantAndProjectHeaders_SetsBothInContext()
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123", "project-456");
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal("tenant-123", _tenantAccessor.TenantContext.TenantId);
|
||||
Assert.Equal("project-456", _tenantAccessor.TenantContext.ProjectId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_MissingTenantHeader_Returns400WithErrorCode()
|
||||
{
|
||||
// Arrange
|
||||
var nextCalled = false;
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => { nextCalled = true; return Task.CompletedTask; },
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", tenantId: null);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.False(nextCalled);
|
||||
Assert.Equal(StatusCodes.Status400BadRequest, context.Response.StatusCode);
|
||||
Assert.Null(_tenantAccessor.TenantContext);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_MissingTenantHeaderNotRequired_UsesDefaultTenant()
|
||||
{
|
||||
// Arrange
|
||||
var optionsNotRequired = new TenantContextOptions
|
||||
{
|
||||
Enabled = true,
|
||||
RequireTenantHeader = false
|
||||
};
|
||||
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(optionsNotRequired),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", tenantId: null);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal(TenantContextConstants.DefaultTenantId, _tenantAccessor.TenantContext.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_ExcludedPath_SkipsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var nextCalled = false;
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => { nextCalled = true; return Task.CompletedTask; },
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/healthz", tenantId: null);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.True(nextCalled);
|
||||
Assert.Null(_tenantAccessor.TenantContext); // Not set for excluded paths
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_Disabled_SkipsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var disabledOptions = new TenantContextOptions { Enabled = false };
|
||||
var nextCalled = false;
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => { nextCalled = true; return Task.CompletedTask; },
|
||||
MsOptions.Options.Create(disabledOptions),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", tenantId: null);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.True(nextCalled);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("tenant-123")]
|
||||
[InlineData("TENANT_456")]
|
||||
[InlineData("tenant_with-mixed-123")]
|
||||
public async Task Middleware_ValidTenantIdFormat_Passes(string tenantId)
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", tenantId);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal(tenantId, _tenantAccessor.TenantContext.TenantId);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("tenant 123")] // spaces
|
||||
[InlineData("tenant@123")] // special char
|
||||
[InlineData("tenant/123")] // slash
|
||||
[InlineData("tenant.123")] // dot
|
||||
public async Task Middleware_InvalidTenantIdFormat_Returns400(string tenantId)
|
||||
{
|
||||
// Arrange
|
||||
var nextCalled = false;
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => { nextCalled = true; return Task.CompletedTask; },
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", tenantId);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.False(nextCalled);
|
||||
Assert.Equal(StatusCodes.Status400BadRequest, context.Response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_TenantIdTooLong_Returns400()
|
||||
{
|
||||
// Arrange
|
||||
var longTenantId = new string('a', 300); // exceeds default 256 limit
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", longTenantId);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(StatusCodes.Status400BadRequest, context.Response.StatusCode);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("project-123")]
|
||||
[InlineData("PROJECT_456")]
|
||||
[InlineData("proj_with-mixed-123")]
|
||||
public async Task Middleware_ValidProjectIdFormat_Passes(string projectId)
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123", projectId);
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal(projectId, _tenantAccessor.TenantContext.ProjectId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_WithWriteScope_SetsCanWriteTrue()
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123");
|
||||
var claims = new[]
|
||||
{
|
||||
new Claim("sub", "user@example.com"),
|
||||
new Claim("scope", "policy:write")
|
||||
};
|
||||
context.User = new ClaimsPrincipal(new ClaimsIdentity(claims, "TestAuth"));
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.True(_tenantAccessor.TenantContext.CanWrite);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_WithoutWriteScope_SetsCanWriteFalse()
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123");
|
||||
var claims = new[]
|
||||
{
|
||||
new Claim("sub", "user@example.com"),
|
||||
new Claim("scope", "policy:read")
|
||||
};
|
||||
context.User = new ClaimsPrincipal(new ClaimsIdentity(claims, "TestAuth"));
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.False(_tenantAccessor.TenantContext.CanWrite);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_ExtractsActorIdFromSubClaim()
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123");
|
||||
var claims = new[] { new Claim("sub", "user-id-123") };
|
||||
context.User = new ClaimsPrincipal(new ClaimsIdentity(claims, "TestAuth"));
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal("user-id-123", _tenantAccessor.TenantContext.ActorId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Middleware_ExtractsActorIdFromHeader()
|
||||
{
|
||||
// Arrange
|
||||
var middleware = new TenantContextMiddleware(
|
||||
_ => Task.CompletedTask,
|
||||
MsOptions.Options.Create(_options),
|
||||
_logger);
|
||||
|
||||
var context = CreateHttpContext("/api/risk/profiles", "tenant-123");
|
||||
context.Request.Headers["X-StellaOps-Actor"] = "service-account-123";
|
||||
|
||||
// Act
|
||||
await middleware.InvokeAsync(context, _tenantAccessor);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(_tenantAccessor.TenantContext);
|
||||
Assert.Equal("service-account-123", _tenantAccessor.TenantContext.ActorId);
|
||||
}
|
||||
|
||||
private static DefaultHttpContext CreateHttpContext(
|
||||
string path,
|
||||
string? tenantId,
|
||||
string? projectId = null)
|
||||
{
|
||||
var context = new DefaultHttpContext();
|
||||
context.Request.Path = path;
|
||||
|
||||
if (!string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
context.Request.Headers[TenantContextConstants.TenantHeader] = tenantId;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(projectId))
|
||||
{
|
||||
context.Request.Headers[TenantContextConstants.ProjectHeader] = projectId;
|
||||
}
|
||||
|
||||
// Set up response body stream to capture output
|
||||
context.Response.Body = new MemoryStream();
|
||||
|
||||
return context;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TenantContextConstantsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constants_HaveExpectedValues()
|
||||
{
|
||||
Assert.Equal("X-Stella-Tenant", TenantContextConstants.TenantHeader);
|
||||
Assert.Equal("X-Stella-Project", TenantContextConstants.ProjectHeader);
|
||||
Assert.Equal("app.tenant_id", TenantContextConstants.TenantGuc);
|
||||
Assert.Equal("app.project_id", TenantContextConstants.ProjectGuc);
|
||||
Assert.Equal("app.can_write", TenantContextConstants.CanWriteGuc);
|
||||
Assert.Equal("public", TenantContextConstants.DefaultTenantId);
|
||||
Assert.Equal("POLICY_TENANT_HEADER_REQUIRED", TenantContextConstants.MissingTenantHeaderErrorCode);
|
||||
Assert.Equal("POLICY_TENANT_ID_INVALID", TenantContextConstants.InvalidTenantIdErrorCode);
|
||||
Assert.Equal("POLICY_TENANT_ACCESS_DENIED", TenantContextConstants.TenantAccessDeniedErrorCode);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TenantContextOptionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Options_HaveCorrectDefaults()
|
||||
{
|
||||
// Arrange & Act
|
||||
var options = new TenantContextOptions();
|
||||
|
||||
// Assert
|
||||
Assert.True(options.Enabled);
|
||||
Assert.True(options.RequireTenantHeader);
|
||||
Assert.Contains("/healthz", options.ExcludedPaths);
|
||||
Assert.Contains("/readyz", options.ExcludedPaths);
|
||||
Assert.Contains("/.well-known", options.ExcludedPaths);
|
||||
Assert.Equal(256, options.MaxTenantIdLength);
|
||||
Assert.Equal(256, options.MaxProjectIdLength);
|
||||
Assert.False(options.AllowMultiTenantQueries);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SectionName_IsCorrect()
|
||||
{
|
||||
Assert.Equal("PolicyEngine:Tenancy", TenantContextOptions.SectionName);
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,10 @@ internal static class OrchestratorEventKinds
|
||||
{
|
||||
public const string ScannerReportReady = "scanner.event.report.ready";
|
||||
public const string ScannerScanCompleted = "scanner.event.scan.completed";
|
||||
public const string ScannerScanStarted = "scanner.event.scan.started";
|
||||
public const string ScannerScanFailed = "scanner.event.scan.failed";
|
||||
public const string ScannerSbomGenerated = "scanner.event.sbom.generated";
|
||||
public const string ScannerVulnerabilityDetected = "scanner.event.vulnerability.detected";
|
||||
}
|
||||
|
||||
internal sealed record OrchestratorEvent
|
||||
@@ -74,6 +78,39 @@ internal sealed record OrchestratorEvent
|
||||
[JsonPropertyOrder(13)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableSortedDictionary<string, string>? Attributes { get; init; }
|
||||
|
||||
[JsonPropertyName("notifier")]
|
||||
[JsonPropertyOrder(14)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public NotifierIngestionMetadata? Notifier { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for Notifier service ingestion per orchestrator-envelope.schema.json.
|
||||
/// </summary>
|
||||
internal sealed record NotifierIngestionMetadata
|
||||
{
|
||||
[JsonPropertyName("severityThresholdMet")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public bool SeverityThresholdMet { get; init; }
|
||||
|
||||
[JsonPropertyName("notificationChannels")]
|
||||
[JsonPropertyOrder(1)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? NotificationChannels { get; init; }
|
||||
|
||||
[JsonPropertyName("digestEligible")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public bool DigestEligible { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("immediateDispatch")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public bool ImmediateDispatch { get; init; }
|
||||
|
||||
[JsonPropertyName("priority")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Priority { get; init; }
|
||||
}
|
||||
|
||||
internal sealed record OrchestratorEventScope
|
||||
@@ -287,3 +324,274 @@ internal sealed record FindingSummaryPayload
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Reachability { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for scanner.event.scan.started events.
|
||||
/// </summary>
|
||||
internal sealed record ScanStartedEventPayload : OrchestratorEventPayload
|
||||
{
|
||||
[JsonPropertyName("scanId")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string ScanId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("jobId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? JobId { get; init; }
|
||||
|
||||
[JsonPropertyName("target")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public ScanTargetPayload Target { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("startedAt")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public string Status { get; init; } = "started";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for scanner.event.scan.failed events.
|
||||
/// </summary>
|
||||
internal sealed record ScanFailedEventPayload : OrchestratorEventPayload
|
||||
{
|
||||
[JsonPropertyName("scanId")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string ScanId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("jobId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? JobId { get; init; }
|
||||
|
||||
[JsonPropertyName("target")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public ScanTargetPayload Target { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("startedAt")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public DateTimeOffset? StartedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("failedAt")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public DateTimeOffset FailedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("durationMs")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public long? DurationMs { get; init; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public string Status { get; init; } = "failed";
|
||||
|
||||
[JsonPropertyName("error")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public ScanErrorPayload Error { get; init; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for scanner.event.sbom.generated events.
|
||||
/// </summary>
|
||||
internal sealed record SbomGeneratedEventPayload : OrchestratorEventPayload
|
||||
{
|
||||
[JsonPropertyName("scanId")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string ScanId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("sbomId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string SbomId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("target")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public ScanTargetPayload Target { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public DateTimeOffset GeneratedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("format")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public string Format { get; init; } = "cyclonedx";
|
||||
|
||||
[JsonPropertyName("specVersion")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? SpecVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("componentCount")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public int ComponentCount { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomRef")]
|
||||
[JsonPropertyOrder(7)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? SbomRef { get; init; }
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
[JsonPropertyOrder(8)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for scanner.event.vulnerability.detected events.
|
||||
/// </summary>
|
||||
internal sealed record VulnerabilityDetectedEventPayload : OrchestratorEventPayload
|
||||
{
|
||||
[JsonPropertyName("scanId")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string ScanId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("vulnerability")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public VulnerabilityInfoPayload Vulnerability { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("affectedComponent")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public ComponentInfoPayload AffectedComponent { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("reachability")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Reachability { get; init; }
|
||||
|
||||
[JsonPropertyName("detectedAt")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public DateTimeOffset DetectedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target being scanned.
|
||||
/// </summary>
|
||||
internal sealed record ScanTargetPayload
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Type { get; init; } = "container_image";
|
||||
|
||||
[JsonPropertyName("identifier")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string Identifier { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Digest { get; init; }
|
||||
|
||||
[JsonPropertyName("tag")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Tag { get; init; }
|
||||
|
||||
[JsonPropertyName("platform")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Platform { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Error information for failed scans.
|
||||
/// </summary>
|
||||
internal sealed record ScanErrorPayload
|
||||
{
|
||||
[JsonPropertyName("code")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Code { get; init; } = "SCAN_FAILED";
|
||||
|
||||
[JsonPropertyName("message")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string Message { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("details")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableDictionary<string, string>? Details { get; init; }
|
||||
|
||||
[JsonPropertyName("recoverable")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public bool Recoverable { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability information.
|
||||
/// </summary>
|
||||
internal sealed record VulnerabilityInfoPayload
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Id { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("severity")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string Severity { get; init; } = "unknown";
|
||||
|
||||
[JsonPropertyName("cvssScore")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public double? CvssScore { get; init; }
|
||||
|
||||
[JsonPropertyName("cvssVector")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? CvssVector { get; init; }
|
||||
|
||||
[JsonPropertyName("title")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Title { get; init; }
|
||||
|
||||
[JsonPropertyName("fixAvailable")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public bool? FixAvailable { get; init; }
|
||||
|
||||
[JsonPropertyName("fixedVersion")]
|
||||
[JsonPropertyOrder(6)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? FixedVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("kevListed")]
|
||||
[JsonPropertyOrder(7)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public bool? KevListed { get; init; }
|
||||
|
||||
[JsonPropertyName("epssScore")]
|
||||
[JsonPropertyOrder(8)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public double? EpssScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Component information.
|
||||
/// </summary>
|
||||
internal sealed record ComponentInfoPayload
|
||||
{
|
||||
[JsonPropertyName("purl")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Purl { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("name")]
|
||||
[JsonPropertyOrder(1)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Name { get; init; }
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("ecosystem")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Ecosystem { get; init; }
|
||||
|
||||
[JsonPropertyName("location")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Location { get; init; }
|
||||
}
|
||||
|
||||
@@ -211,6 +211,10 @@ internal static class OrchestratorEventSerializer
|
||||
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ReportReadyEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ScanCompletedEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ScanStartedEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ScanFailedEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(SbomGeneratedEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(VulnerabilityDetectedEventPayload));
|
||||
}
|
||||
|
||||
private static void AddDerivedType(JsonPolymorphismOptions options, Type derivedType)
|
||||
|
||||
@@ -147,7 +147,7 @@ internal static class PythonContainerAdapter
|
||||
|
||||
foreach (var sitePackages in DiscoverLayerSitePackages(rootPath))
|
||||
{
|
||||
foreach (var distInfo in EnumerateDistInfoDirectories(sitePackages))
|
||||
foreach (var distInfo in EnumerateMetadataDirectories(sitePackages))
|
||||
{
|
||||
discovered.Add(distInfo);
|
||||
}
|
||||
@@ -156,7 +156,7 @@ internal static class PythonContainerAdapter
|
||||
// Also check root-level site-packages
|
||||
foreach (var sitePackages in DiscoverSitePackagesInDirectory(rootPath))
|
||||
{
|
||||
foreach (var distInfo in EnumerateDistInfoDirectories(sitePackages))
|
||||
foreach (var distInfo in EnumerateMetadataDirectories(sitePackages))
|
||||
{
|
||||
discovered.Add(distInfo);
|
||||
}
|
||||
@@ -167,25 +167,27 @@ internal static class PythonContainerAdapter
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> EnumerateDistInfoDirectories(string sitePackages)
|
||||
private static IEnumerable<string> EnumerateMetadataDirectories(string sitePackages)
|
||||
{
|
||||
if (!Directory.Exists(sitePackages))
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
foreach (var pattern in new[] { "*.dist-info", "*.egg-info" })
|
||||
{
|
||||
IEnumerable<string>? directories = null;
|
||||
try
|
||||
{
|
||||
directories = Directory.EnumerateDirectories(sitePackages, "*.dist-info");
|
||||
directories = Directory.EnumerateDirectories(sitePackages, pattern);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
yield break;
|
||||
continue;
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
yield break;
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var directory in directories)
|
||||
@@ -193,6 +195,7 @@ internal static class PythonContainerAdapter
|
||||
yield return directory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> DiscoverSitePackagesInDirectory(string directory)
|
||||
{
|
||||
|
||||
@@ -18,10 +18,13 @@ internal static class PythonDistributionLoader
|
||||
return null;
|
||||
}
|
||||
|
||||
var metadataPath = Path.Combine(distInfoPath, "METADATA");
|
||||
var wheelPath = Path.Combine(distInfoPath, "WHEEL");
|
||||
var isEggInfo = distInfoPath.EndsWith(".egg-info", StringComparison.OrdinalIgnoreCase);
|
||||
var metadataPath = Path.Combine(distInfoPath, isEggInfo ? "PKG-INFO" : "METADATA");
|
||||
var wheelPath = isEggInfo ? null : Path.Combine(distInfoPath, "WHEEL");
|
||||
var entryPointsPath = Path.Combine(distInfoPath, "entry_points.txt");
|
||||
var recordPath = Path.Combine(distInfoPath, "RECORD");
|
||||
var recordPath = isEggInfo
|
||||
? Path.Combine(distInfoPath, "installed-files.txt")
|
||||
: Path.Combine(distInfoPath, "RECORD");
|
||||
var installerPath = Path.Combine(distInfoPath, "INSTALLER");
|
||||
var directUrlPath = Path.Combine(distInfoPath, "direct_url.json");
|
||||
|
||||
@@ -42,11 +45,11 @@ internal static class PythonDistributionLoader
|
||||
var metadataEntries = new List<KeyValuePair<string, string?>>();
|
||||
var evidenceEntries = new List<LanguageComponentEvidence>();
|
||||
|
||||
AddFileEvidence(context, metadataPath, "METADATA", evidenceEntries);
|
||||
AddFileEvidence(context, metadataPath, isEggInfo ? "PKG-INFO" : "METADATA", evidenceEntries);
|
||||
AddFileEvidence(context, wheelPath, "WHEEL", evidenceEntries);
|
||||
AddFileEvidence(context, entryPointsPath, "entry_points.txt", evidenceEntries);
|
||||
AddFileEvidence(context, installerPath, "INSTALLER", evidenceEntries);
|
||||
AddFileEvidence(context, recordPath, "RECORD", evidenceEntries);
|
||||
AddFileEvidence(context, recordPath, isEggInfo ? "installed-files.txt" : "RECORD", evidenceEntries);
|
||||
|
||||
AppendMetadata(metadataEntries, "distInfoPath", PythonPathHelper.NormalizeRelative(context, distInfoPath));
|
||||
AppendMetadata(metadataEntries, "name", trimmedName);
|
||||
@@ -150,7 +153,9 @@ internal static class PythonDistributionLoader
|
||||
AppendMetadata(metadataEntries, $"entryPoints.{group.Key}", string.Join(';', group.Value.Select(static ep => $"{ep.Name}={ep.Target}")));
|
||||
}
|
||||
|
||||
var wheelInfo = await PythonWheelInfo.LoadAsync(wheelPath, cancellationToken).ConfigureAwait(false);
|
||||
if (!isEggInfo)
|
||||
{
|
||||
var wheelInfo = await PythonWheelInfo.LoadAsync(wheelPath!, cancellationToken).ConfigureAwait(false);
|
||||
if (wheelInfo is not null)
|
||||
{
|
||||
foreach (var pair in wheelInfo.ToMetadata())
|
||||
@@ -158,6 +163,7 @@ internal static class PythonDistributionLoader
|
||||
AppendMetadata(metadataEntries, pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var installer = await ReadSingleLineAsync(installerPath, cancellationToken).ConfigureAwait(false);
|
||||
if (!string.IsNullOrWhiteSpace(installer))
|
||||
@@ -200,7 +206,7 @@ internal static class PythonDistributionLoader
|
||||
|
||||
evidenceEntries.AddRange(verification.Evidence);
|
||||
var usedByEntrypoint = verification.UsedByEntrypoint || EvaluateEntryPointUsage(context, distInfoPath, entryPoints);
|
||||
AppendMetadata(metadataEntries, "provenance", "dist-info");
|
||||
AppendMetadata(metadataEntries, "provenance", isEggInfo ? "egg-info" : "dist-info");
|
||||
|
||||
return new PythonDistribution(
|
||||
trimmedName,
|
||||
@@ -245,7 +251,7 @@ internal static class PythonDistributionLoader
|
||||
|
||||
private static void AddFileEvidence(LanguageAnalyzerContext context, string path, string source, ICollection<LanguageComponentEvidence> evidence)
|
||||
{
|
||||
if (!File.Exists(path))
|
||||
if (string.IsNullOrWhiteSpace(path) || !File.Exists(path))
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -283,9 +289,13 @@ internal static class PythonDistributionLoader
|
||||
|
||||
var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase);
|
||||
if (suffixIndex <= 0)
|
||||
{
|
||||
suffixIndex = directoryName.IndexOf(".egg-info", StringComparison.OrdinalIgnoreCase);
|
||||
if (suffixIndex <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
var trimmed = directoryName[..suffixIndex];
|
||||
var dashIndex = trimmed.LastIndexOf('-');
|
||||
@@ -307,9 +317,13 @@ internal static class PythonDistributionLoader
|
||||
|
||||
var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase);
|
||||
if (suffixIndex <= 0)
|
||||
{
|
||||
suffixIndex = directoryName.IndexOf(".egg-info", StringComparison.OrdinalIgnoreCase);
|
||||
if (suffixIndex <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
var trimmed = directoryName[..suffixIndex];
|
||||
var dashIndex = trimmed.LastIndexOf('-');
|
||||
@@ -664,6 +678,13 @@ internal static class PythonRecordParser
|
||||
return Array.Empty<PythonRecordEntry>();
|
||||
}
|
||||
|
||||
var fileName = Path.GetFileName(path);
|
||||
if (!string.IsNullOrWhiteSpace(fileName) &&
|
||||
fileName.EndsWith("installed-files.txt", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return await LoadInstalledFilesAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var entries = new List<PythonRecordEntry>();
|
||||
|
||||
await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
@@ -763,6 +784,29 @@ internal static class PythonRecordParser
|
||||
values.Add(builder.ToString());
|
||||
return values;
|
||||
}
|
||||
|
||||
private static async Task<IReadOnlyList<PythonRecordEntry>> LoadInstalledFilesAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
var entries = new List<PythonRecordEntry>();
|
||||
|
||||
await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
|
||||
|
||||
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var trimmed = line.Trim();
|
||||
if (string.IsNullOrEmpty(trimmed) || trimmed == ".")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
entries.Add(new PythonRecordEntry(trimmed, hashAlgorithm: null, hashValue: null, size: null));
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class PythonRecordVerificationResult
|
||||
|
||||
@@ -291,22 +291,8 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
var directories = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Collect from root path recursively
|
||||
try
|
||||
{
|
||||
foreach (var dir in Directory.EnumerateDirectories(rootPath, "*.dist-info", Enumeration))
|
||||
{
|
||||
directories.Add(dir);
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore enumeration errors
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
// Ignore access errors
|
||||
}
|
||||
AddMetadataDirectories(rootPath, "*.dist-info", directories);
|
||||
AddMetadataDirectories(rootPath, "*.egg-info", directories);
|
||||
|
||||
// Also collect from OCI container layers
|
||||
foreach (var dir in PythonContainerAdapter.DiscoverDistInfoDirectories(rootPath))
|
||||
@@ -317,5 +303,24 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
|
||||
return directories
|
||||
.OrderBy(static path => path, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
static void AddMetadataDirectories(string basePath, string pattern, ISet<string> accumulator)
|
||||
{
|
||||
try
|
||||
{
|
||||
foreach (var dir in Directory.EnumerateDirectories(basePath, pattern, Enumeration))
|
||||
{
|
||||
accumulator.Add(dir);
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore enumeration errors
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
// Ignore access errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
plugins {
|
||||
alias(libs.plugins.kotlin.jvm)
|
||||
alias(libs.plugins.spring.boot)
|
||||
}
|
||||
|
||||
group = "com.example"
|
||||
version = "1.0.0"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// Individual library references
|
||||
implementation(libs.kotlin.stdlib)
|
||||
implementation(libs.slf4j.api)
|
||||
implementation(libs.guava)
|
||||
|
||||
// Bundle reference (expands to multiple libraries)
|
||||
implementation(libs.bundles.jackson)
|
||||
|
||||
// Test bundle
|
||||
testImplementation(libs.bundles.testing)
|
||||
|
||||
// Direct declaration alongside catalog
|
||||
runtimeOnly("ch.qos.logback:logback-classic:1.4.14")
|
||||
}
|
||||
@@ -0,0 +1,135 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"purl": "pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"name": "commons-lang3",
|
||||
"version": "3.14.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "commons-lang3",
|
||||
"groupId": "org.apache.commons",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"catalogAlias": "commons-lang",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.jetbrains.kotlin/kotlin-stdlib@1.9.21",
|
||||
"purl": "pkg:maven/org.jetbrains.kotlin/kotlin-stdlib@1.9.21",
|
||||
"name": "kotlin-stdlib",
|
||||
"version": "1.9.21",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "kotlin-stdlib",
|
||||
"groupId": "org.jetbrains.kotlin",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"versionRef": "kotlin",
|
||||
"catalogAlias": "kotlin-stdlib",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-core@2.16.0",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-core@2.16.0",
|
||||
"name": "jackson-core",
|
||||
"version": "2.16.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-core",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"versionRef": "jackson",
|
||||
"catalogAlias": "jackson-core",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"name": "jackson-databind",
|
||||
"version": "2.16.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-databind",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"versionRef": "jackson",
|
||||
"catalogAlias": "jackson-databind",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"purl": "pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"name": "slf4j-api",
|
||||
"version": "2.0.9",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "slf4j-api",
|
||||
"groupId": "org.slf4j",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"catalogAlias": "slf4j-api",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"purl": "pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"name": "guava",
|
||||
"version": "32.1.3-jre",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "guava",
|
||||
"groupId": "com.google.guava",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"versionRef": "guava",
|
||||
"catalogAlias": "guava",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"versionRef": "junit",
|
||||
"catalogAlias": "junit-jupiter",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.mockito/mockito-core@5.8.0",
|
||||
"purl": "pkg:maven/org.mockito/mockito-core@5.8.0",
|
||||
"name": "mockito-core",
|
||||
"version": "5.8.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "mockito-core",
|
||||
"groupId": "org.mockito",
|
||||
"declaredOnly": "true",
|
||||
"versionSource": "version-catalog",
|
||||
"catalogAlias": "mockito-core",
|
||||
"buildFile": "libs.versions.toml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,35 @@
|
||||
[versions]
|
||||
kotlin = "1.9.21"
|
||||
spring-boot = "3.2.0"
|
||||
jackson = { strictly = "2.16.0" }
|
||||
junit = { prefer = "5.10.1" }
|
||||
guava = "32.1.3-jre"
|
||||
|
||||
[libraries]
|
||||
# Short notation
|
||||
commons-lang = "org.apache.commons:commons-lang3:3.14.0"
|
||||
|
||||
# Module notation with version reference
|
||||
kotlin-stdlib = { module = "org.jetbrains.kotlin:kotlin-stdlib", version.ref = "kotlin" }
|
||||
|
||||
# Full notation with group/name
|
||||
jackson-core = { group = "com.fasterxml.jackson.core", name = "jackson-core", version.ref = "jackson" }
|
||||
jackson-databind = { group = "com.fasterxml.jackson.core", name = "jackson-databind", version.ref = "jackson" }
|
||||
|
||||
# Direct version in table
|
||||
slf4j-api = { module = "org.slf4j:slf4j-api", version = "2.0.9" }
|
||||
|
||||
# Without version (managed elsewhere)
|
||||
guava = { module = "com.google.guava:guava", version.ref = "guava" }
|
||||
|
||||
# Test libraries
|
||||
junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit" }
|
||||
mockito-core = { module = "org.mockito:mockito-core", version = "5.8.0" }
|
||||
|
||||
[bundles]
|
||||
jackson = ["jackson-core", "jackson-databind"]
|
||||
testing = ["junit-jupiter", "mockito-core"]
|
||||
|
||||
[plugins]
|
||||
kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" }
|
||||
spring-boot = { id = "org.springframework.boot", version.ref = "spring-boot" }
|
||||
@@ -0,0 +1,44 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'application'
|
||||
}
|
||||
|
||||
group = 'com.example'
|
||||
version = '1.0.0'
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// String notation - compile scope
|
||||
implementation 'com.google.guava:guava:32.1.3-jre'
|
||||
|
||||
// String notation - with parentheses
|
||||
implementation("org.apache.commons:commons-lang3:3.14.0")
|
||||
|
||||
// Map notation - compile scope
|
||||
implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.9'
|
||||
|
||||
// String notation - test scope
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter:5.10.1'
|
||||
|
||||
// String notation - provided scope
|
||||
compileOnly 'org.projectlombok:lombok:1.18.30'
|
||||
|
||||
// String notation - runtime scope
|
||||
runtimeOnly 'ch.qos.logback:logback-classic:1.4.14'
|
||||
|
||||
// Annotation processor
|
||||
annotationProcessor 'org.projectlombok:lombok:1.18.30'
|
||||
|
||||
// Platform/BOM import
|
||||
implementation platform('org.springframework.boot:spring-boot-dependencies:3.2.0')
|
||||
|
||||
// Classifier example
|
||||
implementation 'org.lwjgl:lwjgl:3.3.3:natives-linux'
|
||||
}
|
||||
|
||||
application {
|
||||
mainClass = 'com.example.Main'
|
||||
}
|
||||
@@ -0,0 +1,108 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"purl": "pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"name": "guava",
|
||||
"version": "32.1.3-jre",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "guava",
|
||||
"groupId": "com.google.guava",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"purl": "pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"name": "commons-lang3",
|
||||
"version": "3.14.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "commons-lang3",
|
||||
"groupId": "org.apache.commons",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"purl": "pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"name": "slf4j-api",
|
||||
"version": "2.0.9",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "slf4j-api",
|
||||
"groupId": "org.slf4j",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"purl": "pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"name": "lombok",
|
||||
"version": "1.18.30",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "lombok",
|
||||
"groupId": "org.projectlombok",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/ch.qos.logback/logback-classic@1.4.14",
|
||||
"purl": "pkg:maven/ch.qos.logback/logback-classic@1.4.14",
|
||||
"name": "logback-classic",
|
||||
"version": "1.4.14",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "logback-classic",
|
||||
"groupId": "ch.qos.logback",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "runtime",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.lwjgl/lwjgl@3.3.3",
|
||||
"purl": "pkg:maven/org.lwjgl/lwjgl@3.3.3",
|
||||
"name": "lwjgl",
|
||||
"version": "3.3.3",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "lwjgl",
|
||||
"groupId": "org.lwjgl",
|
||||
"classifier": "natives-linux",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,2 @@
|
||||
org.gradle.jvmargs=-Xmx2048m
|
||||
org.gradle.caching=true
|
||||
@@ -0,0 +1,50 @@
|
||||
plugins {
|
||||
id("java")
|
||||
id("org.springframework.boot") version "3.2.0"
|
||||
kotlin("jvm") version "1.9.21"
|
||||
`java-library`
|
||||
}
|
||||
|
||||
group = "com.example"
|
||||
version = "2.0.0"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// String coordinate notation
|
||||
implementation("org.jetbrains.kotlin:kotlin-stdlib:1.9.21")
|
||||
|
||||
// Named arguments notation
|
||||
implementation(group = "com.fasterxml.jackson.core", name = "jackson-databind", version = "2.16.0")
|
||||
|
||||
// Test dependencies
|
||||
testImplementation("org.junit.jupiter:junit-jupiter:5.10.1")
|
||||
testImplementation("io.mockk:mockk:1.13.8")
|
||||
|
||||
// Provided scope
|
||||
compileOnly("jakarta.servlet:jakarta.servlet-api:6.0.0")
|
||||
|
||||
// Runtime scope
|
||||
runtimeOnly("org.postgresql:postgresql:42.7.0")
|
||||
|
||||
// Platform/BOM import
|
||||
implementation(platform("org.springframework.boot:spring-boot-dependencies:3.2.0"))
|
||||
|
||||
// Enforced platform
|
||||
api(enforcedPlatform("com.google.cloud:libraries-bom:26.28.0"))
|
||||
|
||||
// Annotation processor (kapt)
|
||||
kapt("org.mapstruct:mapstruct-processor:1.5.5.Final")
|
||||
|
||||
// KSP processor
|
||||
ksp("io.insert-koin:koin-ksp-compiler:1.3.0")
|
||||
|
||||
// Internal project dependency (should be skipped)
|
||||
implementation(project(":core-module"))
|
||||
}
|
||||
|
||||
kotlin {
|
||||
jvmToolchain(17)
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.jetbrains.kotlin/kotlin-stdlib@1.9.21",
|
||||
"purl": "pkg:maven/org.jetbrains.kotlin/kotlin-stdlib@1.9.21",
|
||||
"name": "kotlin-stdlib",
|
||||
"version": "1.9.21",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "kotlin-stdlib",
|
||||
"groupId": "org.jetbrains.kotlin",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"name": "jackson-databind",
|
||||
"version": "2.16.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-databind",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/io.mockk/mockk@1.13.8",
|
||||
"purl": "pkg:maven/io.mockk/mockk@1.13.8",
|
||||
"name": "mockk",
|
||||
"version": "1.13.8",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "mockk",
|
||||
"groupId": "io.mockk",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/jakarta.servlet/jakarta.servlet-api@6.0.0",
|
||||
"purl": "pkg:maven/jakarta.servlet/jakarta.servlet-api@6.0.0",
|
||||
"name": "jakarta.servlet-api",
|
||||
"version": "6.0.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jakarta.servlet-api",
|
||||
"groupId": "jakarta.servlet",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.postgresql/postgresql@42.7.0",
|
||||
"purl": "pkg:maven/org.postgresql/postgresql@42.7.0",
|
||||
"name": "postgresql",
|
||||
"version": "42.7.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "postgresql",
|
||||
"groupId": "org.postgresql",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "runtime",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.mapstruct/mapstruct-processor@1.5.5.Final",
|
||||
"purl": "pkg:maven/org.mapstruct/mapstruct-processor@1.5.5.Final",
|
||||
"name": "mapstruct-processor",
|
||||
"version": "1.5.5.Final",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "mapstruct-processor",
|
||||
"groupId": "org.mapstruct",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/io.insert-koin/koin-ksp-compiler@1.3.0",
|
||||
"purl": "pkg:maven/io.insert-koin/koin-ksp-compiler@1.3.0",
|
||||
"name": "koin-ksp-compiler",
|
||||
"version": "1.3.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "koin-ksp-compiler",
|
||||
"groupId": "io.insert-koin",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "build.gradle.kts"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,82 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework.boot/spring-boot-starter-web",
|
||||
"purl": "pkg:maven/org.springframework.boot/spring-boot-starter-web",
|
||||
"name": "spring-boot-starter-web",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-boot-starter-web",
|
||||
"groupId": "org.springframework.boot",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "bom",
|
||||
"bomArtifact": "org.springframework.boot:spring-boot-dependencies:3.2.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-databind",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind",
|
||||
"name": "jackson-databind",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-databind",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "bom",
|
||||
"bomArtifact": "org.springframework.boot:spring-boot-dependencies:3.2.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/software.amazon.awssdk/s3",
|
||||
"purl": "pkg:maven/software.amazon.awssdk/s3",
|
||||
"name": "s3",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "s3",
|
||||
"groupId": "software.amazon.awssdk",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "bom",
|
||||
"bomArtifact": "software.amazon.awssdk:bom:2.21.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"purl": "pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"name": "commons-lang3",
|
||||
"version": "3.14.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "commons-lang3",
|
||||
"groupId": "org.apache.commons",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "dependencyManagement",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"purl": "pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"name": "lombok",
|
||||
"version": "1.18.30",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "lombok",
|
||||
"groupId": "org.projectlombok",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"versionSource": "direct",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,88 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>bom-consumer</artifactId>
|
||||
<version>1.0.0</version>
|
||||
|
||||
<name>BOM Consumer</name>
|
||||
<description>Project that imports BOMs for version management</description>
|
||||
|
||||
<properties>
|
||||
<spring-boot.version>3.2.0</spring-boot.version>
|
||||
<spring-cloud.version>2023.0.0</spring-cloud.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<!-- Spring Boot BOM -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-dependencies</artifactId>
|
||||
<version>${spring-boot.version}</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Cloud BOM -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-dependencies</artifactId>
|
||||
<version>${spring-cloud.version}</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- AWS SDK BOM -->
|
||||
<dependency>
|
||||
<groupId>software.amazon.awssdk</groupId>
|
||||
<artifactId>bom</artifactId>
|
||||
<version>2.21.0</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Local version override -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.14.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<!-- Versions managed by Spring Boot BOM -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Versions managed by AWS SDK BOM -->
|
||||
<dependency>
|
||||
<groupId>software.amazon.awssdk</groupId>
|
||||
<artifactId>s3</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Version managed by local dependencyManagement -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Direct version declaration -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.30</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,62 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"purl": "pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"name": "guava",
|
||||
"version": "32.1.3-jre",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "guava",
|
||||
"groupId": "com.google.guava",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"purl": "pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"name": "slf4j-api",
|
||||
"version": "2.0.9",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "slf4j-api",
|
||||
"groupId": "org.slf4j",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.hibernate.orm/hibernate-core@6.4.0.Final",
|
||||
"purl": "pkg:maven/org.hibernate.orm/hibernate-core@6.4.0.Final",
|
||||
"name": "hibernate-core",
|
||||
"version": "6.4.0.Final",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "hibernate-core",
|
||||
"groupId": "org.hibernate.orm",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,65 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>licensed-app</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>Licensed Application</name>
|
||||
<description>Example project with license declarations</description>
|
||||
|
||||
<licenses>
|
||||
<license>
|
||||
<name>Apache License, Version 2.0</name>
|
||||
<url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
|
||||
<distribution>repo</distribution>
|
||||
</license>
|
||||
<license>
|
||||
<name>MIT License</name>
|
||||
<url>https://opensource.org/licenses/MIT</url>
|
||||
<distribution>repo</distribution>
|
||||
<comments>Dual licensed under Apache-2.0 and MIT</comments>
|
||||
</license>
|
||||
</licenses>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- Apache-2.0 licensed dependency -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>32.1.3-jre</version>
|
||||
</dependency>
|
||||
|
||||
<!-- MIT licensed dependency -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
|
||||
<!-- EPL-2.0 licensed dependency -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- LGPL-2.1 licensed dependency -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate.orm</groupId>
|
||||
<artifactId>hibernate-core</artifactId>
|
||||
<version>6.4.0.Final</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,86 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework/spring-core@6.1.0",
|
||||
"purl": "pkg:maven/org.springframework/spring-core@6.1.0",
|
||||
"name": "spring-core",
|
||||
"version": "6.1.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-core",
|
||||
"groupId": "org.springframework",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "parent",
|
||||
"parentArtifact": "com.example:parent-pom:1.0.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"purl": "pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"name": "spring-context",
|
||||
"version": "6.1.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-context",
|
||||
"groupId": "org.springframework",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "parent",
|
||||
"parentArtifact": "com.example:parent-pom:1.0.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"name": "jackson-databind",
|
||||
"version": "2.16.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-databind",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "parent",
|
||||
"parentArtifact": "com.example:parent-pom:1.0.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"purl": "pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"name": "slf4j-api",
|
||||
"version": "2.0.9",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "slf4j-api",
|
||||
"groupId": "org.slf4j",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionSource": "direct",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"versionSource": "parent",
|
||||
"parentArtifact": "com.example:parent-pom:1.0.0",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,47 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>parent-pom</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Parent POM</name>
|
||||
<description>Parent POM for version inheritance testing</description>
|
||||
|
||||
<properties>
|
||||
<java.version>17</java.version>
|
||||
<spring.version>6.1.0</spring.version>
|
||||
<jackson.version>2.16.0</jackson.version>
|
||||
<junit.version>5.10.1</junit.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>${junit.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
</project>
|
||||
@@ -0,0 +1,48 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>parent-pom</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<relativePath>parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>child-module</artifactId>
|
||||
<version>2.0.0</version>
|
||||
|
||||
<name>Child Module</name>
|
||||
<description>Child module that inherits from parent</description>
|
||||
|
||||
<dependencies>
|
||||
<!-- Inherits version from parent dependencyManagement -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Direct version override -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test scope inherited from parent -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,97 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework/spring-core@6.1.0",
|
||||
"purl": "pkg:maven/org.springframework/spring-core@6.1.0",
|
||||
"name": "spring-core",
|
||||
"version": "6.1.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-core",
|
||||
"groupId": "org.springframework",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionProperty": "spring.version",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"purl": "pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"name": "spring-context",
|
||||
"version": "6.1.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-context",
|
||||
"groupId": "org.springframework",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionProperty": "spring-core.version",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.16.0",
|
||||
"name": "jackson-databind",
|
||||
"version": "2.16.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jackson-databind",
|
||||
"groupId": "com.fasterxml.jackson.core",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"versionProperty": "jackson.version",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"versionProperty": "junit.version",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"purl": "pkg:maven/org.slf4j/slf4j-api@2.0.9",
|
||||
"name": "slf4j-api",
|
||||
"version": "2.0.9",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "slf4j-api",
|
||||
"groupId": "org.slf4j",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"purl": "pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"name": "lombok",
|
||||
"version": "1.18.30",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "lombok",
|
||||
"groupId": "org.projectlombok",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"versionProperty": "lombok.version",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,79 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>properties-demo</artifactId>
|
||||
<version>1.0.0</version>
|
||||
|
||||
<name>Properties Demo</name>
|
||||
<description>Project demonstrating property placeholder resolution</description>
|
||||
|
||||
<properties>
|
||||
<!-- Standard version properties -->
|
||||
<java.version>17</java.version>
|
||||
<spring.version>6.1.0</spring.version>
|
||||
<jackson.version>2.16.0</jackson.version>
|
||||
<lombok.version>1.18.30</lombok.version>
|
||||
|
||||
<!-- Nested property reference -->
|
||||
<spring-core.version>${spring.version}</spring-core.version>
|
||||
|
||||
<!-- Property with suffix -->
|
||||
<junit.major>5</junit.major>
|
||||
<junit.minor>10</junit.minor>
|
||||
<junit.patch>1</junit.patch>
|
||||
<junit.version>${junit.major}.${junit.minor}.${junit.patch}</junit.version>
|
||||
|
||||
<!-- Project properties -->
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- Simple property reference -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Nested property reference -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring-core.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Multiple property interpolation -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Composed version property -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>${junit.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Direct version (no property) -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Property in scope attribute -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>${lombok.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,169 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"purl": "pkg:maven/com.google.guava/guava@32.1.3-jre",
|
||||
"name": "guava",
|
||||
"version": "32.1.3-jre",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "guava",
|
||||
"groupId": "com.google.guava",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"purl": "pkg:maven/org.apache.commons/commons-lang3@3.14.0",
|
||||
"name": "commons-lang3",
|
||||
"version": "3.14.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "commons-lang3",
|
||||
"groupId": "org.apache.commons",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/jakarta.servlet/jakarta.servlet-api@6.0.0",
|
||||
"purl": "pkg:maven/jakarta.servlet/jakarta.servlet-api@6.0.0",
|
||||
"name": "jakarta.servlet-api",
|
||||
"version": "6.0.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "jakarta.servlet-api",
|
||||
"groupId": "jakarta.servlet",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"purl": "pkg:maven/org.projectlombok/lombok@1.18.30",
|
||||
"name": "lombok",
|
||||
"version": "1.18.30",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "lombok",
|
||||
"groupId": "org.projectlombok",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "provided",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.postgresql/postgresql@42.7.0",
|
||||
"purl": "pkg:maven/org.postgresql/postgresql@42.7.0",
|
||||
"name": "postgresql",
|
||||
"version": "42.7.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "postgresql",
|
||||
"groupId": "org.postgresql",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "runtime",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/ch.qos.logback/logback-classic@1.4.14",
|
||||
"purl": "pkg:maven/ch.qos.logback/logback-classic@1.4.14",
|
||||
"name": "logback-classic",
|
||||
"version": "1.4.14",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "logback-classic",
|
||||
"groupId": "ch.qos.logback",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "runtime",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"purl": "pkg:maven/org.junit.jupiter/junit-jupiter@5.10.1",
|
||||
"name": "junit-jupiter",
|
||||
"version": "5.10.1",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "junit-jupiter",
|
||||
"groupId": "org.junit.jupiter",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.mockito/mockito-core@5.8.0",
|
||||
"purl": "pkg:maven/org.mockito/mockito-core@5.8.0",
|
||||
"name": "mockito-core",
|
||||
"version": "5.8.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "mockito-core",
|
||||
"groupId": "org.mockito",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.assertj/assertj-core@3.24.2",
|
||||
"purl": "pkg:maven/org.assertj/assertj-core@3.24.2",
|
||||
"name": "assertj-core",
|
||||
"version": "3.24.2",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "assertj-core",
|
||||
"groupId": "org.assertj",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "test",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.example.legacy/legacy-lib@1.0.0",
|
||||
"purl": "pkg:maven/com.example.legacy/legacy-lib@1.0.0",
|
||||
"name": "legacy-lib",
|
||||
"version": "1.0.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "legacy-lib",
|
||||
"groupId": "com.example.legacy",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "system",
|
||||
"systemPath": "${project.basedir}/lib/legacy-lib.jar",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
},
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"purl": "pkg:maven/org.springframework/spring-context@6.1.0",
|
||||
"name": "spring-context",
|
||||
"version": "6.1.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"artifactId": "spring-context",
|
||||
"groupId": "org.springframework",
|
||||
"declaredOnly": "true",
|
||||
"declaredScope": "compile",
|
||||
"optional": "true",
|
||||
"buildFile": "pom.xml"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,114 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>scoped-deps</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>Scoped Dependencies Example</name>
|
||||
<description>Tests all Maven dependency scopes</description>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- compile scope (default) - available in all classpaths -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>32.1.3-jre</version>
|
||||
<!-- scope defaults to compile -->
|
||||
</dependency>
|
||||
|
||||
<!-- compile scope (explicit) -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.14.0</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- provided scope - available at compile time, not included in final package -->
|
||||
<dependency>
|
||||
<groupId>jakarta.servlet</groupId>
|
||||
<artifactId>jakarta.servlet-api</artifactId>
|
||||
<version>6.0.0</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- provided scope - annotation processor -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.30</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- runtime scope - not needed for compilation, only for execution -->
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.7.0</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- runtime scope - logging implementation -->
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.4.14</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope - only available during test compilation and execution -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope - mocking framework -->
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>5.8.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- test scope - assertions library -->
|
||||
<dependency>
|
||||
<groupId>org.assertj</groupId>
|
||||
<artifactId>assertj-core</artifactId>
|
||||
<version>3.24.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- system scope - deprecated but still used - local JAR -->
|
||||
<dependency>
|
||||
<groupId>com.example.legacy</groupId>
|
||||
<artifactId>legacy-lib</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/legacy-lib.jar</systemPath>
|
||||
</dependency>
|
||||
|
||||
<!-- import scope - only valid in dependencyManagement for BOMs -->
|
||||
<!-- Note: import scope is tested in maven-bom fixture -->
|
||||
|
||||
<!-- optional dependency - not transitive -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>6.1.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,28 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "java",
|
||||
"componentKey": "purl::pkg:maven/com.example.osgi/service@1.0.0",
|
||||
"purl": "pkg:maven/com.example.osgi/service@1.0.0",
|
||||
"name": "service",
|
||||
"version": "1.0.0",
|
||||
"type": "maven",
|
||||
"metadata": {
|
||||
"jarPath": "osgi-service.jar",
|
||||
"osgi.symbolicName": "com.example.osgi.service",
|
||||
"osgi.version": "1.0.0.qualifier",
|
||||
"osgi.bundleName": "Example OSGi Service Bundle",
|
||||
"osgi.vendor": "Example Corp",
|
||||
"osgi.executionEnvironment": "JavaSE-17",
|
||||
"osgi.importPackage": "org.osgi.framework;version=\"[1.8,2.0)\",org.osgi.service.component;version=\"[1.4,2.0)\",org.slf4j;version=\"[2.0,3.0)\"",
|
||||
"osgi.exportPackage": "com.example.osgi.service.api;version=\"1.0.0\",com.example.osgi.service.spi;version=\"1.0.0\"",
|
||||
"osgi.requireBundle": "org.apache.felix.scr;bundle-version=\"[2.1,3.0)\""
|
||||
},
|
||||
"evidence": [
|
||||
{
|
||||
"kind": "file",
|
||||
"source": "MANIFEST.MF",
|
||||
"locator": "osgi-service.jar!META-INF/MANIFEST.MF"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"description": "OSGi bundle fixture - tests detection of Bundle-SymbolicName and Import/Export-Package headers",
|
||||
"jarName": "osgi-service.jar",
|
||||
"manifest": {
|
||||
"Bundle-SymbolicName": "com.example.osgi.service",
|
||||
"Bundle-Version": "1.0.0.qualifier",
|
||||
"Bundle-Name": "Example OSGi Service Bundle",
|
||||
"Bundle-Vendor": "Example Corp",
|
||||
"Bundle-RequiredExecutionEnvironment": "JavaSE-17",
|
||||
"Import-Package": [
|
||||
"org.osgi.framework;version=\"[1.8,2.0)\"",
|
||||
"org.osgi.service.component;version=\"[1.4,2.0)\"",
|
||||
"org.slf4j;version=\"[2.0,3.0)\""
|
||||
],
|
||||
"Export-Package": [
|
||||
"com.example.osgi.service.api;version=\"1.0.0\"",
|
||||
"com.example.osgi.service.spi;version=\"1.0.0\""
|
||||
],
|
||||
"Require-Bundle": [
|
||||
"org.apache.felix.scr;bundle-version=\"[2.1,3.0)\""
|
||||
]
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user